Skip to content

Commit

Permalink
Fixing missing dependencies while building TSDB package for releasing…
Browse files Browse the repository at this point in the history
… to PyPI (#47)

* refactor: use relative imports to replace absolute ones;

* fix: install full dependencies while building TSDB package;

* fix: run build with --no-isolation;

* fix: install wheel before building the package;
  • Loading branch information
WenjieDu committed Nov 6, 2023
1 parent 3d13d5c commit 9beff75
Show file tree
Hide file tree
Showing 10 changed files with 77 additions and 25 deletions.
8 changes: 6 additions & 2 deletions .github/workflows/publish_to_PyPI.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ permissions:
jobs:
PyPI-deployment:
runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v3

Expand All @@ -31,11 +32,14 @@ jobs:
with:
python-version: '3.10'
check-latest: true
cache-dependency-path: |
requirements.txt
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install build
pip install build wheel
pip install -r requirements.txt
- name: Fetch the test environment details
run: |
Expand All @@ -44,7 +48,7 @@ jobs:
- name: Build package
run: |
python -m build
python -m build --no-isolation
- name: Publish the new package to PyPI
uses: pypa/gh-action-pypi-publish@v1.8.7
Expand Down
2 changes: 1 addition & 1 deletion tsdb/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
__version__ = "0.2"


from tsdb.data_processing import (
from .data_processing import (
list,
load,
download_and_extract,
Expand Down
10 changes: 5 additions & 5 deletions tsdb/data_processing.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,18 +10,18 @@
import sys
import warnings

from tsdb.database import AVAILABLE_DATASETS, CACHED_DATASET_DIR
from tsdb.loading_funcs import (
from .database import AVAILABLE_DATASETS, CACHED_DATASET_DIR
from .loading_funcs import (
load_physionet2012,
load_physionet2019,
load_electricity,
load_beijing_air_quality,
load_ucr_uea_dataset,
load_ais,
)
from tsdb.utils.downloading import download_and_extract
from tsdb.utils.file import purge_path, pickle_load, pickle_dump
from tsdb.utils.logging import logger
from .utils.downloading import download_and_extract
from .utils.file import purge_path, pickle_load, pickle_dump
from .utils.logging import logger


def list() -> list:
Expand Down
54 changes: 52 additions & 2 deletions tsdb/database.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,12 +32,62 @@
#
# https://github.com/WenjieDu/TSDB/tree/main/dataset_profiles/beijing_multisite_air_quality
"beijing_multisite_air_quality": "https://archive.ics.uci.edu/ml/machine-learning-databases/00501/"
"PRSA2017_Data_20130301-20170228.zip",
"PRSA2017_Data_20130301-20170228.zip",
#
# https://github.com/WenjieDu/TSDB/tree/main/dataset_profiles/vessel_ais
"vessel_ais": "https://zenodo.org/record/8064564/files/parquets.zip",
}


# The list of raw data files to be downloaded
MATR_LINKS = (
(
"https://data.matr.io/1/api/v1/file/5c86c0b5fa2ede00015ddf66/download",
"2017-05-12_batchdata_updated_struct_errorcorrect.mat",
),
(
"https://data.matr.io/1/api/v1/file/5c86bf13fa2ede00015ddd82/download",
"2017-06-30_batchdata_updated_struct_errorcorrect.mat",
),
(
"https://data.matr.io/1/api/v1/file/5c86bd64fa2ede00015ddbb2/download",
"2018-04-12_batchdata_updated_struct_errorcorrect.mat",
),
(
"https://data.matr.io/1/api/v1/file/5dcef152110002c7215b2c90/download",
"2019-01-24_batchdata_updated_struct_errorcorrect.mat",
),
)

HUST_LINKS = (
(
"https://data.mendeley.com/public-files/datasets/nsc7hnsg4s/"
"files/5ca0ac3e-d598-4d07-8dcb-879aa047e98b/file_downloaded",
"hust_data.zip",
),
)

CALCE_LINKS = (
("https://web.calce.umd.edu/batteries/data/CS2_33.zip", "CS2_33.zip"),
("https://web.calce.umd.edu/batteries/data/CS2_34.zip", "CS2_34.zip"),
("https://web.calce.umd.edu/batteries/data/CS2_35.zip", "CS2_35.zip"),
("https://web.calce.umd.edu/batteries/data/CS2_36.zip", "CS2_36.zip"),
("https://web.calce.umd.edu/batteries/data/CS2_37.zip", "CS2_37.zip"),
("https://web.calce.umd.edu/batteries/data/CS2_38.zip", "CS2_38.zip"),
("https://web.calce.umd.edu/batteries/data/CX2_16.zip", "CX2_16.zip"),
("https://web.calce.umd.edu/batteries/data/CX2_33.zip", "CX2_33.zip"),
("https://web.calce.umd.edu/batteries/data/CX2_35.zip", "CX2_35.zip"),
("https://web.calce.umd.edu/batteries/data/CX2_34.zip", "CX2_34.zip"),
("https://web.calce.umd.edu/batteries/data/CX2_36.zip", "CX2_36.zip"),
("https://web.calce.umd.edu/batteries/data/CX2_37.zip", "CX2_37.zip"),
("https://web.calce.umd.edu/batteries/data/CX2_38.zip", "CX2_38.zip"),
)


RWTH_LINKS = (
("https://publications.rwth-aachen.de/record/818642/files/Rawdata.zip", "raw.zip"),
)

# https://github.com/WenjieDu/TSDB/tree/main/dataset_profiles/ucr_uea_datasets
# 128 UCR + 33 UEA + 2 old removed (NonInvasiveFatalECGThorax1 and 2) = 163
_ucr_uea_datasets = [
Expand Down Expand Up @@ -210,7 +260,7 @@
for i in _ucr_uea_datasets:
UCR_UEA_DATASETS[
"ucr_uea_" + i
] = f"https://www.timeseriesclassification.com/aeon-toolkit/{i}.zip"
] = f"https://www.timeseriesclassification.com/aeon-toolkit/{i}.zip"

DATABASE = {**_DATABASE, **UCR_UEA_DATASETS}
AVAILABLE_DATASETS = list(DATABASE.keys())
14 changes: 6 additions & 8 deletions tsdb/loading_funcs/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,14 +5,12 @@
# Created by Wenjie Du <wenjay.du@gmail.com>
# License: BSD-3-Clause

from tsdb.loading_funcs.beijing_multisite_air_quality import (
load_beijing_air_quality,
)
from tsdb.loading_funcs.electricity_load_diagrams import load_electricity
from tsdb.loading_funcs.physionet_2012 import load_physionet2012
from tsdb.loading_funcs.physionet_2019 import load_physionet2019
from tsdb.loading_funcs.ucr_uea_datasets import load_ucr_uea_dataset
from tsdb.loading_funcs.vessel_ais import load_ais
from .beijing_multisite_air_quality import load_beijing_air_quality
from .electricity_load_diagrams import load_electricity
from .physionet_2012 import load_physionet2012
from .physionet_2019 import load_physionet2019
from .ucr_uea_datasets import load_ucr_uea_dataset
from .vessel_ais import load_ais

__all__ = [
"load_beijing_air_quality",
Expand Down
2 changes: 1 addition & 1 deletion tsdb/loading_funcs/beijing_multisite_air_quality.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@

import pandas as pd

from tsdb.utils.logging import logger
from ..utils.logging import logger


def load_beijing_air_quality(local_path):
Expand Down
2 changes: 1 addition & 1 deletion tsdb/loading_funcs/physionet_2012.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@

import pandas as pd

from tsdb.utils.logging import logger
from ..utils.logging import logger


def load_physionet2012(local_path):
Expand Down
2 changes: 1 addition & 1 deletion tsdb/loading_funcs/vessel_ais.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
import pandas as pd
from pandas.errors import UnsupportedFunctionCall

from tsdb.utils.logging import logger
from ..utils.logging import logger


def load_ais(local_path):
Expand Down
4 changes: 2 additions & 2 deletions tsdb/utils/downloading.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,8 @@
import warnings
from typing import Optional

from tsdb.database import DATABASE
from tsdb.utils.logging import logger
from ..database import DATABASE
from .logging import logger


def _download_and_extract(url: str, saving_path: str) -> Optional[str]:
Expand Down
4 changes: 2 additions & 2 deletions tsdb/utils/file.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,8 @@
import shutil
from typing import Optional

from tsdb.database import CACHED_DATASET_DIR
from tsdb.utils.logging import logger
from ..database import CACHED_DATASET_DIR
from .logging import logger


def pickle_dump(data: object, path: str) -> Optional[str]:
Expand Down

0 comments on commit 9beff75

Please sign in to comment.