Skip to content

Commit

Permalink
cleaning
Browse files Browse the repository at this point in the history
  • Loading branch information
Jhsmit committed Aug 8, 2023
1 parent 9f12bf6 commit a12aaaa
Show file tree
Hide file tree
Showing 5 changed files with 14 additions and 14 deletions.
10 changes: 3 additions & 7 deletions examples/load_from_yaml.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,17 +6,13 @@

test_pth = Path("../tests").resolve()
data_pth = test_pth / "datasets"
data_id = '20221007_1530_SecA_Krishnamurthy'
data_id = "20221007_1530_SecA_Krishnamurthy"

hdx_spec = yaml.safe_load((data_pth / data_id / "hdx_spec.yaml").read_text())
metadata = yaml.safe_load((data_pth / data_id / "metadata.yaml").read_text())

#%%

dataset = HDXDataSet.from_spec(
hdx_spec,
data_dir=data_pth / data_id,
metadata=metadata
)
dataset = HDXDataSet.from_spec(hdx_spec, data_dir=data_pth / data_id, metadata=metadata)

print(dataset.describe())
print(dataset.describe())
7 changes: 6 additions & 1 deletion hdxms_datasets/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,5 +4,10 @@
from hdxms_datasets.config import cfg
from hdxms_datasets.datasets import HDXDataSet, DataFile
from hdxms_datasets.datavault import DataVault
from hdxms_datasets.process import convert_temperature, convert_time, filter_peptides, parse_data_files
from hdxms_datasets.process import (
convert_temperature,
convert_time,
filter_peptides,
parse_data_files,
)
from hdxms_datasets.reader import read_dynamx
4 changes: 3 additions & 1 deletion hdxms_datasets/datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,9 @@ class HDXDataSet(object):
_cache: dict[tuple[str, str], pd.DataFrame] = field(init=False, default_factory=dict)

@classmethod
def from_spec(cls, hdx_spec: dict, data_dir: Path, data_id = Optional[str], metadata: Optional[dict] = None):
def from_spec(
cls, hdx_spec: dict, data_dir: Path, data_id=Optional[str], metadata: Optional[dict] = None
):
metadata = metadata or {}
data_id = data_id or uuid.uuid4().hex
data_files = parse_data_files(hdx_spec["data_files"], data_dir)
Expand Down
2 changes: 1 addition & 1 deletion hdxms_datasets/datavault.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,5 +146,5 @@ def load_dataset(self, data_id: str) -> HDXDataSet:
hdx_spec=hdx_spec,
data_dir=self.cache_dir / data_id,
data_id=data_id,
metadata=dataset_metadata
metadata=dataset_metadata,
)
5 changes: 1 addition & 4 deletions hdxms_datasets/process.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,8 @@
from __future__ import annotations

from pathlib import Path
from typing import Literal, Optional, Union, TypeVar, TYPE_CHECKING
from typing import Literal, Optional, Union, TYPE_CHECKING

import numpy.typing as npt
import pandas as pd

from hdxms_datasets.config import cfg
Expand All @@ -15,8 +14,6 @@
TIME_FACTORS = {"s": 1, "m": 60.0, "min": 60.0, "h": 3600, "d": 86400}
TEMPERATURE_OFFSETS = {"c": 273.15, "celsius": 273.15, "k": 0.0, "kelvin": 0.0}

A = TypeVar("A", npt.ArrayLike, pd.Series, pd.DataFrame)


def convert_temperature(
temperature_dict: dict, target_unit: str = "c"
Expand Down

0 comments on commit a12aaaa

Please sign in to comment.