diff --git a/.vscode/settings.json b/.vscode/settings.json index f580f74a5..28f99b8d9 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -13,18 +13,11 @@ "editor.insertSpaces": true, "editor.tabSize": 2 }, - "python.formatting.blackPath": "black", - "python.formatting.provider": "none", - "python.formatting.blackArgs": [ - "-l 120" - ], - "python.linting.mypyEnabled": true, "isort.args": [ "--profile", "black" ], "python.analysis.autoImportCompletions": true, "python.testing.pytestEnabled": true, - "python.linting.enabled": true, - "python.analysis.typeCheckingMode": "basic" + "python.analysis.typeCheckingMode": "basic", } \ No newline at end of file diff --git a/README.md b/README.md index 9f652f323..a4a6a653c 100644 --- a/README.md +++ b/README.md @@ -47,6 +47,9 @@ To run test, first install the `test` extra package ``` pip install darwin-py[test] ``` +### Development + +See our development and QA environment installation recommendations [here](docs/DEV.md) --- @@ -167,7 +170,9 @@ Dataset example-team/test:0.1 downloaded at /directory/choosen/at/authentication The framework is designed to be usable as a standalone python library. Usage can be inferred from looking at the operations performed in `darwin/cli_functions.py`. A minimal example to download a dataset is provided below and a more extensive one can be found in -[darwin_demo.py](https://github.com/v7labs/darwin-py/blob/master/darwin_demo.py). + +[./darwin_demo.py](https://github.com/v7labs/darwin-py/blob/master/darwin_demo.py). + ```python from darwin.client import Client diff --git a/darwin/dataset/download_manager.py b/darwin/dataset/download_manager.py index f96003acb..1b9326e0c 100644 --- a/darwin/dataset/download_manager.py +++ b/darwin/dataset/download_manager.py @@ -94,7 +94,7 @@ def download_all_images_from_annotations( # Verify that there is not already image in the images folder unfiltered_files = images_path.rglob(f"*") if use_folders else images_path.glob(f"*") - existing_images = {image.stem: image for image in unfiltered_files if is_image_extension_allowed(image.suffix)} + existing_images = {image for image in unfiltered_files if is_image_extension_allowed(image.suffix)} annotations_to_download_path = [] for annotation_path in annotations_path.glob(f"*.{annotation_format}"): @@ -103,11 +103,11 @@ def download_all_images_from_annotations( continue if not force_replace: - # Check collisions on image filename and json filename on the system - if annotation.filename in existing_images: - continue - if sanitize_filename(annotation_path.stem) in existing_images: + # Check the planned path for the image against the existing images + planned_image_path = images_path / Path(annotation.remote_path.lstrip('/\\')).resolve().absolute() / Path(annotation.filename) + if planned_image_path in existing_images: continue + annotations_to_download_path.append(annotation_path) if len(annotation.slots) > 1: force_slots = True @@ -119,10 +119,11 @@ def download_all_images_from_annotations( if remove_extra: # Removes existing images for which there is not corresponding annotation annotations_downloaded_stem = [a.stem for a in annotations_path.glob(f"*.{annotation_format}")] - for existing_image in existing_images.values(): + for existing_image in existing_images: if existing_image.stem not in annotations_downloaded_stem: print(f"Removing {existing_image} as there is no corresponding annotation") existing_image.unlink() + # Create the generator with the partial functions download_functions: List = [] for annotation_path in annotations_to_download_path: diff --git a/darwin/dataset/remote_dataset.py b/darwin/dataset/remote_dataset.py index 1033592ae..c84d81394 100644 --- a/darwin/dataset/remote_dataset.py +++ b/darwin/dataset/remote_dataset.py @@ -353,7 +353,8 @@ def pull( for error in errors: self.console.print(f"\t - {error}") - downloaded_file_count = len([f for f in self.local_images_path.rglob("*") if f.is_file()]) + downloaded_file_count = len([f for f in self.local_images_path.rglob("*") if f.is_file() and not f.name.startswith('.')]) + console.print(f"Total file count after download completed {str(downloaded_file_count)}.") return None, count diff --git a/darwin/exporter/formats/darwin.py b/darwin/exporter/formats/darwin.py index 0e7f33835..2e6193b4c 100644 --- a/darwin/exporter/formats/darwin.py +++ b/darwin/exporter/formats/darwin.py @@ -47,6 +47,7 @@ def build_image_annotation(annotation_file: dt.AnnotationFile) -> Dict[str, Any] } """ annotations: List[Dict[str, Any]] = [] + print(annotations) for annotation in annotation_file.annotations: payload = { annotation.annotation_class.annotation_type: _build_annotation_data(annotation), @@ -54,8 +55,7 @@ def build_image_annotation(annotation_file: dt.AnnotationFile) -> Dict[str, Any] } if ( - annotation.annotation_class.annotation_type == "complex_polygon" - or annotation.annotation_class.annotation_type == "polygon" + annotation.annotation_class.annotation_type == "complex_polygon" or annotation.annotation_class.annotation_type == "polygon" ) and "bounding_box" in annotation.data: payload["bounding_box"] = annotation.data["bounding_box"] diff --git a/darwin/exporter/formats/darwin_1_0.py b/darwin/exporter/formats/darwin_1_0.py index 8cff79fd3..28744817d 100644 --- a/darwin/exporter/formats/darwin_1_0.py +++ b/darwin/exporter/formats/darwin_1_0.py @@ -1,5 +1,5 @@ from pathlib import Path -from typing import Any, Dict, Iterable, List, Union +from typing import Iterable, List, Union import orjson as json @@ -34,14 +34,11 @@ def export(annotation_files: Iterable[AnnotationFile], output_dir: Path) -> None def _export_file(annotation_file: AnnotationFile, _: int, output_dir: Path) -> None: - try: filename = annotation_file.path.parts[-1] output_file_path = (output_dir / filename).with_suffix(".json") except Exception as e: - raise ExportException_CouldNotAssembleOutputPath( - f"Could not export file {annotation_file.path} to {output_dir}" - ) from e + raise ExportException_CouldNotAssembleOutputPath(f"Could not export file {annotation_file.path} to {output_dir}") from e try: output: DictFreeForm = _build_json(annotation_file) @@ -50,9 +47,7 @@ def _export_file(annotation_file: AnnotationFile, _: int, output_dir: Path) -> N try: with open(output_file_path, "w") as f: - op = json.dumps(output, option=json.OPT_INDENT_2 | json.OPT_SERIALIZE_NUMPY | json.OPT_NON_STR_KEYS).decode( - "utf-8" - ) + op = json.dumps(output, option=json.OPT_INDENT_2 | json.OPT_SERIALIZE_NUMPY | json.OPT_NON_STR_KEYS).decode("utf-8") f.write(op) except Exception as e: raise ExportException_CouldNotWriteFile(f"Could not write output for {annotation_file.path}") from e @@ -170,12 +165,24 @@ def _build_image_annotation(annotation: Annotation, skip_slots: bool = False) -> def _build_legacy_annotation_data(annotation_class: AnnotationClass, data: DictFreeForm) -> DictFreeForm: - if annotation_class.annotation_type == "complex_polygon": - data["path"] = data["paths"] - del data["paths"] - return {"complex_polygon": data} - else: - return {annotation_class.annotation_type: data} + v1_data = {} + polygon_annotation_mappings = {"complex_polygon": "paths", "polygon": "path"} + + if annotation_class.annotation_type in polygon_annotation_mappings: + key = polygon_annotation_mappings[annotation_class.annotation_type] + v1_data[annotation_class.annotation_type] = {"path": data.get(key)} + + elif annotation_class.annotation_type == "tag": + v1_data["tag"] = {} + + elif annotation_class.annotation_type == "bounding_box": + v1_data[annotation_class.annotation_type] = data + + if "bounding_box" in data and annotation_class.annotation_type != "bounding_box": + # Poygons and complex polygons usually have attached bounding_box annotations + v1_data["bounding_box"] = data["bounding_box"] + + return v1_data def _build_metadata(annotation_file: AnnotationFile) -> DictFreeForm: diff --git a/darwin/future/core/client.py b/darwin/future/core/client.py index 8787170b5..7e6d4387b 100644 --- a/darwin/future/core/client.py +++ b/darwin/future/core/client.py @@ -1,7 +1,7 @@ from __future__ import annotations from pathlib import Path -from typing import Any, Callable, Dict, List, Optional, Union, overload +from typing import Callable, Dict, Optional, overload from urllib.parse import urlparse import requests @@ -41,7 +41,10 @@ def validate_base_url(cls, v: str) -> str: if not v.endswith("/"): v += "/" check = urlparse(v) - assert check.scheme in {"http", "https"}, "base_url must start with http or https" + assert check.scheme in { + "http", + "https", + }, "base_url must start with http or https" assert check.netloc, "base_url must contain a domain" return v @@ -136,7 +139,9 @@ def __init__(self, config: DarwinConfig, retries: Optional[Retry] = None) -> Non self.config = config self.session = requests.Session() if not retries: - retries = Retry(total=3, backoff_factor=0.2, status_forcelist=[500, 502, 503, 504]) + retries = Retry( + total=3, backoff_factor=0.2, status_forcelist=[500, 502, 503, 504] + ) self._setup_session(retries) self._mappings = { "get": self.session.get, @@ -153,20 +158,32 @@ def _setup_session(self, retries: Retry) -> None: @property def headers(self) -> Dict[str, str]: - http_headers: Dict[str, str] = {"Content-Type": "application/json", "Accept": "application/json"} + http_headers: Dict[str, str] = { + "Content-Type": "application/json", + "Accept": "application/json", + } if self.config.api_key: http_headers["Authorization"] = f"ApiKey {self.config.api_key}" return http_headers @overload - def _generic_call(self, method: Callable[[str], requests.Response], endpoint: str) -> dict: + def _generic_call( + self, method: Callable[[str], requests.Response], endpoint: str + ) -> dict: ... @overload - def _generic_call(self, method: Callable[[str, dict], requests.Response], endpoint: str, payload: dict) -> dict: + def _generic_call( + self, + method: Callable[[str, dict], requests.Response], + endpoint: str, + payload: dict, + ) -> dict: ... - def _generic_call(self, method: Callable, endpoint: str, payload: Optional[dict] = None) -> JSONType: + def _generic_call( + self, method: Callable, endpoint: str, payload: Optional[dict] = None + ) -> JSONType: endpoint = self._sanitize_endpoint(endpoint) url = self.config.api_endpoint + endpoint if payload is not None: @@ -179,15 +196,21 @@ def _generic_call(self, method: Callable, endpoint: str, payload: Optional[dict] return response.json() - def _contain_qs_and_endpoint(self, endpoint: str, query_string: Optional[QueryString] = None) -> str: + def _contain_qs_and_endpoint( + self, endpoint: str, query_string: Optional[QueryString] = None + ) -> str: if not query_string: return endpoint assert "?" not in endpoint return endpoint + str(query_string) - def get(self, endpoint: str, query_string: Optional[QueryString] = None) -> JSONType: - return self._generic_call(self.session.get, self._contain_qs_and_endpoint(endpoint, query_string)) + def get( + self, endpoint: str, query_string: Optional[QueryString] = None + ) -> JSONType: + return self._generic_call( + self.session.get, self._contain_qs_and_endpoint(endpoint, query_string) + ) def put(self, endpoint: str, data: dict) -> JSONType: return self._generic_call(self.session.put, endpoint, data) @@ -195,8 +218,12 @@ def put(self, endpoint: str, data: dict) -> JSONType: def post(self, endpoint: str, data: dict) -> JSONType: return self._generic_call(self.session.post, endpoint, data) - def delete(self, endpoint: str, query_string: Optional[QueryString] = None) -> JSONType: - return self._generic_call(self.session.delete, self._contain_qs_and_endpoint(endpoint, query_string)) + def delete( + self, endpoint: str, query_string: Optional[QueryString] = None + ) -> JSONType: + return self._generic_call( + self.session.delete, self._contain_qs_and_endpoint(endpoint, query_string) + ) def patch(self, endpoint: str, data: dict) -> JSONType: return self._generic_call(self.session.patch, endpoint, data) diff --git a/darwin/future/core/datasets/__init__.py b/darwin/future/core/datasets/__init__.py index a5ccb19d5..ff1f72c59 100644 --- a/darwin/future/core/datasets/__init__.py +++ b/darwin/future/core/datasets/__init__.py @@ -1,4 +1,4 @@ -from darwin.future.core.datasets.create_dataset import * -from darwin.future.core.datasets.get_dataset import * -from darwin.future.core.datasets.list_datasets import * -from darwin.future.core.datasets.remove_dataset import * +from darwin.future.core.datasets.create_dataset import create_dataset +from darwin.future.core.datasets.get_dataset import get_dataset +from darwin.future.core.datasets.list_datasets import list_datasets +from darwin.future.core.datasets.remove_dataset import remove_dataset diff --git a/darwin/future/core/datasets/remove_dataset.py b/darwin/future/core/datasets/remove_dataset.py index 26abd7a08..86e21de1f 100644 --- a/darwin/future/core/datasets/remove_dataset.py +++ b/darwin/future/core/datasets/remove_dataset.py @@ -4,7 +4,9 @@ from darwin.future.exceptions import DatasetNotFound -def remove_dataset(api_client: ClientCore, id: int, team_slug: Optional[str] = None) -> int: +def remove_dataset( + api_client: ClientCore, id: int, team_slug: Optional[str] = None +) -> int: """ Creates a new dataset for the given team diff --git a/darwin/future/core/items/__init__.py b/darwin/future/core/items/__init__.py index 619a680fe..a56cb784d 100644 --- a/darwin/future/core/items/__init__.py +++ b/darwin/future/core/items/__init__.py @@ -1,2 +1,2 @@ -from darwin.future.core.items.get import * -from darwin.future.core.items.move_items import * +from darwin.future.core.items.get import get_item_ids, get_item_ids_stage +from darwin.future.core.items.move_items import move_items_to_stage diff --git a/darwin/future/core/items/get.py b/darwin/future/core/items/get.py index 8ac7a7559..37bb6fd4c 100644 --- a/darwin/future/core/items/get.py +++ b/darwin/future/core/items/get.py @@ -5,7 +5,9 @@ from darwin.future.core.types.common import QueryString -def get_item_ids(api_client: ClientCore, team_slug: str, dataset_id: Union[str, int]) -> List[UUID]: +def get_item_ids( + api_client: ClientCore, team_slug: str, dataset_id: Union[str, int] +) -> List[UUID]: """ Returns a list of item ids for the dataset @@ -26,15 +28,24 @@ def get_item_ids(api_client: ClientCore, team_slug: str, dataset_id: Union[str, response = api_client.get( f"/v2/teams/{team_slug}/items/ids", - QueryString({"not_statuses": "archived,error", "sort[id]": "desc", "dataset_ids": str(dataset_id)}), + QueryString( + { + "not_statuses": "archived,error", + "sort[id]": "desc", + "dataset_ids": str(dataset_id), + } + ), ) - assert type(response) == dict + assert isinstance(response, dict) uuids = [UUID(uuid) for uuid in response["item_ids"]] return uuids def get_item_ids_stage( - api_client: ClientCore, team_slug: str, dataset_id: Union[int, str], stage_id: Union[UUID, str] + api_client: ClientCore, + team_slug: str, + dataset_id: Union[int, str], + stage_id: Union[UUID, str], ) -> List[UUID]: """ Returns a list of item ids for the stage @@ -57,8 +68,10 @@ def get_item_ids_stage( """ response = api_client.get( f"/v2/teams/{team_slug}/items/ids", - QueryString({"workflow_stage_ids": str(stage_id), "dataset_ids": str(dataset_id)}), + QueryString( + {"workflow_stage_ids": str(stage_id), "dataset_ids": str(dataset_id)} + ), ) - assert type(response) == dict + assert isinstance(response, dict) uuids = [UUID(uuid) for uuid in response["item_ids"]] return uuids diff --git a/darwin/future/core/items/move_items.py b/darwin/future/core/items/move_items.py index 32a39b86f..b28e9ab5b 100644 --- a/darwin/future/core/items/move_items.py +++ b/darwin/future/core/items/move_items.py @@ -6,7 +6,12 @@ def move_items_to_stage( - api_client: ClientCore, team_slug: str, workflow_id: UUID, dataset_id: int, stage_id: UUID, item_ids: List[UUID] + api_client: ClientCore, + team_slug: str, + workflow_id: UUID, + dataset_id: int, + stage_id: UUID, + item_ids: List[UUID], ) -> JSONType: """ Moves a list of items to a stage diff --git a/darwin/future/core/team/get_team.py b/darwin/future/core/team/get_team.py index 619c5e649..5570099ef 100644 --- a/darwin/future/core/team/get_team.py +++ b/darwin/future/core/team/get_team.py @@ -1,7 +1,6 @@ from typing import List, Optional, Tuple from darwin.future.core.client import ClientCore -from darwin.future.core.types.common import JSONType from darwin.future.data_objects.team import TeamCore, TeamMemberCore @@ -13,7 +12,9 @@ def get_team(client: ClientCore, team_slug: Optional[str] = None) -> TeamCore: return TeamCore.parse_obj(response) -def get_team_members(client: ClientCore) -> Tuple[List[TeamMemberCore], List[Exception]]: +def get_team_members( + client: ClientCore, +) -> Tuple[List[TeamMemberCore], List[Exception]]: response = client.get("/memberships") members = [] errors = [] diff --git a/darwin/future/core/types/__init__.py b/darwin/future/core/types/__init__.py index 55e5f844b..e52d13056 100644 --- a/darwin/future/core/types/__init__.py +++ b/darwin/future/core/types/__init__.py @@ -1 +1 @@ -from .common import * +from .common import JSONType, QueryString, TeamSlug diff --git a/darwin/future/core/types/common.py b/darwin/future/core/types/common.py index 297eb3d4b..d7a665d79 100644 --- a/darwin/future/core/types/common.py +++ b/darwin/future/core/types/common.py @@ -1,7 +1,5 @@ from typing import Any, Dict, List, Union -import pydantic -from pydantic import BaseModel from darwin.future.data_objects import validators as darwin_validators from darwin.future.data_objects.typing import UnknownType @@ -21,8 +19,12 @@ def __get_validators__(cls): # type: ignore @classmethod def validate(cls, v: str) -> "TeamSlug": - assert len(v) < cls.max_length, f"maximum length for team slug is {cls.max_length}" - assert len(v) > cls.min_length, f"minimum length for team slug is {cls.min_length}" + assert ( + len(v) < cls.max_length + ), f"maximum length for team slug is {cls.max_length}" + assert ( + len(v) > cls.min_length + ), f"minimum length for team slug is {cls.min_length}" if not isinstance(v, str): raise TypeError("string required") modified_value = darwin_validators.parse_name(v) diff --git a/darwin/future/core/types/query.py b/darwin/future/core/types/query.py index ac45d2903..fca744c4f 100644 --- a/darwin/future/core/types/query.py +++ b/darwin/future/core/types/query.py @@ -9,9 +9,7 @@ Generic, List, Optional, - Tuple, TypeVar, - overload, ) from darwin.future.core.client import ClientCore @@ -54,7 +52,9 @@ class QueryFilter(DefaultDarwin): def filter_attr(self, attr: Any) -> bool: # type: ignore caster: Callable[[str], Any] = type(attr) # type: ignore - param = caster(self.param) # attempt to cast the param to the type of the attribute + param = caster( + self.param + ) # attempt to cast the param to the type of the attribute if self.modifier is None: return attr == param elif self.modifier == Modifier.GREATER_EQUAL: @@ -75,7 +75,9 @@ def filter_attr(self, attr: Any) -> bool: # type: ignore @classmethod def _from_dict(cls, d: Dict[str, Any]) -> QueryFilter: # type: ignore if "name" not in d or "param" not in d: - raise InvalidQueryFilter(f"args must be a QueryFilter or a dict with 'name' and 'param' keys, got {d}") + raise InvalidQueryFilter( + f"args must be a QueryFilter or a dict with 'name' and 'param' keys, got {d}" + ) modifier = Modifier(d["modifier"]) if "modifier" in d else None return QueryFilter(name=d["name"], param=str(d["param"]), modifier=modifier) @@ -95,7 +97,9 @@ def _from_arg(cls, arg: object) -> QueryFilter: elif isinstance(arg, dict): return cls._from_dict(arg) else: - raise InvalidQueryFilter(f"args must be a QueryFilter or a dict with 'name' and 'param' keys, got {arg}") + raise InvalidQueryFilter( + f"args must be a QueryFilter or a dict with 'name' and 'param' keys, got {arg}" + ) @classmethod def _from_kwarg(cls, key: str, value: str) -> QueryFilter: @@ -117,9 +121,12 @@ class Query(Generic[T], ABC): """ def __init__( - self, client: ClientCore, filters: Optional[List[QueryFilter]] = None, meta_params: Optional[Param] = None + self, + client: ClientCore, + filters: Optional[List[QueryFilter]] = None, + meta_params: Optional[Param] = None, ): - self.meta_params: dict = meta_params or dict() + self.meta_params: dict = meta_params or {} self.client = client self.filters = filters or [] self.results: Optional[List[T]] = None @@ -130,12 +137,16 @@ def filter(self, filter: QueryFilter) -> Query[T]: def __add__(self, filter: QueryFilter) -> Query[T]: self._changed_since_last = True - return self.__class__(self.client, filters=[*self.filters, filter], meta_params=self.meta_params) + return self.__class__( + self.client, filters=[*self.filters, filter], meta_params=self.meta_params + ) def __sub__(self, filter: QueryFilter) -> Query[T]: self._changed_since_last = True return self.__class__( - self.client, filters=[f for f in self.filters if f != filter], meta_params=self.meta_params + self.client, + filters=[f for f in self.filters if f != filter], + meta_params=self.meta_params, ) def __iadd__(self, filter: QueryFilter) -> Query[T]: @@ -212,4 +223,6 @@ def first(self) -> Optional[T]: return self.results[0] def _generic_execute_filter(self, objects: List[T], filter: QueryFilter) -> List[T]: - return [m for m in objects if filter.filter_attr(getattr(m._element, filter.name))] + return [ + m for m in objects if filter.filter_attr(getattr(m._element, filter.name)) + ] diff --git a/darwin/future/core/utils/pathutils.py b/darwin/future/core/utils/pathutils.py index ccaebdeba..68f5734ec 100644 --- a/darwin/future/core/utils/pathutils.py +++ b/darwin/future/core/utils/pathutils.py @@ -1,6 +1,6 @@ import json from pathlib import Path -from typing import Any, Optional +from typing import Optional import yaml @@ -29,7 +29,9 @@ def attempt_open(path: Path) -> dict: return open_json(path, encoding) except Exception: pass - raise UnrecognizableFileEncoding(f"Unable to load file {path} with any encodings: {ENCODINGS}") + raise UnrecognizableFileEncoding( + f"Unable to load file {path} with any encodings: {ENCODINGS}" + ) def open_yaml(path: Path, encoding: Optional[str] = None) -> dict: diff --git a/darwin/future/core/workflows/__init__.py b/darwin/future/core/workflows/__init__.py index c2282b65a..cbecab1e8 100644 --- a/darwin/future/core/workflows/__init__.py +++ b/darwin/future/core/workflows/__init__.py @@ -1,3 +1,3 @@ -from darwin.future.core.workflows.get_workflow import * -from darwin.future.core.workflows.get_workflows import * -from darwin.future.core.workflows.list_workflows import * +from darwin.future.core.workflows.get_workflow import get_workflow +from darwin.future.core.workflows.get_workflows import get_workflows +from darwin.future.core.workflows.list_workflows import list_workflows diff --git a/darwin/future/core/workflows/get_workflows.py b/darwin/future/core/workflows/get_workflows.py index 8664d1f74..97805f131 100644 --- a/darwin/future/core/workflows/get_workflows.py +++ b/darwin/future/core/workflows/get_workflows.py @@ -6,7 +6,9 @@ from darwin.future.data_objects.workflow import WorkflowCore -def get_workflows(client: ClientCore, team_slug: Optional[str] = None) -> List[WorkflowCore]: +def get_workflows( + client: ClientCore, team_slug: Optional[str] = None +) -> List[WorkflowCore]: team_slug = team_slug or client.config.default_team response = client.get(f"/v2/teams/{team_slug}/workflows?worker=false") diff --git a/darwin/future/core/workflows/list_workflows.py b/darwin/future/core/workflows/list_workflows.py index e28e766d5..9b9cf8ec6 100644 --- a/darwin/future/core/workflows/list_workflows.py +++ b/darwin/future/core/workflows/list_workflows.py @@ -1,12 +1,13 @@ from typing import List, Optional, Tuple -from pydantic import ValidationError from darwin.future.core.client import ClientCore from darwin.future.data_objects.workflow import WorkflowCore, WorkflowListValidator -def list_workflows(client: ClientCore, team_slug: Optional[str] = None) -> Tuple[List[WorkflowCore], List[Exception]]: +def list_workflows( + client: ClientCore, team_slug: Optional[str] = None +) -> Tuple[List[WorkflowCore], List[Exception]]: """ Returns a list of workflows for the given team @@ -28,7 +29,9 @@ def list_workflows(client: ClientCore, team_slug: Optional[str] = None) -> Tuple team_slug = team_slug or client.config.default_team response = client.get(f"/v2/teams/{team_slug}/workflows?worker=false") list_of_workflows = WorkflowListValidator(list=response) # type: ignore - workflows = [WorkflowCore.parse_obj(workflow) for workflow in list_of_workflows.list] + workflows = [ + WorkflowCore.parse_obj(workflow) for workflow in list_of_workflows.list + ] except Exception as e: exceptions.append(e) diff --git a/darwin/future/data_objects/team.py b/darwin/future/data_objects/team.py index c766fd591..da697ec90 100644 --- a/darwin/future/data_objects/team.py +++ b/darwin/future/data_objects/team.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import List, Optional, Tuple +from typing import List, Optional from pydantic import validator diff --git a/darwin/future/data_objects/workflow.py b/darwin/future/data_objects/workflow.py index 0af79c14f..47e4d2d11 100644 --- a/darwin/future/data_objects/workflow.py +++ b/darwin/future/data_objects/workflow.py @@ -59,7 +59,9 @@ class WFEdgeCore(DefaultDarwin): @root_validator(pre=True) def _one_or_both_must_exist(cls, values: dict) -> dict: if not values["source_stage_id"] and not values["target_stage_id"]: - raise ValueError("One or both of source_stage_id and target_stage_id must be defined") + raise ValueError( + "One or both of source_stage_id and target_stage_id must be defined" + ) return values diff --git a/darwin/future/exceptions.py b/darwin/future/exceptions.py index bfd6cd0b3..e68fd5e8f 100644 --- a/darwin/future/exceptions.py +++ b/darwin/future/exceptions.py @@ -1,4 +1,4 @@ -from typing import Any, List, Optional +from typing import List, Optional from darwin.future.data_objects.typing import KeyValuePairDict, UnknownType diff --git a/darwin/future/helpers/assertion.py b/darwin/future/helpers/assertion.py index 5cecf467c..04c5a71c5 100644 --- a/darwin/future/helpers/assertion.py +++ b/darwin/future/helpers/assertion.py @@ -1,6 +1,10 @@ from typing import Type -def assert_is(conditional: bool, message: str, exception_factory: Type[BaseException] = AssertionError) -> None: +def assert_is( + conditional: bool, + message: str, + exception_factory: Type[BaseException] = AssertionError, +) -> None: if not conditional: raise exception_factory(message) diff --git a/darwin/future/meta/client.py b/darwin/future/meta/client.py index 8d230dfb2..ceb4a182f 100644 --- a/darwin/future/meta/client.py +++ b/darwin/future/meta/client.py @@ -1,14 +1,12 @@ from __future__ import annotations from pathlib import Path -from typing import List, Optional +from typing import Optional from requests.adapters import Retry from darwin.future.core.client import ClientCore, DarwinConfig from darwin.future.meta.objects.team import Team -from darwin.future.meta.objects.workflow import Workflow -from darwin.future.meta.queries.workflow import WorkflowQuery class Client(ClientCore): diff --git a/darwin/future/meta/objects/base.py b/darwin/future/meta/objects/base.py index aa51789f1..317ed388e 100644 --- a/darwin/future/meta/objects/base.py +++ b/darwin/future/meta/objects/base.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Dict, Generic, List, Optional, TypeVar +from typing import Dict, Generic, Optional, TypeVar from darwin.future.core.client import ClientCore from darwin.future.pydantic_base import DefaultDarwin @@ -13,10 +13,12 @@ class MetaBase(Generic[R]): _element: R client: ClientCore - def __init__(self, client: ClientCore, element: R, meta_params: Optional[Param] = None) -> None: + def __init__( + self, client: ClientCore, element: R, meta_params: Optional[Param] = None + ) -> None: self.client = client self._element = element - self.meta_params = meta_params or dict() + self.meta_params = meta_params or {} def __str__(self) -> str: return str(self._element) diff --git a/darwin/future/meta/objects/dataset.py b/darwin/future/meta/objects/dataset.py index be870afaf..80c636473 100644 --- a/darwin/future/meta/objects/dataset.py +++ b/darwin/future/meta/objects/dataset.py @@ -1,16 +1,15 @@ from __future__ import annotations -from typing import List, Optional, Sequence, Tuple, Union +from typing import List, Optional, Sequence, Union from uuid import UUID from darwin.cli_functions import upload_data from darwin.dataset.upload_manager import LocalFile from darwin.datatypes import PathLike from darwin.future.core.client import ClientCore -from darwin.future.core.datasets import create_dataset, get_dataset, remove_dataset +from darwin.future.core.datasets import create_dataset, remove_dataset from darwin.future.core.items import get_item_ids from darwin.future.data_objects.dataset import DatasetCore -from darwin.future.exceptions import MissingDataset from darwin.future.helpers.assertion import assert_is from darwin.future.meta.objects.base import MetaBase @@ -48,15 +47,15 @@ def item_ids(self) -> List[UUID]: List[UUID]: A list of item ids """ assert self._element.id is not None - assert self.meta_params["team_slug"] is not None and type(self.meta_params["team_slug"]) == str - return get_item_ids(self.client, self.meta_params["team_slug"], str(self._element.id)) - - def get_dataset_by_id(self) -> DatasetCore: - # TODO: implement - raise NotImplementedError() + assert self.meta_params["team_slug"] is not None and isinstance( + self.meta_params["team_slug"], str + ) + return get_item_ids( + self.client, self.meta_params["team_slug"], str(self._element.id) + ) @classmethod - def create_dataset(cls, client: ClientCore, slug: str) -> Tuple[Optional[List[Exception]], Optional[DatasetCore]]: + def create_dataset(cls, client: ClientCore, slug: str) -> DatasetCore: """ Creates a new dataset for the given team @@ -71,101 +70,25 @@ def create_dataset(cls, client: ClientCore, slug: str) -> Tuple[Optional[List[Ex A tuple containing a list of exceptions and the dataset created """ - exceptions = [] dataset: Optional[DatasetCore] = None - - try: - cls._validate_slug(slug) - dataset = create_dataset(client, slug) - except Exception as e: - exceptions.append(e) - - return exceptions or None, dataset + cls._validate_slug(slug) + dataset = create_dataset(client, slug) + return dataset def update_dataset(self) -> DatasetCore: # TODO: implement in IO-1018 raise NotImplementedError() - @classmethod - def delete_dataset(cls, client: ClientCore, dataset_id: Union[int, str]) -> Tuple[Optional[List[Exception]], int]: + def delete(self) -> int: """ - Deletes a dataset by id or slug - - Parameters - ---------- - dataset_id: Union[int, str] - The id or slug of the dataset to delete - - Returns - ------- - Tuple[Optional[List[Exception]], int] - A tuple containing a list of exceptions and the number of datasets deleted - """ - exceptions = [] - dataset_deleted = -1 - - try: - if isinstance(dataset_id, str): - dataset_deleted = cls._delete_by_slug(client, dataset_id) - else: - dataset_deleted = cls._delete_by_id(client, dataset_id) - - except Exception as e: - exceptions.append(e) - - return exceptions or None, dataset_deleted - - @staticmethod - def _delete_by_slug(client: ClientCore, slug: str) -> int: - """ - (internal) Deletes a dataset by slug - - Parameters - ---------- - client: MetaClient - The client to use to make the request - - slug: str - The slug of the dataset to delete + Deletes a current dataset Returns ------- int - The dataset deleted + The id of dataset deleted """ - assert_is(isinstance(client, ClientCore), "client must be a Core Client") - assert_is(isinstance(slug, str), "slug must be a string") - - dataset = get_dataset(client, slug) - if dataset and dataset.id: - dataset_deleted = remove_dataset(client, dataset.id) - else: - raise MissingDataset(f"Dataset with slug {slug} not found") - - return dataset_deleted - - @staticmethod - def _delete_by_id(client: ClientCore, dataset_id: int) -> int: - """ - (internal) Deletes a dataset by id - - Parameters - ---------- - client: Client - The client to use to make the request - - dataset_id: int - The id of the dataset to delete - - Returns - ------- - int - The dataset deleted - """ - assert_is(isinstance(client, ClientCore), "client must be a Client") - assert_is(isinstance(dataset_id, int), "dataset_id must be an integer") - - dataset_deleted = remove_dataset(client, dataset_id) + dataset_deleted = remove_dataset(self.client, self.id) return dataset_deleted @staticmethod @@ -187,7 +110,10 @@ def _validate_slug(slug: str) -> None: assert_is(len(slug_copy) > 0, "slug must not be empty") VALID_SLUG_CHARS = "abcdefghijklmnopqrstuvwxyz0123456789-_" - assert_is(all(c in VALID_SLUG_CHARS for c in slug_copy), "slug must only contain valid characters") + assert_is( + all(c in VALID_SLUG_CHARS for c in slug_copy), + "slug must only contain valid characters", + ) def upload_files( self, @@ -200,7 +126,5 @@ def upload_files( preserve_folders: bool = False, verbose: bool = False, ) -> Dataset: - upload_data( - self._element.name, files, files_to_exclude, fps, path, frames, extract_views, preserve_folders, verbose # type: ignore - ) + upload_data(self._element.name, files, files_to_exclude, fps, path, frames, extract_views, preserve_folders, verbose) # type: ignore return self diff --git a/darwin/future/meta/objects/stage.py b/darwin/future/meta/objects/stage.py index 6ca58683e..747b1637b 100644 --- a/darwin/future/meta/objects/stage.py +++ b/darwin/future/meta/objects/stage.py @@ -17,20 +17,29 @@ class Stage(MetaBase[WFStageCore]): @property def item_ids(self) -> List[UUID]: - """_summary_ + """Item ids attached to the stage Returns: - _type_: _description_ + List[UUID]: List of item ids """ assert self._element.id is not None return get_item_ids_stage( - self.client, str(self.meta_params["team_slug"]), str(self.meta_params["dataset_id"]), self.id + self.client, + str(self.meta_params["team_slug"]), + str(self.meta_params["dataset_id"]), + self.id, ) def move_attached_files_to_stage(self, new_stage_id: UUID) -> Stage: - assert self.meta_params["team_slug"] is not None and type(self.meta_params["team_slug"]) == str - assert self.meta_params["workflow_id"] is not None and type(self.meta_params["workflow_id"]) == UUID - assert self.meta_params["dataset_id"] is not None and type(self.meta_params["dataset_id"]) == int + assert self.meta_params["team_slug"] is not None and isinstance( + self.meta_params["team_slug"], str + ) + assert self.meta_params["workflow_id"] is not None and isinstance( + self.meta_params["workflow_id"], UUID + ) + assert self.meta_params["dataset_id"] is not None and isinstance( + self.meta_params["dataset_id"], int + ) slug, w_id, d_id = ( self.meta_params["team_slug"], self.meta_params["workflow_id"], diff --git a/darwin/future/meta/objects/team.py b/darwin/future/meta/objects/team.py index 4783f4484..bbc169894 100644 --- a/darwin/future/meta/objects/team.py +++ b/darwin/future/meta/objects/team.py @@ -1,10 +1,13 @@ -from typing import List, Optional +from typing import List, Optional, Tuple, Union from darwin.future.core.client import ClientCore +from darwin.future.core.datasets import get_dataset, remove_dataset from darwin.future.core.team.get_team import get_team from darwin.future.data_objects.team import TeamCore +from darwin.future.exceptions import MissingDataset from darwin.future.helpers.assertion import assert_is from darwin.future.meta.objects.base import MetaBase +from darwin.future.meta.objects.dataset import Dataset from darwin.future.meta.queries.dataset import DatasetQuery from darwin.future.meta.queries.team_member import TeamMemberQuery from darwin.future.meta.queries.workflow import WorkflowQuery @@ -55,3 +58,91 @@ def workflows(self) -> WorkflowQuery: def __str__(self) -> str: return f"TeamMeta(name='{self.name}', slug='{self.slug}', id='{self.id}' - {len(self._element.members if self._element.members else [])} members)" + + @classmethod + def delete_dataset( + cls, client: ClientCore, dataset_id: Union[int, str] + ) -> Tuple[Optional[List[Exception]], int]: + """ + Deletes a dataset by id or slug + + Parameters + ---------- + dataset_id: Union[int, str] + The id or slug of the dataset to delete + + Returns + ------- + Tuple[Optional[List[Exception]], int] + A tuple containing a list of exceptions and the number of datasets deleted + """ + exceptions = [] + dataset_deleted = -1 + + try: + if isinstance(dataset_id, str): + dataset_deleted = cls._delete_dataset_by_slug(client, dataset_id) + else: + dataset_deleted = cls._delete_dataset_by_id(client, dataset_id) + + except Exception as e: + exceptions.append(e) + + return exceptions or None, dataset_deleted + + @staticmethod + def _delete_dataset_by_slug(client: ClientCore, slug: str) -> int: + """ + (internal) Deletes a dataset by slug + + Parameters + ---------- + client: MetaClient + The client to use to make the request + + slug: str + The slug of the dataset to delete + + Returns + ------- + int + The dataset deleted + """ + assert_is(isinstance(client, ClientCore), "client must be a Core Client") + assert_is(isinstance(slug, str), "slug must be a string") + + dataset = get_dataset(client, slug) + if dataset and dataset.id: + dataset_deleted = remove_dataset(client, dataset.id) + else: + raise MissingDataset(f"Dataset with slug {slug} not found") + + return dataset_deleted + + @staticmethod + def _delete_dataset_by_id(client: ClientCore, dataset_id: int) -> int: + """ + (internal) Deletes a dataset by id + + Parameters + ---------- + client: Client + The client to use to make the request + + dataset_id: int + The id of the dataset to delete + + Returns + ------- + int + The dataset deleted + """ + assert_is(isinstance(client, ClientCore), "client must be a Client") + assert_is(isinstance(dataset_id, int), "dataset_id must be an integer") + + dataset_deleted = remove_dataset(client, dataset_id) + return dataset_deleted + + def create_dataset(self, slug: str) -> Dataset: + core = Dataset.create_dataset(self.client, slug) + return Dataset(self.client, core, meta_params={"team_slug": self.slug}) diff --git a/darwin/future/meta/objects/team_member.py b/darwin/future/meta/objects/team_member.py index 222e24454..6f87d4326 100644 --- a/darwin/future/meta/objects/team_member.py +++ b/darwin/future/meta/objects/team_member.py @@ -1,6 +1,3 @@ -from typing import List, Optional - -from darwin.future.core.client import ClientCore from darwin.future.data_objects.team import TeamMemberCore from darwin.future.data_objects.team_member_role import TeamMemberRole from darwin.future.meta.objects.base import MetaBase diff --git a/darwin/future/meta/objects/workflow.py b/darwin/future/meta/objects/workflow.py index f28671243..af6458382 100644 --- a/darwin/future/meta/objects/workflow.py +++ b/darwin/future/meta/objects/workflow.py @@ -1,7 +1,5 @@ from __future__ import annotations -from enum import auto -from pathlib import Path from typing import List, Optional, Sequence, Union from uuid import UUID @@ -61,9 +59,7 @@ def upload_files( auto_push: bool = True, ) -> Workflow: assert self._element.dataset is not None - upload_data( - self.datasets[0].name, files, files_to_exclude, fps, path, frames, extract_views, preserve_folders, verbose # type: ignore - ) + upload_data(self.datasets[0].name, files, files_to_exclude, fps, path, frames, extract_views, preserve_folders, verbose) # type: ignore if auto_push: self.push_from_dataset_stage() return self diff --git a/darwin/future/meta/queries/dataset.py b/darwin/future/meta/queries/dataset.py index daf3dc24e..c87b8d915 100644 --- a/darwin/future/meta/queries/dataset.py +++ b/darwin/future/meta/queries/dataset.py @@ -3,7 +3,7 @@ from typing import List from darwin.future.core.datasets import list_datasets -from darwin.future.core.types.query import Param, Query, QueryFilter +from darwin.future.core.types.query import Query, QueryFilter from darwin.future.meta.objects.dataset import Dataset @@ -23,7 +23,9 @@ def _collect(self) -> List[Dataset]: if exceptions: # TODO: print and or raise exceptions, tbd how we want to handle this pass - datasets_meta = [Dataset(self.client, dataset) for dataset in datasets] + datasets_meta = [ + Dataset(self.client, dataset, self.meta_params) for dataset in datasets + ] if not self.filters: self.filters = [] @@ -32,7 +34,9 @@ def _collect(self) -> List[Dataset]: return datasets_meta - def _execute_filters(self, datasets: List[Dataset], filter: QueryFilter) -> List[Dataset]: + def _execute_filters( + self, datasets: List[Dataset], filter: QueryFilter + ) -> List[Dataset]: """Executes filtering on the local list of datasets, applying special logic for role filtering otherwise calls the parent method for general filtering on the values of the datasets diff --git a/darwin/future/meta/queries/stage.py b/darwin/future/meta/queries/stage.py index 3c8bd86db..7211bb5d5 100644 --- a/darwin/future/meta/queries/stage.py +++ b/darwin/future/meta/queries/stage.py @@ -3,7 +3,7 @@ from typing import List from uuid import UUID -from darwin.future.core.types.query import Param, Query, QueryFilter +from darwin.future.core.types.query import Query, QueryFilter from darwin.future.core.workflows import get_workflow from darwin.future.meta.objects.stage import Stage @@ -16,7 +16,9 @@ def _collect(self) -> List[Stage]: meta_params = self.meta_params workflow, exceptions = get_workflow(self.client, str(workflow_id)) assert workflow is not None - stages = [Stage(self.client, s, meta_params=meta_params) for s in workflow.stages] + stages = [ + Stage(self.client, s, meta_params=meta_params) for s in workflow.stages + ] if not self.filters: self.filters = [] for filter in self.filters: @@ -35,5 +37,9 @@ def _execute_filter(self, stages: List[Stage], filter: QueryFilter) -> List[Stag List[Stage]: Filtered subset of stages """ if filter.name == "role": - return [s for s in stages if s._element is not None and filter.filter_attr(s._element.type.value)] + return [ + s + for s in stages + if s._element is not None and filter.filter_attr(s._element.type.value) + ] return super()._generic_execute_filter(stages, filter) diff --git a/darwin/future/meta/queries/team_member.py b/darwin/future/meta/queries/team_member.py index 0fa192208..f6eaba922 100644 --- a/darwin/future/meta/queries/team_member.py +++ b/darwin/future/meta/queries/team_member.py @@ -3,7 +3,7 @@ from typing import List from darwin.future.core.team.get_team import get_team_members -from darwin.future.core.types.query import Param, Query, QueryFilter +from darwin.future.core.types.query import Query, QueryFilter from darwin.future.meta.objects.team_member import TeamMember @@ -27,7 +27,9 @@ def _collect(self) -> List[TeamMember]: return members_meta - def _execute_filter(self, members: List[TeamMember], filter: QueryFilter) -> List[TeamMember]: + def _execute_filter( + self, members: List[TeamMember], filter: QueryFilter + ) -> List[TeamMember]: """Executes filtering on the local list of members, applying special logic for role filtering otherwise calls the parent method for general filtering on the values of the members @@ -41,6 +43,10 @@ def _execute_filter(self, members: List[TeamMember], filter: QueryFilter) -> Lis List[TeamMember]: Filtered subset of members """ if filter.name == "role": - return [m for m in members if m._element is not None and filter.filter_attr(m._element.role.value)] + return [ + m + for m in members + if m._element is not None and filter.filter_attr(m._element.role.value) + ] else: return super()._generic_execute_filter(members, filter) diff --git a/darwin/future/meta/queries/workflow.py b/darwin/future/meta/queries/workflow.py index 552a49dfa..75c3a6442 100644 --- a/darwin/future/meta/queries/workflow.py +++ b/darwin/future/meta/queries/workflow.py @@ -3,7 +3,7 @@ from uuid import UUID from darwin.exceptions import DarwinException -from darwin.future.core.types.query import Param, Query, QueryFilter +from darwin.future.core.types.query import Query, QueryFilter from darwin.future.core.workflows import list_workflows from darwin.future.data_objects.workflow import WFStageCore from darwin.future.helpers.exception_handler import handle_exception @@ -26,7 +26,10 @@ def _collect(self) -> List[Workflow]: if exceptions: handle_exception(exceptions) raise DarwinException from exceptions[0] - workflows = [Workflow(self.client, workflow, self.meta_params) for workflow in workflows_core] + workflows = [ + Workflow(self.client, workflow, self.meta_params) + for workflow in workflows_core + ] if not self.filters: return workflows @@ -35,7 +38,9 @@ def _collect(self) -> List[Workflow]: return workflows - def _execute_filters(self, workflows: List[Workflow], filter: QueryFilter) -> List[Workflow]: + def _execute_filters( + self, workflows: List[Workflow], filter: QueryFilter + ) -> List[Workflow]: if filter.name == "id": id_to_find = UUID(filter.param) return [w for w in workflows if w.id == id_to_find] @@ -45,25 +50,35 @@ def _execute_filters(self, workflows: List[Workflow], filter: QueryFilter) -> Li return [ w for w in workflows - if w._element is not None and self._date_compare(w._element.inserted_at, start_date) + if w._element is not None + and self._date_compare(w._element.inserted_at, start_date) ] if filter.name == "inserted_at_end": end_date = datetime.fromisoformat(filter.param) return [ - w for w in workflows if w._element is not None and self._date_compare(end_date, w._element.inserted_at) + w + for w in workflows + if w._element is not None + and self._date_compare(end_date, w._element.inserted_at) ] if filter.name == "updated_at_start": start_date = datetime.fromisoformat(filter.param) return [ - w for w in workflows if w._element is not None and self._date_compare(w._element.updated_at, start_date) + w + for w in workflows + if w._element is not None + and self._date_compare(w._element.updated_at, start_date) ] if filter.name == "updated_at_end": end_date = datetime.fromisoformat(filter.param) return [ - w for w in workflows if w._element is not None and self._date_compare(end_date, w._element.updated_at) + w + for w in workflows + if w._element is not None + and self._date_compare(end_date, w._element.updated_at) ] if filter.name == "dataset_id": @@ -78,14 +93,20 @@ def _execute_filters(self, workflows: List[Workflow], filter: QueryFilter) -> Li if filter.name == "dataset_name": datasets_to_find_name: List[str] = [str(s) for s in filter.param.split(",")] - return [w for w in workflows if w._element is not None and str(w._element.dataset) in datasets_to_find_name] + return [ + w + for w in workflows + if w._element is not None + and str(w._element.dataset) in datasets_to_find_name + ] if filter.name == "has_stages": - stages_to_find = [s for s in filter.param.split(",")] + stages_to_find = list(filter.param.split(",")) return [ w for w in workflows - if w._element is not None and self._stages_contains(w._element.stages, stages_to_find) + if w._element is not None + and self._stages_contains(w._element.stages, stages_to_find) ] return self._generic_execute_filter(workflows, filter) @@ -95,6 +116,8 @@ def _date_compare(cls, date1: datetime, date2: datetime) -> bool: return date1.astimezone(timezone.utc) >= date2.astimezone(timezone.utc) @classmethod - def _stages_contains(cls, stages: List[WFStageCore], stages_to_find: List[str]) -> bool: + def _stages_contains( + cls, stages: List[WFStageCore], stages_to_find: List[str] + ) -> bool: stage_ids = [str(s.id) for s in stages] return any(stage_to_find in stage_ids for stage_to_find in stages_to_find) diff --git a/darwin/future/tests/core/datasets/fixtures.py b/darwin/future/tests/core/datasets/fixtures.py index bbf044840..0f9489544 100644 --- a/darwin/future/tests/core/datasets/fixtures.py +++ b/darwin/future/tests/core/datasets/fixtures.py @@ -81,4 +81,7 @@ def happy_get_client() -> ClientCore: @fixture def sad_client_pydantic() -> ClientCore: - return MagicMock(ClientCore, side_effect=ValidationError(["error1", "error2", "error3"], model=BaseModel)) + return MagicMock( + ClientCore, + side_effect=ValidationError(["error1", "error2", "error3"], model=BaseModel), + ) diff --git a/darwin/future/tests/core/datasets/test_create_dataset.py b/darwin/future/tests/core/datasets/test_create_dataset.py index fd323fed1..26630ac44 100644 --- a/darwin/future/tests/core/datasets/test_create_dataset.py +++ b/darwin/future/tests/core/datasets/test_create_dataset.py @@ -1,5 +1,3 @@ -from typing import Union - import responses from pytest import raises from requests import HTTPError @@ -12,7 +10,9 @@ from .fixtures import * # noqa: F401, F403 -def test_it_creates_a_dataset(basic_dataset: DatasetCore, base_client: ClientCore) -> None: +def test_it_creates_a_dataset( + basic_dataset: DatasetCore, base_client: ClientCore +) -> None: with responses.RequestsMock() as rsps: rsps.add( rsps.POST, @@ -26,7 +26,9 @@ def test_it_creates_a_dataset(basic_dataset: DatasetCore, base_client: ClientCor assert dataset.slug == "1337" -def test_it_raises_an_error_on_http_error(basic_dataset: DatasetCore, base_client: ClientCore) -> None: +def test_it_raises_an_error_on_http_error( + basic_dataset: DatasetCore, base_client: ClientCore +) -> None: with raises(HTTPError): with responses.RequestsMock() as rsps: rsps.add( diff --git a/darwin/future/tests/core/datasets/test_list_datasets.py b/darwin/future/tests/core/datasets/test_list_datasets.py index 641b7331f..a4c7ac4a2 100644 --- a/darwin/future/tests/core/datasets/test_list_datasets.py +++ b/darwin/future/tests/core/datasets/test_list_datasets.py @@ -11,7 +11,9 @@ from .fixtures import * -def test_it_lists_datasets(base_client: ClientCore, basic_list_of_datasets: List[DatasetCore]) -> None: +def test_it_lists_datasets( + base_client: ClientCore, basic_list_of_datasets: List[DatasetCore] +) -> None: with responses.RequestsMock() as rsps: rsps.add( rsps.GET, @@ -29,7 +31,9 @@ def test_it_lists_datasets(base_client: ClientCore, basic_list_of_datasets: List assert datasets[0].slug == "1337" -def test_it_returns_an_error_if_the_client_returns_an_http_error(base_client: ClientCore) -> None: +def test_it_returns_an_error_if_the_client_returns_an_http_error( + base_client: ClientCore, +) -> None: with responses.RequestsMock() as rsps: rsps.add( rsps.GET, diff --git a/darwin/future/tests/core/fixtures.py b/darwin/future/tests/core/fixtures.py index f929d191c..0d4782134 100644 --- a/darwin/future/tests/core/fixtures.py +++ b/darwin/future/tests/core/fixtures.py @@ -8,7 +8,6 @@ from darwin.future.data_objects.dataset import DatasetCore from darwin.future.data_objects.team import TeamCore, TeamMemberCore from darwin.future.data_objects.team_member_role import TeamMemberRole -from darwin.future.data_objects.workflow import WFTypeCore @pytest.fixture @@ -128,7 +127,9 @@ def transform_dataset(dataset_json_dict: dict, id: int) -> dict: dataset["id"] = id dataset["slug"] = f"{dataset['slug']}-{id}" dataset["name"] = f"{dataset['name']} {id}" - dataset["releases"] = [{"name": "release2"}] if id % 2 == 0 else [{"name": "release1"}] + dataset["releases"] = ( + [{"name": "release2"}] if id % 2 == 0 else [{"name": "release1"}] + ) return dataset diff --git a/darwin/future/tests/core/items/fixtures.py b/darwin/future/tests/core/items/fixtures.py index 9f15db778..856abc6bd 100644 --- a/darwin/future/tests/core/items/fixtures.py +++ b/darwin/future/tests/core/items/fixtures.py @@ -8,14 +8,17 @@ def UUIDs() -> List[UUID]: return [uuid4() for i in range(10)] + @pytest.fixture def UUIDs_str(UUIDs: List[UUID]) -> List[str]: return [str(uuid) for uuid in UUIDs] + @pytest.fixture def stage_id() -> UUID: return uuid4() + @pytest.fixture def workflow_id() -> UUID: - return uuid4() \ No newline at end of file + return uuid4() diff --git a/darwin/future/tests/core/items/test_get_items.py b/darwin/future/tests/core/items/test_get_items.py index 0bee40afd..e2af43eba 100644 --- a/darwin/future/tests/core/items/test_get_items.py +++ b/darwin/future/tests/core/items/test_get_items.py @@ -9,12 +9,14 @@ from darwin.future.tests.core.items.fixtures import * -def test_get_item_ids(UUIDs: List[UUID], UUIDs_str: List[str], base_client: ClientCore) -> None: +def test_get_item_ids( + UUIDs: List[UUID], UUIDs_str: List[str], base_client: ClientCore +) -> None: with responses.RequestsMock() as rsps: rsps.add( rsps.GET, base_client.config.api_endpoint - + f"v2/teams/default-team/items/ids?not_statuses=archived,error&sort[id]=desc&dataset_ids=1337", + + "v2/teams/default-team/items/ids?not_statuses=archived,error&sort[id]=desc&dataset_ids=1337", json={"item_ids": UUIDs_str}, status=200, ) @@ -22,7 +24,9 @@ def test_get_item_ids(UUIDs: List[UUID], UUIDs_str: List[str], base_client: Clie assert item_ids == UUIDs -def test_get_item_ids_stage(UUIDs: List[UUID], UUIDs_str: List[str], base_client: ClientCore) -> None: +def test_get_item_ids_stage( + UUIDs: List[UUID], UUIDs_str: List[str], base_client: ClientCore +) -> None: stage_id = str(uuid4()) with responses.RequestsMock() as rsps: rsps.add( diff --git a/darwin/future/tests/core/items/test_move_items.py b/darwin/future/tests/core/items/test_move_items.py index e3ea7bea6..f9c357c76 100644 --- a/darwin/future/tests/core/items/test_move_items.py +++ b/darwin/future/tests/core/items/test_move_items.py @@ -1,5 +1,5 @@ from typing import Dict, List -from uuid import UUID, uuid4 +from uuid import UUID import pytest import responses @@ -37,5 +37,9 @@ def test_move_items( json={"success": UUIDs_str}, status=200, ) - item_ids = move_items_to_stage(base_client, "default-team", workflow_id, 1337, stage_id, UUIDs) - assert rsps.assert_call_count(base_client.config.api_endpoint + "v2/teams/default-team/items/stage", 1) + move_items_to_stage( + base_client, "default-team", workflow_id, 1337, stage_id, UUIDs + ) + assert rsps.assert_call_count( + base_client.config.api_endpoint + "v2/teams/default-team/items/stage", 1 + ) diff --git a/darwin/future/tests/core/test_client.py b/darwin/future/tests/core/test_client.py index 2ee03ad2c..6b5195698 100644 --- a/darwin/future/tests/core/test_client.py +++ b/darwin/future/tests/core/test_client.py @@ -1,4 +1,3 @@ -import unittest from pathlib import Path import pytest @@ -6,7 +5,7 @@ from pydantic import ValidationError from requests import HTTPError -from darwin.future.core.client import ClientCore, DarwinConfig, TeamsConfig +from darwin.future.core.client import ClientCore, DarwinConfig from darwin.future.exceptions import DarwinException, NotFound, Unauthorized from darwin.future.tests.core.fixtures import * from darwin.future.tests.fixtures import * @@ -35,7 +34,7 @@ def test_config_base_url(base_config: DarwinConfig) -> None: @pytest.mark.parametrize("base_url", ["test_url.com", "ftp://test_url.com", ""]) def test_invalid_config_url_validation(base_url: str, tmp_path: Path) -> None: with pytest.raises(ValidationError): - config = DarwinConfig( + DarwinConfig( api_key="test_key", datasets_dir=tmp_path, api_endpoint="http://test_url.com/api/", @@ -92,7 +91,9 @@ def test_client(base_client: ClientCore) -> None: "status_code, exception", [(401, Unauthorized), (404, NotFound)], ) -def test_client_raises_darwin(status_code: int, exception: DarwinException, base_client: ClientCore) -> None: +def test_client_raises_darwin( + status_code: int, exception: DarwinException, base_client: ClientCore +) -> None: endpoint = base_client.config.api_endpoint + "test_endpoint" with responses.RequestsMock() as rsps: rsps.add(responses.GET, endpoint, json={"test": "test"}, status=status_code) diff --git a/darwin/future/tests/core/test_query.py b/darwin/future/tests/core/test_query.py index 5008c195f..77fd612ab 100644 --- a/darwin/future/tests/core/test_query.py +++ b/darwin/future/tests/core/test_query.py @@ -34,14 +34,18 @@ def test_team() -> TeamCore: def test_query_instantiated( - base_client: ClientCore, basic_filters: List[Query.QueryFilter], non_abc_query: Type[Query.Query] + base_client: ClientCore, + basic_filters: List[Query.QueryFilter], + non_abc_query: Type[Query.Query], ) -> None: q = non_abc_query(base_client, basic_filters) assert q.filters == basic_filters def test_query_filter_functionality( - base_client: ClientCore, basic_filters: List[Query.QueryFilter], non_abc_query: Type[Query.Query] + base_client: ClientCore, + basic_filters: List[Query.QueryFilter], + non_abc_query: Type[Query.Query], ) -> None: q = non_abc_query(base_client) for f in basic_filters: @@ -99,7 +103,8 @@ def test_query_filter_filters(mod: Optional[str], param: Any, check: Any, expect def test_QF_from_asteriks() -> None: # Builds with dictionary args QF = Query.QueryFilter._from_args( - {"name": "test", "param": "test"}, {"name": "test2", "param": "test2", "modifier": "!="} + {"name": "test", "param": "test"}, + {"name": "test2", "param": "test2", "modifier": "!="}, ) assert len(QF) == 2 assert QF[0].name == "test" @@ -133,7 +138,9 @@ def test_query_first(non_abc_query: Type[Query.Query], base_client: ClientCore) assert first == 1 -def test_query_collect_one(non_abc_query: Type[Query.Query], base_client: ClientCore) -> None: +def test_query_collect_one( + non_abc_query: Type[Query.Query], base_client: ClientCore +) -> None: query = non_abc_query(base_client) query.results = [1, 2, 3] with pytest.raises(MoreThanOneResultFound): diff --git a/darwin/future/tests/core/types/test_querystring.py b/darwin/future/tests/core/types/test_querystring.py index 11a3a0bce..bf38d054a 100644 --- a/darwin/future/tests/core/types/test_querystring.py +++ b/darwin/future/tests/core/types/test_querystring.py @@ -10,7 +10,7 @@ def test_querystring_happy_path() -> None: query_string_2 = QueryString({"foo": "bar", "baz": "qux"}) assert str(query_string_2) == "?foo=bar&baz=qux" - query_string_3 = QueryString(dict()) + query_string_3 = QueryString({}) assert str(query_string_3) == "?" assert query_string.value == {"foo": "bar"} diff --git a/darwin/future/tests/core/workflows/test_get_workflow.py b/darwin/future/tests/core/workflows/test_get_workflow.py index c0f443635..f63119092 100644 --- a/darwin/future/tests/core/workflows/test_get_workflow.py +++ b/darwin/future/tests/core/workflows/test_get_workflow.py @@ -1,4 +1,3 @@ -import pytest import responses from pydantic import ValidationError from requests import HTTPError @@ -11,7 +10,9 @@ @responses.activate -def test_get_workflow(base_client: ClientCore, base_single_workflow_object: JSONType) -> None: +def test_get_workflow( + base_client: ClientCore, base_single_workflow_object: JSONType +) -> None: # Mocking the response using responses library response_data = base_single_workflow_object workflow_id = "1" @@ -31,7 +32,9 @@ def test_get_workflow(base_client: ClientCore, base_single_workflow_object: JSON @responses.activate -def test_get_workflow_with_team_slug(base_client: ClientCore, base_single_workflow_object: JSONType) -> None: +def test_get_workflow_with_team_slug( + base_client: ClientCore, base_single_workflow_object: JSONType +) -> None: # Mocking the response using responses library team_slug = "team-slug" workflow_id = "1" diff --git a/darwin/future/tests/core/workflows/test_get_workflows.py b/darwin/future/tests/core/workflows/test_get_workflows.py index 0c6fb751e..9400c3658 100644 --- a/darwin/future/tests/core/workflows/test_get_workflows.py +++ b/darwin/future/tests/core/workflows/test_get_workflows.py @@ -33,7 +33,9 @@ def test_get_workflows(base_client: ClientCore, base_workflows_object: str) -> N @responses.activate -def test_get_workflows_with_team_slug(base_client: ClientCore, base_workflows_object: JSONType) -> None: +def test_get_workflows_with_team_slug( + base_client: ClientCore, base_workflows_object: JSONType +) -> None: # Mocking the response using responses library team_slug = "team-slug" response_data = base_workflows_object diff --git a/darwin/future/tests/core/workflows/test_list_workflows.py b/darwin/future/tests/core/workflows/test_list_workflows.py index c959cea1d..23daf0ace 100644 --- a/darwin/future/tests/core/workflows/test_list_workflows.py +++ b/darwin/future/tests/core/workflows/test_list_workflows.py @@ -1,6 +1,5 @@ from typing import List -import pytest import responses from pydantic import ValidationError from requests import HTTPError @@ -35,7 +34,9 @@ def test_list_workflows(base_client: ClientCore, base_workflows_object: str) -> @responses.activate -def test_list_workflows_with_team_slug(base_client: ClientCore, base_workflows_object: JSONType) -> None: +def test_list_workflows_with_team_slug( + base_client: ClientCore, base_workflows_object: JSONType +) -> None: # Mocking the response using responses library team_slug = "team-slug" response_data = base_workflows_object diff --git a/darwin/future/tests/data_objects/fixtures.py b/darwin/future/tests/data_objects/fixtures.py index 8f0849e3a..4e0a56a7d 100644 --- a/darwin/future/tests/data_objects/fixtures.py +++ b/darwin/future/tests/data_objects/fixtures.py @@ -1,10 +1,7 @@ -import json from pathlib import Path -from typing import List import pytest -from darwin.future.data_objects.workflow import WFStageCore, WorkflowCore test_data_path: Path = Path(__file__).parent / "workflow" / "data" valid_stage_json = test_data_path / "stage.json" diff --git a/darwin/future/tests/data_objects/test_general_darwin_objects.py b/darwin/future/tests/data_objects/test_general_darwin_objects.py index 0a9382d39..3247fb927 100644 --- a/darwin/future/tests/data_objects/test_general_darwin_objects.py +++ b/darwin/future/tests/data_objects/test_general_darwin_objects.py @@ -1,5 +1,3 @@ -import unittest - import pytest from pydantic import BaseModel, ValidationError @@ -19,11 +17,11 @@ def test_integrated_parsing_works_with_raw(basic_combined: dict) -> None: def test_broken_obj_raises(broken_combined: dict) -> None: - with pytest.raises(ValidationError) as e_info: - broken = TeamCore.parse_obj(broken_combined) + with pytest.raises(ValidationError): + TeamCore.parse_obj(broken_combined) @pytest.mark.parametrize("test_object", [TeamCore, DatasetCore, ReleaseCore]) def test_empty_obj_raises(test_object: BaseModel) -> None: - with pytest.raises(ValidationError) as e_info: - broken = test_object.parse_obj({}) + with pytest.raises(ValidationError): + test_object.parse_obj({}) diff --git a/darwin/future/tests/data_objects/test_team.py b/darwin/future/tests/data_objects/test_team.py index 1e17bed9e..44008880d 100644 --- a/darwin/future/tests/data_objects/test_team.py +++ b/darwin/future/tests/data_objects/test_team.py @@ -1,5 +1,3 @@ -import unittest - import pytest import responses from pydantic import ValidationError @@ -11,7 +9,9 @@ from darwin.future.tests.fixtures import * -def test_get_team_returns_valid_team(base_client: ClientCore, base_team_json: dict, base_team: TeamCore) -> None: +def test_get_team_returns_valid_team( + base_client: ClientCore, base_team_json: dict, base_team: TeamCore +) -> None: slug = "test-slug" endpoint = base_client.config.api_endpoint + f"teams/{slug}" with responses.RequestsMock() as rsps: @@ -21,21 +21,30 @@ def test_get_team_returns_valid_team(base_client: ClientCore, base_team_json: di assert team == base_team -def test_get_team_fails_on_incorrect_input(base_client: ClientCore, base_team: TeamCore) -> None: +def test_get_team_fails_on_incorrect_input( + base_client: ClientCore, base_team: TeamCore +) -> None: slug = "test-slug" endpoint = base_client.config.api_endpoint + f"teams/{slug}" with responses.RequestsMock() as rsps: rsps.add(responses.GET, endpoint, json={}) with pytest.raises(ValidationError): - team = get_team(base_client, slug) + get_team(base_client, slug) -def test_get_team_members_returns_valid_list(base_client: ClientCore, base_team_member_json: dict) -> None: - synthetic_list = [TeamMemberCore.parse_obj(base_team_member_json), TeamMemberCore.parse_obj(base_team_member_json)] +def test_get_team_members_returns_valid_list( + base_client: ClientCore, base_team_member_json: dict +) -> None: + synthetic_list = [ + TeamMemberCore.parse_obj(base_team_member_json), + TeamMemberCore.parse_obj(base_team_member_json), + ] endpoint = base_client.config.api_endpoint + "memberships" with responses.RequestsMock() as rsps: - rsps.add(responses.GET, endpoint, json=[base_team_member_json, base_team_member_json]) + rsps.add( + responses.GET, endpoint, json=[base_team_member_json, base_team_member_json] + ) members, errors = get_team_members(base_client) assert len(members) == 2 @@ -43,7 +52,9 @@ def test_get_team_members_returns_valid_list(base_client: ClientCore, base_team_ assert members == synthetic_list -def test_get_team_members_fails_on_incorrect_input(base_client: ClientCore, base_team_member_json: dict) -> None: +def test_get_team_members_fails_on_incorrect_input( + base_client: ClientCore, base_team_member_json: dict +) -> None: endpoint = base_client.config.api_endpoint + "memberships" with responses.RequestsMock() as rsps: rsps.add(responses.GET, endpoint, json=[base_team_member_json, {}]) @@ -55,11 +66,14 @@ def test_get_team_members_fails_on_incorrect_input(base_client: ClientCore, base assert isinstance(members[0], TeamMemberCore) -def test_team_from_client(base_client: ClientCore, base_team_json: dict, base_team: TeamCore) -> None: +def test_team_from_client( + base_client: ClientCore, base_team_json: dict, base_team: TeamCore +) -> None: with responses.RequestsMock() as rsps: rsps.add( responses.GET, - base_client.config.api_endpoint + f"teams/{base_client.config.default_team}", + base_client.config.api_endpoint + + f"teams/{base_client.config.default_team}", json=base_team_json, ) diff --git a/darwin/future/tests/data_objects/workflow/test_wfdataset.py b/darwin/future/tests/data_objects/workflow/test_wfdataset.py index 0126853dd..32e783b5b 100644 --- a/darwin/future/tests/data_objects/workflow/test_wfdataset.py +++ b/darwin/future/tests/data_objects/workflow/test_wfdataset.py @@ -54,4 +54,6 @@ def test_sad_paths() -> None: working_dataset[key] = InvalidValueForTest() WFDatasetCore.parse_obj(working_dataset) - assert str(excinfo.value).startswith(f"1 validation error for WFDatasetCore\n{key}") + assert str(excinfo.value).startswith( + f"1 validation error for WFDatasetCore\n{key}" + ) diff --git a/darwin/future/tests/data_objects/workflow/test_wfedge.py b/darwin/future/tests/data_objects/workflow/test_wfedge.py index c3d0c7b8c..c20b8c5a9 100644 --- a/darwin/future/tests/data_objects/workflow/test_wfedge.py +++ b/darwin/future/tests/data_objects/workflow/test_wfedge.py @@ -1,6 +1,5 @@ from pathlib import Path -import pytest from darwin.future.data_objects.workflow import WFEdgeCore diff --git a/darwin/future/tests/data_objects/workflow/test_wfstage.py b/darwin/future/tests/data_objects/workflow/test_wfstage.py index a03613aaf..3416478c1 100644 --- a/darwin/future/tests/data_objects/workflow/test_wfstage.py +++ b/darwin/future/tests/data_objects/workflow/test_wfstage.py @@ -1,5 +1,4 @@ from json import loads -from pathlib import Path from uuid import UUID import pytest diff --git a/darwin/future/tests/data_objects/workflow/test_wfstage_config.py b/darwin/future/tests/data_objects/workflow/test_wfstage_config.py index 7a5ad2ef3..53ea142b8 100644 --- a/darwin/future/tests/data_objects/workflow/test_wfstage_config.py +++ b/darwin/future/tests/data_objects/workflow/test_wfstage_config.py @@ -1,6 +1,5 @@ from pathlib import Path -import pytest from darwin.future.data_objects.workflow import WFStageConfigCore diff --git a/darwin/future/tests/data_objects/workflow/test_wfuser.py b/darwin/future/tests/data_objects/workflow/test_wfuser.py index 8619a73e7..d69026ad5 100644 --- a/darwin/future/tests/data_objects/workflow/test_wfuser.py +++ b/darwin/future/tests/data_objects/workflow/test_wfuser.py @@ -1,6 +1,5 @@ from pathlib import Path -import pytest from darwin.future.data_objects.workflow import WFUserCore diff --git a/darwin/future/tests/meta/fixtures.py b/darwin/future/tests/meta/fixtures.py index 0a78cc9e0..2132a511b 100644 --- a/darwin/future/tests/meta/fixtures.py +++ b/darwin/future/tests/meta/fixtures.py @@ -1,4 +1,4 @@ -from pytest import fixture, raises +from pytest import fixture from darwin.future.core.client import DarwinConfig from darwin.future.meta.client import Client diff --git a/darwin/future/tests/meta/objects/fixtures.py b/darwin/future/tests/meta/objects/fixtures.py index fd84327a0..a4a75c14f 100644 --- a/darwin/future/tests/meta/objects/fixtures.py +++ b/darwin/future/tests/meta/objects/fixtures.py @@ -1,12 +1,13 @@ from typing import List from uuid import UUID -from pytest import fixture, raises +from pytest import fixture from darwin.future.core.client import ClientCore +from darwin.future.data_objects.dataset import DatasetCore from darwin.future.data_objects.team import TeamCore from darwin.future.data_objects.workflow import WFStageCore, WorkflowCore -from darwin.future.meta.objects import stage +from darwin.future.meta.objects.dataset import Dataset from darwin.future.meta.objects.stage import Stage from darwin.future.meta.objects.team import Team from darwin.future.meta.objects.workflow import Workflow @@ -24,15 +25,24 @@ def base_meta_team(base_client: ClientCore, base_team: TeamCore) -> Team: @fixture -def base_meta_workflow(base_client: ClientCore, base_workflow: WorkflowCore) -> Workflow: +def base_meta_workflow( + base_client: ClientCore, base_workflow: WorkflowCore +) -> Workflow: return Workflow(base_client, base_workflow) @fixture -def base_meta_stage(base_client: ClientCore, base_stage: WFStageCore, base_UUID: UUID) -> Stage: +def base_meta_stage( + base_client: ClientCore, base_stage: WFStageCore, base_UUID: UUID +) -> Stage: return Stage(base_client, base_stage) @fixture def base_meta_stage_list(base_meta_stage: Stage, base_UUID: UUID) -> List[Stage]: return [base_meta_stage] + + +@fixture +def base_meta_dataset(base_client: ClientCore, base_dataset: DatasetCore) -> Dataset: + return Dataset(base_client, base_dataset, meta_params={"team_slug": "test_team"}) diff --git a/darwin/future/tests/meta/objects/test_datasetmeta.py b/darwin/future/tests/meta/objects/test_datasetmeta.py index 7d3088b38..47c3c71e4 100644 --- a/darwin/future/tests/meta/objects/test_datasetmeta.py +++ b/darwin/future/tests/meta/objects/test_datasetmeta.py @@ -1,27 +1,14 @@ import string -from typing import Generator -from unittest.mock import Mock, patch -from pytest import fixture, mark, raises +from pytest import mark, raises +from requests import HTTPError from responses import RequestsMock from darwin.future.core.client import DarwinConfig from darwin.future.meta.client import Client from darwin.future.meta.objects.dataset import Dataset from darwin.future.tests.core.fixtures import * - - -@fixture -def _delete_by_slug_mock() -> Generator: - with patch.object(Dataset, "_delete_by_slug") as mock: - yield mock - - -@fixture -def _delete_by_id_mock() -> Generator: - with patch.object(Dataset, "_delete_by_id") as mock: - yield mock - +from darwin.future.tests.meta.objects.fixtures import * # `datasets` tests # TODO datasets tests @@ -31,23 +18,20 @@ def _delete_by_id_mock() -> Generator: # `create_dataset` tests -def test_create_dataset_returns_exceptions_thrown(base_config: DarwinConfig) -> None: +def test_create_dataset_raises_HTTPError(base_config: DarwinConfig) -> None: valid_client = Client(base_config) valid_slug = "test_dataset" base_url = base_config.base_url + "api/datasets" - with RequestsMock() as rsps: + with RequestsMock() as rsps, raises(HTTPError): rsps.add(rsps.POST, base_url, status=500) - - exceptions, dataset_created = Dataset.create_dataset(valid_client, valid_slug) - - assert exceptions is not None - assert "500 Server Error" in str(exceptions[0]) - assert dataset_created is None + Dataset.create_dataset(valid_client, valid_slug) -def test_create_dataset_returns_dataset_created_if_dataset_created(base_config: DarwinConfig) -> None: +def test_create_dataset_returns_dataset_created_if_dataset_created( + base_config: DarwinConfig, +) -> None: valid_client = Client(base_config) valid_slug = "test_dataset" @@ -61,9 +45,8 @@ def test_create_dataset_returns_dataset_created_if_dataset_created(base_config: status=201, ) - exceptions, dataset_created = Dataset.create_dataset(valid_client, valid_slug) + dataset_created = Dataset.create_dataset(valid_client, valid_slug) - assert exceptions is None assert dataset_created is not None assert dataset_created.id == 1 assert dataset_created.name == "test dataset" @@ -74,128 +57,41 @@ def test_create_dataset_returns_dataset_created_if_dataset_created(base_config: # TODO update_dataset tests -# `delete_dataset` tests -def test_delete_dataset_returns_exceptions_thrown( - base_config: DarwinConfig, _delete_by_id_mock: Mock, _delete_by_slug_mock: Mock -) -> None: - _delete_by_slug_mock.side_effect = Exception("test exception") - - valid_client = Client(base_config) - - exceptions, dataset_deleted = Dataset.delete_dataset(valid_client, "test_dataset") - - assert exceptions is not None - assert str(exceptions[0]) == "test exception" - assert dataset_deleted == -1 - - assert _delete_by_slug_mock.call_count == 1 - assert _delete_by_id_mock.call_count == 0 - - -def test_delete_dataset_calls_delete_by_slug_as_appropriate( - base_config: DarwinConfig, _delete_by_id_mock: Mock, _delete_by_slug_mock: Mock -) -> None: - valid_client = Client(base_config) - - exceptions, _ = Dataset.delete_dataset(valid_client, "test_dataset") - - assert exceptions is None - assert _delete_by_slug_mock.call_count == 1 - assert _delete_by_id_mock.call_count == 0 - - -def test_delete_dataset_calls_delete_by_id_as_appropriate( - base_config: DarwinConfig, _delete_by_id_mock: Mock, _delete_by_slug_mock: Mock +@mark.parametrize( + "invalid_slug", + [ + "", + " ", + "test dataset", + *[f"dataset_{c}" for c in string.punctuation if c not in ["-", "_", "."]], + ], +) +def test_validate_slugh_raises_exception_if_passed_invalid_inputs( + invalid_slug: str, ) -> None: - valid_client = Client(base_config) - - exceptions, _ = Dataset.delete_dataset(valid_client, 1) - - assert exceptions is None - assert _delete_by_slug_mock.call_count == 0 - assert _delete_by_id_mock.call_count == 1 - - -# Test `_delete_by_slug` -def test_delete_by_slug_raises_exception_if_not_passed_str_and_client(base_config: DarwinConfig) -> None: - valid_client = Client(base_config) - valid_slug = "test_dataset" - invalid_client = "client" - invalid_slug = 1 - - with raises(AssertionError): - Dataset._delete_by_slug(valid_client, invalid_slug) # type: ignore - with raises(AssertionError): - Dataset._delete_by_slug(invalid_client, valid_slug) # type: ignore - - -def test_delete_by_slug__returns_dataset_deleted_if_dataset_found(base_config: DarwinConfig) -> None: - valid_client = Client(base_config) - valid_slug = "test_dataset" - - base_url = base_config.base_url + "api/datasets" - - with RequestsMock() as rsps: - rsps.add( - rsps.GET, - base_url + "?id=test_dataset", - json={"id": 1, "name": "Test Dataset", "slug": "test_dataset"}, - status=200, - ) - rsps.add( - rsps.PUT, - base_url + "/1/archive", - json={"id": 1, "name": "Test Dataset", "slug": "test_dataset"}, - status=200, - ) - dataset_deleted = Dataset._delete_by_slug(valid_client, valid_slug) - - assert dataset_deleted == 1 - - -# Test `_delete_by_id` -def test_delete_by_id_raises_exception_if_not_passed_int_and_client(base_config: DarwinConfig) -> None: - valid_client = Client(base_config) - valid_id = 1 - invalid_client = "client" - invalid_id = "1" + Dataset._validate_slug(invalid_slug) - with raises(AssertionError): - Dataset._delete_by_id(valid_client, invalid_id) # type: ignore - with raises(AssertionError): - Dataset._delete_by_id(invalid_client, valid_id) # type: ignore +def test_validate_slug_returns_none_if_passed_valid_slug() -> None: + valid_slug = "test-dataset" + assert Dataset._validate_slug(valid_slug) is None -def test_delete_by_id_returns_dataset_deleted_if_dataset_found(base_config: DarwinConfig) -> None: - valid_client = Client(base_config) - valid_id = 1 +def test_delete(base_meta_dataset: Dataset, base_config: DarwinConfig) -> None: base_url = base_config.base_url + "api/datasets" - with RequestsMock() as rsps: rsps.add( rsps.PUT, - base_url + "/1/archive", - json={"id": 1, "name": "Test Dataset", "slug": "test_dataset"}, + base_url + f"/{base_meta_dataset.id}/archive", + json={ + "id": base_meta_dataset.id, + "name": "Test Dataset", + "slug": "test_dataset", + }, status=200, ) - dataset_deleted = Dataset._delete_by_id(valid_client, valid_id) + dataset_deleted = base_meta_dataset.delete() assert dataset_deleted == 1 - - -@mark.parametrize( - "invalid_slug", - ["", " ", "test dataset", *[f"dataset_{c}" for c in string.punctuation if c not in ["-", "_", "."]]], -) -def test_validate_slugh_raises_exception_if_passed_invalid_inputs(invalid_slug: str) -> None: - with raises(AssertionError): - Dataset._validate_slug(invalid_slug) - - -def test_validate_slug_returns_none_if_passed_valid_slug() -> None: - valid_slug = "test-dataset" - - assert Dataset._validate_slug(valid_slug) is None diff --git a/darwin/future/tests/meta/objects/test_stagemeta.py b/darwin/future/tests/meta/objects/test_stagemeta.py index 99f4dfef5..9a1d24c7d 100644 --- a/darwin/future/tests/meta/objects/test_stagemeta.py +++ b/darwin/future/tests/meta/objects/test_stagemeta.py @@ -2,11 +2,8 @@ from uuid import UUID import responses -from pytest import fixture, mark, raises -from responses import RequestsMock -from sklearn import base +from pytest import fixture -from darwin.future.core.client import DarwinConfig from darwin.future.data_objects.workflow import WFStageCore, WFTypeCore from darwin.future.meta.client import Client from darwin.future.meta.objects.stage import Stage @@ -22,17 +19,29 @@ def uuid_str() -> str: @fixture def base_WFStage(uuid_str: str) -> WFStageCore: - return WFStageCore(id=UUID(uuid_str), name="test-stage", type=WFTypeCore.ANNOTATE, assignable_users=[], edges=[]) + return WFStageCore( + id=UUID(uuid_str), + name="test-stage", + type=WFTypeCore.ANNOTATE, + assignable_users=[], + edges=[], + ) @fixture -def stage_meta(base_meta_client: Client, base_WFStage: WFStageCore, workflow_id: UUID) -> Stage: +def stage_meta( + base_meta_client: Client, base_WFStage: WFStageCore, workflow_id: UUID +) -> Stage: return Stage( - base_meta_client, base_WFStage, {"team_slug": "default-team", "dataset_id": 1337, "workflow_id": workflow_id} + base_meta_client, + base_WFStage, + {"team_slug": "default-team", "dataset_id": 1337, "workflow_id": workflow_id}, ) -def test_item_ids(base_meta_client: Client, stage_meta: Stage, UUIDs_str: List[str], UUIDs: List[UUID]) -> None: +def test_item_ids( + base_meta_client: Client, stage_meta: Stage, UUIDs_str: List[str], UUIDs: List[UUID] +) -> None: with responses.RequestsMock() as rsps: rsps.add( rsps.GET, @@ -63,7 +72,10 @@ def test_move_attached_files_to_stage( status=200, ) stage_meta.move_attached_files_to_stage(stage_meta.id) - assert rsps.assert_call_count(base_meta_client.config.api_endpoint + "v2/teams/default-team/items/stage", 1) + assert rsps.assert_call_count( + base_meta_client.config.api_endpoint + "v2/teams/default-team/items/stage", + 1, + ) assert rsps.assert_call_count( base_meta_client.config.api_endpoint + f"v2/teams/default-team/items/ids?workflow_stage_ids={str(stage_meta.id)}&dataset_ids=1337", diff --git a/darwin/future/tests/meta/objects/test_teammeta.py b/darwin/future/tests/meta/objects/test_teammeta.py index 06370b4d7..8b010820c 100644 --- a/darwin/future/tests/meta/objects/test_teammeta.py +++ b/darwin/future/tests/meta/objects/test_teammeta.py @@ -1,21 +1,181 @@ +from typing import Generator from unittest.mock import Mock, patch -import responses from pytest import fixture, raises +from responses import RequestsMock -from darwin.future.core.client import ClientCore +from darwin.future.core.client import ClientCore, DarwinConfig +from darwin.future.data_objects.dataset import DatasetCore from darwin.future.data_objects.team import TeamMemberCore +from darwin.future.meta.client import Client +from darwin.future.meta.objects.dataset import Dataset from darwin.future.meta.objects.team import Team from darwin.future.tests.core.fixtures import * from darwin.future.tests.meta.objects.fixtures import * +@fixture +def _delete_by_slug_mock() -> Generator: + with patch.object(Team, "_delete_dataset_by_slug") as mock: + yield mock + + +@fixture +def _delete_by_id_mock() -> Generator: + with patch.object(Team, "_delete_dataset_by_id") as mock: + yield mock + + def test_team_meta_collects_members( - base_meta_team: Team, base_client: ClientCore, base_team_member: TeamMemberCore, base_team_member_json: dict + base_meta_team: Team, + base_client: ClientCore, + base_team_member: TeamMemberCore, + base_team_member_json: dict, ) -> None: - with responses.RequestsMock() as rsps: + with RequestsMock() as rsps: endpoint = base_client.config.api_endpoint + "memberships" - rsps.add(responses.GET, endpoint, json=[base_team_member_json]) + rsps.add(rsps.GET, endpoint, json=[base_team_member_json]) members = base_meta_team.members._collect() assert len(members) == 1 assert members[0]._element == base_team_member + + +# `delete_dataset` tests +def test_delete_dataset_returns_exceptions_thrown( + base_config: DarwinConfig, _delete_by_id_mock: Mock, _delete_by_slug_mock: Mock +) -> None: + _delete_by_slug_mock.side_effect = Exception("test exception") + + valid_client = Client(base_config) + + exceptions, dataset_deleted = Team.delete_dataset(valid_client, "test_dataset") + + assert exceptions is not None + assert str(exceptions[0]) == "test exception" + assert dataset_deleted == -1 + + assert _delete_by_slug_mock.call_count == 1 + assert _delete_by_id_mock.call_count == 0 + + +def test_delete_dataset_calls_delete_by_slug_as_appropriate( + base_config: DarwinConfig, _delete_by_id_mock: Mock, _delete_by_slug_mock: Mock +) -> None: + valid_client = Client(base_config) + + exceptions, _ = Team.delete_dataset(valid_client, "test_dataset") + + assert exceptions is None + assert _delete_by_slug_mock.call_count == 1 + assert _delete_by_id_mock.call_count == 0 + + +def test_delete_dataset_calls_delete_by_id_as_appropriate( + base_config: DarwinConfig, _delete_by_id_mock: Mock, _delete_by_slug_mock: Mock +) -> None: + valid_client = Client(base_config) + + exceptions, _ = Team.delete_dataset(valid_client, 1) + + assert exceptions is None + assert _delete_by_slug_mock.call_count == 0 + assert _delete_by_id_mock.call_count == 1 + + +def test_delete_by_slug__returns_dataset_deleted_if_dataset_found( + base_config: DarwinConfig, +) -> None: + valid_client = Client(base_config) + valid_slug = "test_dataset" + + base_url = base_config.base_url + "api/datasets" + + with RequestsMock() as rsps: + rsps.add( + rsps.GET, + base_url + "?id=test_dataset", + json={"id": 1, "name": "Test Dataset", "slug": "test_dataset"}, + status=200, + ) + rsps.add( + rsps.PUT, + base_url + "/1/archive", + json={"id": 1, "name": "Test Dataset", "slug": "test_dataset"}, + status=200, + ) + dataset_deleted = Team._delete_dataset_by_slug(valid_client, valid_slug) + + assert dataset_deleted == 1 + + +def test_delete_by_id_returns_dataset_deleted_if_dataset_found( + base_config: DarwinConfig, +) -> None: + valid_client = Client(base_config) + valid_id = 1 + + base_url = base_config.base_url + "api/datasets" + + with RequestsMock() as rsps: + rsps.add( + rsps.PUT, + base_url + "/1/archive", + json={"id": 1, "name": "Test Dataset", "slug": "test_dataset"}, + status=200, + ) + dataset_deleted = Team._delete_dataset_by_id(valid_client, valid_id) + + assert dataset_deleted == 1 + + +# Test `_delete_by_id` +def test_delete_by_id_raises_exception_if_not_passed_int_and_client( + base_config: DarwinConfig, +) -> None: + valid_client = Client(base_config) + valid_id = 1 + invalid_client = "client" + invalid_id = "1" + + with raises(AssertionError): + Team._delete_dataset_by_id(valid_client, invalid_id) # type: ignore + + with raises(AssertionError): + Team._delete_dataset_by_id(invalid_client, valid_id) # type: ignore + + +# Test `_delete_by_slug` +def test_delete_by_slug_raises_exception_if_not_passed_str_and_client( + base_config: DarwinConfig, +) -> None: + valid_client = Client(base_config) + valid_slug = "test_dataset" + invalid_client = "client" + invalid_slug = 1 + + with raises(AssertionError): + Team._delete_dataset_by_slug(valid_client, invalid_slug) # type: ignore + + with raises(AssertionError): + Team._delete_dataset_by_slug(invalid_client, valid_slug) # type: ignore + + +def test_create_dataset(base_meta_team: Team, base_config: DarwinConfig) -> None: + base_url = base_config.base_url + "api/datasets" + valid_slug = "test_dataset" + valid_name = "test dataset" + with RequestsMock() as rsps: + rsps.add( + rsps.POST, + base_url, + json={"id": 1, "name": valid_name, "slug": valid_slug}, + status=201, + ) + + dataset_created = base_meta_team.create_dataset(valid_slug) + assert dataset_created is not None + assert isinstance(dataset_created, Dataset) + assert isinstance(dataset_created._element, DatasetCore) + assert dataset_created.id == 1 + assert dataset_created.name == valid_name + assert dataset_created.slug == valid_slug diff --git a/darwin/future/tests/meta/queries/test_dataset.py b/darwin/future/tests/meta/queries/test_dataset.py index 3d3e163d9..d06e123c6 100644 --- a/darwin/future/tests/meta/queries/test_dataset.py +++ b/darwin/future/tests/meta/queries/test_dataset.py @@ -1,21 +1,21 @@ import responses -from pytest import fixture, mark from darwin.future.core.client import ClientCore -from darwin.future.data_objects.dataset import DatasetCore from darwin.future.meta.objects.dataset import Dataset from darwin.future.meta.queries.dataset import DatasetQuery from darwin.future.tests.core.fixtures import * -def test_dataset_collects_basic(base_client: ClientCore, base_datasets_json: dict) -> None: +def test_dataset_collects_basic( + base_client: ClientCore, base_datasets_json: dict +) -> None: query = DatasetQuery(base_client) with responses.RequestsMock() as rsps: endpoint = base_client.config.api_endpoint + "datasets" rsps.add(responses.GET, endpoint, json=base_datasets_json) datasets = query._collect() assert len(datasets) == 2 - assert all([isinstance(dataset, Dataset) for dataset in datasets]) + assert all(isinstance(dataset, Dataset) for dataset in datasets) def test_datasetquery_only_passes_back_correctly_formed_objects( @@ -31,9 +31,13 @@ def test_datasetquery_only_passes_back_correctly_formed_objects( assert isinstance(datasets[0], Dataset) -def test_dataset_filters_name(base_client: ClientCore, base_datasets_json: dict) -> None: +def test_dataset_filters_name( + base_client: ClientCore, base_datasets_json: dict +) -> None: with responses.RequestsMock() as rsps: - query = DatasetQuery(base_client).where({"name": "name", "param": "test dataset 1"}) + query = DatasetQuery(base_client).where( + {"name": "name", "param": "test dataset 1"} + ) endpoint = base_client.config.api_endpoint + "datasets" rsps.add(responses.GET, endpoint, json=base_datasets_json) datasets = query._collect() @@ -53,9 +57,13 @@ def test_dataset_filters_id(base_client: ClientCore, base_datasets_json: dict) - assert datasets[0]._element.slug == "test-dataset-1" -def test_dataset_filters_slug(base_client: ClientCore, base_datasets_json: dict) -> None: +def test_dataset_filters_slug( + base_client: ClientCore, base_datasets_json: dict +) -> None: with responses.RequestsMock() as rsps: - query = DatasetQuery(base_client).where({"name": "slug", "param": "test-dataset-1"}) + query = DatasetQuery(base_client).where( + {"name": "slug", "param": "test-dataset-1"} + ) endpoint = base_client.config.api_endpoint + "datasets" rsps.add(responses.GET, endpoint, json=base_datasets_json) datasets = query._collect() @@ -64,9 +72,13 @@ def test_dataset_filters_slug(base_client: ClientCore, base_datasets_json: dict) assert datasets[0]._element.slug == "test-dataset-1" -def test_dataset_filters_releases(base_client: ClientCore, base_datasets_json_with_releases: dict) -> None: +def test_dataset_filters_releases( + base_client: ClientCore, base_datasets_json_with_releases: dict +) -> None: with responses.RequestsMock() as rsps: - query = DatasetQuery(base_client).where({"name": "releases", "param": "release1"}) + query = DatasetQuery(base_client).where( + {"name": "releases", "param": "release1"} + ) endpoint = base_client.config.api_endpoint + "datasets" rsps.add(responses.GET, endpoint, json=base_datasets_json_with_releases) @@ -76,7 +88,9 @@ def test_dataset_filters_releases(base_client: ClientCore, base_datasets_json_wi assert datasets_odd_ids[0]._element.slug == "test-dataset-1" assert datasets_odd_ids[1]._element.slug == "test-dataset-3" - query2 = DatasetQuery(base_client).where({"name": "releases", "param": "release2"}) + query2 = DatasetQuery(base_client).where( + {"name": "releases", "param": "release2"} + ) datasets_even_ids = query2._collect() assert len(datasets_even_ids) == 2 diff --git a/darwin/future/tests/meta/queries/test_stage.py b/darwin/future/tests/meta/queries/test_stage.py index a9cc61267..1d7ad0721 100644 --- a/darwin/future/tests/meta/queries/test_stage.py +++ b/darwin/future/tests/meta/queries/test_stage.py @@ -1,5 +1,3 @@ -from typing import List - import pytest import responses @@ -13,18 +11,22 @@ @pytest.fixture def filled_query(base_client: ClientCore, base_workflow_meta: Workflow) -> StageQuery: - return StageQuery(base_client, meta_params={"workflow_id": str(base_workflow_meta.id)}) + return StageQuery( + base_client, meta_params={"workflow_id": str(base_workflow_meta.id)} + ) @pytest.fixture -def base_workflow_meta(base_client: ClientCore, base_single_workflow_object: dict) -> Workflow: +def base_workflow_meta( + base_client: ClientCore, base_single_workflow_object: dict +) -> Workflow: return Workflow(base_client, WorkflowCore.parse_obj(base_single_workflow_object)) @pytest.fixture def multi_stage_workflow_object(base_single_workflow_object: dict) -> dict: stage = base_single_workflow_object["stages"][0] - types = [t for t in WFTypeCore.__members__.values()] * 3 + types = list(WFTypeCore.__members__.values()) * 3 stages = [] for i, t in enumerate(types): temp = stage.copy() @@ -41,11 +43,16 @@ def test_WFTypes_accept_unknonwn() -> None: def test_stage_collects_basic( - filled_query: StageQuery, base_single_workflow_object: dict, base_workflow_meta: Workflow + filled_query: StageQuery, + base_single_workflow_object: dict, + base_workflow_meta: Workflow, ) -> None: UUID = base_workflow_meta.id with responses.RequestsMock() as rsps: - endpoint = filled_query.client.config.api_endpoint + f"v2/teams/default-team/workflows/{UUID}" + endpoint = ( + filled_query.client.config.api_endpoint + + f"v2/teams/default-team/workflows/{UUID}" + ) rsps.add(responses.GET, endpoint, json=base_single_workflow_object) stages = filled_query._collect() assert len(stages) == len(base_workflow_meta.stages) @@ -53,11 +60,16 @@ def test_stage_collects_basic( def test_stage_filters_basic( - filled_query: StageQuery, multi_stage_workflow_object: dict, base_workflow_meta: Workflow + filled_query: StageQuery, + multi_stage_workflow_object: dict, + base_workflow_meta: Workflow, ) -> None: UUID = base_workflow_meta.id with responses.RequestsMock() as rsps: - endpoint = filled_query.client.config.api_endpoint + f"v2/teams/default-team/workflows/{UUID}" + endpoint = ( + filled_query.client.config.api_endpoint + + f"v2/teams/default-team/workflows/{UUID}" + ) rsps.add(responses.GET, endpoint, json=multi_stage_workflow_object) stages = filled_query.where({"name": "name", "param": "stage1"})._collect() assert len(stages) == 1 @@ -65,13 +77,19 @@ def test_stage_filters_basic( assert stages[0]._element.name == "stage1" -@pytest.mark.parametrize("wf_type", [t for t in WFTypeCore.__members__.values()]) +@pytest.mark.parametrize("wf_type", list(WFTypeCore.__members__.values())) def test_stage_filters_WFType( - wf_type: WFTypeCore, filled_query: StageQuery, multi_stage_workflow_object: dict, base_workflow_meta: Workflow + wf_type: WFTypeCore, + filled_query: StageQuery, + multi_stage_workflow_object: dict, + base_workflow_meta: Workflow, ) -> None: UUID = base_workflow_meta.id with responses.RequestsMock() as rsps: - endpoint = filled_query.client.config.api_endpoint + f"v2/teams/default-team/workflows/{UUID}" + endpoint = ( + filled_query.client.config.api_endpoint + + f"v2/teams/default-team/workflows/{UUID}" + ) rsps.add(responses.GET, endpoint, json=multi_stage_workflow_object) stages = filled_query.where({"name": "type", "param": wf_type.value})._collect() assert len(stages) == 3 diff --git a/darwin/future/tests/meta/queries/test_team_member.py b/darwin/future/tests/meta/queries/test_team_member.py index 19b19a2e2..6fe314e58 100644 --- a/darwin/future/tests/meta/queries/test_team_member.py +++ b/darwin/future/tests/meta/queries/test_team_member.py @@ -10,7 +10,9 @@ from darwin.future.tests.core.fixtures import * -def test_team_member_collects_basic(base_client: ClientCore, base_team_members_json: List[dict]) -> None: +def test_team_member_collects_basic( + base_client: ClientCore, base_team_members_json: List[dict] +) -> None: query = TeamMemberQuery(base_client) with responses.RequestsMock() as rsps: endpoint = base_client.config.api_endpoint + "memberships" @@ -20,7 +22,9 @@ def test_team_member_collects_basic(base_client: ClientCore, base_team_members_j assert isinstance(members[0], TeamMember) -def test_team_member_only_passes_back_correct(base_client: ClientCore, base_team_member_json: dict) -> None: +def test_team_member_only_passes_back_correct( + base_client: ClientCore, base_team_member_json: dict +) -> None: query = TeamMemberQuery(base_client) with responses.RequestsMock() as rsps: endpoint = base_client.config.api_endpoint + "memberships" @@ -30,13 +34,15 @@ def test_team_member_only_passes_back_correct(base_client: ClientCore, base_team assert isinstance(members[0], TeamMember) -@pytest.mark.parametrize("role", [role for role in TeamMemberRole]) +@pytest.mark.parametrize("role", list(TeamMemberRole)) def test_team_member_filters_role( role: TeamMemberRole, base_client: ClientCore, base_team_members_json: List[dict] ) -> None: with responses.RequestsMock() as rsps: # Test equal - query = TeamMemberQuery(base_client).where({"name": "role", "param": role.value}) + query = TeamMemberQuery(base_client).where( + {"name": "role", "param": role.value} + ) endpoint = base_client.config.api_endpoint + "memberships" rsps.add(responses.GET, endpoint, json=base_team_members_json) members = query._collect() @@ -45,7 +51,9 @@ def test_team_member_filters_role( # Test not equal rsps.reset() - query = TeamMemberQuery(base_client).where({"name": "role", "param": role.value, "modifier": "!="}) + query = TeamMemberQuery(base_client).where( + {"name": "role", "param": role.value, "modifier": "!="} + ) rsps.add(responses.GET, endpoint, json=base_team_members_json) members = query._collect() assert len(members) == len(TeamMemberRole) - 1 @@ -53,7 +61,9 @@ def test_team_member_filters_role( assert member._element.role != role -def test_team_member_filters_general(base_client: ClientCore, base_team_members_json: List[dict]) -> None: +def test_team_member_filters_general( + base_client: ClientCore, base_team_members_json: List[dict] +) -> None: for idx in range(len(base_team_members_json)): base_team_members_json[idx]["id"] = idx + 1 @@ -73,7 +83,9 @@ def test_team_member_filters_general(base_client: ClientCore, base_team_members_ members = ( TeamMemberQuery(base_client) .where({"name": "id", "param": 1, "modifier": ">"}) - .where({"name": "id", "param": len(base_team_members_json), "modifier": "<"}) + .where( + {"name": "id", "param": len(base_team_members_json), "modifier": "<"} + ) ._collect() ) diff --git a/darwin/future/tests/meta/queries/test_workflow.py b/darwin/future/tests/meta/queries/test_workflow.py index 71f77c802..b9761f786 100644 --- a/darwin/future/tests/meta/queries/test_workflow.py +++ b/darwin/future/tests/meta/queries/test_workflow.py @@ -4,8 +4,6 @@ import responses from darwin.future.core.client import ClientCore -from darwin.future.core.types.query import Modifier -from darwin.future.data_objects.workflow import WorkflowCore from darwin.future.meta.objects.workflow import Workflow from darwin.future.meta.queries.workflow import WorkflowQuery from darwin.future.tests.core.fixtures import * @@ -20,20 +18,28 @@ def workflows_query_endpoint(team: str) -> str: @responses.activate -def test_workflowquery_collects_basic(base_client: ClientCore, base_filterable_workflows: dict) -> None: - endpoint = base_client.config.api_endpoint + workflows_query_endpoint(base_client.config.default_team) +def test_workflowquery_collects_basic( + base_client: ClientCore, base_filterable_workflows: dict +) -> None: + endpoint = base_client.config.api_endpoint + workflows_query_endpoint( + base_client.config.default_team + ) responses.add(responses.GET, endpoint, json=base_filterable_workflows) query = WorkflowQuery(base_client, []) workflows = query._collect() assert len(workflows) == 3 - assert all([isinstance(workflow, Workflow) for workflow in workflows]) + assert all(isinstance(workflow, Workflow) for workflow in workflows) @responses.activate -def test_workflowquery_filters_uuid(base_client: ClientCore, base_filterable_workflows: dict) -> None: - endpoint = base_client.config.api_endpoint + workflows_query_endpoint(base_client.config.default_team) +def test_workflowquery_filters_uuid( + base_client: ClientCore, base_filterable_workflows: dict +) -> None: + endpoint = base_client.config.api_endpoint + workflows_query_endpoint( + base_client.config.default_team + ) responses.add(responses.GET, endpoint, json=base_filterable_workflows) query = WorkflowQuery(base_client, []).where( @@ -49,8 +55,12 @@ def test_workflowquery_filters_uuid(base_client: ClientCore, base_filterable_wor @responses.activate -def test_workflowquery_filters_inserted_at(base_client: ClientCore, base_filterable_workflows: dict) -> None: - endpoint = base_client.config.api_endpoint + workflows_query_endpoint(base_client.config.default_team) +def test_workflowquery_filters_inserted_at( + base_client: ClientCore, base_filterable_workflows: dict +) -> None: + endpoint = base_client.config.api_endpoint + workflows_query_endpoint( + base_client.config.default_team + ) responses.add(responses.GET, endpoint, json=base_filterable_workflows) start = "2021-06-01T15:00:00.000+00:00" @@ -80,8 +90,12 @@ def test_workflowquery_filters_inserted_at(base_client: ClientCore, base_filtera @responses.activate -def test_workflowquery_filters_updated_at(base_client: ClientCore, base_filterable_workflows: dict) -> None: - endpoint = base_client.config.api_endpoint + workflows_query_endpoint(base_client.config.default_team) +def test_workflowquery_filters_updated_at( + base_client: ClientCore, base_filterable_workflows: dict +) -> None: + endpoint = base_client.config.api_endpoint + workflows_query_endpoint( + base_client.config.default_team + ) responses.add(responses.GET, endpoint, json=base_filterable_workflows) start = "2021-06-04T15:00:00.000+00:00" @@ -111,8 +125,12 @@ def test_workflowquery_filters_updated_at(base_client: ClientCore, base_filterab @responses.activate -def test_workflowquery_filters_dataset_id(base_client: ClientCore, base_filterable_workflows: dict) -> None: - endpoint = base_client.config.api_endpoint + workflows_query_endpoint(base_client.config.default_team) +def test_workflowquery_filters_dataset_id( + base_client: ClientCore, base_filterable_workflows: dict +) -> None: + endpoint = base_client.config.api_endpoint + workflows_query_endpoint( + base_client.config.default_team + ) responses.add(responses.GET, endpoint, json=base_filterable_workflows) query = WorkflowQuery(base_client, []).where( @@ -131,7 +149,9 @@ def test_workflowquery_filters_dataset_id(base_client: ClientCore, base_filterab def test_workflowquery_filters_dataset_id_multiple_ids( base_client: ClientCore, base_filterable_workflows: dict ) -> None: - endpoint = base_client.config.api_endpoint + workflows_query_endpoint(base_client.config.default_team) + endpoint = base_client.config.api_endpoint + workflows_query_endpoint( + base_client.config.default_team + ) responses.add(responses.GET, endpoint, json=base_filterable_workflows) query = WorkflowQuery(base_client, []).where( @@ -148,8 +168,12 @@ def test_workflowquery_filters_dataset_id_multiple_ids( @responses.activate -def test_workflowquery_filters_dataset_name(base_client: ClientCore, base_filterable_workflows: dict) -> None: - endpoint = base_client.config.api_endpoint + workflows_query_endpoint(base_client.config.default_team) +def test_workflowquery_filters_dataset_name( + base_client: ClientCore, base_filterable_workflows: dict +) -> None: + endpoint = base_client.config.api_endpoint + workflows_query_endpoint( + base_client.config.default_team + ) responses.add(responses.GET, endpoint, json=base_filterable_workflows) query = WorkflowQuery(base_client, []).where( @@ -168,7 +192,9 @@ def test_workflowquery_filters_dataset_name(base_client: ClientCore, base_filter def test_workflowquery_filters_dataset_name_mutliple_names( base_client: ClientCore, base_filterable_workflows: dict ) -> None: - endpoint = base_client.config.api_endpoint + workflows_query_endpoint(base_client.config.default_team) + endpoint = base_client.config.api_endpoint + workflows_query_endpoint( + base_client.config.default_team + ) responses.add(responses.GET, endpoint, json=base_filterable_workflows) query = WorkflowQuery(base_client, []).where( @@ -185,8 +211,12 @@ def test_workflowquery_filters_dataset_name_mutliple_names( @responses.activate -def test_workflowquery_filters_stages(base_client: ClientCore, base_filterable_workflows: dict) -> None: - endpoint = base_client.config.api_endpoint + workflows_query_endpoint(base_client.config.default_team) +def test_workflowquery_filters_stages( + base_client: ClientCore, base_filterable_workflows: dict +) -> None: + endpoint = base_client.config.api_endpoint + workflows_query_endpoint( + base_client.config.default_team + ) responses.add(responses.GET, endpoint, json=base_filterable_workflows) query = WorkflowQuery(base_client, []).where( @@ -202,8 +232,12 @@ def test_workflowquery_filters_stages(base_client: ClientCore, base_filterable_w @responses.activate -def test_workflowquery_filters_stages_multiple(base_client: ClientCore, base_filterable_workflows: dict) -> None: - endpoint = base_client.config.api_endpoint + workflows_query_endpoint(base_client.config.default_team) +def test_workflowquery_filters_stages_multiple( + base_client: ClientCore, base_filterable_workflows: dict +) -> None: + endpoint = base_client.config.api_endpoint + workflows_query_endpoint( + base_client.config.default_team + ) responses.add(responses.GET, endpoint, json=base_filterable_workflows) query = WorkflowQuery(base_client, []).where( diff --git a/darwin/future/tests/meta/test_client.py b/darwin/future/tests/meta/test_client.py index fb3b74b16..aa86bd480 100644 --- a/darwin/future/tests/meta/test_client.py +++ b/darwin/future/tests/meta/test_client.py @@ -1,6 +1,3 @@ -import unittest - -import pytest import responses from darwin.future.core.client import DarwinConfig @@ -14,12 +11,18 @@ def test_creates_from_api_key() -> None: with responses.RequestsMock() as rsps: base_api_endpoint = DarwinConfig._default_api_endpoint() - rsps.add(responses.GET, base_api_endpoint + "users/token_info", json={"selected_team": {"slug": "test-team"}}) + rsps.add( + responses.GET, + base_api_endpoint + "users/token_info", + json={"selected_team": {"slug": "test-team"}}, + ) client = Client.from_api_key(api_key="test") assert client.config.default_team == "test-team" -def test_team_property(base_meta_client: Client, base_team: TeamCore, base_team_json: dict) -> None: +def test_team_property( + base_meta_client: Client, base_team: TeamCore, base_team_json: dict +) -> None: client = base_meta_client endpoint = client.config.api_endpoint + f"teams/{client.config.default_team}" with responses.RequestsMock() as rsps: diff --git a/darwin/version/__init__.py b/darwin/version/__init__.py index eb467a33f..8f694aac8 100644 --- a/darwin/version/__init__.py +++ b/darwin/version/__init__.py @@ -1 +1 @@ -__version__ = "0.8.44" +__version__ = "0.8.45" diff --git a/deploy/_filter_files.py b/deploy/_filter_files.py index ce2480e7a..c1777b0df 100755 --- a/deploy/_filter_files.py +++ b/deploy/_filter_files.py @@ -11,7 +11,7 @@ def main(argv: List[str]) -> None: if file_extension.startswith("."): file_extension = file_extension[1:] - files_out = [file for file in files_in if file.endswith(f".{file_extension}") and "darwin/future" in file] + files_out = [file for file in files_in if file.endswith(f".{file_extension}") and 'future' in file] print(" ".join(files_out)) diff --git a/deploy/format_lint.sh b/deploy/format_lint.sh index 23270d5e6..a40b74e48 100755 --- a/deploy/format_lint.sh +++ b/deploy/format_lint.sh @@ -35,6 +35,8 @@ if [ "$ACTION" == "format" ]; then pipinstall ruff elif [ "$ACTION" == "typecheck" ]; then pipinstall mypy + pipinstall types-requests + pipinstall types-pyYAML else echo "Action must be format, typecheck, or lint" exit 1 diff --git a/docs/DEV.md b/docs/DEV.md new file mode 100644 index 000000000..81b9daacb --- /dev/null +++ b/docs/DEV.md @@ -0,0 +1,55 @@ +# Development Environment +This doesn't represent the only way to develop on darwin-py, but does represent an easy and configurable way to manage things like underlying dependencies and python versions +## Shell environment +No requirement for any particular shell, [zsh](https://github.com/ohmyzsh/ohmyzsh/wiki/Installing-ZSH) + [oh my zsh](https://ohmyz.sh/) is a good setup commonly used, but whatever environment you use make sure to install the recommended alias's and path exports that the below systems require for your particular shell environment, particularly pertinent for poetry which has an added step that it prints to console but isn't included on the webpage. +## Pyenv +Pyenv manages system python versions, install instructions can be found [here](https://github.com/pyenv/pyenv). +After installation of pyenv, install a python version that is compatible with darwin-py (3.8-3.10 as of writing) + +`pyenv install 3.10` + +If the command `pyenv` isn't recognized, it hasn't installed to your shell environemnt config file correctly .zshrc .bashrc etc. +## Poetry +Poetry manages project level dependencies and local python versions. Install instructions [here](https://python-poetry.org/docs/). Make sure to follow the printed instructions and add the path to your shell environment, if running the command `poetry --version` after installation doesn't work, it means your path hasn't been updated + +Set 2 config settings for poetry once you have it setup and recognized as a command +1. Set poetry to use the local version of python, to be used in conjuction with pyenv later + + - `poetry config virtualenvs.prefer-active-python true` + +2. Tell poetry to create a local folder copy of python inside .venv directory when it's called to manage a project + + - `poetry config virtualenvs.in-project true` +## New Folder Setup +To Start from scratch and get a development/QA environemnt setup. This process means you will have a fresh python version with only the dependencies required by darwin-py that is uncorrupted by other packages installed on the system python +- clone darwin py repo +- navigate to downloaded repo +- Set pyenv to use a local `pyenv local ` eg `pyenv local 3.10` +- Create local environment with poetry `poetry shell` +- Install dependencies `poetry install` + +Pyenv + Poetry here get used in conjuction, with pyenv telling the system whenever `python` is called in a folder that has been set with `pyenv local ` that it should use that local version. Poetry is then set to prefer that local version of python, and to create a per project copy of python to use, it will clone `` into a .venv folder locally, and install dependencies there. If new environment is required, run `rm -rf .venv` while inside the project folder, set a new pyenv version if needed and re-run poetry commands + +## Subsequent Uses +Once a folder is setup, it can easily be reused +- navigate to folder +- run `poetry shell` +- execute any commands as normal eg `python -m darwin.cli ...` +- once complete, close terminal or manually exit shell via `exit` in terminal + +Can also force poetry commands without being in a shell environment by prepending the command with `poetry run ...` for example + +`poetry run python -m darwin.cli` + +## Useful Aliases +Aliases can be helpful for testing and development. Add them to your shell configuration file .bashrc .zshrc etc for ease of use and development +``` +DARWIN_PY_DEV="$HOME/Development/darwin-py" +alias dpy="poetry run python -m darwin.cli" +alias dpyt="poetry run python -m pytest -W ignore::DeprecationWarning" +alias dpydb="poetry run python -m debugpy --listen 5678 --wait-for-client $DARWIN_PY_DEV/darwin/cli.py" +``` + +- dpy -> quick way to run darwin +- dpyt -> calls pytest +- dpydb -> creates a remote attach debugging instance for vscode to attach to \ No newline at end of file diff --git a/poetry.lock b/poetry.lock index 4e27775fc..a980110e9 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,9 +1,35 @@ -# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand. + +[[package]] +name = "albumentations" +version = "1.3.1" +description = "Fast image augmentation library and easy to use wrapper around other libraries" +category = "main" +optional = true +python-versions = ">=3.7" +files = [ + {file = "albumentations-1.3.1-py3-none-any.whl", hash = "sha256:6b641d13733181d9ecdc29550e6ad580d1bfa9d25e2213a66940062f25e291bd"}, + {file = "albumentations-1.3.1.tar.gz", hash = "sha256:a6a38388fe546c568071e8c82f414498e86c9ed03c08b58e7a88b31cf7a244c6"}, +] + +[package.dependencies] +numpy = ">=1.11.1" +opencv-python-headless = ">=4.1.1" +PyYAML = "*" +qudida = ">=0.0.4" +scikit-image = ">=0.16.1" +scipy = ">=1.1.0" + +[package.extras] +develop = ["imgaug (>=0.4.0)", "pytest"] +imgaug = ["imgaug (>=0.4.0)"] +tests = ["pytest"] [[package]] name = "argcomplete" version = "2.1.2" description = "Bash tab completion for argparse" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -22,6 +48,7 @@ test = ["coverage", "flake8", "mypy", "pexpect", "wheel"] name = "attrs" version = "23.1.0" description = "Classes Without Boilerplate" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -43,6 +70,7 @@ tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pyte name = "black" version = "22.12.0" description = "The uncompromising code formatter." +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -79,6 +107,7 @@ uvloop = ["uvloop (>=0.15.2)"] name = "certifi" version = "2023.7.22" description = "Python package for providing Mozilla's CA Bundle." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -90,6 +119,7 @@ files = [ name = "charset-normalizer" version = "3.3.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -189,6 +219,7 @@ files = [ name = "click" version = "8.1.7" description = "Composable command line interface toolkit" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -204,6 +235,7 @@ importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -215,6 +247,7 @@ files = [ name = "connected-components-3d" version = "3.12.3" description = "Connected components on 2D and 3D images. Supports multiple labels." +category = "main" optional = true python-versions = ">=3.7,<4.0" files = [ @@ -262,6 +295,7 @@ numpy = "*" name = "debugpy" version = "1.7.0" description = "An implementation of the Debug Adapter Protocol for Python" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -293,6 +327,7 @@ files = [ name = "deprecation" version = "2.1.0" description = "A library to handle automated deprecations" +category = "main" optional = false python-versions = "*" files = [ @@ -307,6 +342,7 @@ packaging = "*" name = "exceptiongroup" version = "1.1.3" description = "Backport of PEP 654 (exception groups)" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -321,6 +357,7 @@ test = ["pytest (>=6)"] name = "humanize" version = "4.6.0" description = "Python humanize utilities" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -338,6 +375,7 @@ tests = ["freezegun", "pytest", "pytest-cov"] name = "idna" version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -345,10 +383,43 @@ files = [ {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, ] +[[package]] +name = "imageio" +version = "2.31.5" +description = "Library for reading and writing a wide range of image, video, scientific, and volumetric data formats." +category = "main" +optional = true +python-versions = ">=3.8" +files = [ + {file = "imageio-2.31.5-py3-none-any.whl", hash = "sha256:97f68e12ba676f2f4b541684ed81f7f3370dc347e8321bc68ee34d37b2dbac9f"}, + {file = "imageio-2.31.5.tar.gz", hash = "sha256:d8e53f9cd4054880276a3dac0a28c85ba7874084856a55a0294a8ae6ed7f3a8e"}, +] + +[package.dependencies] +numpy = "*" +pillow = ">=8.3.2" + +[package.extras] +all-plugins = ["astropy", "av", "imageio-ffmpeg", "psutil", "tifffile"] +all-plugins-pypy = ["av", "imageio-ffmpeg", "psutil", "tifffile"] +build = ["wheel"] +dev = ["black", "flake8", "fsspec[github]", "pytest", "pytest-cov"] +docs = ["numpydoc", "pydata-sphinx-theme", "sphinx (<6)"] +ffmpeg = ["imageio-ffmpeg", "psutil"] +fits = ["astropy"] +full = ["astropy", "av", "black", "flake8", "fsspec[github]", "gdal", "imageio-ffmpeg", "itk", "numpydoc", "psutil", "pydata-sphinx-theme", "pytest", "pytest-cov", "sphinx (<6)", "tifffile", "wheel"] +gdal = ["gdal"] +itk = ["itk"] +linting = ["black", "flake8"] +pyav = ["av"] +test = ["fsspec[github]", "pytest", "pytest-cov"] +tifffile = ["tifffile"] + [[package]] name = "importlib-metadata" version = "5.2.0" description = "Read metadata from Python packages" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -369,6 +440,7 @@ testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packag name = "importlib-resources" version = "5.12.0" description = "Read resources from Python packages" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -387,6 +459,7 @@ testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-chec name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -398,6 +471,7 @@ files = [ name = "isort" version = "5.11.5" description = "A Python utility / library to sort Python imports." +category = "main" optional = true python-versions = ">=3.7.0" files = [ @@ -415,6 +489,7 @@ requirements-deprecated-finder = ["pip-api", "pipreqs"] name = "joblib" version = "1.3.2" description = "Lightweight pipelining with Python functions" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -426,6 +501,7 @@ files = [ name = "jsonschema" version = "4.17.3" description = "An implementation of JSON Schema validation for Python" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -445,10 +521,27 @@ typing-extensions = {version = "*", markers = "python_version < \"3.8\""} format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] +[[package]] +name = "lazy-loader" +version = "0.3" +description = "lazy_loader" +category = "main" +optional = true +python-versions = ">=3.7" +files = [ + {file = "lazy_loader-0.3-py3-none-any.whl", hash = "sha256:1e9e76ee8631e264c62ce10006718e80b2cfc74340d17d1031e0f84af7478554"}, + {file = "lazy_loader-0.3.tar.gz", hash = "sha256:3b68898e34f5b2a29daaaac172c6555512d0f32074f147e2254e4a6d9d838f37"}, +] + +[package.extras] +lint = ["pre-commit (>=3.3)"] +test = ["pytest (>=7.4)", "pytest-cov (>=4.1)"] + [[package]] name = "markdown-it-py" version = "2.2.0" description = "Python port of markdown-it. Markdown parsing, done right!" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -474,6 +567,7 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] name = "mdurl" version = "0.1.2" description = "Markdown URL utilities" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -485,6 +579,7 @@ files = [ name = "mpire" version = "2.8.0" description = "A Python package for easy multiprocessing, but faster than multiprocessing" +category = "main" optional = false python-versions = "*" files = [ @@ -505,38 +600,39 @@ testing = ["dataclasses", "multiprocess", "multiprocess (>=0.70.15)", "numpy", " [[package]] name = "mypy" -version = "1.5.1" +version = "1.6.0" description = "Optional static typing for Python" +category = "main" optional = true python-versions = ">=3.8" files = [ - {file = "mypy-1.5.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f33592ddf9655a4894aef22d134de7393e95fcbdc2d15c1ab65828eee5c66c70"}, - {file = "mypy-1.5.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:258b22210a4a258ccd077426c7a181d789d1121aca6db73a83f79372f5569ae0"}, - {file = "mypy-1.5.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9ec1f695f0c25986e6f7f8778e5ce61659063268836a38c951200c57479cc12"}, - {file = "mypy-1.5.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:abed92d9c8f08643c7d831300b739562b0a6c9fcb028d211134fc9ab20ccad5d"}, - {file = "mypy-1.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:a156e6390944c265eb56afa67c74c0636f10283429171018446b732f1a05af25"}, - {file = "mypy-1.5.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6ac9c21bfe7bc9f7f1b6fae441746e6a106e48fc9de530dea29e8cd37a2c0cc4"}, - {file = "mypy-1.5.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:51cb1323064b1099e177098cb939eab2da42fea5d818d40113957ec954fc85f4"}, - {file = "mypy-1.5.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:596fae69f2bfcb7305808c75c00f81fe2829b6236eadda536f00610ac5ec2243"}, - {file = "mypy-1.5.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:32cb59609b0534f0bd67faebb6e022fe534bdb0e2ecab4290d683d248be1b275"}, - {file = "mypy-1.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:159aa9acb16086b79bbb0016145034a1a05360626046a929f84579ce1666b315"}, - {file = "mypy-1.5.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f6b0e77db9ff4fda74de7df13f30016a0a663928d669c9f2c057048ba44f09bb"}, - {file = "mypy-1.5.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:26f71b535dfc158a71264e6dc805a9f8d2e60b67215ca0bfa26e2e1aa4d4d373"}, - {file = "mypy-1.5.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fc3a600f749b1008cc75e02b6fb3d4db8dbcca2d733030fe7a3b3502902f161"}, - {file = "mypy-1.5.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:26fb32e4d4afa205b24bf645eddfbb36a1e17e995c5c99d6d00edb24b693406a"}, - {file = "mypy-1.5.1-cp312-cp312-win_amd64.whl", hash = "sha256:82cb6193de9bbb3844bab4c7cf80e6227d5225cc7625b068a06d005d861ad5f1"}, - {file = "mypy-1.5.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4a465ea2ca12804d5b34bb056be3a29dc47aea5973b892d0417c6a10a40b2d65"}, - {file = "mypy-1.5.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9fece120dbb041771a63eb95e4896791386fe287fefb2837258925b8326d6160"}, - {file = "mypy-1.5.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d28ddc3e3dfeab553e743e532fb95b4e6afad51d4706dd22f28e1e5e664828d2"}, - {file = "mypy-1.5.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:57b10c56016adce71fba6bc6e9fd45d8083f74361f629390c556738565af8eeb"}, - {file = "mypy-1.5.1-cp38-cp38-win_amd64.whl", hash = "sha256:ff0cedc84184115202475bbb46dd99f8dcb87fe24d5d0ddfc0fe6b8575c88d2f"}, - {file = "mypy-1.5.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8f772942d372c8cbac575be99f9cc9d9fb3bd95c8bc2de6c01411e2c84ebca8a"}, - {file = "mypy-1.5.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5d627124700b92b6bbaa99f27cbe615c8ea7b3402960f6372ea7d65faf376c14"}, - {file = "mypy-1.5.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:361da43c4f5a96173220eb53340ace68cda81845cd88218f8862dfb0adc8cddb"}, - {file = "mypy-1.5.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:330857f9507c24de5c5724235e66858f8364a0693894342485e543f5b07c8693"}, - {file = "mypy-1.5.1-cp39-cp39-win_amd64.whl", hash = "sha256:c543214ffdd422623e9fedd0869166c2f16affe4ba37463975043ef7d2ea8770"}, - {file = "mypy-1.5.1-py3-none-any.whl", hash = "sha256:f757063a83970d67c444f6e01d9550a7402322af3557ce7630d3c957386fa8f5"}, - {file = "mypy-1.5.1.tar.gz", hash = "sha256:b031b9601f1060bf1281feab89697324726ba0c0bae9d7cd7ab4b690940f0b92"}, + {file = "mypy-1.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:091f53ff88cb093dcc33c29eee522c087a438df65eb92acd371161c1f4380ff0"}, + {file = "mypy-1.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eb7ff4007865833c470a601498ba30462b7374342580e2346bf7884557e40531"}, + {file = "mypy-1.6.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49499cf1e464f533fc45be54d20a6351a312f96ae7892d8e9f1708140e27ce41"}, + {file = "mypy-1.6.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4c192445899c69f07874dabda7e931b0cc811ea055bf82c1ababf358b9b2a72c"}, + {file = "mypy-1.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:3df87094028e52766b0a59a3e46481bb98b27986ed6ded6a6cc35ecc75bb9182"}, + {file = "mypy-1.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c8835a07b8442da900db47ccfda76c92c69c3a575872a5b764332c4bacb5a0a"}, + {file = "mypy-1.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:24f3de8b9e7021cd794ad9dfbf2e9fe3f069ff5e28cb57af6f873ffec1cb0425"}, + {file = "mypy-1.6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:856bad61ebc7d21dbc019b719e98303dc6256cec6dcc9ebb0b214b81d6901bd8"}, + {file = "mypy-1.6.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:89513ddfda06b5c8ebd64f026d20a61ef264e89125dc82633f3c34eeb50e7d60"}, + {file = "mypy-1.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:9f8464ed410ada641c29f5de3e6716cbdd4f460b31cf755b2af52f2d5ea79ead"}, + {file = "mypy-1.6.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:971104bcb180e4fed0d7bd85504c9036346ab44b7416c75dd93b5c8c6bb7e28f"}, + {file = "mypy-1.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ab98b8f6fdf669711f3abe83a745f67f50e3cbaea3998b90e8608d2b459fd566"}, + {file = "mypy-1.6.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a69db3018b87b3e6e9dd28970f983ea6c933800c9edf8c503c3135b3274d5ad"}, + {file = "mypy-1.6.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:dccd850a2e3863891871c9e16c54c742dba5470f5120ffed8152956e9e0a5e13"}, + {file = "mypy-1.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:f8598307150b5722854f035d2e70a1ad9cc3c72d392c34fffd8c66d888c90f17"}, + {file = "mypy-1.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fea451a3125bf0bfe716e5d7ad4b92033c471e4b5b3e154c67525539d14dc15a"}, + {file = "mypy-1.6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e28d7b221898c401494f3b77db3bac78a03ad0a0fff29a950317d87885c655d2"}, + {file = "mypy-1.6.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4b7a99275a61aa22256bab5839c35fe8a6887781862471df82afb4b445daae6"}, + {file = "mypy-1.6.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7469545380dddce5719e3656b80bdfbb217cfe8dbb1438532d6abc754b828fed"}, + {file = "mypy-1.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:7807a2a61e636af9ca247ba8494031fb060a0a744b9fee7de3a54bed8a753323"}, + {file = "mypy-1.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d2dad072e01764823d4b2f06bc7365bb1d4b6c2f38c4d42fade3c8d45b0b4b67"}, + {file = "mypy-1.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b19006055dde8a5425baa5f3b57a19fa79df621606540493e5e893500148c72f"}, + {file = "mypy-1.6.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31eba8a7a71f0071f55227a8057468b8d2eb5bf578c8502c7f01abaec8141b2f"}, + {file = "mypy-1.6.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e0db37ac4ebb2fee7702767dfc1b773c7365731c22787cb99f507285014fcaf"}, + {file = "mypy-1.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:c69051274762cccd13498b568ed2430f8d22baa4b179911ad0c1577d336ed849"}, + {file = "mypy-1.6.0-py3-none-any.whl", hash = "sha256:9e1589ca150a51d9d00bb839bfeca2f7a04f32cd62fad87a847bc0818e15d7dc"}, + {file = "mypy-1.6.0.tar.gz", hash = "sha256:4f3d27537abde1be6d5f2c96c29a454da333a2a271ae7d5bc7110e6d4b7beb3f"}, ] [package.dependencies] @@ -553,6 +649,7 @@ reports = ["lxml"] name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." +category = "main" optional = true python-versions = ">=3.5" files = [ @@ -560,10 +657,30 @@ files = [ {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] +[[package]] +name = "networkx" +version = "3.1" +description = "Python package for creating and manipulating graphs and networks" +category = "main" +optional = true +python-versions = ">=3.8" +files = [ + {file = "networkx-3.1-py3-none-any.whl", hash = "sha256:4f33f68cb2afcf86f28a45f43efc27a9386b535d567d2127f8f61d51dec58d36"}, + {file = "networkx-3.1.tar.gz", hash = "sha256:de346335408f84de0eada6ff9fafafff9bcda11f0a0dfaa931133debb146ab61"}, +] + +[package.extras] +default = ["matplotlib (>=3.4)", "numpy (>=1.20)", "pandas (>=1.3)", "scipy (>=1.8)"] +developer = ["mypy (>=1.1)", "pre-commit (>=3.2)"] +doc = ["nb2plots (>=0.6)", "numpydoc (>=1.5)", "pillow (>=9.4)", "pydata-sphinx-theme (>=0.13)", "sphinx (>=6.1)", "sphinx-gallery (>=0.12)", "texext (>=0.6.7)"] +extra = ["lxml (>=4.6)", "pydot (>=1.4.2)", "pygraphviz (>=1.10)", "sympy (>=1.10)"] +test = ["codecov (>=2.1)", "pytest (>=7.2)", "pytest-cov (>=4.0)"] + [[package]] name = "nibabel" version = "5.1.0" description = "Access a multitude of neuroimaging data formats" +category = "main" optional = true python-versions = ">=3.8" files = [ @@ -594,6 +711,7 @@ zstd = ["pyzstd (>=0.14.3)"] name = "numpy" version = "1.21.6" description = "NumPy is the fundamental package for array computing with Python." +category = "main" optional = false python-versions = ">=3.7,<3.11" files = [ @@ -634,6 +752,7 @@ files = [ name = "nvidia-cublas-cu11" version = "11.10.3.66" description = "CUBLAS native runtime libraries" +category = "main" optional = true python-versions = ">=3" files = [ @@ -649,6 +768,7 @@ wheel = "*" name = "nvidia-cuda-nvrtc-cu11" version = "11.7.99" description = "NVRTC native runtime libraries" +category = "main" optional = true python-versions = ">=3" files = [ @@ -665,6 +785,7 @@ wheel = "*" name = "nvidia-cuda-runtime-cu11" version = "11.7.99" description = "CUDA Runtime native Libraries" +category = "main" optional = true python-versions = ">=3" files = [ @@ -680,6 +801,7 @@ wheel = "*" name = "nvidia-cudnn-cu11" version = "8.5.0.96" description = "cuDNN runtime libraries" +category = "main" optional = true python-versions = ">=3" files = [ @@ -695,6 +817,7 @@ wheel = "*" name = "opencv-python-headless" version = "4.8.1.78" description = "Wrapper package for OpenCV python bindings." +category = "main" optional = true python-versions = ">=3.6" files = [ @@ -721,6 +844,7 @@ numpy = [ name = "orjson" version = "3.9.7" description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -790,6 +914,7 @@ files = [ name = "packaging" version = "23.2" description = "Core utilities for Python packages" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -801,6 +926,7 @@ files = [ name = "pathspec" version = "0.11.2" description = "Utility library for gitignore style pattern matching of file paths." +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -812,6 +938,7 @@ files = [ name = "pillow" version = "9.5.0" description = "Python Imaging Library (Fork)" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -891,6 +1018,7 @@ tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "pa name = "pkgutil-resolve-name" version = "1.3.10" description = "Resolve a name to an object." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -902,6 +1030,7 @@ files = [ name = "platformdirs" version = "3.11.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -920,6 +1049,7 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co name = "pluggy" version = "1.2.0" description = "plugin and hook calling mechanisms for python" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -938,6 +1068,7 @@ testing = ["pytest", "pytest-benchmark"] name = "pydantic" version = "1.10.13" description = "Data validation and settings management using python type hints" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -990,6 +1121,7 @@ email = ["email-validator (>=1.0.3)"] name = "pygments" version = "2.16.1" description = "Pygments is a syntax highlighting package written in Python." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1004,6 +1136,7 @@ plugins = ["importlib-metadata"] name = "pyrsistent" version = "0.19.3" description = "Persistent/Functional/Immutable data structures" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1040,6 +1173,7 @@ files = [ name = "pytest" version = "7.4.2" description = "pytest: simple powerful testing with Python" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -1063,6 +1197,7 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no name = "pytest-rerunfailures" version = "12.0" description = "pytest plugin to re-run tests to eliminate flaky failures" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -1079,6 +1214,7 @@ pytest = ">=6.2" name = "python-dotenv" version = "1.0.0" description = "Read key-value pairs from a .env file and set them as environment variables" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1089,10 +1225,49 @@ files = [ [package.extras] cli = ["click (>=5.0)"] +[[package]] +name = "pywavelets" +version = "1.4.1" +description = "PyWavelets, wavelet transform module" +category = "main" +optional = true +python-versions = ">=3.8" +files = [ + {file = "PyWavelets-1.4.1-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:d854411eb5ee9cb4bc5d0e66e3634aeb8f594210f6a1bed96dbed57ec70f181c"}, + {file = "PyWavelets-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:231b0e0b1cdc1112f4af3c24eea7bf181c418d37922a67670e9bf6cfa2d544d4"}, + {file = "PyWavelets-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:754fa5085768227c4f4a26c1e0c78bc509a266d9ebd0eb69a278be7e3ece943c"}, + {file = "PyWavelets-1.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da7b9c006171be1f9ddb12cc6e0d3d703b95f7f43cb5e2c6f5f15d3233fcf202"}, + {file = "PyWavelets-1.4.1-cp310-cp310-win32.whl", hash = "sha256:67a0d28a08909f21400cb09ff62ba94c064882ffd9e3a6b27880a111211d59bd"}, + {file = "PyWavelets-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:91d3d393cffa634f0e550d88c0e3f217c96cfb9e32781f2960876f1808d9b45b"}, + {file = "PyWavelets-1.4.1-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:64c6bac6204327321db30b775060fbe8e8642316e6bff17f06b9f34936f88875"}, + {file = "PyWavelets-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3f19327f2129fb7977bc59b966b4974dfd72879c093e44a7287500a7032695de"}, + {file = "PyWavelets-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad987748f60418d5f4138db89d82ba0cb49b086e0cbb8fd5c3ed4a814cfb705e"}, + {file = "PyWavelets-1.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:875d4d620eee655346e3589a16a73790cf9f8917abba062234439b594e706784"}, + {file = "PyWavelets-1.4.1-cp311-cp311-win32.whl", hash = "sha256:7231461d7a8eb3bdc7aa2d97d9f67ea5a9f8902522818e7e2ead9c2b3408eeb1"}, + {file = "PyWavelets-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:daf0aa79842b571308d7c31a9c43bc99a30b6328e6aea3f50388cd8f69ba7dbc"}, + {file = "PyWavelets-1.4.1-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:ab7da0a17822cd2f6545626946d3b82d1a8e106afc4b50e3387719ba01c7b966"}, + {file = "PyWavelets-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:578af438a02a86b70f1975b546f68aaaf38f28fb082a61ceb799816049ed18aa"}, + {file = "PyWavelets-1.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb5ca8d11d3f98e89e65796a2125be98424d22e5ada360a0dbabff659fca0fc"}, + {file = "PyWavelets-1.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:058b46434eac4c04dd89aeef6fa39e4b6496a951d78c500b6641fd5b2cc2f9f4"}, + {file = "PyWavelets-1.4.1-cp38-cp38-win32.whl", hash = "sha256:de7cd61a88a982edfec01ea755b0740e94766e00a1ceceeafef3ed4c85c605cd"}, + {file = "PyWavelets-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:7ab8d9db0fe549ab2ee0bea61f614e658dd2df419d5b75fba47baa761e95f8f2"}, + {file = "PyWavelets-1.4.1-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:23bafd60350b2b868076d976bdd92f950b3944f119b4754b1d7ff22b7acbf6c6"}, + {file = "PyWavelets-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d0e56cd7a53aed3cceca91a04d62feb3a0aca6725b1912d29546c26f6ea90426"}, + {file = "PyWavelets-1.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:030670a213ee8fefa56f6387b0c8e7d970c7f7ad6850dc048bd7c89364771b9b"}, + {file = "PyWavelets-1.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:71ab30f51ee4470741bb55fc6b197b4a2b612232e30f6ac069106f0156342356"}, + {file = "PyWavelets-1.4.1-cp39-cp39-win32.whl", hash = "sha256:47cac4fa25bed76a45bc781a293c26ac63e8eaae9eb8f9be961758d22b58649c"}, + {file = "PyWavelets-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:88aa5449e109d8f5e7f0adef85f7f73b1ab086102865be64421a3a3d02d277f4"}, + {file = "PyWavelets-1.4.1.tar.gz", hash = "sha256:6437af3ddf083118c26d8f97ab43b0724b956c9f958e9ea788659f6a2834ba93"}, +] + +[package.dependencies] +numpy = ">=1.17.3" + [[package]] name = "pywin32" version = "306" description = "Python for Window Extensions" +category = "main" optional = false python-versions = "*" files = [ @@ -1116,6 +1291,7 @@ files = [ name = "pyyaml" version = "6.0.1" description = "YAML parser and emitter for Python" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1124,6 +1300,7 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -1131,8 +1308,15 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -1149,6 +1333,7 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -1156,15 +1341,35 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, ] +[[package]] +name = "qudida" +version = "0.0.4" +description = "QUick and DIrty Domain Adaptation" +category = "main" +optional = true +python-versions = ">=3.5.0" +files = [ + {file = "qudida-0.0.4-py3-none-any.whl", hash = "sha256:4519714c40cd0f2e6c51e1735edae8f8b19f4efe1f33be13e9d644ca5f736dd6"}, + {file = "qudida-0.0.4.tar.gz", hash = "sha256:db198e2887ab0c9aa0023e565afbff41dfb76b361f85fd5e13f780d75ba18cc8"}, +] + +[package.dependencies] +numpy = ">=0.18.0" +opencv-python-headless = ">=4.0.1" +scikit-learn = ">=0.19.1" +typing-extensions = "*" + [[package]] name = "requests" version = "2.31.0" description = "Python HTTP for Humans." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1186,6 +1391,7 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "responses" version = "0.22.0" description = "A utility library for mocking out the `requests` Python library." +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -1207,6 +1413,7 @@ tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asy name = "rich" version = "13.6.0" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -1222,10 +1429,89 @@ typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.9 [package.extras] jupyter = ["ipywidgets (>=7.5.1,<9)"] +[[package]] +name = "ruff" +version = "0.0.292" +description = "An extremely fast Python linter, written in Rust." +category = "main" +optional = true +python-versions = ">=3.7" +files = [ + {file = "ruff-0.0.292-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:02f29db018c9d474270c704e6c6b13b18ed0ecac82761e4fcf0faa3728430c96"}, + {file = "ruff-0.0.292-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:69654e564342f507edfa09ee6897883ca76e331d4bbc3676d8a8403838e9fade"}, + {file = "ruff-0.0.292-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c3c91859a9b845c33778f11902e7b26440d64b9d5110edd4e4fa1726c41e0a4"}, + {file = "ruff-0.0.292-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f4476f1243af2d8c29da5f235c13dca52177117935e1f9393f9d90f9833f69e4"}, + {file = "ruff-0.0.292-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be8eb50eaf8648070b8e58ece8e69c9322d34afe367eec4210fdee9a555e4ca7"}, + {file = "ruff-0.0.292-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:9889bac18a0c07018aac75ef6c1e6511d8411724d67cb879103b01758e110a81"}, + {file = "ruff-0.0.292-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6bdfabd4334684a4418b99b3118793f2c13bb67bf1540a769d7816410402a205"}, + {file = "ruff-0.0.292-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa7c77c53bfcd75dbcd4d1f42d6cabf2485d2e1ee0678da850f08e1ab13081a8"}, + {file = "ruff-0.0.292-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e087b24d0d849c5c81516ec740bf4fd48bf363cfb104545464e0fca749b6af9"}, + {file = "ruff-0.0.292-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:f160b5ec26be32362d0774964e218f3fcf0a7da299f7e220ef45ae9e3e67101a"}, + {file = "ruff-0.0.292-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:ac153eee6dd4444501c4bb92bff866491d4bfb01ce26dd2fff7ca472c8df9ad0"}, + {file = "ruff-0.0.292-py3-none-musllinux_1_2_i686.whl", hash = "sha256:87616771e72820800b8faea82edd858324b29bb99a920d6aa3d3949dd3f88fb0"}, + {file = "ruff-0.0.292-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:b76deb3bdbea2ef97db286cf953488745dd6424c122d275f05836c53f62d4016"}, + {file = "ruff-0.0.292-py3-none-win32.whl", hash = "sha256:e854b05408f7a8033a027e4b1c7f9889563dd2aca545d13d06711e5c39c3d003"}, + {file = "ruff-0.0.292-py3-none-win_amd64.whl", hash = "sha256:f27282bedfd04d4c3492e5c3398360c9d86a295be00eccc63914438b4ac8a83c"}, + {file = "ruff-0.0.292-py3-none-win_arm64.whl", hash = "sha256:7f67a69c8f12fbc8daf6ae6d36705037bde315abf8b82b6e1f4c9e74eb750f68"}, + {file = "ruff-0.0.292.tar.gz", hash = "sha256:1093449e37dd1e9b813798f6ad70932b57cf614e5c2b5c51005bf67d55db33ac"}, +] + +[[package]] +name = "scikit-image" +version = "0.21.0" +description = "Image processing in Python" +category = "main" +optional = true +python-versions = ">=3.8" +files = [ + {file = "scikit_image-0.21.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:978ac3302252155a8556cdfe067bad2d18d5ccef4e91c2f727bc564ed75566bc"}, + {file = "scikit_image-0.21.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:82c22e008527e5ee26ab08e3ce919998ef164d538ff30b9e5764b223cfda06b1"}, + {file = "scikit_image-0.21.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd29d2631d3e975c377066acfc1f4cb2cc95e2257cf70e7fedfcb96441096e88"}, + {file = "scikit_image-0.21.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6c12925ceb9f3aede555921e26642d601b2d37d1617002a2636f2cb5178ae2f"}, + {file = "scikit_image-0.21.0-cp310-cp310-win_amd64.whl", hash = "sha256:1f538d4de77e4f3225d068d9ea2965bed3f7dda7f457a8f89634fa22ffb9ad8c"}, + {file = "scikit_image-0.21.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ec9bab6920ac43037d7434058b67b5778d42c60f67b8679239f48a471e7ed6f8"}, + {file = "scikit_image-0.21.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:a54720430dba833ffbb6dedd93d9f0938c5dd47d20ab9ba3e4e61c19d95f6f19"}, + {file = "scikit_image-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7e40dd102da14cdadc09210f930b4556c90ff8f99cd9d8bcccf9f73a86c44245"}, + {file = "scikit_image-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff5719c7eb99596a39c3e1d9b564025bae78ecf1da3ee6842d34f6965b5f1474"}, + {file = "scikit_image-0.21.0-cp311-cp311-win_amd64.whl", hash = "sha256:146c3824253eee9ff346c4ad10cb58376f91aefaf4a4bb2fe11aa21691f7de76"}, + {file = "scikit_image-0.21.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4e1b09f81a99c9c390215929194847b3cd358550b4b65bb6e42c5393d69cb74a"}, + {file = "scikit_image-0.21.0-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:9f7b5fb4a22f0d5ae0fa13beeb887c925280590145cd6d8b2630794d120ff7c7"}, + {file = "scikit_image-0.21.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4814033717f0b6491fee252facb9df92058d6a72ab78dd6408a50f3915a88b8"}, + {file = "scikit_image-0.21.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b0d6ed6502cca0c9719c444caafa0b8cda0f9e29e01ca42f621a240073284be"}, + {file = "scikit_image-0.21.0-cp38-cp38-win_amd64.whl", hash = "sha256:9194cb7bc21215fde6c1b1e9685d312d2aa8f65ea9736bc6311126a91c860032"}, + {file = "scikit_image-0.21.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54df1ddc854f37a912d42bc724e456e86858107e94048a81a27720bc588f9937"}, + {file = "scikit_image-0.21.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:c01e3ab0a1fabfd8ce30686d4401b7ed36e6126c9d4d05cb94abf6bdc46f7ac9"}, + {file = "scikit_image-0.21.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8ef5d8d1099317b7b315b530348cbfa68ab8ce32459de3c074d204166951025c"}, + {file = "scikit_image-0.21.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b1e96c59cab640ca5c5b22c501524cfaf34cbe0cb51ba73bd9a9ede3fb6e1d"}, + {file = "scikit_image-0.21.0-cp39-cp39-win_amd64.whl", hash = "sha256:9cffcddd2a5594c0a06de2ae3e1e25d662745a26f94fda31520593669677c010"}, + {file = "scikit_image-0.21.0.tar.gz", hash = "sha256:b33e823c54e6f11873ea390ee49ef832b82b9f70752c8759efd09d5a4e3d87f0"}, +] + +[package.dependencies] +imageio = ">=2.27" +lazy_loader = ">=0.2" +networkx = ">=2.8" +numpy = ">=1.21.1" +packaging = ">=21" +pillow = ">=9.0.1" +PyWavelets = ">=1.1.1" +scipy = ">=1.8" +tifffile = ">=2022.8.12" + +[package.extras] +build = ["Cython (>=0.29.32)", "build", "meson-python (>=0.13)", "ninja", "numpy (>=1.21.1)", "packaging (>=21)", "pythran", "setuptools (>=67)", "spin (==0.3)", "wheel"] +data = ["pooch (>=1.6.0)"] +default = ["PyWavelets (>=1.1.1)", "imageio (>=2.27)", "lazy_loader (>=0.2)", "networkx (>=2.8)", "numpy (>=1.21.1)", "packaging (>=21)", "pillow (>=9.0.1)", "scipy (>=1.8)", "tifffile (>=2022.8.12)"] +developer = ["pre-commit", "rtoml"] +docs = ["dask[array] (>=2022.9.2)", "ipykernel", "ipywidgets", "kaleido", "matplotlib (>=3.5)", "myst-parser", "numpydoc (>=1.5)", "pandas (>=1.5)", "plotly (>=5.10)", "pooch (>=1.6)", "pydata-sphinx-theme (>=0.13)", "pytest-runner", "scikit-learn (>=0.24.0)", "seaborn (>=0.11)", "sphinx (>=5.0)", "sphinx-copybutton", "sphinx-gallery (>=0.11)", "sphinx_design (>=0.3)", "tifffile (>=2022.8.12)"] +optional = ["SimpleITK", "astropy (>=5.0)", "cloudpickle (>=0.2.1)", "dask[array] (>=2021.1.0)", "matplotlib (>=3.5)", "pooch (>=1.6.0)", "pyamg", "scikit-learn (>=0.24.0)"] +test = ["asv", "matplotlib (>=3.5)", "pooch (>=1.6.0)", "pytest (>=7.0)", "pytest-cov (>=2.11.0)", "pytest-faulthandler", "pytest-localserver"] + [[package]] name = "scikit-learn" version = "1.3.1" description = "A set of python modules for machine learning and data mining" +category = "main" optional = true python-versions = ">=3.8" files = [ @@ -1268,6 +1554,7 @@ tests = ["black (>=23.3.0)", "matplotlib (>=3.1.3)", "mypy (>=1.3)", "numpydoc ( name = "scipy" version = "1.10.1" description = "Fundamental algorithms for scientific computing in Python" +category = "main" optional = true python-versions = "<3.12,>=3.8" files = [ @@ -1306,6 +1593,7 @@ test = ["asv", "gmpy2", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeo name = "setuptools" version = "68.0.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -1322,6 +1610,7 @@ testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs ( name = "threadpoolctl" version = "3.2.0" description = "threadpoolctl" +category = "main" optional = true python-versions = ">=3.8" files = [ @@ -1329,10 +1618,29 @@ files = [ {file = "threadpoolctl-3.2.0.tar.gz", hash = "sha256:c96a0ba3bdddeaca37dc4cc7344aafad41cdb8c313f74fdfe387a867bba93355"}, ] +[[package]] +name = "tifffile" +version = "2023.7.10" +description = "Read and write TIFF files" +category = "main" +optional = true +python-versions = ">=3.8" +files = [ + {file = "tifffile-2023.7.10-py3-none-any.whl", hash = "sha256:94dfdec321ace96abbfe872a66cfd824800c099a2db558443453eebc2c11b304"}, + {file = "tifffile-2023.7.10.tar.gz", hash = "sha256:c06ec460926d16796eeee249a560bcdddf243daae36ac62af3c84a953cd60b4a"}, +] + +[package.dependencies] +numpy = "*" + +[package.extras] +all = ["defusedxml", "fsspec", "imagecodecs (>=2023.1.23)", "lxml", "matplotlib", "zarr"] + [[package]] name = "toml" version = "0.10.2" description = "Python Library for Tom's Obvious, Minimal Language" +category = "main" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -1344,6 +1652,7 @@ files = [ name = "tomli" version = "2.0.1" description = "A lil' TOML parser" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -1355,6 +1664,7 @@ files = [ name = "torch" version = "1.13.1" description = "Tensors and Dynamic neural networks in Python with strong GPU acceleration" +category = "main" optional = true python-versions = ">=3.7.0" files = [ @@ -1395,6 +1705,7 @@ opt-einsum = ["opt-einsum (>=3.3)"] name = "torchvision" version = "0.14.1" description = "image and video datasets and models for torch deep learning" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -1421,7 +1732,7 @@ files = [ [package.dependencies] numpy = "*" -pillow = ">=5.3.0,<8.3.dev0 || >=8.4.dev0" +pillow = ">=5.3.0,<8.3.0 || >=8.4.0" requests = "*" torch = "1.13.1" typing-extensions = "*" @@ -1433,6 +1744,7 @@ scipy = ["scipy"] name = "tqdm" version = "4.66.1" description = "Fast, Extensible Progress Meter" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1453,6 +1765,7 @@ telegram = ["requests"] name = "typed-ast" version = "1.5.5" description = "a fork of Python 2 and 3 ast modules with type comment support" +category = "main" optional = true python-versions = ">=3.6" files = [ @@ -1503,6 +1816,7 @@ files = [ name = "types-pyyaml" version = "6.0.12.12" description = "Typing stubs for PyYAML" +category = "main" optional = false python-versions = "*" files = [ @@ -1512,13 +1826,14 @@ files = [ [[package]] name = "types-requests" -version = "2.31.0.8" +version = "2.31.0.9" description = "Typing stubs for requests" +category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "types-requests-2.31.0.8.tar.gz", hash = "sha256:e1b325c687b3494a2f528ab06e411d7092cc546cc9245c000bacc2fca5ae96d4"}, - {file = "types_requests-2.31.0.8-py3-none-any.whl", hash = "sha256:39894cbca3fb3d032ed8bdd02275b4273471aa5668564617cc1734b0a65ffdf8"}, + {file = "types-requests-2.31.0.9.tar.gz", hash = "sha256:3bb11188795cc3aa39f9635032044ee771009370fb31c3a06ae952b267b6fcd7"}, + {file = "types_requests-2.31.0.9-py3-none-any.whl", hash = "sha256:140e323da742a0cd0ff0a5a83669da9ffcebfaeb855d367186b2ec3985ba2742"}, ] [package.dependencies] @@ -1528,6 +1843,7 @@ urllib3 = ">=2" name = "types-toml" version = "0.10.8.7" description = "Typing stubs for toml" +category = "main" optional = true python-versions = "*" files = [ @@ -1539,6 +1855,7 @@ files = [ name = "typing-extensions" version = "4.7.1" description = "Backported and Experimental Type Hints for Python 3.7+" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1550,6 +1867,7 @@ files = [ name = "upolygon" version = "0.1.10" description = "Collection of fast polygon operations for DL" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1571,6 +1889,7 @@ numpy = "*" name = "urllib3" version = "2.0.6" description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1588,6 +1907,7 @@ zstd = ["zstandard (>=0.18.0)"] name = "wheel" version = "0.41.2" description = "A built-package format for Python" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -1602,6 +1922,7 @@ test = ["pytest (>=6.0.0)", "setuptools (>=65)"] name = "zipp" version = "3.15.0" description = "Backport of pathlib-compatible object wrapper for zip files" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1614,13 +1935,13 @@ docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] [extras] -dev = ["black", "debugpy", "isort", "mypy", "pytest", "pytest-rerunfailures", "responses"] +dev = ["black", "debugpy", "isort", "mypy", "pytest", "pytest-rerunfailures", "responses", "ruff"] medical = ["connected-components-3d", "nibabel"] -ml = ["scikit-learn", "torch", "torchvision"] +ml = ["albumentations", "scikit-learn", "torch", "torchvision"] ocv = ["opencv-python-headless"] test = ["pytest", "responses"] [metadata] lock-version = "2.0" python-versions = ">=3.7.0,<3.11" -content-hash = "9865d21e7a68ded971e8b79d8d7c3b3ec669bb97eadce284fda60b892540ad2d" +content-hash = "4d7aeadf6e4cc865f1a031b4d68ca1da234f1a7829215af4e21256bfb0ba4b1b" diff --git a/pyproject.toml b/pyproject.toml index d0d834d87..75362bb10 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "darwin-py" -version = "0.8.44" +version = "0.8.45" description = "Library and command line interface for darwin.v7labs.com" homepage = "https://docs.v7labs.com/reference/getting-started-2" documentation = "https://darwin-py-sdk.v7labs.com/index.html" @@ -46,6 +46,7 @@ no_implicit_optional = true warn_return_any = false warn_unreachable = true pretty = true +implicit_reexport = true [tool.pydantic-mypy] init_forbid_extra = true @@ -53,21 +54,18 @@ init_typed = true warn_required_dynamic_aliases = true warn_untyped_fields = true -[tool.black] -line-length = 160 - [tool.ruff] select = ["E", "F", "C"] ignore = ["E203", "E402"] -line-length = 160 +line-length = 88 [tool.ruff.per-file-ignores] -"__init__.py" = ["E402"] -"path/to/file.py" = ["E402"] -"**/{tests,docs,tools}/*" = ["E402"] +"__init__.py" = ["E402", "F401"] +"**/{tests,docs,tools}/*" = ["E402", "F403"] + [tool.flake8] -max-line-length = 160 +max-line-length = 88 ignore = ["E203", "W503", "E402"] [tool.poetry.dependencies] @@ -88,6 +86,7 @@ toml = "^0.10.2" nibabel = { version = "^5.0.0", python = ">=3.8.1", optional = true } connected-components-3d = { version = "^3.10.3", optional = true } scikit-learn = { version = "^1.2.0", python = ">=3.8.1,<3.11", optional = true } +albumentations = { version = "^1.3.1", python = ">=3.8", optional = true } torch = { version = "^1.13.1", optional = true } torchvision = { version = "^0.14.1", optional = true } black = { version = "^22.12.0", optional = true } @@ -96,7 +95,7 @@ mypy = { version = "^1.5", optional = true, python = ">=3.8" } responses = { version = "^0.22.0", optional = true } pytest = { version = "^7.2.1", optional = true } debugpy = { version = "^1.6.5", optional = true } -types-requests = { version = "^2.28.11.8" } +types-requests = "^2.28.11.8" mpire = { version = "^2.7.0" } tqdm = "^4.64.1" types-pyyaml = "^6.0.12.9" @@ -104,6 +103,7 @@ python-dotenv = { version = "^1.0.0", python = ">3.8" } opencv-python-headless = { version = "^4.8.0.76", optional = true } pyyaml = "^6.0.1" pytest-rerunfailures = { version = "^12.0", optional = true } +ruff = { version = "^0.0.292", optional = true } [tool.poetry.extras] dev = [ @@ -116,6 +116,7 @@ dev = [ "pytest", "flake8-pyproject", "pytest-rerunfailures", + "ruff", ] test = ["responses", "pytest", "flake8-pyproject"] ml = ["torch", "torchvision", "scikit-learn", "albumentations"] diff --git a/tests/darwin/exporter/formats/export_darwin_1_0_test.py b/tests/darwin/exporter/formats/export_darwin_1_0_test.py index a997c62b3..4bca09c28 100644 --- a/tests/darwin/exporter/formats/export_darwin_1_0_test.py +++ b/tests/darwin/exporter/formats/export_darwin_1_0_test.py @@ -1,6 +1,5 @@ from pathlib import Path -import pytest import darwin.datatypes as dt from darwin.exporter.formats.darwin_1_0 import _build_json @@ -8,9 +7,7 @@ class TestBuildJson: def test_empty_annotation_file(self): - annotation_file = dt.AnnotationFile( - path=Path("test.json"), filename="test.json", annotation_classes=[], annotations=[] - ) + annotation_file = dt.AnnotationFile(path=Path("test.json"), filename="test.json", annotation_classes=[], annotations=[]) assert _build_json(annotation_file) == { "image": { @@ -59,6 +56,7 @@ def test_complete_annotation_file(self): {"x": 531.6440000000002, "y": 428.4196}, {"x": 529.8140000000002, "y": 426.5896}, ] + annotation_class = dt.AnnotationClass(name="test", annotation_type="polygon") annotation = dt.Annotation(annotation_class=annotation_class, data={"path": polygon_path}, subs=[]) @@ -128,3 +126,145 @@ def test_complex_polygon(self): "annotations": [{"complex_polygon": {"path": polygon_path}, "name": "test", "slot_names": []}], "dataset": "None", } + + def test_polygon_annotation_file_with_bbox(self): + polygon_path = [ + {"x": 534.1440000000002, "y": 429.0896}, + {"x": 531.6440000000002, "y": 428.4196}, + {"x": 529.8140000000002, "y": 426.5896}, + ] + + bounding_box = {"x": 557.66, "y": 428.98, "w": 160.76, "h": 315.3} + + annotation_class = dt.AnnotationClass(name="test", annotation_type="polygon") + annotation = dt.Annotation(annotation_class=annotation_class, data={"path": polygon_path, "bounding_box": bounding_box}, subs=[]) + + annotation_file = dt.AnnotationFile( + path=Path("test.json"), + filename="test.json", + annotation_classes=[annotation_class], + annotations=[annotation], + image_height=1080, + image_width=1920, + image_url="https://darwin.v7labs.com/image.jpg", + ) + + assert _build_json(annotation_file) == { + "image": { + "seq": None, + "width": 1920, + "height": 1080, + "filename": "test.json", + "original_filename": "test.json", + "url": "https://darwin.v7labs.com/image.jpg", + "thumbnail_url": None, + "path": None, + "workview_url": None, + }, + "annotations": [{"polygon": {"path": polygon_path}, "name": "test", "slot_names": [], "bounding_box": bounding_box}], + "dataset": "None", + } + + def test_complex_polygon_with_bbox(self): + polygon_path = [ + [{"x": 230.06, "y": 174.04}, {"x": 226.39, "y": 170.36}, {"x": 224.61, "y": 166.81}], + [{"x": 238.98, "y": 171.69}, {"x": 236.97, "y": 174.04}, {"x": 238.67, "y": 174.04}], + [ + {"x": 251.75, "y": 169.77}, + {"x": 251.75, "y": 154.34}, + {"x": 251.08, "y": 151.84}, + {"x": 249.25, "y": 150.01}, + ], + ] + + bounding_box = {"x": 557.66, "y": 428.98, "w": 160.76, "h": 315.3} + + annotation_class = dt.AnnotationClass(name="test", annotation_type="complex_polygon") + annotation = dt.Annotation(annotation_class=annotation_class, data={"paths": polygon_path, "bounding_box": bounding_box}, subs=[]) + + annotation_file = dt.AnnotationFile( + path=Path("test.json"), + filename="test.json", + annotation_classes=[annotation_class], + annotations=[annotation], + image_height=1080, + image_width=1920, + image_url="https://darwin.v7labs.com/image.jpg", + ) + + assert _build_json(annotation_file) == { + "image": { + "seq": None, + "width": 1920, + "height": 1080, + "filename": "test.json", + "original_filename": "test.json", + "url": "https://darwin.v7labs.com/image.jpg", + "thumbnail_url": None, + "path": None, + "workview_url": None, + }, + "annotations": [{"complex_polygon": {"path": polygon_path}, "name": "test", "slot_names": [], "bounding_box": bounding_box}], + "dataset": "None", + } + + def test_bounding_box(self): + bounding_box_data = {"x": 100, "y": 150, "w": 50, "h": 30} + annotation_class = dt.AnnotationClass(name="bbox_test", annotation_type="bounding_box") + annotation = dt.Annotation(annotation_class=annotation_class, data=bounding_box_data, subs=[]) + + annotation_file = dt.AnnotationFile( + path=Path("test.json"), + filename="test.json", + annotation_classes=[annotation_class], + annotations=[annotation], + image_height=1080, + image_width=1920, + image_url="https://darwin.v7labs.com/image.jpg", + ) + + assert _build_json(annotation_file) == { + "image": { + "seq": None, + "width": 1920, + "height": 1080, + "filename": "test.json", + "original_filename": "test.json", + "url": "https://darwin.v7labs.com/image.jpg", + "thumbnail_url": None, + "path": None, + "workview_url": None, + }, + "annotations": [{"bounding_box": bounding_box_data, "name": "bbox_test", "slot_names": []}], + "dataset": "None", + } + + def test_tags(self): + tag_data = "sample_tag" + annotation_class = dt.AnnotationClass(name="tag_test", annotation_type="tag") + annotation = dt.Annotation(annotation_class=annotation_class, data=tag_data, subs=[]) + + annotation_file = dt.AnnotationFile( + path=Path("test.json"), + filename="test.json", + annotation_classes=[annotation_class], + annotations=[annotation], + image_height=1080, + image_width=1920, + image_url="https://darwin.v7labs.com/image.jpg", + ) + assert _build_json(annotation_file) == { + "image": { + "seq": None, + "width": 1920, + "height": 1080, + "filename": "test.json", + "original_filename": "test.json", + "url": "https://darwin.v7labs.com/image.jpg", + "thumbnail_url": None, + "path": None, + "workview_url": None, + }, + "annotations": [{"tag": {}, "name": "tag_test", "slot_names": []}], + "dataset": "None", + } diff --git a/tests/darwin/torch/transform_test.py b/tests/darwin/torch/transform_test.py new file mode 100644 index 000000000..099fbf241 --- /dev/null +++ b/tests/darwin/torch/transform_test.py @@ -0,0 +1,101 @@ +import pytest +import torch +from albumentations import BboxParams, Compose, HorizontalFlip, Resize +from PIL import Image + +from darwin.torch.transforms import AlbumentationsTransform + +# Sample data +SAMPLE_IMAGE = Image.new("RGB", (100, 100)) +SAMPLE_ANNOTATION = { + "boxes": torch.tensor([[25, 25, 75, 75]]), + "labels": torch.tensor([1]), + "area": torch.tensor([2500.0]), + "iscrowd": torch.tensor([0]), +} + +SAMPLE_ANNOTATION_OOB = { + "boxes": torch.tensor([[25, 25, 105, 105]]), # Out of bounds + "labels": torch.tensor([1]), + "area": torch.tensor([2500.0]), + "iscrowd": torch.tensor([0]), +} + +SAMPLE_ANNOTATION_WITH_MASKS = { + **SAMPLE_ANNOTATION, + "masks": torch.ones((1, 100, 100)), +} + +EXAMPLE_TRANSFORM = Compose( + [HorizontalFlip(p=1)], + bbox_params=BboxParams(format="coco", label_fields=["labels"]), +) +EXAMPLE_TRANSFORM_RESIZE = Compose( + [Resize(50, 50)], + bbox_params=BboxParams(format="coco", label_fields=["labels"]), +) + + +class TestAlbumentationsTransform: + def test_init(self): + transformations = EXAMPLE_TRANSFORM + at = AlbumentationsTransform(transformations) + assert isinstance(at, AlbumentationsTransform) + + def test_from_path_invalid(self): + with pytest.raises(ValueError): + AlbumentationsTransform.from_path("invalid/path/to/config.yml") + + def test_from_dict_invalid(self): + with pytest.raises(ValueError): + AlbumentationsTransform.from_dict({"invalid": "config"}) + + def test_transformations(self): + transformations = EXAMPLE_TRANSFORM + at = AlbumentationsTransform(transformations) + image, annotation = at(SAMPLE_IMAGE, SAMPLE_ANNOTATION) + assert annotation["boxes"][0, 0] != SAMPLE_ANNOTATION["boxes"][0, 0] + + def test_transformations_resize(self): + transformations = EXAMPLE_TRANSFORM_RESIZE + at = AlbumentationsTransform(transformations) + image, annotation = at(SAMPLE_IMAGE, SAMPLE_ANNOTATION) + assert image.shape[:2] == (50, 50) # We only check the height and width + + def test_boxes_out_of_bounds(self): + transformations = EXAMPLE_TRANSFORM + at = AlbumentationsTransform(transformations) + with pytest.raises(ValueError): + _, annotation = at(SAMPLE_IMAGE, SAMPLE_ANNOTATION_OOB) # Expecting the ValueError due to out of bounds + + def test_transform_with_masks(self): + transformations = EXAMPLE_TRANSFORM + at = AlbumentationsTransform(transformations) + _, annotation = at(SAMPLE_IMAGE, SAMPLE_ANNOTATION_WITH_MASKS) + assert "masks" in annotation + assert annotation["masks"].shape[0] == 1 + + def test_area_calculation_with_masks(self): + transformations = EXAMPLE_TRANSFORM + at = AlbumentationsTransform(transformations) + _, annotation = at(SAMPLE_IMAGE, SAMPLE_ANNOTATION_WITH_MASKS) + assert annotation["area"] == torch.sum(annotation["masks"]) + + def test_area_calculation_without_masks(self): + transformations = EXAMPLE_TRANSFORM + at = AlbumentationsTransform(transformations) + _, annotation = at(SAMPLE_IMAGE, SAMPLE_ANNOTATION) + area = annotation["boxes"][0, 2] * annotation["boxes"][0, 3] + + assert torch.isclose(annotation["area"], area.unsqueeze(0), atol=1e-5) # Using isclose for floating point comparison + + def test_iscrowd_unchanged(self): + transformations = EXAMPLE_TRANSFORM + at = AlbumentationsTransform(transformations) + _, annotation = at(SAMPLE_IMAGE, SAMPLE_ANNOTATION) + assert "iscrowd" in annotation + assert annotation["iscrowd"] == SAMPLE_ANNOTATION["iscrowd"] + + +if __name__ == "__main__": + pytest.run()