From 6f755d50192ccf2123352ab6d4f7fae92bdc3a07 Mon Sep 17 00:00:00 2001 From: Owen Date: Tue, 17 Oct 2023 11:39:27 +0100 Subject: [PATCH 1/2] HOUSEKEEPING: Increase version to 0.8.45 --- darwin/version/__init__.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/darwin/version/__init__.py b/darwin/version/__init__.py index eb467a33f..8f694aac8 100644 --- a/darwin/version/__init__.py +++ b/darwin/version/__init__.py @@ -1 +1 @@ -__version__ = "0.8.44" +__version__ = "0.8.45" diff --git a/pyproject.toml b/pyproject.toml index 89e73b70d..e2ac46574 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "darwin-py" -version = "0.8.44" +version = "0.8.45" description = "Library and command line interface for darwin.v7labs.com" homepage = "https://docs.v7labs.com/reference/getting-started-2" documentation = "https://darwin-py-sdk.v7labs.com/index.html" From e9583077081a7419b0d04c8fa23d5b2318c088b8 Mon Sep 17 00:00:00 2001 From: Nathan Perkins Date: Tue, 17 Oct 2023 13:09:13 +0100 Subject: [PATCH 2/2] [IO-1454][IO-1455][IO-1456][IO-1458] Streamlining of Team/Dataset deletion and creation methods (#685) * dogfooding changes for dataset/team objects * WIP ruff fixes * WIP ruff fixes * black fixes * wip ruff * typecheck change * wip ruff * comment * comment * ruff * ruff * black defaults * black defaults * vscode settings update * ruff addition to dev --- .vscode/settings.json | 6 +- darwin/future/core/client.py | 51 ++++-- darwin/future/core/datasets/__init__.py | 8 +- darwin/future/core/datasets/remove_dataset.py | 4 +- darwin/future/core/items/__init__.py | 4 +- darwin/future/core/items/get.py | 25 ++- darwin/future/core/items/move_items.py | 7 +- darwin/future/core/team/get_team.py | 5 +- darwin/future/core/types/__init__.py | 2 +- darwin/future/core/types/common.py | 10 +- darwin/future/core/types/query.py | 33 ++-- darwin/future/core/utils/pathutils.py | 6 +- darwin/future/core/workflows/__init__.py | 6 +- darwin/future/core/workflows/get_workflows.py | 4 +- .../future/core/workflows/list_workflows.py | 9 +- darwin/future/data_objects/team.py | 2 +- darwin/future/data_objects/workflow.py | 4 +- darwin/future/exceptions.py | 2 +- darwin/future/helpers/assertion.py | 6 +- darwin/future/meta/client.py | 4 +- darwin/future/meta/objects/base.py | 8 +- darwin/future/meta/objects/dataset.py | 118 +++--------- darwin/future/meta/objects/stage.py | 21 ++- darwin/future/meta/objects/team.py | 93 +++++++++- darwin/future/meta/objects/team_member.py | 3 - darwin/future/meta/objects/workflow.py | 6 +- darwin/future/meta/queries/dataset.py | 10 +- darwin/future/meta/queries/stage.py | 12 +- darwin/future/meta/queries/team_member.py | 12 +- darwin/future/meta/queries/workflow.py | 45 +++-- darwin/future/tests/core/datasets/fixtures.py | 5 +- .../core/datasets/test_create_dataset.py | 10 +- .../tests/core/datasets/test_list_datasets.py | 8 +- darwin/future/tests/core/fixtures.py | 5 +- darwin/future/tests/core/items/fixtures.py | 5 +- .../future/tests/core/items/test_get_items.py | 10 +- .../tests/core/items/test_move_items.py | 10 +- darwin/future/tests/core/test_client.py | 9 +- darwin/future/tests/core/test_query.py | 15 +- .../tests/core/types/test_querystring.py | 2 +- .../tests/core/workflows/test_get_workflow.py | 9 +- .../core/workflows/test_get_workflows.py | 4 +- .../core/workflows/test_list_workflows.py | 5 +- darwin/future/tests/data_objects/fixtures.py | 3 - .../test_general_darwin_objects.py | 10 +- darwin/future/tests/data_objects/test_team.py | 36 ++-- .../data_objects/workflow/test_wfdataset.py | 4 +- .../data_objects/workflow/test_wfedge.py | 1 - .../data_objects/workflow/test_wfstage.py | 1 - .../workflow/test_wfstage_config.py | 1 - .../data_objects/workflow/test_wfuser.py | 1 - darwin/future/tests/meta/fixtures.py | 2 +- darwin/future/tests/meta/objects/fixtures.py | 18 +- .../tests/meta/objects/test_datasetmeta.py | 170 ++++-------------- .../tests/meta/objects/test_stagemeta.py | 30 +++- .../tests/meta/objects/test_teammeta.py | 170 +++++++++++++++++- .../future/tests/meta/queries/test_dataset.py | 36 ++-- .../future/tests/meta/queries/test_stage.py | 42 +++-- .../tests/meta/queries/test_team_member.py | 26 ++- .../tests/meta/queries/test_workflow.py | 76 +++++--- darwin/future/tests/meta/test_client.py | 13 +- deploy/format_lint.sh | 2 + poetry.lock | 126 +++++++++++-- pyproject.toml | 11 +- 64 files changed, 918 insertions(+), 484 deletions(-) diff --git a/.vscode/settings.json b/.vscode/settings.json index bfd0b9283..28f99b8d9 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -13,15 +13,11 @@ "editor.insertSpaces": true, "editor.tabSize": 2 }, - "python.formatting.blackPath": "black", - "python.formatting.provider": "black", - "python.linting.mypyEnabled": true, "isort.args": [ "--profile", "black" ], "python.analysis.autoImportCompletions": true, "python.testing.pytestEnabled": true, - "python.linting.enabled": true, - "python.analysis.typeCheckingMode": "basic" + "python.analysis.typeCheckingMode": "basic", } \ No newline at end of file diff --git a/darwin/future/core/client.py b/darwin/future/core/client.py index 8787170b5..7e6d4387b 100644 --- a/darwin/future/core/client.py +++ b/darwin/future/core/client.py @@ -1,7 +1,7 @@ from __future__ import annotations from pathlib import Path -from typing import Any, Callable, Dict, List, Optional, Union, overload +from typing import Callable, Dict, Optional, overload from urllib.parse import urlparse import requests @@ -41,7 +41,10 @@ def validate_base_url(cls, v: str) -> str: if not v.endswith("/"): v += "/" check = urlparse(v) - assert check.scheme in {"http", "https"}, "base_url must start with http or https" + assert check.scheme in { + "http", + "https", + }, "base_url must start with http or https" assert check.netloc, "base_url must contain a domain" return v @@ -136,7 +139,9 @@ def __init__(self, config: DarwinConfig, retries: Optional[Retry] = None) -> Non self.config = config self.session = requests.Session() if not retries: - retries = Retry(total=3, backoff_factor=0.2, status_forcelist=[500, 502, 503, 504]) + retries = Retry( + total=3, backoff_factor=0.2, status_forcelist=[500, 502, 503, 504] + ) self._setup_session(retries) self._mappings = { "get": self.session.get, @@ -153,20 +158,32 @@ def _setup_session(self, retries: Retry) -> None: @property def headers(self) -> Dict[str, str]: - http_headers: Dict[str, str] = {"Content-Type": "application/json", "Accept": "application/json"} + http_headers: Dict[str, str] = { + "Content-Type": "application/json", + "Accept": "application/json", + } if self.config.api_key: http_headers["Authorization"] = f"ApiKey {self.config.api_key}" return http_headers @overload - def _generic_call(self, method: Callable[[str], requests.Response], endpoint: str) -> dict: + def _generic_call( + self, method: Callable[[str], requests.Response], endpoint: str + ) -> dict: ... @overload - def _generic_call(self, method: Callable[[str, dict], requests.Response], endpoint: str, payload: dict) -> dict: + def _generic_call( + self, + method: Callable[[str, dict], requests.Response], + endpoint: str, + payload: dict, + ) -> dict: ... - def _generic_call(self, method: Callable, endpoint: str, payload: Optional[dict] = None) -> JSONType: + def _generic_call( + self, method: Callable, endpoint: str, payload: Optional[dict] = None + ) -> JSONType: endpoint = self._sanitize_endpoint(endpoint) url = self.config.api_endpoint + endpoint if payload is not None: @@ -179,15 +196,21 @@ def _generic_call(self, method: Callable, endpoint: str, payload: Optional[dict] return response.json() - def _contain_qs_and_endpoint(self, endpoint: str, query_string: Optional[QueryString] = None) -> str: + def _contain_qs_and_endpoint( + self, endpoint: str, query_string: Optional[QueryString] = None + ) -> str: if not query_string: return endpoint assert "?" not in endpoint return endpoint + str(query_string) - def get(self, endpoint: str, query_string: Optional[QueryString] = None) -> JSONType: - return self._generic_call(self.session.get, self._contain_qs_and_endpoint(endpoint, query_string)) + def get( + self, endpoint: str, query_string: Optional[QueryString] = None + ) -> JSONType: + return self._generic_call( + self.session.get, self._contain_qs_and_endpoint(endpoint, query_string) + ) def put(self, endpoint: str, data: dict) -> JSONType: return self._generic_call(self.session.put, endpoint, data) @@ -195,8 +218,12 @@ def put(self, endpoint: str, data: dict) -> JSONType: def post(self, endpoint: str, data: dict) -> JSONType: return self._generic_call(self.session.post, endpoint, data) - def delete(self, endpoint: str, query_string: Optional[QueryString] = None) -> JSONType: - return self._generic_call(self.session.delete, self._contain_qs_and_endpoint(endpoint, query_string)) + def delete( + self, endpoint: str, query_string: Optional[QueryString] = None + ) -> JSONType: + return self._generic_call( + self.session.delete, self._contain_qs_and_endpoint(endpoint, query_string) + ) def patch(self, endpoint: str, data: dict) -> JSONType: return self._generic_call(self.session.patch, endpoint, data) diff --git a/darwin/future/core/datasets/__init__.py b/darwin/future/core/datasets/__init__.py index a5ccb19d5..ff1f72c59 100644 --- a/darwin/future/core/datasets/__init__.py +++ b/darwin/future/core/datasets/__init__.py @@ -1,4 +1,4 @@ -from darwin.future.core.datasets.create_dataset import * -from darwin.future.core.datasets.get_dataset import * -from darwin.future.core.datasets.list_datasets import * -from darwin.future.core.datasets.remove_dataset import * +from darwin.future.core.datasets.create_dataset import create_dataset +from darwin.future.core.datasets.get_dataset import get_dataset +from darwin.future.core.datasets.list_datasets import list_datasets +from darwin.future.core.datasets.remove_dataset import remove_dataset diff --git a/darwin/future/core/datasets/remove_dataset.py b/darwin/future/core/datasets/remove_dataset.py index 26abd7a08..86e21de1f 100644 --- a/darwin/future/core/datasets/remove_dataset.py +++ b/darwin/future/core/datasets/remove_dataset.py @@ -4,7 +4,9 @@ from darwin.future.exceptions import DatasetNotFound -def remove_dataset(api_client: ClientCore, id: int, team_slug: Optional[str] = None) -> int: +def remove_dataset( + api_client: ClientCore, id: int, team_slug: Optional[str] = None +) -> int: """ Creates a new dataset for the given team diff --git a/darwin/future/core/items/__init__.py b/darwin/future/core/items/__init__.py index 619a680fe..a56cb784d 100644 --- a/darwin/future/core/items/__init__.py +++ b/darwin/future/core/items/__init__.py @@ -1,2 +1,2 @@ -from darwin.future.core.items.get import * -from darwin.future.core.items.move_items import * +from darwin.future.core.items.get import get_item_ids, get_item_ids_stage +from darwin.future.core.items.move_items import move_items_to_stage diff --git a/darwin/future/core/items/get.py b/darwin/future/core/items/get.py index 8ac7a7559..37bb6fd4c 100644 --- a/darwin/future/core/items/get.py +++ b/darwin/future/core/items/get.py @@ -5,7 +5,9 @@ from darwin.future.core.types.common import QueryString -def get_item_ids(api_client: ClientCore, team_slug: str, dataset_id: Union[str, int]) -> List[UUID]: +def get_item_ids( + api_client: ClientCore, team_slug: str, dataset_id: Union[str, int] +) -> List[UUID]: """ Returns a list of item ids for the dataset @@ -26,15 +28,24 @@ def get_item_ids(api_client: ClientCore, team_slug: str, dataset_id: Union[str, response = api_client.get( f"/v2/teams/{team_slug}/items/ids", - QueryString({"not_statuses": "archived,error", "sort[id]": "desc", "dataset_ids": str(dataset_id)}), + QueryString( + { + "not_statuses": "archived,error", + "sort[id]": "desc", + "dataset_ids": str(dataset_id), + } + ), ) - assert type(response) == dict + assert isinstance(response, dict) uuids = [UUID(uuid) for uuid in response["item_ids"]] return uuids def get_item_ids_stage( - api_client: ClientCore, team_slug: str, dataset_id: Union[int, str], stage_id: Union[UUID, str] + api_client: ClientCore, + team_slug: str, + dataset_id: Union[int, str], + stage_id: Union[UUID, str], ) -> List[UUID]: """ Returns a list of item ids for the stage @@ -57,8 +68,10 @@ def get_item_ids_stage( """ response = api_client.get( f"/v2/teams/{team_slug}/items/ids", - QueryString({"workflow_stage_ids": str(stage_id), "dataset_ids": str(dataset_id)}), + QueryString( + {"workflow_stage_ids": str(stage_id), "dataset_ids": str(dataset_id)} + ), ) - assert type(response) == dict + assert isinstance(response, dict) uuids = [UUID(uuid) for uuid in response["item_ids"]] return uuids diff --git a/darwin/future/core/items/move_items.py b/darwin/future/core/items/move_items.py index 32a39b86f..b28e9ab5b 100644 --- a/darwin/future/core/items/move_items.py +++ b/darwin/future/core/items/move_items.py @@ -6,7 +6,12 @@ def move_items_to_stage( - api_client: ClientCore, team_slug: str, workflow_id: UUID, dataset_id: int, stage_id: UUID, item_ids: List[UUID] + api_client: ClientCore, + team_slug: str, + workflow_id: UUID, + dataset_id: int, + stage_id: UUID, + item_ids: List[UUID], ) -> JSONType: """ Moves a list of items to a stage diff --git a/darwin/future/core/team/get_team.py b/darwin/future/core/team/get_team.py index 619c5e649..5570099ef 100644 --- a/darwin/future/core/team/get_team.py +++ b/darwin/future/core/team/get_team.py @@ -1,7 +1,6 @@ from typing import List, Optional, Tuple from darwin.future.core.client import ClientCore -from darwin.future.core.types.common import JSONType from darwin.future.data_objects.team import TeamCore, TeamMemberCore @@ -13,7 +12,9 @@ def get_team(client: ClientCore, team_slug: Optional[str] = None) -> TeamCore: return TeamCore.parse_obj(response) -def get_team_members(client: ClientCore) -> Tuple[List[TeamMemberCore], List[Exception]]: +def get_team_members( + client: ClientCore, +) -> Tuple[List[TeamMemberCore], List[Exception]]: response = client.get("/memberships") members = [] errors = [] diff --git a/darwin/future/core/types/__init__.py b/darwin/future/core/types/__init__.py index 55e5f844b..e52d13056 100644 --- a/darwin/future/core/types/__init__.py +++ b/darwin/future/core/types/__init__.py @@ -1 +1 @@ -from .common import * +from .common import JSONType, QueryString, TeamSlug diff --git a/darwin/future/core/types/common.py b/darwin/future/core/types/common.py index 297eb3d4b..d7a665d79 100644 --- a/darwin/future/core/types/common.py +++ b/darwin/future/core/types/common.py @@ -1,7 +1,5 @@ from typing import Any, Dict, List, Union -import pydantic -from pydantic import BaseModel from darwin.future.data_objects import validators as darwin_validators from darwin.future.data_objects.typing import UnknownType @@ -21,8 +19,12 @@ def __get_validators__(cls): # type: ignore @classmethod def validate(cls, v: str) -> "TeamSlug": - assert len(v) < cls.max_length, f"maximum length for team slug is {cls.max_length}" - assert len(v) > cls.min_length, f"minimum length for team slug is {cls.min_length}" + assert ( + len(v) < cls.max_length + ), f"maximum length for team slug is {cls.max_length}" + assert ( + len(v) > cls.min_length + ), f"minimum length for team slug is {cls.min_length}" if not isinstance(v, str): raise TypeError("string required") modified_value = darwin_validators.parse_name(v) diff --git a/darwin/future/core/types/query.py b/darwin/future/core/types/query.py index ac45d2903..fca744c4f 100644 --- a/darwin/future/core/types/query.py +++ b/darwin/future/core/types/query.py @@ -9,9 +9,7 @@ Generic, List, Optional, - Tuple, TypeVar, - overload, ) from darwin.future.core.client import ClientCore @@ -54,7 +52,9 @@ class QueryFilter(DefaultDarwin): def filter_attr(self, attr: Any) -> bool: # type: ignore caster: Callable[[str], Any] = type(attr) # type: ignore - param = caster(self.param) # attempt to cast the param to the type of the attribute + param = caster( + self.param + ) # attempt to cast the param to the type of the attribute if self.modifier is None: return attr == param elif self.modifier == Modifier.GREATER_EQUAL: @@ -75,7 +75,9 @@ def filter_attr(self, attr: Any) -> bool: # type: ignore @classmethod def _from_dict(cls, d: Dict[str, Any]) -> QueryFilter: # type: ignore if "name" not in d or "param" not in d: - raise InvalidQueryFilter(f"args must be a QueryFilter or a dict with 'name' and 'param' keys, got {d}") + raise InvalidQueryFilter( + f"args must be a QueryFilter or a dict with 'name' and 'param' keys, got {d}" + ) modifier = Modifier(d["modifier"]) if "modifier" in d else None return QueryFilter(name=d["name"], param=str(d["param"]), modifier=modifier) @@ -95,7 +97,9 @@ def _from_arg(cls, arg: object) -> QueryFilter: elif isinstance(arg, dict): return cls._from_dict(arg) else: - raise InvalidQueryFilter(f"args must be a QueryFilter or a dict with 'name' and 'param' keys, got {arg}") + raise InvalidQueryFilter( + f"args must be a QueryFilter or a dict with 'name' and 'param' keys, got {arg}" + ) @classmethod def _from_kwarg(cls, key: str, value: str) -> QueryFilter: @@ -117,9 +121,12 @@ class Query(Generic[T], ABC): """ def __init__( - self, client: ClientCore, filters: Optional[List[QueryFilter]] = None, meta_params: Optional[Param] = None + self, + client: ClientCore, + filters: Optional[List[QueryFilter]] = None, + meta_params: Optional[Param] = None, ): - self.meta_params: dict = meta_params or dict() + self.meta_params: dict = meta_params or {} self.client = client self.filters = filters or [] self.results: Optional[List[T]] = None @@ -130,12 +137,16 @@ def filter(self, filter: QueryFilter) -> Query[T]: def __add__(self, filter: QueryFilter) -> Query[T]: self._changed_since_last = True - return self.__class__(self.client, filters=[*self.filters, filter], meta_params=self.meta_params) + return self.__class__( + self.client, filters=[*self.filters, filter], meta_params=self.meta_params + ) def __sub__(self, filter: QueryFilter) -> Query[T]: self._changed_since_last = True return self.__class__( - self.client, filters=[f for f in self.filters if f != filter], meta_params=self.meta_params + self.client, + filters=[f for f in self.filters if f != filter], + meta_params=self.meta_params, ) def __iadd__(self, filter: QueryFilter) -> Query[T]: @@ -212,4 +223,6 @@ def first(self) -> Optional[T]: return self.results[0] def _generic_execute_filter(self, objects: List[T], filter: QueryFilter) -> List[T]: - return [m for m in objects if filter.filter_attr(getattr(m._element, filter.name))] + return [ + m for m in objects if filter.filter_attr(getattr(m._element, filter.name)) + ] diff --git a/darwin/future/core/utils/pathutils.py b/darwin/future/core/utils/pathutils.py index ccaebdeba..68f5734ec 100644 --- a/darwin/future/core/utils/pathutils.py +++ b/darwin/future/core/utils/pathutils.py @@ -1,6 +1,6 @@ import json from pathlib import Path -from typing import Any, Optional +from typing import Optional import yaml @@ -29,7 +29,9 @@ def attempt_open(path: Path) -> dict: return open_json(path, encoding) except Exception: pass - raise UnrecognizableFileEncoding(f"Unable to load file {path} with any encodings: {ENCODINGS}") + raise UnrecognizableFileEncoding( + f"Unable to load file {path} with any encodings: {ENCODINGS}" + ) def open_yaml(path: Path, encoding: Optional[str] = None) -> dict: diff --git a/darwin/future/core/workflows/__init__.py b/darwin/future/core/workflows/__init__.py index c2282b65a..cbecab1e8 100644 --- a/darwin/future/core/workflows/__init__.py +++ b/darwin/future/core/workflows/__init__.py @@ -1,3 +1,3 @@ -from darwin.future.core.workflows.get_workflow import * -from darwin.future.core.workflows.get_workflows import * -from darwin.future.core.workflows.list_workflows import * +from darwin.future.core.workflows.get_workflow import get_workflow +from darwin.future.core.workflows.get_workflows import get_workflows +from darwin.future.core.workflows.list_workflows import list_workflows diff --git a/darwin/future/core/workflows/get_workflows.py b/darwin/future/core/workflows/get_workflows.py index 8664d1f74..97805f131 100644 --- a/darwin/future/core/workflows/get_workflows.py +++ b/darwin/future/core/workflows/get_workflows.py @@ -6,7 +6,9 @@ from darwin.future.data_objects.workflow import WorkflowCore -def get_workflows(client: ClientCore, team_slug: Optional[str] = None) -> List[WorkflowCore]: +def get_workflows( + client: ClientCore, team_slug: Optional[str] = None +) -> List[WorkflowCore]: team_slug = team_slug or client.config.default_team response = client.get(f"/v2/teams/{team_slug}/workflows?worker=false") diff --git a/darwin/future/core/workflows/list_workflows.py b/darwin/future/core/workflows/list_workflows.py index e28e766d5..9b9cf8ec6 100644 --- a/darwin/future/core/workflows/list_workflows.py +++ b/darwin/future/core/workflows/list_workflows.py @@ -1,12 +1,13 @@ from typing import List, Optional, Tuple -from pydantic import ValidationError from darwin.future.core.client import ClientCore from darwin.future.data_objects.workflow import WorkflowCore, WorkflowListValidator -def list_workflows(client: ClientCore, team_slug: Optional[str] = None) -> Tuple[List[WorkflowCore], List[Exception]]: +def list_workflows( + client: ClientCore, team_slug: Optional[str] = None +) -> Tuple[List[WorkflowCore], List[Exception]]: """ Returns a list of workflows for the given team @@ -28,7 +29,9 @@ def list_workflows(client: ClientCore, team_slug: Optional[str] = None) -> Tuple team_slug = team_slug or client.config.default_team response = client.get(f"/v2/teams/{team_slug}/workflows?worker=false") list_of_workflows = WorkflowListValidator(list=response) # type: ignore - workflows = [WorkflowCore.parse_obj(workflow) for workflow in list_of_workflows.list] + workflows = [ + WorkflowCore.parse_obj(workflow) for workflow in list_of_workflows.list + ] except Exception as e: exceptions.append(e) diff --git a/darwin/future/data_objects/team.py b/darwin/future/data_objects/team.py index c766fd591..da697ec90 100644 --- a/darwin/future/data_objects/team.py +++ b/darwin/future/data_objects/team.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import List, Optional, Tuple +from typing import List, Optional from pydantic import validator diff --git a/darwin/future/data_objects/workflow.py b/darwin/future/data_objects/workflow.py index 0af79c14f..47e4d2d11 100644 --- a/darwin/future/data_objects/workflow.py +++ b/darwin/future/data_objects/workflow.py @@ -59,7 +59,9 @@ class WFEdgeCore(DefaultDarwin): @root_validator(pre=True) def _one_or_both_must_exist(cls, values: dict) -> dict: if not values["source_stage_id"] and not values["target_stage_id"]: - raise ValueError("One or both of source_stage_id and target_stage_id must be defined") + raise ValueError( + "One or both of source_stage_id and target_stage_id must be defined" + ) return values diff --git a/darwin/future/exceptions.py b/darwin/future/exceptions.py index bfd6cd0b3..e68fd5e8f 100644 --- a/darwin/future/exceptions.py +++ b/darwin/future/exceptions.py @@ -1,4 +1,4 @@ -from typing import Any, List, Optional +from typing import List, Optional from darwin.future.data_objects.typing import KeyValuePairDict, UnknownType diff --git a/darwin/future/helpers/assertion.py b/darwin/future/helpers/assertion.py index 5cecf467c..04c5a71c5 100644 --- a/darwin/future/helpers/assertion.py +++ b/darwin/future/helpers/assertion.py @@ -1,6 +1,10 @@ from typing import Type -def assert_is(conditional: bool, message: str, exception_factory: Type[BaseException] = AssertionError) -> None: +def assert_is( + conditional: bool, + message: str, + exception_factory: Type[BaseException] = AssertionError, +) -> None: if not conditional: raise exception_factory(message) diff --git a/darwin/future/meta/client.py b/darwin/future/meta/client.py index 8d230dfb2..ceb4a182f 100644 --- a/darwin/future/meta/client.py +++ b/darwin/future/meta/client.py @@ -1,14 +1,12 @@ from __future__ import annotations from pathlib import Path -from typing import List, Optional +from typing import Optional from requests.adapters import Retry from darwin.future.core.client import ClientCore, DarwinConfig from darwin.future.meta.objects.team import Team -from darwin.future.meta.objects.workflow import Workflow -from darwin.future.meta.queries.workflow import WorkflowQuery class Client(ClientCore): diff --git a/darwin/future/meta/objects/base.py b/darwin/future/meta/objects/base.py index aa51789f1..317ed388e 100644 --- a/darwin/future/meta/objects/base.py +++ b/darwin/future/meta/objects/base.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Dict, Generic, List, Optional, TypeVar +from typing import Dict, Generic, Optional, TypeVar from darwin.future.core.client import ClientCore from darwin.future.pydantic_base import DefaultDarwin @@ -13,10 +13,12 @@ class MetaBase(Generic[R]): _element: R client: ClientCore - def __init__(self, client: ClientCore, element: R, meta_params: Optional[Param] = None) -> None: + def __init__( + self, client: ClientCore, element: R, meta_params: Optional[Param] = None + ) -> None: self.client = client self._element = element - self.meta_params = meta_params or dict() + self.meta_params = meta_params or {} def __str__(self) -> str: return str(self._element) diff --git a/darwin/future/meta/objects/dataset.py b/darwin/future/meta/objects/dataset.py index be870afaf..80c636473 100644 --- a/darwin/future/meta/objects/dataset.py +++ b/darwin/future/meta/objects/dataset.py @@ -1,16 +1,15 @@ from __future__ import annotations -from typing import List, Optional, Sequence, Tuple, Union +from typing import List, Optional, Sequence, Union from uuid import UUID from darwin.cli_functions import upload_data from darwin.dataset.upload_manager import LocalFile from darwin.datatypes import PathLike from darwin.future.core.client import ClientCore -from darwin.future.core.datasets import create_dataset, get_dataset, remove_dataset +from darwin.future.core.datasets import create_dataset, remove_dataset from darwin.future.core.items import get_item_ids from darwin.future.data_objects.dataset import DatasetCore -from darwin.future.exceptions import MissingDataset from darwin.future.helpers.assertion import assert_is from darwin.future.meta.objects.base import MetaBase @@ -48,15 +47,15 @@ def item_ids(self) -> List[UUID]: List[UUID]: A list of item ids """ assert self._element.id is not None - assert self.meta_params["team_slug"] is not None and type(self.meta_params["team_slug"]) == str - return get_item_ids(self.client, self.meta_params["team_slug"], str(self._element.id)) - - def get_dataset_by_id(self) -> DatasetCore: - # TODO: implement - raise NotImplementedError() + assert self.meta_params["team_slug"] is not None and isinstance( + self.meta_params["team_slug"], str + ) + return get_item_ids( + self.client, self.meta_params["team_slug"], str(self._element.id) + ) @classmethod - def create_dataset(cls, client: ClientCore, slug: str) -> Tuple[Optional[List[Exception]], Optional[DatasetCore]]: + def create_dataset(cls, client: ClientCore, slug: str) -> DatasetCore: """ Creates a new dataset for the given team @@ -71,101 +70,25 @@ def create_dataset(cls, client: ClientCore, slug: str) -> Tuple[Optional[List[Ex A tuple containing a list of exceptions and the dataset created """ - exceptions = [] dataset: Optional[DatasetCore] = None - - try: - cls._validate_slug(slug) - dataset = create_dataset(client, slug) - except Exception as e: - exceptions.append(e) - - return exceptions or None, dataset + cls._validate_slug(slug) + dataset = create_dataset(client, slug) + return dataset def update_dataset(self) -> DatasetCore: # TODO: implement in IO-1018 raise NotImplementedError() - @classmethod - def delete_dataset(cls, client: ClientCore, dataset_id: Union[int, str]) -> Tuple[Optional[List[Exception]], int]: + def delete(self) -> int: """ - Deletes a dataset by id or slug - - Parameters - ---------- - dataset_id: Union[int, str] - The id or slug of the dataset to delete - - Returns - ------- - Tuple[Optional[List[Exception]], int] - A tuple containing a list of exceptions and the number of datasets deleted - """ - exceptions = [] - dataset_deleted = -1 - - try: - if isinstance(dataset_id, str): - dataset_deleted = cls._delete_by_slug(client, dataset_id) - else: - dataset_deleted = cls._delete_by_id(client, dataset_id) - - except Exception as e: - exceptions.append(e) - - return exceptions or None, dataset_deleted - - @staticmethod - def _delete_by_slug(client: ClientCore, slug: str) -> int: - """ - (internal) Deletes a dataset by slug - - Parameters - ---------- - client: MetaClient - The client to use to make the request - - slug: str - The slug of the dataset to delete + Deletes a current dataset Returns ------- int - The dataset deleted + The id of dataset deleted """ - assert_is(isinstance(client, ClientCore), "client must be a Core Client") - assert_is(isinstance(slug, str), "slug must be a string") - - dataset = get_dataset(client, slug) - if dataset and dataset.id: - dataset_deleted = remove_dataset(client, dataset.id) - else: - raise MissingDataset(f"Dataset with slug {slug} not found") - - return dataset_deleted - - @staticmethod - def _delete_by_id(client: ClientCore, dataset_id: int) -> int: - """ - (internal) Deletes a dataset by id - - Parameters - ---------- - client: Client - The client to use to make the request - - dataset_id: int - The id of the dataset to delete - - Returns - ------- - int - The dataset deleted - """ - assert_is(isinstance(client, ClientCore), "client must be a Client") - assert_is(isinstance(dataset_id, int), "dataset_id must be an integer") - - dataset_deleted = remove_dataset(client, dataset_id) + dataset_deleted = remove_dataset(self.client, self.id) return dataset_deleted @staticmethod @@ -187,7 +110,10 @@ def _validate_slug(slug: str) -> None: assert_is(len(slug_copy) > 0, "slug must not be empty") VALID_SLUG_CHARS = "abcdefghijklmnopqrstuvwxyz0123456789-_" - assert_is(all(c in VALID_SLUG_CHARS for c in slug_copy), "slug must only contain valid characters") + assert_is( + all(c in VALID_SLUG_CHARS for c in slug_copy), + "slug must only contain valid characters", + ) def upload_files( self, @@ -200,7 +126,5 @@ def upload_files( preserve_folders: bool = False, verbose: bool = False, ) -> Dataset: - upload_data( - self._element.name, files, files_to_exclude, fps, path, frames, extract_views, preserve_folders, verbose # type: ignore - ) + upload_data(self._element.name, files, files_to_exclude, fps, path, frames, extract_views, preserve_folders, verbose) # type: ignore return self diff --git a/darwin/future/meta/objects/stage.py b/darwin/future/meta/objects/stage.py index 85ae3b0ba..0cec82431 100644 --- a/darwin/future/meta/objects/stage.py +++ b/darwin/future/meta/objects/stage.py @@ -17,20 +17,29 @@ class Stage(MetaBase[WFStageCore]): @property def item_ids(self) -> List[UUID]: - """_summary_ + """Item ids attached to the stage Returns: - _type_: _description_ + List[UUID]: List of item ids """ assert self._element.id is not None return get_item_ids_stage( - self.client, str(self.meta_params["team_slug"]), str(self.meta_params["dataset_id"]), self.id + self.client, + str(self.meta_params["team_slug"]), + str(self.meta_params["dataset_id"]), + self.id, ) def move_attached_files_to_stage(self, new_stage_id: UUID) -> Stage: - assert self.meta_params["team_slug"] is not None and type(self.meta_params["team_slug"]) == str - assert self.meta_params["workflow_id"] is not None and type(self.meta_params["workflow_id"]) == UUID - assert self.meta_params["dataset_id"] is not None and type(self.meta_params["dataset_id"]) == int + assert self.meta_params["team_slug"] is not None and isinstance( + self.meta_params["team_slug"], str + ) + assert self.meta_params["workflow_id"] is not None and isinstance( + self.meta_params["workflow_id"], UUID + ) + assert self.meta_params["dataset_id"] is not None and isinstance( + self.meta_params["dataset_id"], int + ) slug, w_id, d_id = ( self.meta_params["team_slug"], self.meta_params["workflow_id"], diff --git a/darwin/future/meta/objects/team.py b/darwin/future/meta/objects/team.py index 4783f4484..bbc169894 100644 --- a/darwin/future/meta/objects/team.py +++ b/darwin/future/meta/objects/team.py @@ -1,10 +1,13 @@ -from typing import List, Optional +from typing import List, Optional, Tuple, Union from darwin.future.core.client import ClientCore +from darwin.future.core.datasets import get_dataset, remove_dataset from darwin.future.core.team.get_team import get_team from darwin.future.data_objects.team import TeamCore +from darwin.future.exceptions import MissingDataset from darwin.future.helpers.assertion import assert_is from darwin.future.meta.objects.base import MetaBase +from darwin.future.meta.objects.dataset import Dataset from darwin.future.meta.queries.dataset import DatasetQuery from darwin.future.meta.queries.team_member import TeamMemberQuery from darwin.future.meta.queries.workflow import WorkflowQuery @@ -55,3 +58,91 @@ def workflows(self) -> WorkflowQuery: def __str__(self) -> str: return f"TeamMeta(name='{self.name}', slug='{self.slug}', id='{self.id}' - {len(self._element.members if self._element.members else [])} members)" + + @classmethod + def delete_dataset( + cls, client: ClientCore, dataset_id: Union[int, str] + ) -> Tuple[Optional[List[Exception]], int]: + """ + Deletes a dataset by id or slug + + Parameters + ---------- + dataset_id: Union[int, str] + The id or slug of the dataset to delete + + Returns + ------- + Tuple[Optional[List[Exception]], int] + A tuple containing a list of exceptions and the number of datasets deleted + """ + exceptions = [] + dataset_deleted = -1 + + try: + if isinstance(dataset_id, str): + dataset_deleted = cls._delete_dataset_by_slug(client, dataset_id) + else: + dataset_deleted = cls._delete_dataset_by_id(client, dataset_id) + + except Exception as e: + exceptions.append(e) + + return exceptions or None, dataset_deleted + + @staticmethod + def _delete_dataset_by_slug(client: ClientCore, slug: str) -> int: + """ + (internal) Deletes a dataset by slug + + Parameters + ---------- + client: MetaClient + The client to use to make the request + + slug: str + The slug of the dataset to delete + + Returns + ------- + int + The dataset deleted + """ + assert_is(isinstance(client, ClientCore), "client must be a Core Client") + assert_is(isinstance(slug, str), "slug must be a string") + + dataset = get_dataset(client, slug) + if dataset and dataset.id: + dataset_deleted = remove_dataset(client, dataset.id) + else: + raise MissingDataset(f"Dataset with slug {slug} not found") + + return dataset_deleted + + @staticmethod + def _delete_dataset_by_id(client: ClientCore, dataset_id: int) -> int: + """ + (internal) Deletes a dataset by id + + Parameters + ---------- + client: Client + The client to use to make the request + + dataset_id: int + The id of the dataset to delete + + Returns + ------- + int + The dataset deleted + """ + assert_is(isinstance(client, ClientCore), "client must be a Client") + assert_is(isinstance(dataset_id, int), "dataset_id must be an integer") + + dataset_deleted = remove_dataset(client, dataset_id) + return dataset_deleted + + def create_dataset(self, slug: str) -> Dataset: + core = Dataset.create_dataset(self.client, slug) + return Dataset(self.client, core, meta_params={"team_slug": self.slug}) diff --git a/darwin/future/meta/objects/team_member.py b/darwin/future/meta/objects/team_member.py index 222e24454..6f87d4326 100644 --- a/darwin/future/meta/objects/team_member.py +++ b/darwin/future/meta/objects/team_member.py @@ -1,6 +1,3 @@ -from typing import List, Optional - -from darwin.future.core.client import ClientCore from darwin.future.data_objects.team import TeamMemberCore from darwin.future.data_objects.team_member_role import TeamMemberRole from darwin.future.meta.objects.base import MetaBase diff --git a/darwin/future/meta/objects/workflow.py b/darwin/future/meta/objects/workflow.py index f28671243..af6458382 100644 --- a/darwin/future/meta/objects/workflow.py +++ b/darwin/future/meta/objects/workflow.py @@ -1,7 +1,5 @@ from __future__ import annotations -from enum import auto -from pathlib import Path from typing import List, Optional, Sequence, Union from uuid import UUID @@ -61,9 +59,7 @@ def upload_files( auto_push: bool = True, ) -> Workflow: assert self._element.dataset is not None - upload_data( - self.datasets[0].name, files, files_to_exclude, fps, path, frames, extract_views, preserve_folders, verbose # type: ignore - ) + upload_data(self.datasets[0].name, files, files_to_exclude, fps, path, frames, extract_views, preserve_folders, verbose) # type: ignore if auto_push: self.push_from_dataset_stage() return self diff --git a/darwin/future/meta/queries/dataset.py b/darwin/future/meta/queries/dataset.py index daf3dc24e..c87b8d915 100644 --- a/darwin/future/meta/queries/dataset.py +++ b/darwin/future/meta/queries/dataset.py @@ -3,7 +3,7 @@ from typing import List from darwin.future.core.datasets import list_datasets -from darwin.future.core.types.query import Param, Query, QueryFilter +from darwin.future.core.types.query import Query, QueryFilter from darwin.future.meta.objects.dataset import Dataset @@ -23,7 +23,9 @@ def _collect(self) -> List[Dataset]: if exceptions: # TODO: print and or raise exceptions, tbd how we want to handle this pass - datasets_meta = [Dataset(self.client, dataset) for dataset in datasets] + datasets_meta = [ + Dataset(self.client, dataset, self.meta_params) for dataset in datasets + ] if not self.filters: self.filters = [] @@ -32,7 +34,9 @@ def _collect(self) -> List[Dataset]: return datasets_meta - def _execute_filters(self, datasets: List[Dataset], filter: QueryFilter) -> List[Dataset]: + def _execute_filters( + self, datasets: List[Dataset], filter: QueryFilter + ) -> List[Dataset]: """Executes filtering on the local list of datasets, applying special logic for role filtering otherwise calls the parent method for general filtering on the values of the datasets diff --git a/darwin/future/meta/queries/stage.py b/darwin/future/meta/queries/stage.py index 3c8bd86db..7211bb5d5 100644 --- a/darwin/future/meta/queries/stage.py +++ b/darwin/future/meta/queries/stage.py @@ -3,7 +3,7 @@ from typing import List from uuid import UUID -from darwin.future.core.types.query import Param, Query, QueryFilter +from darwin.future.core.types.query import Query, QueryFilter from darwin.future.core.workflows import get_workflow from darwin.future.meta.objects.stage import Stage @@ -16,7 +16,9 @@ def _collect(self) -> List[Stage]: meta_params = self.meta_params workflow, exceptions = get_workflow(self.client, str(workflow_id)) assert workflow is not None - stages = [Stage(self.client, s, meta_params=meta_params) for s in workflow.stages] + stages = [ + Stage(self.client, s, meta_params=meta_params) for s in workflow.stages + ] if not self.filters: self.filters = [] for filter in self.filters: @@ -35,5 +37,9 @@ def _execute_filter(self, stages: List[Stage], filter: QueryFilter) -> List[Stag List[Stage]: Filtered subset of stages """ if filter.name == "role": - return [s for s in stages if s._element is not None and filter.filter_attr(s._element.type.value)] + return [ + s + for s in stages + if s._element is not None and filter.filter_attr(s._element.type.value) + ] return super()._generic_execute_filter(stages, filter) diff --git a/darwin/future/meta/queries/team_member.py b/darwin/future/meta/queries/team_member.py index 0fa192208..f6eaba922 100644 --- a/darwin/future/meta/queries/team_member.py +++ b/darwin/future/meta/queries/team_member.py @@ -3,7 +3,7 @@ from typing import List from darwin.future.core.team.get_team import get_team_members -from darwin.future.core.types.query import Param, Query, QueryFilter +from darwin.future.core.types.query import Query, QueryFilter from darwin.future.meta.objects.team_member import TeamMember @@ -27,7 +27,9 @@ def _collect(self) -> List[TeamMember]: return members_meta - def _execute_filter(self, members: List[TeamMember], filter: QueryFilter) -> List[TeamMember]: + def _execute_filter( + self, members: List[TeamMember], filter: QueryFilter + ) -> List[TeamMember]: """Executes filtering on the local list of members, applying special logic for role filtering otherwise calls the parent method for general filtering on the values of the members @@ -41,6 +43,10 @@ def _execute_filter(self, members: List[TeamMember], filter: QueryFilter) -> Lis List[TeamMember]: Filtered subset of members """ if filter.name == "role": - return [m for m in members if m._element is not None and filter.filter_attr(m._element.role.value)] + return [ + m + for m in members + if m._element is not None and filter.filter_attr(m._element.role.value) + ] else: return super()._generic_execute_filter(members, filter) diff --git a/darwin/future/meta/queries/workflow.py b/darwin/future/meta/queries/workflow.py index 552a49dfa..75c3a6442 100644 --- a/darwin/future/meta/queries/workflow.py +++ b/darwin/future/meta/queries/workflow.py @@ -3,7 +3,7 @@ from uuid import UUID from darwin.exceptions import DarwinException -from darwin.future.core.types.query import Param, Query, QueryFilter +from darwin.future.core.types.query import Query, QueryFilter from darwin.future.core.workflows import list_workflows from darwin.future.data_objects.workflow import WFStageCore from darwin.future.helpers.exception_handler import handle_exception @@ -26,7 +26,10 @@ def _collect(self) -> List[Workflow]: if exceptions: handle_exception(exceptions) raise DarwinException from exceptions[0] - workflows = [Workflow(self.client, workflow, self.meta_params) for workflow in workflows_core] + workflows = [ + Workflow(self.client, workflow, self.meta_params) + for workflow in workflows_core + ] if not self.filters: return workflows @@ -35,7 +38,9 @@ def _collect(self) -> List[Workflow]: return workflows - def _execute_filters(self, workflows: List[Workflow], filter: QueryFilter) -> List[Workflow]: + def _execute_filters( + self, workflows: List[Workflow], filter: QueryFilter + ) -> List[Workflow]: if filter.name == "id": id_to_find = UUID(filter.param) return [w for w in workflows if w.id == id_to_find] @@ -45,25 +50,35 @@ def _execute_filters(self, workflows: List[Workflow], filter: QueryFilter) -> Li return [ w for w in workflows - if w._element is not None and self._date_compare(w._element.inserted_at, start_date) + if w._element is not None + and self._date_compare(w._element.inserted_at, start_date) ] if filter.name == "inserted_at_end": end_date = datetime.fromisoformat(filter.param) return [ - w for w in workflows if w._element is not None and self._date_compare(end_date, w._element.inserted_at) + w + for w in workflows + if w._element is not None + and self._date_compare(end_date, w._element.inserted_at) ] if filter.name == "updated_at_start": start_date = datetime.fromisoformat(filter.param) return [ - w for w in workflows if w._element is not None and self._date_compare(w._element.updated_at, start_date) + w + for w in workflows + if w._element is not None + and self._date_compare(w._element.updated_at, start_date) ] if filter.name == "updated_at_end": end_date = datetime.fromisoformat(filter.param) return [ - w for w in workflows if w._element is not None and self._date_compare(end_date, w._element.updated_at) + w + for w in workflows + if w._element is not None + and self._date_compare(end_date, w._element.updated_at) ] if filter.name == "dataset_id": @@ -78,14 +93,20 @@ def _execute_filters(self, workflows: List[Workflow], filter: QueryFilter) -> Li if filter.name == "dataset_name": datasets_to_find_name: List[str] = [str(s) for s in filter.param.split(",")] - return [w for w in workflows if w._element is not None and str(w._element.dataset) in datasets_to_find_name] + return [ + w + for w in workflows + if w._element is not None + and str(w._element.dataset) in datasets_to_find_name + ] if filter.name == "has_stages": - stages_to_find = [s for s in filter.param.split(",")] + stages_to_find = list(filter.param.split(",")) return [ w for w in workflows - if w._element is not None and self._stages_contains(w._element.stages, stages_to_find) + if w._element is not None + and self._stages_contains(w._element.stages, stages_to_find) ] return self._generic_execute_filter(workflows, filter) @@ -95,6 +116,8 @@ def _date_compare(cls, date1: datetime, date2: datetime) -> bool: return date1.astimezone(timezone.utc) >= date2.astimezone(timezone.utc) @classmethod - def _stages_contains(cls, stages: List[WFStageCore], stages_to_find: List[str]) -> bool: + def _stages_contains( + cls, stages: List[WFStageCore], stages_to_find: List[str] + ) -> bool: stage_ids = [str(s.id) for s in stages] return any(stage_to_find in stage_ids for stage_to_find in stages_to_find) diff --git a/darwin/future/tests/core/datasets/fixtures.py b/darwin/future/tests/core/datasets/fixtures.py index bbf044840..0f9489544 100644 --- a/darwin/future/tests/core/datasets/fixtures.py +++ b/darwin/future/tests/core/datasets/fixtures.py @@ -81,4 +81,7 @@ def happy_get_client() -> ClientCore: @fixture def sad_client_pydantic() -> ClientCore: - return MagicMock(ClientCore, side_effect=ValidationError(["error1", "error2", "error3"], model=BaseModel)) + return MagicMock( + ClientCore, + side_effect=ValidationError(["error1", "error2", "error3"], model=BaseModel), + ) diff --git a/darwin/future/tests/core/datasets/test_create_dataset.py b/darwin/future/tests/core/datasets/test_create_dataset.py index fd323fed1..26630ac44 100644 --- a/darwin/future/tests/core/datasets/test_create_dataset.py +++ b/darwin/future/tests/core/datasets/test_create_dataset.py @@ -1,5 +1,3 @@ -from typing import Union - import responses from pytest import raises from requests import HTTPError @@ -12,7 +10,9 @@ from .fixtures import * # noqa: F401, F403 -def test_it_creates_a_dataset(basic_dataset: DatasetCore, base_client: ClientCore) -> None: +def test_it_creates_a_dataset( + basic_dataset: DatasetCore, base_client: ClientCore +) -> None: with responses.RequestsMock() as rsps: rsps.add( rsps.POST, @@ -26,7 +26,9 @@ def test_it_creates_a_dataset(basic_dataset: DatasetCore, base_client: ClientCor assert dataset.slug == "1337" -def test_it_raises_an_error_on_http_error(basic_dataset: DatasetCore, base_client: ClientCore) -> None: +def test_it_raises_an_error_on_http_error( + basic_dataset: DatasetCore, base_client: ClientCore +) -> None: with raises(HTTPError): with responses.RequestsMock() as rsps: rsps.add( diff --git a/darwin/future/tests/core/datasets/test_list_datasets.py b/darwin/future/tests/core/datasets/test_list_datasets.py index 641b7331f..a4c7ac4a2 100644 --- a/darwin/future/tests/core/datasets/test_list_datasets.py +++ b/darwin/future/tests/core/datasets/test_list_datasets.py @@ -11,7 +11,9 @@ from .fixtures import * -def test_it_lists_datasets(base_client: ClientCore, basic_list_of_datasets: List[DatasetCore]) -> None: +def test_it_lists_datasets( + base_client: ClientCore, basic_list_of_datasets: List[DatasetCore] +) -> None: with responses.RequestsMock() as rsps: rsps.add( rsps.GET, @@ -29,7 +31,9 @@ def test_it_lists_datasets(base_client: ClientCore, basic_list_of_datasets: List assert datasets[0].slug == "1337" -def test_it_returns_an_error_if_the_client_returns_an_http_error(base_client: ClientCore) -> None: +def test_it_returns_an_error_if_the_client_returns_an_http_error( + base_client: ClientCore, +) -> None: with responses.RequestsMock() as rsps: rsps.add( rsps.GET, diff --git a/darwin/future/tests/core/fixtures.py b/darwin/future/tests/core/fixtures.py index f929d191c..0d4782134 100644 --- a/darwin/future/tests/core/fixtures.py +++ b/darwin/future/tests/core/fixtures.py @@ -8,7 +8,6 @@ from darwin.future.data_objects.dataset import DatasetCore from darwin.future.data_objects.team import TeamCore, TeamMemberCore from darwin.future.data_objects.team_member_role import TeamMemberRole -from darwin.future.data_objects.workflow import WFTypeCore @pytest.fixture @@ -128,7 +127,9 @@ def transform_dataset(dataset_json_dict: dict, id: int) -> dict: dataset["id"] = id dataset["slug"] = f"{dataset['slug']}-{id}" dataset["name"] = f"{dataset['name']} {id}" - dataset["releases"] = [{"name": "release2"}] if id % 2 == 0 else [{"name": "release1"}] + dataset["releases"] = ( + [{"name": "release2"}] if id % 2 == 0 else [{"name": "release1"}] + ) return dataset diff --git a/darwin/future/tests/core/items/fixtures.py b/darwin/future/tests/core/items/fixtures.py index 9f15db778..856abc6bd 100644 --- a/darwin/future/tests/core/items/fixtures.py +++ b/darwin/future/tests/core/items/fixtures.py @@ -8,14 +8,17 @@ def UUIDs() -> List[UUID]: return [uuid4() for i in range(10)] + @pytest.fixture def UUIDs_str(UUIDs: List[UUID]) -> List[str]: return [str(uuid) for uuid in UUIDs] + @pytest.fixture def stage_id() -> UUID: return uuid4() + @pytest.fixture def workflow_id() -> UUID: - return uuid4() \ No newline at end of file + return uuid4() diff --git a/darwin/future/tests/core/items/test_get_items.py b/darwin/future/tests/core/items/test_get_items.py index 0bee40afd..e2af43eba 100644 --- a/darwin/future/tests/core/items/test_get_items.py +++ b/darwin/future/tests/core/items/test_get_items.py @@ -9,12 +9,14 @@ from darwin.future.tests.core.items.fixtures import * -def test_get_item_ids(UUIDs: List[UUID], UUIDs_str: List[str], base_client: ClientCore) -> None: +def test_get_item_ids( + UUIDs: List[UUID], UUIDs_str: List[str], base_client: ClientCore +) -> None: with responses.RequestsMock() as rsps: rsps.add( rsps.GET, base_client.config.api_endpoint - + f"v2/teams/default-team/items/ids?not_statuses=archived,error&sort[id]=desc&dataset_ids=1337", + + "v2/teams/default-team/items/ids?not_statuses=archived,error&sort[id]=desc&dataset_ids=1337", json={"item_ids": UUIDs_str}, status=200, ) @@ -22,7 +24,9 @@ def test_get_item_ids(UUIDs: List[UUID], UUIDs_str: List[str], base_client: Clie assert item_ids == UUIDs -def test_get_item_ids_stage(UUIDs: List[UUID], UUIDs_str: List[str], base_client: ClientCore) -> None: +def test_get_item_ids_stage( + UUIDs: List[UUID], UUIDs_str: List[str], base_client: ClientCore +) -> None: stage_id = str(uuid4()) with responses.RequestsMock() as rsps: rsps.add( diff --git a/darwin/future/tests/core/items/test_move_items.py b/darwin/future/tests/core/items/test_move_items.py index e3ea7bea6..f9c357c76 100644 --- a/darwin/future/tests/core/items/test_move_items.py +++ b/darwin/future/tests/core/items/test_move_items.py @@ -1,5 +1,5 @@ from typing import Dict, List -from uuid import UUID, uuid4 +from uuid import UUID import pytest import responses @@ -37,5 +37,9 @@ def test_move_items( json={"success": UUIDs_str}, status=200, ) - item_ids = move_items_to_stage(base_client, "default-team", workflow_id, 1337, stage_id, UUIDs) - assert rsps.assert_call_count(base_client.config.api_endpoint + "v2/teams/default-team/items/stage", 1) + move_items_to_stage( + base_client, "default-team", workflow_id, 1337, stage_id, UUIDs + ) + assert rsps.assert_call_count( + base_client.config.api_endpoint + "v2/teams/default-team/items/stage", 1 + ) diff --git a/darwin/future/tests/core/test_client.py b/darwin/future/tests/core/test_client.py index 2ee03ad2c..6b5195698 100644 --- a/darwin/future/tests/core/test_client.py +++ b/darwin/future/tests/core/test_client.py @@ -1,4 +1,3 @@ -import unittest from pathlib import Path import pytest @@ -6,7 +5,7 @@ from pydantic import ValidationError from requests import HTTPError -from darwin.future.core.client import ClientCore, DarwinConfig, TeamsConfig +from darwin.future.core.client import ClientCore, DarwinConfig from darwin.future.exceptions import DarwinException, NotFound, Unauthorized from darwin.future.tests.core.fixtures import * from darwin.future.tests.fixtures import * @@ -35,7 +34,7 @@ def test_config_base_url(base_config: DarwinConfig) -> None: @pytest.mark.parametrize("base_url", ["test_url.com", "ftp://test_url.com", ""]) def test_invalid_config_url_validation(base_url: str, tmp_path: Path) -> None: with pytest.raises(ValidationError): - config = DarwinConfig( + DarwinConfig( api_key="test_key", datasets_dir=tmp_path, api_endpoint="http://test_url.com/api/", @@ -92,7 +91,9 @@ def test_client(base_client: ClientCore) -> None: "status_code, exception", [(401, Unauthorized), (404, NotFound)], ) -def test_client_raises_darwin(status_code: int, exception: DarwinException, base_client: ClientCore) -> None: +def test_client_raises_darwin( + status_code: int, exception: DarwinException, base_client: ClientCore +) -> None: endpoint = base_client.config.api_endpoint + "test_endpoint" with responses.RequestsMock() as rsps: rsps.add(responses.GET, endpoint, json={"test": "test"}, status=status_code) diff --git a/darwin/future/tests/core/test_query.py b/darwin/future/tests/core/test_query.py index 5008c195f..77fd612ab 100644 --- a/darwin/future/tests/core/test_query.py +++ b/darwin/future/tests/core/test_query.py @@ -34,14 +34,18 @@ def test_team() -> TeamCore: def test_query_instantiated( - base_client: ClientCore, basic_filters: List[Query.QueryFilter], non_abc_query: Type[Query.Query] + base_client: ClientCore, + basic_filters: List[Query.QueryFilter], + non_abc_query: Type[Query.Query], ) -> None: q = non_abc_query(base_client, basic_filters) assert q.filters == basic_filters def test_query_filter_functionality( - base_client: ClientCore, basic_filters: List[Query.QueryFilter], non_abc_query: Type[Query.Query] + base_client: ClientCore, + basic_filters: List[Query.QueryFilter], + non_abc_query: Type[Query.Query], ) -> None: q = non_abc_query(base_client) for f in basic_filters: @@ -99,7 +103,8 @@ def test_query_filter_filters(mod: Optional[str], param: Any, check: Any, expect def test_QF_from_asteriks() -> None: # Builds with dictionary args QF = Query.QueryFilter._from_args( - {"name": "test", "param": "test"}, {"name": "test2", "param": "test2", "modifier": "!="} + {"name": "test", "param": "test"}, + {"name": "test2", "param": "test2", "modifier": "!="}, ) assert len(QF) == 2 assert QF[0].name == "test" @@ -133,7 +138,9 @@ def test_query_first(non_abc_query: Type[Query.Query], base_client: ClientCore) assert first == 1 -def test_query_collect_one(non_abc_query: Type[Query.Query], base_client: ClientCore) -> None: +def test_query_collect_one( + non_abc_query: Type[Query.Query], base_client: ClientCore +) -> None: query = non_abc_query(base_client) query.results = [1, 2, 3] with pytest.raises(MoreThanOneResultFound): diff --git a/darwin/future/tests/core/types/test_querystring.py b/darwin/future/tests/core/types/test_querystring.py index 11a3a0bce..bf38d054a 100644 --- a/darwin/future/tests/core/types/test_querystring.py +++ b/darwin/future/tests/core/types/test_querystring.py @@ -10,7 +10,7 @@ def test_querystring_happy_path() -> None: query_string_2 = QueryString({"foo": "bar", "baz": "qux"}) assert str(query_string_2) == "?foo=bar&baz=qux" - query_string_3 = QueryString(dict()) + query_string_3 = QueryString({}) assert str(query_string_3) == "?" assert query_string.value == {"foo": "bar"} diff --git a/darwin/future/tests/core/workflows/test_get_workflow.py b/darwin/future/tests/core/workflows/test_get_workflow.py index c0f443635..f63119092 100644 --- a/darwin/future/tests/core/workflows/test_get_workflow.py +++ b/darwin/future/tests/core/workflows/test_get_workflow.py @@ -1,4 +1,3 @@ -import pytest import responses from pydantic import ValidationError from requests import HTTPError @@ -11,7 +10,9 @@ @responses.activate -def test_get_workflow(base_client: ClientCore, base_single_workflow_object: JSONType) -> None: +def test_get_workflow( + base_client: ClientCore, base_single_workflow_object: JSONType +) -> None: # Mocking the response using responses library response_data = base_single_workflow_object workflow_id = "1" @@ -31,7 +32,9 @@ def test_get_workflow(base_client: ClientCore, base_single_workflow_object: JSON @responses.activate -def test_get_workflow_with_team_slug(base_client: ClientCore, base_single_workflow_object: JSONType) -> None: +def test_get_workflow_with_team_slug( + base_client: ClientCore, base_single_workflow_object: JSONType +) -> None: # Mocking the response using responses library team_slug = "team-slug" workflow_id = "1" diff --git a/darwin/future/tests/core/workflows/test_get_workflows.py b/darwin/future/tests/core/workflows/test_get_workflows.py index 0c6fb751e..9400c3658 100644 --- a/darwin/future/tests/core/workflows/test_get_workflows.py +++ b/darwin/future/tests/core/workflows/test_get_workflows.py @@ -33,7 +33,9 @@ def test_get_workflows(base_client: ClientCore, base_workflows_object: str) -> N @responses.activate -def test_get_workflows_with_team_slug(base_client: ClientCore, base_workflows_object: JSONType) -> None: +def test_get_workflows_with_team_slug( + base_client: ClientCore, base_workflows_object: JSONType +) -> None: # Mocking the response using responses library team_slug = "team-slug" response_data = base_workflows_object diff --git a/darwin/future/tests/core/workflows/test_list_workflows.py b/darwin/future/tests/core/workflows/test_list_workflows.py index c959cea1d..23daf0ace 100644 --- a/darwin/future/tests/core/workflows/test_list_workflows.py +++ b/darwin/future/tests/core/workflows/test_list_workflows.py @@ -1,6 +1,5 @@ from typing import List -import pytest import responses from pydantic import ValidationError from requests import HTTPError @@ -35,7 +34,9 @@ def test_list_workflows(base_client: ClientCore, base_workflows_object: str) -> @responses.activate -def test_list_workflows_with_team_slug(base_client: ClientCore, base_workflows_object: JSONType) -> None: +def test_list_workflows_with_team_slug( + base_client: ClientCore, base_workflows_object: JSONType +) -> None: # Mocking the response using responses library team_slug = "team-slug" response_data = base_workflows_object diff --git a/darwin/future/tests/data_objects/fixtures.py b/darwin/future/tests/data_objects/fixtures.py index 8f0849e3a..4e0a56a7d 100644 --- a/darwin/future/tests/data_objects/fixtures.py +++ b/darwin/future/tests/data_objects/fixtures.py @@ -1,10 +1,7 @@ -import json from pathlib import Path -from typing import List import pytest -from darwin.future.data_objects.workflow import WFStageCore, WorkflowCore test_data_path: Path = Path(__file__).parent / "workflow" / "data" valid_stage_json = test_data_path / "stage.json" diff --git a/darwin/future/tests/data_objects/test_general_darwin_objects.py b/darwin/future/tests/data_objects/test_general_darwin_objects.py index 0a9382d39..3247fb927 100644 --- a/darwin/future/tests/data_objects/test_general_darwin_objects.py +++ b/darwin/future/tests/data_objects/test_general_darwin_objects.py @@ -1,5 +1,3 @@ -import unittest - import pytest from pydantic import BaseModel, ValidationError @@ -19,11 +17,11 @@ def test_integrated_parsing_works_with_raw(basic_combined: dict) -> None: def test_broken_obj_raises(broken_combined: dict) -> None: - with pytest.raises(ValidationError) as e_info: - broken = TeamCore.parse_obj(broken_combined) + with pytest.raises(ValidationError): + TeamCore.parse_obj(broken_combined) @pytest.mark.parametrize("test_object", [TeamCore, DatasetCore, ReleaseCore]) def test_empty_obj_raises(test_object: BaseModel) -> None: - with pytest.raises(ValidationError) as e_info: - broken = test_object.parse_obj({}) + with pytest.raises(ValidationError): + test_object.parse_obj({}) diff --git a/darwin/future/tests/data_objects/test_team.py b/darwin/future/tests/data_objects/test_team.py index 1e17bed9e..44008880d 100644 --- a/darwin/future/tests/data_objects/test_team.py +++ b/darwin/future/tests/data_objects/test_team.py @@ -1,5 +1,3 @@ -import unittest - import pytest import responses from pydantic import ValidationError @@ -11,7 +9,9 @@ from darwin.future.tests.fixtures import * -def test_get_team_returns_valid_team(base_client: ClientCore, base_team_json: dict, base_team: TeamCore) -> None: +def test_get_team_returns_valid_team( + base_client: ClientCore, base_team_json: dict, base_team: TeamCore +) -> None: slug = "test-slug" endpoint = base_client.config.api_endpoint + f"teams/{slug}" with responses.RequestsMock() as rsps: @@ -21,21 +21,30 @@ def test_get_team_returns_valid_team(base_client: ClientCore, base_team_json: di assert team == base_team -def test_get_team_fails_on_incorrect_input(base_client: ClientCore, base_team: TeamCore) -> None: +def test_get_team_fails_on_incorrect_input( + base_client: ClientCore, base_team: TeamCore +) -> None: slug = "test-slug" endpoint = base_client.config.api_endpoint + f"teams/{slug}" with responses.RequestsMock() as rsps: rsps.add(responses.GET, endpoint, json={}) with pytest.raises(ValidationError): - team = get_team(base_client, slug) + get_team(base_client, slug) -def test_get_team_members_returns_valid_list(base_client: ClientCore, base_team_member_json: dict) -> None: - synthetic_list = [TeamMemberCore.parse_obj(base_team_member_json), TeamMemberCore.parse_obj(base_team_member_json)] +def test_get_team_members_returns_valid_list( + base_client: ClientCore, base_team_member_json: dict +) -> None: + synthetic_list = [ + TeamMemberCore.parse_obj(base_team_member_json), + TeamMemberCore.parse_obj(base_team_member_json), + ] endpoint = base_client.config.api_endpoint + "memberships" with responses.RequestsMock() as rsps: - rsps.add(responses.GET, endpoint, json=[base_team_member_json, base_team_member_json]) + rsps.add( + responses.GET, endpoint, json=[base_team_member_json, base_team_member_json] + ) members, errors = get_team_members(base_client) assert len(members) == 2 @@ -43,7 +52,9 @@ def test_get_team_members_returns_valid_list(base_client: ClientCore, base_team_ assert members == synthetic_list -def test_get_team_members_fails_on_incorrect_input(base_client: ClientCore, base_team_member_json: dict) -> None: +def test_get_team_members_fails_on_incorrect_input( + base_client: ClientCore, base_team_member_json: dict +) -> None: endpoint = base_client.config.api_endpoint + "memberships" with responses.RequestsMock() as rsps: rsps.add(responses.GET, endpoint, json=[base_team_member_json, {}]) @@ -55,11 +66,14 @@ def test_get_team_members_fails_on_incorrect_input(base_client: ClientCore, base assert isinstance(members[0], TeamMemberCore) -def test_team_from_client(base_client: ClientCore, base_team_json: dict, base_team: TeamCore) -> None: +def test_team_from_client( + base_client: ClientCore, base_team_json: dict, base_team: TeamCore +) -> None: with responses.RequestsMock() as rsps: rsps.add( responses.GET, - base_client.config.api_endpoint + f"teams/{base_client.config.default_team}", + base_client.config.api_endpoint + + f"teams/{base_client.config.default_team}", json=base_team_json, ) diff --git a/darwin/future/tests/data_objects/workflow/test_wfdataset.py b/darwin/future/tests/data_objects/workflow/test_wfdataset.py index 0126853dd..32e783b5b 100644 --- a/darwin/future/tests/data_objects/workflow/test_wfdataset.py +++ b/darwin/future/tests/data_objects/workflow/test_wfdataset.py @@ -54,4 +54,6 @@ def test_sad_paths() -> None: working_dataset[key] = InvalidValueForTest() WFDatasetCore.parse_obj(working_dataset) - assert str(excinfo.value).startswith(f"1 validation error for WFDatasetCore\n{key}") + assert str(excinfo.value).startswith( + f"1 validation error for WFDatasetCore\n{key}" + ) diff --git a/darwin/future/tests/data_objects/workflow/test_wfedge.py b/darwin/future/tests/data_objects/workflow/test_wfedge.py index c3d0c7b8c..c20b8c5a9 100644 --- a/darwin/future/tests/data_objects/workflow/test_wfedge.py +++ b/darwin/future/tests/data_objects/workflow/test_wfedge.py @@ -1,6 +1,5 @@ from pathlib import Path -import pytest from darwin.future.data_objects.workflow import WFEdgeCore diff --git a/darwin/future/tests/data_objects/workflow/test_wfstage.py b/darwin/future/tests/data_objects/workflow/test_wfstage.py index a03613aaf..3416478c1 100644 --- a/darwin/future/tests/data_objects/workflow/test_wfstage.py +++ b/darwin/future/tests/data_objects/workflow/test_wfstage.py @@ -1,5 +1,4 @@ from json import loads -from pathlib import Path from uuid import UUID import pytest diff --git a/darwin/future/tests/data_objects/workflow/test_wfstage_config.py b/darwin/future/tests/data_objects/workflow/test_wfstage_config.py index 7a5ad2ef3..53ea142b8 100644 --- a/darwin/future/tests/data_objects/workflow/test_wfstage_config.py +++ b/darwin/future/tests/data_objects/workflow/test_wfstage_config.py @@ -1,6 +1,5 @@ from pathlib import Path -import pytest from darwin.future.data_objects.workflow import WFStageConfigCore diff --git a/darwin/future/tests/data_objects/workflow/test_wfuser.py b/darwin/future/tests/data_objects/workflow/test_wfuser.py index 8619a73e7..d69026ad5 100644 --- a/darwin/future/tests/data_objects/workflow/test_wfuser.py +++ b/darwin/future/tests/data_objects/workflow/test_wfuser.py @@ -1,6 +1,5 @@ from pathlib import Path -import pytest from darwin.future.data_objects.workflow import WFUserCore diff --git a/darwin/future/tests/meta/fixtures.py b/darwin/future/tests/meta/fixtures.py index 0a78cc9e0..2132a511b 100644 --- a/darwin/future/tests/meta/fixtures.py +++ b/darwin/future/tests/meta/fixtures.py @@ -1,4 +1,4 @@ -from pytest import fixture, raises +from pytest import fixture from darwin.future.core.client import DarwinConfig from darwin.future.meta.client import Client diff --git a/darwin/future/tests/meta/objects/fixtures.py b/darwin/future/tests/meta/objects/fixtures.py index fd84327a0..a4a75c14f 100644 --- a/darwin/future/tests/meta/objects/fixtures.py +++ b/darwin/future/tests/meta/objects/fixtures.py @@ -1,12 +1,13 @@ from typing import List from uuid import UUID -from pytest import fixture, raises +from pytest import fixture from darwin.future.core.client import ClientCore +from darwin.future.data_objects.dataset import DatasetCore from darwin.future.data_objects.team import TeamCore from darwin.future.data_objects.workflow import WFStageCore, WorkflowCore -from darwin.future.meta.objects import stage +from darwin.future.meta.objects.dataset import Dataset from darwin.future.meta.objects.stage import Stage from darwin.future.meta.objects.team import Team from darwin.future.meta.objects.workflow import Workflow @@ -24,15 +25,24 @@ def base_meta_team(base_client: ClientCore, base_team: TeamCore) -> Team: @fixture -def base_meta_workflow(base_client: ClientCore, base_workflow: WorkflowCore) -> Workflow: +def base_meta_workflow( + base_client: ClientCore, base_workflow: WorkflowCore +) -> Workflow: return Workflow(base_client, base_workflow) @fixture -def base_meta_stage(base_client: ClientCore, base_stage: WFStageCore, base_UUID: UUID) -> Stage: +def base_meta_stage( + base_client: ClientCore, base_stage: WFStageCore, base_UUID: UUID +) -> Stage: return Stage(base_client, base_stage) @fixture def base_meta_stage_list(base_meta_stage: Stage, base_UUID: UUID) -> List[Stage]: return [base_meta_stage] + + +@fixture +def base_meta_dataset(base_client: ClientCore, base_dataset: DatasetCore) -> Dataset: + return Dataset(base_client, base_dataset, meta_params={"team_slug": "test_team"}) diff --git a/darwin/future/tests/meta/objects/test_datasetmeta.py b/darwin/future/tests/meta/objects/test_datasetmeta.py index 7d3088b38..47c3c71e4 100644 --- a/darwin/future/tests/meta/objects/test_datasetmeta.py +++ b/darwin/future/tests/meta/objects/test_datasetmeta.py @@ -1,27 +1,14 @@ import string -from typing import Generator -from unittest.mock import Mock, patch -from pytest import fixture, mark, raises +from pytest import mark, raises +from requests import HTTPError from responses import RequestsMock from darwin.future.core.client import DarwinConfig from darwin.future.meta.client import Client from darwin.future.meta.objects.dataset import Dataset from darwin.future.tests.core.fixtures import * - - -@fixture -def _delete_by_slug_mock() -> Generator: - with patch.object(Dataset, "_delete_by_slug") as mock: - yield mock - - -@fixture -def _delete_by_id_mock() -> Generator: - with patch.object(Dataset, "_delete_by_id") as mock: - yield mock - +from darwin.future.tests.meta.objects.fixtures import * # `datasets` tests # TODO datasets tests @@ -31,23 +18,20 @@ def _delete_by_id_mock() -> Generator: # `create_dataset` tests -def test_create_dataset_returns_exceptions_thrown(base_config: DarwinConfig) -> None: +def test_create_dataset_raises_HTTPError(base_config: DarwinConfig) -> None: valid_client = Client(base_config) valid_slug = "test_dataset" base_url = base_config.base_url + "api/datasets" - with RequestsMock() as rsps: + with RequestsMock() as rsps, raises(HTTPError): rsps.add(rsps.POST, base_url, status=500) - - exceptions, dataset_created = Dataset.create_dataset(valid_client, valid_slug) - - assert exceptions is not None - assert "500 Server Error" in str(exceptions[0]) - assert dataset_created is None + Dataset.create_dataset(valid_client, valid_slug) -def test_create_dataset_returns_dataset_created_if_dataset_created(base_config: DarwinConfig) -> None: +def test_create_dataset_returns_dataset_created_if_dataset_created( + base_config: DarwinConfig, +) -> None: valid_client = Client(base_config) valid_slug = "test_dataset" @@ -61,9 +45,8 @@ def test_create_dataset_returns_dataset_created_if_dataset_created(base_config: status=201, ) - exceptions, dataset_created = Dataset.create_dataset(valid_client, valid_slug) + dataset_created = Dataset.create_dataset(valid_client, valid_slug) - assert exceptions is None assert dataset_created is not None assert dataset_created.id == 1 assert dataset_created.name == "test dataset" @@ -74,128 +57,41 @@ def test_create_dataset_returns_dataset_created_if_dataset_created(base_config: # TODO update_dataset tests -# `delete_dataset` tests -def test_delete_dataset_returns_exceptions_thrown( - base_config: DarwinConfig, _delete_by_id_mock: Mock, _delete_by_slug_mock: Mock -) -> None: - _delete_by_slug_mock.side_effect = Exception("test exception") - - valid_client = Client(base_config) - - exceptions, dataset_deleted = Dataset.delete_dataset(valid_client, "test_dataset") - - assert exceptions is not None - assert str(exceptions[0]) == "test exception" - assert dataset_deleted == -1 - - assert _delete_by_slug_mock.call_count == 1 - assert _delete_by_id_mock.call_count == 0 - - -def test_delete_dataset_calls_delete_by_slug_as_appropriate( - base_config: DarwinConfig, _delete_by_id_mock: Mock, _delete_by_slug_mock: Mock -) -> None: - valid_client = Client(base_config) - - exceptions, _ = Dataset.delete_dataset(valid_client, "test_dataset") - - assert exceptions is None - assert _delete_by_slug_mock.call_count == 1 - assert _delete_by_id_mock.call_count == 0 - - -def test_delete_dataset_calls_delete_by_id_as_appropriate( - base_config: DarwinConfig, _delete_by_id_mock: Mock, _delete_by_slug_mock: Mock +@mark.parametrize( + "invalid_slug", + [ + "", + " ", + "test dataset", + *[f"dataset_{c}" for c in string.punctuation if c not in ["-", "_", "."]], + ], +) +def test_validate_slugh_raises_exception_if_passed_invalid_inputs( + invalid_slug: str, ) -> None: - valid_client = Client(base_config) - - exceptions, _ = Dataset.delete_dataset(valid_client, 1) - - assert exceptions is None - assert _delete_by_slug_mock.call_count == 0 - assert _delete_by_id_mock.call_count == 1 - - -# Test `_delete_by_slug` -def test_delete_by_slug_raises_exception_if_not_passed_str_and_client(base_config: DarwinConfig) -> None: - valid_client = Client(base_config) - valid_slug = "test_dataset" - invalid_client = "client" - invalid_slug = 1 - - with raises(AssertionError): - Dataset._delete_by_slug(valid_client, invalid_slug) # type: ignore - with raises(AssertionError): - Dataset._delete_by_slug(invalid_client, valid_slug) # type: ignore - - -def test_delete_by_slug__returns_dataset_deleted_if_dataset_found(base_config: DarwinConfig) -> None: - valid_client = Client(base_config) - valid_slug = "test_dataset" - - base_url = base_config.base_url + "api/datasets" - - with RequestsMock() as rsps: - rsps.add( - rsps.GET, - base_url + "?id=test_dataset", - json={"id": 1, "name": "Test Dataset", "slug": "test_dataset"}, - status=200, - ) - rsps.add( - rsps.PUT, - base_url + "/1/archive", - json={"id": 1, "name": "Test Dataset", "slug": "test_dataset"}, - status=200, - ) - dataset_deleted = Dataset._delete_by_slug(valid_client, valid_slug) - - assert dataset_deleted == 1 - - -# Test `_delete_by_id` -def test_delete_by_id_raises_exception_if_not_passed_int_and_client(base_config: DarwinConfig) -> None: - valid_client = Client(base_config) - valid_id = 1 - invalid_client = "client" - invalid_id = "1" + Dataset._validate_slug(invalid_slug) - with raises(AssertionError): - Dataset._delete_by_id(valid_client, invalid_id) # type: ignore - with raises(AssertionError): - Dataset._delete_by_id(invalid_client, valid_id) # type: ignore +def test_validate_slug_returns_none_if_passed_valid_slug() -> None: + valid_slug = "test-dataset" + assert Dataset._validate_slug(valid_slug) is None -def test_delete_by_id_returns_dataset_deleted_if_dataset_found(base_config: DarwinConfig) -> None: - valid_client = Client(base_config) - valid_id = 1 +def test_delete(base_meta_dataset: Dataset, base_config: DarwinConfig) -> None: base_url = base_config.base_url + "api/datasets" - with RequestsMock() as rsps: rsps.add( rsps.PUT, - base_url + "/1/archive", - json={"id": 1, "name": "Test Dataset", "slug": "test_dataset"}, + base_url + f"/{base_meta_dataset.id}/archive", + json={ + "id": base_meta_dataset.id, + "name": "Test Dataset", + "slug": "test_dataset", + }, status=200, ) - dataset_deleted = Dataset._delete_by_id(valid_client, valid_id) + dataset_deleted = base_meta_dataset.delete() assert dataset_deleted == 1 - - -@mark.parametrize( - "invalid_slug", - ["", " ", "test dataset", *[f"dataset_{c}" for c in string.punctuation if c not in ["-", "_", "."]]], -) -def test_validate_slugh_raises_exception_if_passed_invalid_inputs(invalid_slug: str) -> None: - with raises(AssertionError): - Dataset._validate_slug(invalid_slug) - - -def test_validate_slug_returns_none_if_passed_valid_slug() -> None: - valid_slug = "test-dataset" - - assert Dataset._validate_slug(valid_slug) is None diff --git a/darwin/future/tests/meta/objects/test_stagemeta.py b/darwin/future/tests/meta/objects/test_stagemeta.py index 99f4dfef5..9a1d24c7d 100644 --- a/darwin/future/tests/meta/objects/test_stagemeta.py +++ b/darwin/future/tests/meta/objects/test_stagemeta.py @@ -2,11 +2,8 @@ from uuid import UUID import responses -from pytest import fixture, mark, raises -from responses import RequestsMock -from sklearn import base +from pytest import fixture -from darwin.future.core.client import DarwinConfig from darwin.future.data_objects.workflow import WFStageCore, WFTypeCore from darwin.future.meta.client import Client from darwin.future.meta.objects.stage import Stage @@ -22,17 +19,29 @@ def uuid_str() -> str: @fixture def base_WFStage(uuid_str: str) -> WFStageCore: - return WFStageCore(id=UUID(uuid_str), name="test-stage", type=WFTypeCore.ANNOTATE, assignable_users=[], edges=[]) + return WFStageCore( + id=UUID(uuid_str), + name="test-stage", + type=WFTypeCore.ANNOTATE, + assignable_users=[], + edges=[], + ) @fixture -def stage_meta(base_meta_client: Client, base_WFStage: WFStageCore, workflow_id: UUID) -> Stage: +def stage_meta( + base_meta_client: Client, base_WFStage: WFStageCore, workflow_id: UUID +) -> Stage: return Stage( - base_meta_client, base_WFStage, {"team_slug": "default-team", "dataset_id": 1337, "workflow_id": workflow_id} + base_meta_client, + base_WFStage, + {"team_slug": "default-team", "dataset_id": 1337, "workflow_id": workflow_id}, ) -def test_item_ids(base_meta_client: Client, stage_meta: Stage, UUIDs_str: List[str], UUIDs: List[UUID]) -> None: +def test_item_ids( + base_meta_client: Client, stage_meta: Stage, UUIDs_str: List[str], UUIDs: List[UUID] +) -> None: with responses.RequestsMock() as rsps: rsps.add( rsps.GET, @@ -63,7 +72,10 @@ def test_move_attached_files_to_stage( status=200, ) stage_meta.move_attached_files_to_stage(stage_meta.id) - assert rsps.assert_call_count(base_meta_client.config.api_endpoint + "v2/teams/default-team/items/stage", 1) + assert rsps.assert_call_count( + base_meta_client.config.api_endpoint + "v2/teams/default-team/items/stage", + 1, + ) assert rsps.assert_call_count( base_meta_client.config.api_endpoint + f"v2/teams/default-team/items/ids?workflow_stage_ids={str(stage_meta.id)}&dataset_ids=1337", diff --git a/darwin/future/tests/meta/objects/test_teammeta.py b/darwin/future/tests/meta/objects/test_teammeta.py index 06370b4d7..8b010820c 100644 --- a/darwin/future/tests/meta/objects/test_teammeta.py +++ b/darwin/future/tests/meta/objects/test_teammeta.py @@ -1,21 +1,181 @@ +from typing import Generator from unittest.mock import Mock, patch -import responses from pytest import fixture, raises +from responses import RequestsMock -from darwin.future.core.client import ClientCore +from darwin.future.core.client import ClientCore, DarwinConfig +from darwin.future.data_objects.dataset import DatasetCore from darwin.future.data_objects.team import TeamMemberCore +from darwin.future.meta.client import Client +from darwin.future.meta.objects.dataset import Dataset from darwin.future.meta.objects.team import Team from darwin.future.tests.core.fixtures import * from darwin.future.tests.meta.objects.fixtures import * +@fixture +def _delete_by_slug_mock() -> Generator: + with patch.object(Team, "_delete_dataset_by_slug") as mock: + yield mock + + +@fixture +def _delete_by_id_mock() -> Generator: + with patch.object(Team, "_delete_dataset_by_id") as mock: + yield mock + + def test_team_meta_collects_members( - base_meta_team: Team, base_client: ClientCore, base_team_member: TeamMemberCore, base_team_member_json: dict + base_meta_team: Team, + base_client: ClientCore, + base_team_member: TeamMemberCore, + base_team_member_json: dict, ) -> None: - with responses.RequestsMock() as rsps: + with RequestsMock() as rsps: endpoint = base_client.config.api_endpoint + "memberships" - rsps.add(responses.GET, endpoint, json=[base_team_member_json]) + rsps.add(rsps.GET, endpoint, json=[base_team_member_json]) members = base_meta_team.members._collect() assert len(members) == 1 assert members[0]._element == base_team_member + + +# `delete_dataset` tests +def test_delete_dataset_returns_exceptions_thrown( + base_config: DarwinConfig, _delete_by_id_mock: Mock, _delete_by_slug_mock: Mock +) -> None: + _delete_by_slug_mock.side_effect = Exception("test exception") + + valid_client = Client(base_config) + + exceptions, dataset_deleted = Team.delete_dataset(valid_client, "test_dataset") + + assert exceptions is not None + assert str(exceptions[0]) == "test exception" + assert dataset_deleted == -1 + + assert _delete_by_slug_mock.call_count == 1 + assert _delete_by_id_mock.call_count == 0 + + +def test_delete_dataset_calls_delete_by_slug_as_appropriate( + base_config: DarwinConfig, _delete_by_id_mock: Mock, _delete_by_slug_mock: Mock +) -> None: + valid_client = Client(base_config) + + exceptions, _ = Team.delete_dataset(valid_client, "test_dataset") + + assert exceptions is None + assert _delete_by_slug_mock.call_count == 1 + assert _delete_by_id_mock.call_count == 0 + + +def test_delete_dataset_calls_delete_by_id_as_appropriate( + base_config: DarwinConfig, _delete_by_id_mock: Mock, _delete_by_slug_mock: Mock +) -> None: + valid_client = Client(base_config) + + exceptions, _ = Team.delete_dataset(valid_client, 1) + + assert exceptions is None + assert _delete_by_slug_mock.call_count == 0 + assert _delete_by_id_mock.call_count == 1 + + +def test_delete_by_slug__returns_dataset_deleted_if_dataset_found( + base_config: DarwinConfig, +) -> None: + valid_client = Client(base_config) + valid_slug = "test_dataset" + + base_url = base_config.base_url + "api/datasets" + + with RequestsMock() as rsps: + rsps.add( + rsps.GET, + base_url + "?id=test_dataset", + json={"id": 1, "name": "Test Dataset", "slug": "test_dataset"}, + status=200, + ) + rsps.add( + rsps.PUT, + base_url + "/1/archive", + json={"id": 1, "name": "Test Dataset", "slug": "test_dataset"}, + status=200, + ) + dataset_deleted = Team._delete_dataset_by_slug(valid_client, valid_slug) + + assert dataset_deleted == 1 + + +def test_delete_by_id_returns_dataset_deleted_if_dataset_found( + base_config: DarwinConfig, +) -> None: + valid_client = Client(base_config) + valid_id = 1 + + base_url = base_config.base_url + "api/datasets" + + with RequestsMock() as rsps: + rsps.add( + rsps.PUT, + base_url + "/1/archive", + json={"id": 1, "name": "Test Dataset", "slug": "test_dataset"}, + status=200, + ) + dataset_deleted = Team._delete_dataset_by_id(valid_client, valid_id) + + assert dataset_deleted == 1 + + +# Test `_delete_by_id` +def test_delete_by_id_raises_exception_if_not_passed_int_and_client( + base_config: DarwinConfig, +) -> None: + valid_client = Client(base_config) + valid_id = 1 + invalid_client = "client" + invalid_id = "1" + + with raises(AssertionError): + Team._delete_dataset_by_id(valid_client, invalid_id) # type: ignore + + with raises(AssertionError): + Team._delete_dataset_by_id(invalid_client, valid_id) # type: ignore + + +# Test `_delete_by_slug` +def test_delete_by_slug_raises_exception_if_not_passed_str_and_client( + base_config: DarwinConfig, +) -> None: + valid_client = Client(base_config) + valid_slug = "test_dataset" + invalid_client = "client" + invalid_slug = 1 + + with raises(AssertionError): + Team._delete_dataset_by_slug(valid_client, invalid_slug) # type: ignore + + with raises(AssertionError): + Team._delete_dataset_by_slug(invalid_client, valid_slug) # type: ignore + + +def test_create_dataset(base_meta_team: Team, base_config: DarwinConfig) -> None: + base_url = base_config.base_url + "api/datasets" + valid_slug = "test_dataset" + valid_name = "test dataset" + with RequestsMock() as rsps: + rsps.add( + rsps.POST, + base_url, + json={"id": 1, "name": valid_name, "slug": valid_slug}, + status=201, + ) + + dataset_created = base_meta_team.create_dataset(valid_slug) + assert dataset_created is not None + assert isinstance(dataset_created, Dataset) + assert isinstance(dataset_created._element, DatasetCore) + assert dataset_created.id == 1 + assert dataset_created.name == valid_name + assert dataset_created.slug == valid_slug diff --git a/darwin/future/tests/meta/queries/test_dataset.py b/darwin/future/tests/meta/queries/test_dataset.py index 3d3e163d9..d06e123c6 100644 --- a/darwin/future/tests/meta/queries/test_dataset.py +++ b/darwin/future/tests/meta/queries/test_dataset.py @@ -1,21 +1,21 @@ import responses -from pytest import fixture, mark from darwin.future.core.client import ClientCore -from darwin.future.data_objects.dataset import DatasetCore from darwin.future.meta.objects.dataset import Dataset from darwin.future.meta.queries.dataset import DatasetQuery from darwin.future.tests.core.fixtures import * -def test_dataset_collects_basic(base_client: ClientCore, base_datasets_json: dict) -> None: +def test_dataset_collects_basic( + base_client: ClientCore, base_datasets_json: dict +) -> None: query = DatasetQuery(base_client) with responses.RequestsMock() as rsps: endpoint = base_client.config.api_endpoint + "datasets" rsps.add(responses.GET, endpoint, json=base_datasets_json) datasets = query._collect() assert len(datasets) == 2 - assert all([isinstance(dataset, Dataset) for dataset in datasets]) + assert all(isinstance(dataset, Dataset) for dataset in datasets) def test_datasetquery_only_passes_back_correctly_formed_objects( @@ -31,9 +31,13 @@ def test_datasetquery_only_passes_back_correctly_formed_objects( assert isinstance(datasets[0], Dataset) -def test_dataset_filters_name(base_client: ClientCore, base_datasets_json: dict) -> None: +def test_dataset_filters_name( + base_client: ClientCore, base_datasets_json: dict +) -> None: with responses.RequestsMock() as rsps: - query = DatasetQuery(base_client).where({"name": "name", "param": "test dataset 1"}) + query = DatasetQuery(base_client).where( + {"name": "name", "param": "test dataset 1"} + ) endpoint = base_client.config.api_endpoint + "datasets" rsps.add(responses.GET, endpoint, json=base_datasets_json) datasets = query._collect() @@ -53,9 +57,13 @@ def test_dataset_filters_id(base_client: ClientCore, base_datasets_json: dict) - assert datasets[0]._element.slug == "test-dataset-1" -def test_dataset_filters_slug(base_client: ClientCore, base_datasets_json: dict) -> None: +def test_dataset_filters_slug( + base_client: ClientCore, base_datasets_json: dict +) -> None: with responses.RequestsMock() as rsps: - query = DatasetQuery(base_client).where({"name": "slug", "param": "test-dataset-1"}) + query = DatasetQuery(base_client).where( + {"name": "slug", "param": "test-dataset-1"} + ) endpoint = base_client.config.api_endpoint + "datasets" rsps.add(responses.GET, endpoint, json=base_datasets_json) datasets = query._collect() @@ -64,9 +72,13 @@ def test_dataset_filters_slug(base_client: ClientCore, base_datasets_json: dict) assert datasets[0]._element.slug == "test-dataset-1" -def test_dataset_filters_releases(base_client: ClientCore, base_datasets_json_with_releases: dict) -> None: +def test_dataset_filters_releases( + base_client: ClientCore, base_datasets_json_with_releases: dict +) -> None: with responses.RequestsMock() as rsps: - query = DatasetQuery(base_client).where({"name": "releases", "param": "release1"}) + query = DatasetQuery(base_client).where( + {"name": "releases", "param": "release1"} + ) endpoint = base_client.config.api_endpoint + "datasets" rsps.add(responses.GET, endpoint, json=base_datasets_json_with_releases) @@ -76,7 +88,9 @@ def test_dataset_filters_releases(base_client: ClientCore, base_datasets_json_wi assert datasets_odd_ids[0]._element.slug == "test-dataset-1" assert datasets_odd_ids[1]._element.slug == "test-dataset-3" - query2 = DatasetQuery(base_client).where({"name": "releases", "param": "release2"}) + query2 = DatasetQuery(base_client).where( + {"name": "releases", "param": "release2"} + ) datasets_even_ids = query2._collect() assert len(datasets_even_ids) == 2 diff --git a/darwin/future/tests/meta/queries/test_stage.py b/darwin/future/tests/meta/queries/test_stage.py index a9cc61267..1d7ad0721 100644 --- a/darwin/future/tests/meta/queries/test_stage.py +++ b/darwin/future/tests/meta/queries/test_stage.py @@ -1,5 +1,3 @@ -from typing import List - import pytest import responses @@ -13,18 +11,22 @@ @pytest.fixture def filled_query(base_client: ClientCore, base_workflow_meta: Workflow) -> StageQuery: - return StageQuery(base_client, meta_params={"workflow_id": str(base_workflow_meta.id)}) + return StageQuery( + base_client, meta_params={"workflow_id": str(base_workflow_meta.id)} + ) @pytest.fixture -def base_workflow_meta(base_client: ClientCore, base_single_workflow_object: dict) -> Workflow: +def base_workflow_meta( + base_client: ClientCore, base_single_workflow_object: dict +) -> Workflow: return Workflow(base_client, WorkflowCore.parse_obj(base_single_workflow_object)) @pytest.fixture def multi_stage_workflow_object(base_single_workflow_object: dict) -> dict: stage = base_single_workflow_object["stages"][0] - types = [t for t in WFTypeCore.__members__.values()] * 3 + types = list(WFTypeCore.__members__.values()) * 3 stages = [] for i, t in enumerate(types): temp = stage.copy() @@ -41,11 +43,16 @@ def test_WFTypes_accept_unknonwn() -> None: def test_stage_collects_basic( - filled_query: StageQuery, base_single_workflow_object: dict, base_workflow_meta: Workflow + filled_query: StageQuery, + base_single_workflow_object: dict, + base_workflow_meta: Workflow, ) -> None: UUID = base_workflow_meta.id with responses.RequestsMock() as rsps: - endpoint = filled_query.client.config.api_endpoint + f"v2/teams/default-team/workflows/{UUID}" + endpoint = ( + filled_query.client.config.api_endpoint + + f"v2/teams/default-team/workflows/{UUID}" + ) rsps.add(responses.GET, endpoint, json=base_single_workflow_object) stages = filled_query._collect() assert len(stages) == len(base_workflow_meta.stages) @@ -53,11 +60,16 @@ def test_stage_collects_basic( def test_stage_filters_basic( - filled_query: StageQuery, multi_stage_workflow_object: dict, base_workflow_meta: Workflow + filled_query: StageQuery, + multi_stage_workflow_object: dict, + base_workflow_meta: Workflow, ) -> None: UUID = base_workflow_meta.id with responses.RequestsMock() as rsps: - endpoint = filled_query.client.config.api_endpoint + f"v2/teams/default-team/workflows/{UUID}" + endpoint = ( + filled_query.client.config.api_endpoint + + f"v2/teams/default-team/workflows/{UUID}" + ) rsps.add(responses.GET, endpoint, json=multi_stage_workflow_object) stages = filled_query.where({"name": "name", "param": "stage1"})._collect() assert len(stages) == 1 @@ -65,13 +77,19 @@ def test_stage_filters_basic( assert stages[0]._element.name == "stage1" -@pytest.mark.parametrize("wf_type", [t for t in WFTypeCore.__members__.values()]) +@pytest.mark.parametrize("wf_type", list(WFTypeCore.__members__.values())) def test_stage_filters_WFType( - wf_type: WFTypeCore, filled_query: StageQuery, multi_stage_workflow_object: dict, base_workflow_meta: Workflow + wf_type: WFTypeCore, + filled_query: StageQuery, + multi_stage_workflow_object: dict, + base_workflow_meta: Workflow, ) -> None: UUID = base_workflow_meta.id with responses.RequestsMock() as rsps: - endpoint = filled_query.client.config.api_endpoint + f"v2/teams/default-team/workflows/{UUID}" + endpoint = ( + filled_query.client.config.api_endpoint + + f"v2/teams/default-team/workflows/{UUID}" + ) rsps.add(responses.GET, endpoint, json=multi_stage_workflow_object) stages = filled_query.where({"name": "type", "param": wf_type.value})._collect() assert len(stages) == 3 diff --git a/darwin/future/tests/meta/queries/test_team_member.py b/darwin/future/tests/meta/queries/test_team_member.py index 19b19a2e2..6fe314e58 100644 --- a/darwin/future/tests/meta/queries/test_team_member.py +++ b/darwin/future/tests/meta/queries/test_team_member.py @@ -10,7 +10,9 @@ from darwin.future.tests.core.fixtures import * -def test_team_member_collects_basic(base_client: ClientCore, base_team_members_json: List[dict]) -> None: +def test_team_member_collects_basic( + base_client: ClientCore, base_team_members_json: List[dict] +) -> None: query = TeamMemberQuery(base_client) with responses.RequestsMock() as rsps: endpoint = base_client.config.api_endpoint + "memberships" @@ -20,7 +22,9 @@ def test_team_member_collects_basic(base_client: ClientCore, base_team_members_j assert isinstance(members[0], TeamMember) -def test_team_member_only_passes_back_correct(base_client: ClientCore, base_team_member_json: dict) -> None: +def test_team_member_only_passes_back_correct( + base_client: ClientCore, base_team_member_json: dict +) -> None: query = TeamMemberQuery(base_client) with responses.RequestsMock() as rsps: endpoint = base_client.config.api_endpoint + "memberships" @@ -30,13 +34,15 @@ def test_team_member_only_passes_back_correct(base_client: ClientCore, base_team assert isinstance(members[0], TeamMember) -@pytest.mark.parametrize("role", [role for role in TeamMemberRole]) +@pytest.mark.parametrize("role", list(TeamMemberRole)) def test_team_member_filters_role( role: TeamMemberRole, base_client: ClientCore, base_team_members_json: List[dict] ) -> None: with responses.RequestsMock() as rsps: # Test equal - query = TeamMemberQuery(base_client).where({"name": "role", "param": role.value}) + query = TeamMemberQuery(base_client).where( + {"name": "role", "param": role.value} + ) endpoint = base_client.config.api_endpoint + "memberships" rsps.add(responses.GET, endpoint, json=base_team_members_json) members = query._collect() @@ -45,7 +51,9 @@ def test_team_member_filters_role( # Test not equal rsps.reset() - query = TeamMemberQuery(base_client).where({"name": "role", "param": role.value, "modifier": "!="}) + query = TeamMemberQuery(base_client).where( + {"name": "role", "param": role.value, "modifier": "!="} + ) rsps.add(responses.GET, endpoint, json=base_team_members_json) members = query._collect() assert len(members) == len(TeamMemberRole) - 1 @@ -53,7 +61,9 @@ def test_team_member_filters_role( assert member._element.role != role -def test_team_member_filters_general(base_client: ClientCore, base_team_members_json: List[dict]) -> None: +def test_team_member_filters_general( + base_client: ClientCore, base_team_members_json: List[dict] +) -> None: for idx in range(len(base_team_members_json)): base_team_members_json[idx]["id"] = idx + 1 @@ -73,7 +83,9 @@ def test_team_member_filters_general(base_client: ClientCore, base_team_members_ members = ( TeamMemberQuery(base_client) .where({"name": "id", "param": 1, "modifier": ">"}) - .where({"name": "id", "param": len(base_team_members_json), "modifier": "<"}) + .where( + {"name": "id", "param": len(base_team_members_json), "modifier": "<"} + ) ._collect() ) diff --git a/darwin/future/tests/meta/queries/test_workflow.py b/darwin/future/tests/meta/queries/test_workflow.py index 71f77c802..b9761f786 100644 --- a/darwin/future/tests/meta/queries/test_workflow.py +++ b/darwin/future/tests/meta/queries/test_workflow.py @@ -4,8 +4,6 @@ import responses from darwin.future.core.client import ClientCore -from darwin.future.core.types.query import Modifier -from darwin.future.data_objects.workflow import WorkflowCore from darwin.future.meta.objects.workflow import Workflow from darwin.future.meta.queries.workflow import WorkflowQuery from darwin.future.tests.core.fixtures import * @@ -20,20 +18,28 @@ def workflows_query_endpoint(team: str) -> str: @responses.activate -def test_workflowquery_collects_basic(base_client: ClientCore, base_filterable_workflows: dict) -> None: - endpoint = base_client.config.api_endpoint + workflows_query_endpoint(base_client.config.default_team) +def test_workflowquery_collects_basic( + base_client: ClientCore, base_filterable_workflows: dict +) -> None: + endpoint = base_client.config.api_endpoint + workflows_query_endpoint( + base_client.config.default_team + ) responses.add(responses.GET, endpoint, json=base_filterable_workflows) query = WorkflowQuery(base_client, []) workflows = query._collect() assert len(workflows) == 3 - assert all([isinstance(workflow, Workflow) for workflow in workflows]) + assert all(isinstance(workflow, Workflow) for workflow in workflows) @responses.activate -def test_workflowquery_filters_uuid(base_client: ClientCore, base_filterable_workflows: dict) -> None: - endpoint = base_client.config.api_endpoint + workflows_query_endpoint(base_client.config.default_team) +def test_workflowquery_filters_uuid( + base_client: ClientCore, base_filterable_workflows: dict +) -> None: + endpoint = base_client.config.api_endpoint + workflows_query_endpoint( + base_client.config.default_team + ) responses.add(responses.GET, endpoint, json=base_filterable_workflows) query = WorkflowQuery(base_client, []).where( @@ -49,8 +55,12 @@ def test_workflowquery_filters_uuid(base_client: ClientCore, base_filterable_wor @responses.activate -def test_workflowquery_filters_inserted_at(base_client: ClientCore, base_filterable_workflows: dict) -> None: - endpoint = base_client.config.api_endpoint + workflows_query_endpoint(base_client.config.default_team) +def test_workflowquery_filters_inserted_at( + base_client: ClientCore, base_filterable_workflows: dict +) -> None: + endpoint = base_client.config.api_endpoint + workflows_query_endpoint( + base_client.config.default_team + ) responses.add(responses.GET, endpoint, json=base_filterable_workflows) start = "2021-06-01T15:00:00.000+00:00" @@ -80,8 +90,12 @@ def test_workflowquery_filters_inserted_at(base_client: ClientCore, base_filtera @responses.activate -def test_workflowquery_filters_updated_at(base_client: ClientCore, base_filterable_workflows: dict) -> None: - endpoint = base_client.config.api_endpoint + workflows_query_endpoint(base_client.config.default_team) +def test_workflowquery_filters_updated_at( + base_client: ClientCore, base_filterable_workflows: dict +) -> None: + endpoint = base_client.config.api_endpoint + workflows_query_endpoint( + base_client.config.default_team + ) responses.add(responses.GET, endpoint, json=base_filterable_workflows) start = "2021-06-04T15:00:00.000+00:00" @@ -111,8 +125,12 @@ def test_workflowquery_filters_updated_at(base_client: ClientCore, base_filterab @responses.activate -def test_workflowquery_filters_dataset_id(base_client: ClientCore, base_filterable_workflows: dict) -> None: - endpoint = base_client.config.api_endpoint + workflows_query_endpoint(base_client.config.default_team) +def test_workflowquery_filters_dataset_id( + base_client: ClientCore, base_filterable_workflows: dict +) -> None: + endpoint = base_client.config.api_endpoint + workflows_query_endpoint( + base_client.config.default_team + ) responses.add(responses.GET, endpoint, json=base_filterable_workflows) query = WorkflowQuery(base_client, []).where( @@ -131,7 +149,9 @@ def test_workflowquery_filters_dataset_id(base_client: ClientCore, base_filterab def test_workflowquery_filters_dataset_id_multiple_ids( base_client: ClientCore, base_filterable_workflows: dict ) -> None: - endpoint = base_client.config.api_endpoint + workflows_query_endpoint(base_client.config.default_team) + endpoint = base_client.config.api_endpoint + workflows_query_endpoint( + base_client.config.default_team + ) responses.add(responses.GET, endpoint, json=base_filterable_workflows) query = WorkflowQuery(base_client, []).where( @@ -148,8 +168,12 @@ def test_workflowquery_filters_dataset_id_multiple_ids( @responses.activate -def test_workflowquery_filters_dataset_name(base_client: ClientCore, base_filterable_workflows: dict) -> None: - endpoint = base_client.config.api_endpoint + workflows_query_endpoint(base_client.config.default_team) +def test_workflowquery_filters_dataset_name( + base_client: ClientCore, base_filterable_workflows: dict +) -> None: + endpoint = base_client.config.api_endpoint + workflows_query_endpoint( + base_client.config.default_team + ) responses.add(responses.GET, endpoint, json=base_filterable_workflows) query = WorkflowQuery(base_client, []).where( @@ -168,7 +192,9 @@ def test_workflowquery_filters_dataset_name(base_client: ClientCore, base_filter def test_workflowquery_filters_dataset_name_mutliple_names( base_client: ClientCore, base_filterable_workflows: dict ) -> None: - endpoint = base_client.config.api_endpoint + workflows_query_endpoint(base_client.config.default_team) + endpoint = base_client.config.api_endpoint + workflows_query_endpoint( + base_client.config.default_team + ) responses.add(responses.GET, endpoint, json=base_filterable_workflows) query = WorkflowQuery(base_client, []).where( @@ -185,8 +211,12 @@ def test_workflowquery_filters_dataset_name_mutliple_names( @responses.activate -def test_workflowquery_filters_stages(base_client: ClientCore, base_filterable_workflows: dict) -> None: - endpoint = base_client.config.api_endpoint + workflows_query_endpoint(base_client.config.default_team) +def test_workflowquery_filters_stages( + base_client: ClientCore, base_filterable_workflows: dict +) -> None: + endpoint = base_client.config.api_endpoint + workflows_query_endpoint( + base_client.config.default_team + ) responses.add(responses.GET, endpoint, json=base_filterable_workflows) query = WorkflowQuery(base_client, []).where( @@ -202,8 +232,12 @@ def test_workflowquery_filters_stages(base_client: ClientCore, base_filterable_w @responses.activate -def test_workflowquery_filters_stages_multiple(base_client: ClientCore, base_filterable_workflows: dict) -> None: - endpoint = base_client.config.api_endpoint + workflows_query_endpoint(base_client.config.default_team) +def test_workflowquery_filters_stages_multiple( + base_client: ClientCore, base_filterable_workflows: dict +) -> None: + endpoint = base_client.config.api_endpoint + workflows_query_endpoint( + base_client.config.default_team + ) responses.add(responses.GET, endpoint, json=base_filterable_workflows) query = WorkflowQuery(base_client, []).where( diff --git a/darwin/future/tests/meta/test_client.py b/darwin/future/tests/meta/test_client.py index fb3b74b16..aa86bd480 100644 --- a/darwin/future/tests/meta/test_client.py +++ b/darwin/future/tests/meta/test_client.py @@ -1,6 +1,3 @@ -import unittest - -import pytest import responses from darwin.future.core.client import DarwinConfig @@ -14,12 +11,18 @@ def test_creates_from_api_key() -> None: with responses.RequestsMock() as rsps: base_api_endpoint = DarwinConfig._default_api_endpoint() - rsps.add(responses.GET, base_api_endpoint + "users/token_info", json={"selected_team": {"slug": "test-team"}}) + rsps.add( + responses.GET, + base_api_endpoint + "users/token_info", + json={"selected_team": {"slug": "test-team"}}, + ) client = Client.from_api_key(api_key="test") assert client.config.default_team == "test-team" -def test_team_property(base_meta_client: Client, base_team: TeamCore, base_team_json: dict) -> None: +def test_team_property( + base_meta_client: Client, base_team: TeamCore, base_team_json: dict +) -> None: client = base_meta_client endpoint = client.config.api_endpoint + f"teams/{client.config.default_team}" with responses.RequestsMock() as rsps: diff --git a/deploy/format_lint.sh b/deploy/format_lint.sh index 23270d5e6..a40b74e48 100755 --- a/deploy/format_lint.sh +++ b/deploy/format_lint.sh @@ -35,6 +35,8 @@ if [ "$ACTION" == "format" ]; then pipinstall ruff elif [ "$ACTION" == "typecheck" ]; then pipinstall mypy + pipinstall types-requests + pipinstall types-pyYAML else echo "Action must be format, typecheck, or lint" exit 1 diff --git a/poetry.lock b/poetry.lock index f0ec35838..a980110e9 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,9 +1,10 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand. [[package]] name = "albumentations" version = "1.3.1" description = "Fast image augmentation library and easy to use wrapper around other libraries" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -28,6 +29,7 @@ tests = ["pytest"] name = "argcomplete" version = "2.1.2" description = "Bash tab completion for argparse" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -46,6 +48,7 @@ test = ["coverage", "flake8", "mypy", "pexpect", "wheel"] name = "attrs" version = "23.1.0" description = "Classes Without Boilerplate" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -67,6 +70,7 @@ tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pyte name = "black" version = "22.12.0" description = "The uncompromising code formatter." +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -103,6 +107,7 @@ uvloop = ["uvloop (>=0.15.2)"] name = "certifi" version = "2023.7.22" description = "Python package for providing Mozilla's CA Bundle." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -114,6 +119,7 @@ files = [ name = "charset-normalizer" version = "3.3.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -213,6 +219,7 @@ files = [ name = "click" version = "8.1.7" description = "Composable command line interface toolkit" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -228,6 +235,7 @@ importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -239,6 +247,7 @@ files = [ name = "connected-components-3d" version = "3.12.3" description = "Connected components on 2D and 3D images. Supports multiple labels." +category = "main" optional = true python-versions = ">=3.7,<4.0" files = [ @@ -286,6 +295,7 @@ numpy = "*" name = "debugpy" version = "1.7.0" description = "An implementation of the Debug Adapter Protocol for Python" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -317,6 +327,7 @@ files = [ name = "deprecation" version = "2.1.0" description = "A library to handle automated deprecations" +category = "main" optional = false python-versions = "*" files = [ @@ -331,6 +342,7 @@ packaging = "*" name = "exceptiongroup" version = "1.1.3" description = "Backport of PEP 654 (exception groups)" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -345,6 +357,7 @@ test = ["pytest (>=6)"] name = "humanize" version = "4.6.0" description = "Python humanize utilities" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -362,6 +375,7 @@ tests = ["freezegun", "pytest", "pytest-cov"] name = "idna" version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -373,6 +387,7 @@ files = [ name = "imageio" version = "2.31.5" description = "Library for reading and writing a wide range of image, video, scientific, and volumetric data formats." +category = "main" optional = true python-versions = ">=3.8" files = [ @@ -404,6 +419,7 @@ tifffile = ["tifffile"] name = "importlib-metadata" version = "5.2.0" description = "Read metadata from Python packages" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -424,6 +440,7 @@ testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packag name = "importlib-resources" version = "5.12.0" description = "Read resources from Python packages" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -442,6 +459,7 @@ testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-chec name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -453,6 +471,7 @@ files = [ name = "isort" version = "5.11.5" description = "A Python utility / library to sort Python imports." +category = "main" optional = true python-versions = ">=3.7.0" files = [ @@ -470,6 +489,7 @@ requirements-deprecated-finder = ["pip-api", "pipreqs"] name = "joblib" version = "1.3.2" description = "Lightweight pipelining with Python functions" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -481,6 +501,7 @@ files = [ name = "jsonschema" version = "4.17.3" description = "An implementation of JSON Schema validation for Python" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -504,6 +525,7 @@ format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339- name = "lazy-loader" version = "0.3" description = "lazy_loader" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -519,6 +541,7 @@ test = ["pytest (>=7.4)", "pytest-cov (>=4.1)"] name = "markdown-it-py" version = "2.2.0" description = "Python port of markdown-it. Markdown parsing, done right!" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -544,6 +567,7 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] name = "mdurl" version = "0.1.2" description = "Markdown URL utilities" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -555,6 +579,7 @@ files = [ name = "mpire" version = "2.8.0" description = "A Python package for easy multiprocessing, but faster than multiprocessing" +category = "main" optional = false python-versions = "*" files = [ @@ -577,6 +602,7 @@ testing = ["dataclasses", "multiprocess", "multiprocess (>=0.70.15)", "numpy", " name = "mypy" version = "1.6.0" description = "Optional static typing for Python" +category = "main" optional = true python-versions = ">=3.8" files = [ @@ -623,6 +649,7 @@ reports = ["lxml"] name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." +category = "main" optional = true python-versions = ">=3.5" files = [ @@ -634,6 +661,7 @@ files = [ name = "networkx" version = "3.1" description = "Python package for creating and manipulating graphs and networks" +category = "main" optional = true python-versions = ">=3.8" files = [ @@ -652,6 +680,7 @@ test = ["codecov (>=2.1)", "pytest (>=7.2)", "pytest-cov (>=4.0)"] name = "nibabel" version = "5.1.0" description = "Access a multitude of neuroimaging data formats" +category = "main" optional = true python-versions = ">=3.8" files = [ @@ -682,6 +711,7 @@ zstd = ["pyzstd (>=0.14.3)"] name = "numpy" version = "1.21.6" description = "NumPy is the fundamental package for array computing with Python." +category = "main" optional = false python-versions = ">=3.7,<3.11" files = [ @@ -722,6 +752,7 @@ files = [ name = "nvidia-cublas-cu11" version = "11.10.3.66" description = "CUBLAS native runtime libraries" +category = "main" optional = true python-versions = ">=3" files = [ @@ -737,6 +768,7 @@ wheel = "*" name = "nvidia-cuda-nvrtc-cu11" version = "11.7.99" description = "NVRTC native runtime libraries" +category = "main" optional = true python-versions = ">=3" files = [ @@ -753,6 +785,7 @@ wheel = "*" name = "nvidia-cuda-runtime-cu11" version = "11.7.99" description = "CUDA Runtime native Libraries" +category = "main" optional = true python-versions = ">=3" files = [ @@ -768,6 +801,7 @@ wheel = "*" name = "nvidia-cudnn-cu11" version = "8.5.0.96" description = "cuDNN runtime libraries" +category = "main" optional = true python-versions = ">=3" files = [ @@ -783,6 +817,7 @@ wheel = "*" name = "opencv-python-headless" version = "4.8.1.78" description = "Wrapper package for OpenCV python bindings." +category = "main" optional = true python-versions = ">=3.6" files = [ @@ -797,18 +832,19 @@ files = [ [package.dependencies] numpy = [ - {version = ">=1.21.0", markers = "python_version <= \"3.9\" and platform_system == \"Darwin\" and platform_machine == \"arm64\" and python_version >= \"3.7\""}, + {version = ">=1.21.0", markers = "python_version <= \"3.9\" and platform_system == \"Darwin\" and platform_machine == \"arm64\""}, + {version = ">=1.21.2", markers = "python_version >= \"3.10\""}, {version = ">=1.21.4", markers = "python_version >= \"3.10\" and platform_system == \"Darwin\""}, - {version = ">=1.21.2", markers = "platform_system != \"Darwin\" and python_version >= \"3.10\""}, - {version = ">=1.19.3", markers = "platform_system == \"Linux\" and python_version < \"3.10\" and platform_machine == \"aarch64\" and python_version >= \"3.7\" or python_version < \"3.10\" and platform_system != \"Darwin\" and python_version >= \"3.9\" or python_version < \"3.10\" and python_version >= \"3.9\" and platform_machine != \"arm64\" or python_version > \"3.9\" and python_version < \"3.10\""}, - {version = ">=1.17.3", markers = "(platform_system != \"Darwin\" and platform_system != \"Linux\") and python_version >= \"3.8\" and python_version < \"3.9\" or platform_system != \"Darwin\" and python_version >= \"3.8\" and python_version < \"3.9\" and platform_machine != \"aarch64\" or platform_machine != \"arm64\" and python_version >= \"3.8\" and python_version < \"3.9\" and platform_system != \"Linux\" or (platform_machine != \"arm64\" and platform_machine != \"aarch64\") and python_version >= \"3.8\" and python_version < \"3.9\""}, - {version = ">=1.17.0", markers = "(platform_system != \"Darwin\" and platform_system != \"Linux\") and python_version >= \"3.7\" and python_version < \"3.8\" or platform_system != \"Darwin\" and python_version >= \"3.7\" and python_version < \"3.8\" and platform_machine != \"aarch64\" or platform_machine != \"arm64\" and python_version >= \"3.7\" and python_version < \"3.8\" and platform_system != \"Linux\" or (platform_machine != \"arm64\" and platform_machine != \"aarch64\") and python_version >= \"3.7\" and python_version < \"3.8\""}, + {version = ">=1.19.3", markers = "python_version >= \"3.6\" and platform_system == \"Linux\" and platform_machine == \"aarch64\" or python_version >= \"3.9\""}, + {version = ">=1.17.0", markers = "python_version >= \"3.7\""}, + {version = ">=1.17.3", markers = "python_version >= \"3.8\""}, ] [[package]] name = "orjson" version = "3.9.7" description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -878,6 +914,7 @@ files = [ name = "packaging" version = "23.2" description = "Core utilities for Python packages" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -889,6 +926,7 @@ files = [ name = "pathspec" version = "0.11.2" description = "Utility library for gitignore style pattern matching of file paths." +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -900,6 +938,7 @@ files = [ name = "pillow" version = "9.5.0" description = "Python Imaging Library (Fork)" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -979,6 +1018,7 @@ tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "pa name = "pkgutil-resolve-name" version = "1.3.10" description = "Resolve a name to an object." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -990,6 +1030,7 @@ files = [ name = "platformdirs" version = "3.11.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -1008,6 +1049,7 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co name = "pluggy" version = "1.2.0" description = "plugin and hook calling mechanisms for python" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -1026,6 +1068,7 @@ testing = ["pytest", "pytest-benchmark"] name = "pydantic" version = "1.10.13" description = "Data validation and settings management using python type hints" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1078,6 +1121,7 @@ email = ["email-validator (>=1.0.3)"] name = "pygments" version = "2.16.1" description = "Pygments is a syntax highlighting package written in Python." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1092,6 +1136,7 @@ plugins = ["importlib-metadata"] name = "pyrsistent" version = "0.19.3" description = "Persistent/Functional/Immutable data structures" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1128,6 +1173,7 @@ files = [ name = "pytest" version = "7.4.2" description = "pytest: simple powerful testing with Python" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -1151,6 +1197,7 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no name = "pytest-rerunfailures" version = "12.0" description = "pytest plugin to re-run tests to eliminate flaky failures" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -1167,6 +1214,7 @@ pytest = ">=6.2" name = "python-dotenv" version = "1.0.0" description = "Read key-value pairs from a .env file and set them as environment variables" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1181,6 +1229,7 @@ cli = ["click (>=5.0)"] name = "pywavelets" version = "1.4.1" description = "PyWavelets, wavelet transform module" +category = "main" optional = true python-versions = ">=3.8" files = [ @@ -1218,6 +1267,7 @@ numpy = ">=1.17.3" name = "pywin32" version = "306" description = "Python for Window Extensions" +category = "main" optional = false python-versions = "*" files = [ @@ -1241,6 +1291,7 @@ files = [ name = "pyyaml" version = "6.0.1" description = "YAML parser and emitter for Python" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1300,6 +1351,7 @@ files = [ name = "qudida" version = "0.0.4" description = "QUick and DIrty Domain Adaptation" +category = "main" optional = true python-versions = ">=3.5.0" files = [ @@ -1317,6 +1369,7 @@ typing-extensions = "*" name = "requests" version = "2.31.0" description = "Python HTTP for Humans." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1338,6 +1391,7 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "responses" version = "0.22.0" description = "A utility library for mocking out the `requests` Python library." +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -1359,6 +1413,7 @@ tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asy name = "rich" version = "13.6.0" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -1374,10 +1429,38 @@ typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.9 [package.extras] jupyter = ["ipywidgets (>=7.5.1,<9)"] +[[package]] +name = "ruff" +version = "0.0.292" +description = "An extremely fast Python linter, written in Rust." +category = "main" +optional = true +python-versions = ">=3.7" +files = [ + {file = "ruff-0.0.292-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:02f29db018c9d474270c704e6c6b13b18ed0ecac82761e4fcf0faa3728430c96"}, + {file = "ruff-0.0.292-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:69654e564342f507edfa09ee6897883ca76e331d4bbc3676d8a8403838e9fade"}, + {file = "ruff-0.0.292-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c3c91859a9b845c33778f11902e7b26440d64b9d5110edd4e4fa1726c41e0a4"}, + {file = "ruff-0.0.292-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f4476f1243af2d8c29da5f235c13dca52177117935e1f9393f9d90f9833f69e4"}, + {file = "ruff-0.0.292-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be8eb50eaf8648070b8e58ece8e69c9322d34afe367eec4210fdee9a555e4ca7"}, + {file = "ruff-0.0.292-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:9889bac18a0c07018aac75ef6c1e6511d8411724d67cb879103b01758e110a81"}, + {file = "ruff-0.0.292-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6bdfabd4334684a4418b99b3118793f2c13bb67bf1540a769d7816410402a205"}, + {file = "ruff-0.0.292-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa7c77c53bfcd75dbcd4d1f42d6cabf2485d2e1ee0678da850f08e1ab13081a8"}, + {file = "ruff-0.0.292-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e087b24d0d849c5c81516ec740bf4fd48bf363cfb104545464e0fca749b6af9"}, + {file = "ruff-0.0.292-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:f160b5ec26be32362d0774964e218f3fcf0a7da299f7e220ef45ae9e3e67101a"}, + {file = "ruff-0.0.292-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:ac153eee6dd4444501c4bb92bff866491d4bfb01ce26dd2fff7ca472c8df9ad0"}, + {file = "ruff-0.0.292-py3-none-musllinux_1_2_i686.whl", hash = "sha256:87616771e72820800b8faea82edd858324b29bb99a920d6aa3d3949dd3f88fb0"}, + {file = "ruff-0.0.292-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:b76deb3bdbea2ef97db286cf953488745dd6424c122d275f05836c53f62d4016"}, + {file = "ruff-0.0.292-py3-none-win32.whl", hash = "sha256:e854b05408f7a8033a027e4b1c7f9889563dd2aca545d13d06711e5c39c3d003"}, + {file = "ruff-0.0.292-py3-none-win_amd64.whl", hash = "sha256:f27282bedfd04d4c3492e5c3398360c9d86a295be00eccc63914438b4ac8a83c"}, + {file = "ruff-0.0.292-py3-none-win_arm64.whl", hash = "sha256:7f67a69c8f12fbc8daf6ae6d36705037bde315abf8b82b6e1f4c9e74eb750f68"}, + {file = "ruff-0.0.292.tar.gz", hash = "sha256:1093449e37dd1e9b813798f6ad70932b57cf614e5c2b5c51005bf67d55db33ac"}, +] + [[package]] name = "scikit-image" version = "0.21.0" description = "Image processing in Python" +category = "main" optional = true python-versions = ">=3.8" files = [ @@ -1428,6 +1511,7 @@ test = ["asv", "matplotlib (>=3.5)", "pooch (>=1.6.0)", "pytest (>=7.0)", "pytes name = "scikit-learn" version = "1.3.1" description = "A set of python modules for machine learning and data mining" +category = "main" optional = true python-versions = ">=3.8" files = [ @@ -1470,6 +1554,7 @@ tests = ["black (>=23.3.0)", "matplotlib (>=3.1.3)", "mypy (>=1.3)", "numpydoc ( name = "scipy" version = "1.10.1" description = "Fundamental algorithms for scientific computing in Python" +category = "main" optional = true python-versions = "<3.12,>=3.8" files = [ @@ -1508,6 +1593,7 @@ test = ["asv", "gmpy2", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeo name = "setuptools" version = "68.0.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -1524,6 +1610,7 @@ testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs ( name = "threadpoolctl" version = "3.2.0" description = "threadpoolctl" +category = "main" optional = true python-versions = ">=3.8" files = [ @@ -1535,6 +1622,7 @@ files = [ name = "tifffile" version = "2023.7.10" description = "Read and write TIFF files" +category = "main" optional = true python-versions = ">=3.8" files = [ @@ -1552,6 +1640,7 @@ all = ["defusedxml", "fsspec", "imagecodecs (>=2023.1.23)", "lxml", "matplotlib" name = "toml" version = "0.10.2" description = "Python Library for Tom's Obvious, Minimal Language" +category = "main" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -1563,6 +1652,7 @@ files = [ name = "tomli" version = "2.0.1" description = "A lil' TOML parser" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -1574,6 +1664,7 @@ files = [ name = "torch" version = "1.13.1" description = "Tensors and Dynamic neural networks in Python with strong GPU acceleration" +category = "main" optional = true python-versions = ">=3.7.0" files = [ @@ -1614,6 +1705,7 @@ opt-einsum = ["opt-einsum (>=3.3)"] name = "torchvision" version = "0.14.1" description = "image and video datasets and models for torch deep learning" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -1640,7 +1732,7 @@ files = [ [package.dependencies] numpy = "*" -pillow = ">=5.3.0,<8.3.dev0 || >=8.4.dev0" +pillow = ">=5.3.0,<8.3.0 || >=8.4.0" requests = "*" torch = "1.13.1" typing-extensions = "*" @@ -1652,6 +1744,7 @@ scipy = ["scipy"] name = "tqdm" version = "4.66.1" description = "Fast, Extensible Progress Meter" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1672,6 +1765,7 @@ telegram = ["requests"] name = "typed-ast" version = "1.5.5" description = "a fork of Python 2 and 3 ast modules with type comment support" +category = "main" optional = true python-versions = ">=3.6" files = [ @@ -1722,6 +1816,7 @@ files = [ name = "types-pyyaml" version = "6.0.12.12" description = "Typing stubs for PyYAML" +category = "main" optional = false python-versions = "*" files = [ @@ -1731,13 +1826,14 @@ files = [ [[package]] name = "types-requests" -version = "2.31.0.8" +version = "2.31.0.9" description = "Typing stubs for requests" +category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "types-requests-2.31.0.8.tar.gz", hash = "sha256:e1b325c687b3494a2f528ab06e411d7092cc546cc9245c000bacc2fca5ae96d4"}, - {file = "types_requests-2.31.0.8-py3-none-any.whl", hash = "sha256:39894cbca3fb3d032ed8bdd02275b4273471aa5668564617cc1734b0a65ffdf8"}, + {file = "types-requests-2.31.0.9.tar.gz", hash = "sha256:3bb11188795cc3aa39f9635032044ee771009370fb31c3a06ae952b267b6fcd7"}, + {file = "types_requests-2.31.0.9-py3-none-any.whl", hash = "sha256:140e323da742a0cd0ff0a5a83669da9ffcebfaeb855d367186b2ec3985ba2742"}, ] [package.dependencies] @@ -1747,6 +1843,7 @@ urllib3 = ">=2" name = "types-toml" version = "0.10.8.7" description = "Typing stubs for toml" +category = "main" optional = true python-versions = "*" files = [ @@ -1758,6 +1855,7 @@ files = [ name = "typing-extensions" version = "4.7.1" description = "Backported and Experimental Type Hints for Python 3.7+" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1769,6 +1867,7 @@ files = [ name = "upolygon" version = "0.1.10" description = "Collection of fast polygon operations for DL" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1790,6 +1889,7 @@ numpy = "*" name = "urllib3" version = "2.0.6" description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1807,6 +1907,7 @@ zstd = ["zstandard (>=0.18.0)"] name = "wheel" version = "0.41.2" description = "A built-package format for Python" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -1821,6 +1922,7 @@ test = ["pytest (>=6.0.0)", "setuptools (>=65)"] name = "zipp" version = "3.15.0" description = "Backport of pathlib-compatible object wrapper for zip files" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1833,7 +1935,7 @@ docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] [extras] -dev = ["black", "debugpy", "isort", "mypy", "pytest", "pytest-rerunfailures", "responses"] +dev = ["black", "debugpy", "isort", "mypy", "pytest", "pytest-rerunfailures", "responses", "ruff"] medical = ["connected-components-3d", "nibabel"] ml = ["albumentations", "scikit-learn", "torch", "torchvision"] ocv = ["opencv-python-headless"] @@ -1842,4 +1944,4 @@ test = ["pytest", "responses"] [metadata] lock-version = "2.0" python-versions = ">=3.7.0,<3.11" -content-hash = "1158b8ae2bbf51d84bc42543f93bc9bb6013df9dba67dec37a4963e477a143e9" +content-hash = "4d7aeadf6e4cc865f1a031b4d68ca1da234f1a7829215af4e21256bfb0ba4b1b" diff --git a/pyproject.toml b/pyproject.toml index e2ac46574..076126891 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -46,6 +46,7 @@ no_implicit_optional = true warn_return_any = false warn_unreachable = true pretty = true +implicit_reexport = true [tool.pydantic-mypy] init_forbid_extra = true @@ -53,18 +54,16 @@ init_typed = true warn_required_dynamic_aliases = true warn_untyped_fields = true -[tool.black] -line-length = 160 - [tool.ruff] select = ["E", "F", "C"] ignore = ["E203", "E402"] line-length = 160 [tool.ruff.per-file-ignores] -"__init__.py" = ["E402"] +"__init__.py" = ["E402", "F401"] "path/to/file.py" = ["E402"] "**/{tests,docs,tools}/*" = ["E402"] +"darwin/future/tests/*" = ["F403"] [tool.flake8] max-line-length = 160 @@ -97,7 +96,7 @@ mypy = { version = "^1.5", optional = true, python = ">=3.8" } responses = { version = "^0.22.0", optional = true } pytest = { version = "^7.2.1", optional = true } debugpy = { version = "^1.6.5", optional = true } -types-requests = { version = "^2.28.11.8" } +types-requests = "^2.28.11.8" mpire = { version = "^2.7.0" } tqdm = "^4.64.1" types-pyyaml = "^6.0.12.9" @@ -105,6 +104,7 @@ python-dotenv = { version = "^1.0.0", python = ">3.8" } opencv-python-headless = { version = "^4.8.0.76", optional = true } pyyaml = "^6.0.1" pytest-rerunfailures = { version = "^12.0", optional = true } +ruff = { version = "^0.0.292", optional = true } [tool.poetry.extras] dev = [ @@ -117,6 +117,7 @@ dev = [ "pytest", "flake8-pyproject", "pytest-rerunfailures", + "ruff", ] test = ["responses", "pytest", "flake8-pyproject"] ml = ["torch", "torchvision", "scikit-learn", "albumentations"]