From f5992b4de12ed8baad9a8cbcc1845285655c82d3 Mon Sep 17 00:00:00 2001 From: Nathan Perkins Date: Fri, 20 Oct 2023 12:57:52 +0100 Subject: [PATCH] [IO-1278][IO-1277] Core Items/Folders apis (#691) * basics * get folders * linting * tests * linting * camel case fields * line length * line length * linting changes * linting changes * import sorting * assert not required * merge master * linting --- darwin/future/core/items/get.py | 88 +++++++++++++++++++ darwin/future/core/team/__init__.py | 7 +- darwin/future/core/types/common.py | 3 +- darwin/future/core/types/query.py | 8 +- darwin/future/core/utils/pathutils.py | 4 - .../future/core/workflows/list_workflows.py | 1 - darwin/future/data_objects/item.py | 78 ++++++++++++++++ darwin/future/data_objects/release.py | 5 +- darwin/future/data_objects/team.py | 4 +- darwin/future/exceptions.py | 10 ++- darwin/future/meta/objects/dataset.py | 19 +++- darwin/future/meta/objects/team.py | 20 +++-- darwin/future/meta/objects/workflow.py | 12 ++- darwin/future/meta/queries/dataset.py | 6 +- darwin/future/meta/queries/team_member.py | 10 ++- .../tests/core/datasets/test_list_datasets.py | 1 + darwin/future/tests/core/items/fixtures.py | 47 ++++++++++ .../future/tests/core/items/test_get_items.py | 62 ++++++++++++- darwin/future/tests/core/test_query.py | 4 +- .../tests/core/types/test_querystring.py | 6 +- darwin/future/tests/data_objects/fixtures.py | 1 - .../data_objects/workflow/test_wfedge.py | 1 - .../workflow/test_wfstage_config.py | 1 - .../data_objects/workflow/test_wfuser.py | 1 - .../tests/meta/objects/test_stagemeta.py | 8 +- .../tests/meta/queries/test_workflow.py | 4 +- pyproject.toml | 3 + 27 files changed, 359 insertions(+), 55 deletions(-) create mode 100644 darwin/future/data_objects/item.py diff --git a/darwin/future/core/items/get.py b/darwin/future/core/items/get.py index 37bb6fd4c..f2b67f2f2 100644 --- a/darwin/future/core/items/get.py +++ b/darwin/future/core/items/get.py @@ -1,8 +1,11 @@ from typing import List, Union from uuid import UUID +from pydantic import parse_obj_as + from darwin.future.core.client import ClientCore from darwin.future.core.types.common import QueryString +from darwin.future.data_objects.item import Folder, Item def get_item_ids( @@ -75,3 +78,88 @@ def get_item_ids_stage( assert isinstance(response, dict) uuids = [UUID(uuid) for uuid in response["item_ids"]] return uuids + + +def get_item( + api_client: ClientCore, + team_slug: str, + item_id: Union[UUID, str], + params: QueryString = QueryString({}), +) -> Item: + """ + Returns an item + + Parameters + ---------- + client: Client + The client to use for the request + team_slug: str + The slug of the team to get item ids for + item_id: str + The id or slug of the item to get + + Returns + ------- + dict + The item + """ + response = api_client.get(f"/v2/teams/{team_slug}/items/{item_id}", params) + assert isinstance(response, dict) + return parse_obj_as(Item, response) + + +def list_items( + api_client: ClientCore, + team_slug: str, + params: QueryString, +) -> List[Item]: + """ + Returns a list of items for the dataset + + Parameters + ---------- + client: Client + The client to use for the request + team_slug: str + The slug of the team to get items for + dataset_id: str + The id or slug of the dataset to get items for + + Returns + ------- + List[Item] + A list of items + """ + assert "dataset_ids" in params.value, "dataset_ids must be provided" + response = api_client.get(f"/v2/teams/{team_slug}/items", params) + assert isinstance(response, dict) + return parse_obj_as(List[Item], response["items"]) + + +def list_folders( + api_client: ClientCore, + team_slug: str, + params: QueryString, +) -> List[Folder]: + """ + Returns a list of folders for the team and dataset + + Parameters + ---------- + client: Client + The client to use for the request + team_slug: str + The slug of the team to get folder ids for + params: QueryString + parameters to filter the folders + + Returns + ------- + List[Folder] + The folders + """ + assert "dataset_ids" in params.value, "dataset_ids must be provided" + response = api_client.get(f"/v2/teams/{team_slug}/items/folders", params) + assert isinstance(response, dict) + assert "folders" in response + return parse_obj_as(List[Folder], response["folders"]) diff --git a/darwin/future/core/team/__init__.py b/darwin/future/core/team/__init__.py index 8bc574aaa..9293adf51 100644 --- a/darwin/future/core/team/__init__.py +++ b/darwin/future/core/team/__init__.py @@ -1,5 +1,6 @@ # Can't import * in this module because of a circular import problem specific to teams -# The TeamCore module can instantiate from a client, but the client needs to use the team backend module -# to request the object for team. To circumvent this there's a get_raw method in this module that returns -# the raw team object, which is then passed to the TeamCore module, but if we import * here it introduces the +# The TeamCore module can instantiate from a client, but the client needs to use the +# team backend module to request the object for team. To circumvent this there's a +# get_raw method in this module that returns the raw team object, which is then passed +# to the TeamCore module, but if we import * here it introduces the # circular import problem. diff --git a/darwin/future/core/types/common.py b/darwin/future/core/types/common.py index d7a665d79..6ca2450a1 100644 --- a/darwin/future/core/types/common.py +++ b/darwin/future/core/types/common.py @@ -1,6 +1,5 @@ from typing import Any, Dict, List, Union - from darwin.future.data_objects import validators as darwin_validators from darwin.future.data_objects.typing import UnknownType @@ -39,7 +38,7 @@ class QueryString: value: Dict[str, str] - def dict_check(cls, value: UnknownType) -> Dict[str, str]: + def dict_check(self, value: UnknownType) -> Dict[str, str]: assert isinstance(value, dict) assert all(isinstance(k, str) and isinstance(v, str) for k, v in value.items()) return value diff --git a/darwin/future/core/types/query.py b/darwin/future/core/types/query.py index 8c2cdee9b..75b278505 100644 --- a/darwin/future/core/types/query.py +++ b/darwin/future/core/types/query.py @@ -68,8 +68,8 @@ def filter_attr(self, attr: Any) -> bool: # type: ignore def _from_dict(cls, d: Dict[str, Any]) -> QueryFilter: # type: ignore if "name" not in d or "param" not in d: raise InvalidQueryFilter( - f"args must be a QueryFilter or a dict with 'name' and 'param' keys, " - f"got {d}" + "args must be a QueryFilter or a dict with 'name' and 'param' keys," + f" got {d}" ) modifier = Modifier(d["modifier"]) if "modifier" in d else None return QueryFilter(name=d["name"], param=str(d["param"]), modifier=modifier) @@ -91,8 +91,8 @@ def _from_arg(cls, arg: object) -> QueryFilter: return cls._from_dict(arg) else: raise InvalidQueryFilter( - f"args must be a QueryFilter or a dict with 'name' and 'param' keys, " - f"got {arg}" + "args must be a QueryFilter or a dict with 'name' and 'param' keys," + f" got {arg}" ) @classmethod diff --git a/darwin/future/core/utils/pathutils.py b/darwin/future/core/utils/pathutils.py index 68f5734ec..0ebcb6032 100644 --- a/darwin/future/core/utils/pathutils.py +++ b/darwin/future/core/utils/pathutils.py @@ -10,10 +10,6 @@ def attempt_open(path: Path) -> dict: - # TODO: Refactor this to be some sort of map method. Mypy doesn't like generic callables - # and will need to be typed - # reader: yaml.safe_load if path.suffix.lower() == ".yaml" else json.loads - # map_reader = {".yaml": yaml.safe_load, ".json": json.loads} try: if "yaml" in path.suffix.lower(): return open_yaml(path) diff --git a/darwin/future/core/workflows/list_workflows.py b/darwin/future/core/workflows/list_workflows.py index 9b9cf8ec6..f7c48dfd4 100644 --- a/darwin/future/core/workflows/list_workflows.py +++ b/darwin/future/core/workflows/list_workflows.py @@ -1,6 +1,5 @@ from typing import List, Optional, Tuple - from darwin.future.core.client import ClientCore from darwin.future.data_objects.workflow import WorkflowCore, WorkflowListValidator diff --git a/darwin/future/data_objects/item.py b/darwin/future/data_objects/item.py new file mode 100644 index 000000000..a52f542b7 --- /dev/null +++ b/darwin/future/data_objects/item.py @@ -0,0 +1,78 @@ +# @see: GraphotateWeb.Schemas.DatasetsV2.ItemRegistration.ExistingItem +from typing import Dict, List, Literal, Optional, Union +from uuid import UUID + +from pydantic import Field, validator + +from darwin.datatypes import NumberLike +from darwin.future.data_objects.pydantic_base import DefaultDarwin +from darwin.future.data_objects.typing import UnknownType + +ItemFrameRate = Union[NumberLike, Literal["native"]] + + +def validate_no_slashes(v: UnknownType) -> str: + assert isinstance(v, str), "Must be a string" + assert len(v) > 0, "cannot be empty" + assert r"^[^/].*$".find(v) == -1, "cannot start with a slash" + + return v + + +class ItemSlot(DefaultDarwin): + # GraphotateWeb.Schemas.DatasetsV2.ItemRegistration.ExistingSlot + + # Required fields + slot_name: str + file_name: str + + # Optional fields + storage_key: Optional[str] + as_frames: Optional[bool] + extract_views: Optional[bool] + fps: Optional[ItemFrameRate] = Field(None, alias="fps") + metadata: Optional[Dict[str, UnknownType]] = Field({}, alias="metadata") + tags: Optional[Union[List[str], Dict[str, str]]] = Field(None, alias="tags") + type: Literal["image", "video", "pdf", "dicom"] = Field(..., alias="type") + + @validator("slot_name") + def validate_slot_name(cls, v: UnknownType) -> str: + assert isinstance(v, str), "slot_name must be a string" + assert len(v) > 0, "slot_name cannot be empty" + return v + + @validator("storage_key") + def validate_storage_key(cls, v: UnknownType) -> str: + return validate_no_slashes(v) + + @validator("fps") + def validate_fps(cls, v: UnknownType) -> ItemFrameRate: + assert isinstance(v, (int, float, str)), "fps must be a number or 'native'" + if isinstance(v, (int, float)): + assert v >= 0.0, "fps must be a positive number" + if isinstance(v, str): + assert v == "native", "fps must be 'native' or a number greater than 0" + return v + + +class Item(DefaultDarwin): + name: str + path: str + archived: bool + dataset_id: int + id: UUID + layout: Dict[str, UnknownType] + slots: List[ItemSlot] + processing_status: str + priority: int + + @validator("name") + def validate_name(cls, v: UnknownType) -> str: + return validate_no_slashes(v) + + +class Folder(DefaultDarwin): + dataset_id: int + filtered_item_count: int + path: str + unfiltered_item_count: int diff --git a/darwin/future/data_objects/release.py b/darwin/future/data_objects/release.py index e1d7f1ac3..c7e5063e7 100644 --- a/darwin/future/data_objects/release.py +++ b/darwin/future/data_objects/release.py @@ -7,7 +7,10 @@ class ReleaseCore(DefaultDarwin): - """A class to manage all the information around a release on the darwin platform, including validation + """ + A class to manage all the information around a release on the darwin platform + including validation + Attributes ---------- name : str diff --git a/darwin/future/data_objects/team.py b/darwin/future/data_objects/team.py index da697ec90..277123a61 100644 --- a/darwin/future/data_objects/team.py +++ b/darwin/future/data_objects/team.py @@ -32,7 +32,9 @@ class TeamMemberCore(DefaultDarwin): class TeamCore(DefaultDarwin): - """A class to manage all the information around a Team on the darwin platform, including validation + """ + A class to manage all the information around a Team on the darwin platform + including validation Attributes ---------- diff --git a/darwin/future/exceptions.py b/darwin/future/exceptions.py index e68fd5e8f..72f5e760d 100644 --- a/darwin/future/exceptions.py +++ b/darwin/future/exceptions.py @@ -7,12 +7,14 @@ class DarwinException(Exception): """ Generic Darwin exception. - Used to differentiate from errors that originate in our code, and those that originate in - third-party libraries. + Used to differentiate from errors that originate in our code, and those that + originate in third-party libraries. - Extends `Exception` and adds a `parent_exception` field to store the original exception. + Extends `Exception` and adds a `parent_exception` field to store the original + exception. - Also has a `combined_exceptions` field to store a list of exceptions that were combined into + Also has a `combined_exceptions` field to store a list of exceptions that were + combined into """ parent_exception: Optional[Exception] = None diff --git a/darwin/future/meta/objects/dataset.py b/darwin/future/meta/objects/dataset.py index 80c636473..8696d41b6 100644 --- a/darwin/future/meta/objects/dataset.py +++ b/darwin/future/meta/objects/dataset.py @@ -15,10 +15,13 @@ class Dataset(MetaBase[DatasetCore]): - """Dataset Meta object. Facilitates the creation of Query objects, lazy loading of sub fields + """ + Dataset Meta object. Facilitates the creation of Query objects, lazy loading of + sub fields Args: - MetaBase (Dataset): Generic MetaBase object expanded by Dataset core object return type + MetaBase (Dataset): Generic MetaBase object expanded by Dataset core object + return type Returns: _type_: DatasetMeta @@ -126,5 +129,15 @@ def upload_files( preserve_folders: bool = False, verbose: bool = False, ) -> Dataset: - upload_data(self._element.name, files, files_to_exclude, fps, path, frames, extract_views, preserve_folders, verbose) # type: ignore + upload_data( + self._element.name, + files, # type: ignore + files_to_exclude, + fps, + path, + frames, + extract_views, + preserve_folders, + verbose, + ) return self diff --git a/darwin/future/meta/objects/team.py b/darwin/future/meta/objects/team.py index 310d44b9e..e0c9f144d 100644 --- a/darwin/future/meta/objects/team.py +++ b/darwin/future/meta/objects/team.py @@ -14,19 +14,21 @@ class Team(MetaBase[TeamCore]): - """Team Meta object. Facilitates the creation of Query objects, lazy loading of - sub fields like members unlike other MetaBase objects, does not extend the - __next__ function because it is not iterable. This is because Team is linked to - api key and only one team can be returned, but stores a list of teams for - consistency. This does mean however that to access the underlying team object, - you must access the first element of the list team = client.team[0] + """ + Team Meta object. Facilitates the creation of Query objects, lazy loading of sub + fields like members unlike other MetaBase objects, does not extend the __next__ + function because it is not iterable. This is because Team is linked to api key and + only one team can be returned, but stores a list of teams for consistency. This + does mean however that to access the underlying team object, you must access the + first element of the list + team = client.team[0] Args: - MetaBase (Team): Generic MetaBase object expanded by Team core object - return type + MetaBase (Team): Generic MetaBase object expanded by Team core object return + type Returns: - _type_: TeamMeta + Team: Team object """ def __init__(self, client: ClientCore, team: Optional[TeamCore] = None) -> None: diff --git a/darwin/future/meta/objects/workflow.py b/darwin/future/meta/objects/workflow.py index af6458382..3564f6412 100644 --- a/darwin/future/meta/objects/workflow.py +++ b/darwin/future/meta/objects/workflow.py @@ -59,7 +59,17 @@ def upload_files( auto_push: bool = True, ) -> Workflow: assert self._element.dataset is not None - upload_data(self.datasets[0].name, files, files_to_exclude, fps, path, frames, extract_views, preserve_folders, verbose) # type: ignore + upload_data( + self.datasets[0].name, + files, # type: ignore + files_to_exclude, + fps, + path, + frames, + extract_views, + preserve_folders, + verbose, + ) if auto_push: self.push_from_dataset_stage() return self diff --git a/darwin/future/meta/queries/dataset.py b/darwin/future/meta/queries/dataset.py index c87b8d915..5fbbc4aa7 100644 --- a/darwin/future/meta/queries/dataset.py +++ b/darwin/future/meta/queries/dataset.py @@ -37,8 +37,10 @@ def _collect(self) -> List[Dataset]: def _execute_filters( self, datasets: List[Dataset], filter: QueryFilter ) -> List[Dataset]: - """Executes filtering on the local list of datasets, applying special logic for role filtering - otherwise calls the parent method for general filtering on the values of the datasets + """ + Executes filtering on the local list of datasets, applying special logic for + role filtering otherwise calls the parent method for general filtering on the + values of the datasets Parameters ---------- diff --git a/darwin/future/meta/queries/team_member.py b/darwin/future/meta/queries/team_member.py index f6eaba922..a76e1f7a9 100644 --- a/darwin/future/meta/queries/team_member.py +++ b/darwin/future/meta/queries/team_member.py @@ -8,7 +8,9 @@ class TeamMemberQuery(Query[TeamMember]): - """TeamMemberQuery object with methods to manage filters, retrieve data, and execute filters + """ + TeamMemberQuery object with methods to manage filters, retrieve data, + and execute filters Methods: collect: Executes the query and returns the filtered data _execute_filter: Executes a filter on a list of objects @@ -30,8 +32,10 @@ def _collect(self) -> List[TeamMember]: def _execute_filter( self, members: List[TeamMember], filter: QueryFilter ) -> List[TeamMember]: - """Executes filtering on the local list of members, applying special logic for role filtering - otherwise calls the parent method for general filtering on the values of the members + """ + Executes filtering on the local list of members, applying special logic for + role filtering otherwise calls the parent method for general filtering on the + values of the members Parameters ---------- diff --git a/darwin/future/tests/core/datasets/test_list_datasets.py b/darwin/future/tests/core/datasets/test_list_datasets.py index a4c7ac4a2..f6ce5bdbe 100644 --- a/darwin/future/tests/core/datasets/test_list_datasets.py +++ b/darwin/future/tests/core/datasets/test_list_datasets.py @@ -46,5 +46,6 @@ def test_it_returns_an_error_if_the_client_returns_an_http_error( assert len(errors) == 1 assert isinstance(error := errors[0], HTTPError) + assert error.response is not None assert error.response.status_code == 400 assert not response diff --git a/darwin/future/tests/core/items/fixtures.py b/darwin/future/tests/core/items/fixtures.py index 856abc6bd..806997042 100644 --- a/darwin/future/tests/core/items/fixtures.py +++ b/darwin/future/tests/core/items/fixtures.py @@ -3,6 +3,53 @@ import pytest +from darwin.future.data_objects.item import Folder, Item + + +@pytest.fixture +def base_items() -> List[Item]: + return [ + Item( + name=f"test_{i}", + path="test_path", + dataset_id=1, + id=uuid4(), + archived=False, + layout={}, + slots=[], + processing_status="complete", + priority=0, + ) + for i in range(10) + ] + + +@pytest.fixture +def base_folders() -> List[Folder]: + return [ + Folder( + dataset_id=0, + filtered_item_count=1, + path=f"test_path_{i}", + unfiltered_item_count=1, + ) + for i in range(10) + ] + + +@pytest.fixture +def base_items_json(base_items: List[Item]) -> List[dict]: + items = [item.dict() for item in base_items] + # json library doesn't support UUIDs so need to be str'd + for item in items: + item["id"] = str(item["id"]) + return items + + +@pytest.fixture +def base_folders_json(base_folders: List[Folder]) -> List[dict]: + return [folder.dict() for folder in base_folders] + @pytest.fixture def UUIDs() -> List[UUID]: diff --git a/darwin/future/tests/core/items/test_get_items.py b/darwin/future/tests/core/items/test_get_items.py index e2af43eba..96c0fcf97 100644 --- a/darwin/future/tests/core/items/test_get_items.py +++ b/darwin/future/tests/core/items/test_get_items.py @@ -5,6 +5,9 @@ from darwin.future.core.client import ClientCore from darwin.future.core.items import get_item_ids, get_item_ids_stage +from darwin.future.core.items.get import get_item, list_folders, list_items +from darwin.future.core.types.common import QueryString +from darwin.future.data_objects.item import Folder, Item from darwin.future.tests.core.fixtures import * from darwin.future.tests.core.items.fixtures import * @@ -15,8 +18,8 @@ def test_get_item_ids( with responses.RequestsMock() as rsps: rsps.add( rsps.GET, - base_client.config.api_endpoint - + "v2/teams/default-team/items/ids?not_statuses=archived,error&sort[id]=desc&dataset_ids=1337", + base_client.config.api_endpoint + "v2/teams/default-team/items/ids" + "?not_statuses=archived,error&sort[id]=desc&dataset_ids=1337", json={"item_ids": UUIDs_str}, status=200, ) @@ -31,10 +34,61 @@ def test_get_item_ids_stage( with responses.RequestsMock() as rsps: rsps.add( rsps.GET, - base_client.config.api_endpoint - + f"v2/teams/default-team/items/ids?workflow_stage_ids={stage_id}&dataset_ids=1337", + base_client.config.api_endpoint + "v2/teams/default-team/items/ids" + f"?workflow_stage_ids={stage_id}&dataset_ids=1337", json={"item_ids": UUIDs_str}, status=200, ) item_ids = get_item_ids_stage(base_client, "default-team", "1337", stage_id) assert item_ids == UUIDs + + +def test_get_item( + base_items_json: List[dict], base_items: List[Item], base_client: ClientCore +) -> None: + uuid = str(base_items[0].id) + with responses.RequestsMock() as rsps: + rsps.add( + rsps.GET, + base_client.config.api_endpoint + f"v2/teams/default-team/items/{uuid}", + json=base_items_json[0], + status=200, + ) + item = get_item(base_client, "default-team", uuid) + assert item == base_items[0] + + +def test_list_items( + base_items_json: List[dict], base_items: List[Item], base_client: ClientCore +) -> None: + with responses.RequestsMock() as rsps: + rsps.add( + rsps.GET, + base_client.config.api_endpoint + + "v2/teams/default-team/items?dataset_ids=1337", + json={"items": base_items_json}, + status=200, + ) + items = list_items( + base_client, "default-team", QueryString({"dataset_ids": "1337"}) + ) + for item, comparator in zip(items, base_items): + assert item == comparator + + +def test_list_folders( + base_folders_json: List[dict], base_folders: List[Folder], base_client: ClientCore +) -> None: + with responses.RequestsMock() as rsps: + rsps.add( + rsps.GET, + base_client.config.api_endpoint + + "v2/teams/default-team/items/folders?dataset_ids=1337", + json={"folders": base_folders_json}, + status=200, + ) + folders = list_folders( + base_client, "default-team", QueryString({"dataset_ids": "1337"}) + ) + for folder, comparator in zip(folders, base_folders): + assert folder == comparator diff --git a/darwin/future/tests/core/test_query.py b/darwin/future/tests/core/test_query.py index 77fd612ab..8c5692390 100644 --- a/darwin/future/tests/core/test_query.py +++ b/darwin/future/tests/core/test_query.py @@ -90,7 +90,9 @@ def test_query_filter_functionality( ("!=", 1, 2, True), ], ) -def test_query_filter_filters(mod: Optional[str], param: Any, check: Any, expected: bool) -> None: # type: ignore +def test_query_filter_filters( # type: ignore + mod: Optional[str], param: Any, check: Any, expected: bool +) -> None: # test str if mod: modifier = Query.Modifier(mod) diff --git a/darwin/future/tests/core/types/test_querystring.py b/darwin/future/tests/core/types/test_querystring.py index bf38d054a..e92b0af94 100644 --- a/darwin/future/tests/core/types/test_querystring.py +++ b/darwin/future/tests/core/types/test_querystring.py @@ -1,8 +1,10 @@ -from darwin.future.core.types.common import QueryString from pytest import raises +from darwin.future.core.types.common import QueryString + -# happy and sad path tests for QueryString - should validate a dict of strings, and return a query string on str() +# happy and sad path tests for QueryString +# should validate a dict of strings, and return a query string on str() def test_querystring_happy_path() -> None: query_string = QueryString({"foo": "bar"}) assert str(query_string) == "?foo=bar" diff --git a/darwin/future/tests/data_objects/fixtures.py b/darwin/future/tests/data_objects/fixtures.py index 4e0a56a7d..5e83057f9 100644 --- a/darwin/future/tests/data_objects/fixtures.py +++ b/darwin/future/tests/data_objects/fixtures.py @@ -2,7 +2,6 @@ import pytest - test_data_path: Path = Path(__file__).parent / "workflow" / "data" valid_stage_json = test_data_path / "stage.json" valid_workflow_json = test_data_path / "workflow.json" diff --git a/darwin/future/tests/data_objects/workflow/test_wfedge.py b/darwin/future/tests/data_objects/workflow/test_wfedge.py index c20b8c5a9..bfa5f0dd4 100644 --- a/darwin/future/tests/data_objects/workflow/test_wfedge.py +++ b/darwin/future/tests/data_objects/workflow/test_wfedge.py @@ -1,6 +1,5 @@ from pathlib import Path - from darwin.future.data_objects.workflow import WFEdgeCore test_data_path: Path = Path(__file__).parent / "data" diff --git a/darwin/future/tests/data_objects/workflow/test_wfstage_config.py b/darwin/future/tests/data_objects/workflow/test_wfstage_config.py index 53ea142b8..f60006d77 100644 --- a/darwin/future/tests/data_objects/workflow/test_wfstage_config.py +++ b/darwin/future/tests/data_objects/workflow/test_wfstage_config.py @@ -1,6 +1,5 @@ from pathlib import Path - from darwin.future.data_objects.workflow import WFStageConfigCore test_data_path: Path = Path(__file__).parent / "data" diff --git a/darwin/future/tests/data_objects/workflow/test_wfuser.py b/darwin/future/tests/data_objects/workflow/test_wfuser.py index d69026ad5..1f52c21d9 100644 --- a/darwin/future/tests/data_objects/workflow/test_wfuser.py +++ b/darwin/future/tests/data_objects/workflow/test_wfuser.py @@ -1,6 +1,5 @@ from pathlib import Path - from darwin.future.data_objects.workflow import WFUserCore test_data_path: Path = Path(__file__).parent / "data" diff --git a/darwin/future/tests/meta/objects/test_stagemeta.py b/darwin/future/tests/meta/objects/test_stagemeta.py index 71b5035d4..f3958125f 100644 --- a/darwin/future/tests/meta/objects/test_stagemeta.py +++ b/darwin/future/tests/meta/objects/test_stagemeta.py @@ -86,19 +86,19 @@ def test_move_attached_files_to_stage( ) -def test_get_stage_id(stage_meta): +def test_get_stage_id(stage_meta: Stage) -> None: assert stage_meta.id == UUID("00000000-0000-0000-0000-000000000000") -def test_get_stage_name(stage_meta): +def test_get_stage_name(stage_meta: Stage) -> None: assert stage_meta.name == "test-stage" -def test_get_stage_type(stage_meta): +def test_get_stage_type(stage_meta: Stage) -> None: assert stage_meta.type == "annotate" -def test_get_stage_edges(stage_meta): +def test_get_stage_edges(stage_meta: Stage) -> None: edges = [ WFEdgeCore( name="edge_1", diff --git a/darwin/future/tests/meta/queries/test_workflow.py b/darwin/future/tests/meta/queries/test_workflow.py index b9761f786..c5630c761 100644 --- a/darwin/future/tests/meta/queries/test_workflow.py +++ b/darwin/future/tests/meta/queries/test_workflow.py @@ -239,11 +239,11 @@ def test_workflowquery_filters_stages_multiple( base_client.config.default_team ) responses.add(responses.GET, endpoint, json=base_filterable_workflows) - + param = "5445adcb-193d-4f76-adb0-0c6d5f5e4c04,53d2c997-6bb0-4766-803c-3c8d1fb21072" query = WorkflowQuery(base_client, []).where( { "name": "has_stages", - "param": "5445adcb-193d-4f76-adb0-0c6d5f5e4c04,53d2c997-6bb0-4766-803c-3c8d1fb21072", + "param": param, } ) workflows = query._collect() diff --git a/pyproject.toml b/pyproject.toml index 75362bb10..e9e2f36e0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -68,6 +68,9 @@ line-length = 88 max-line-length = 88 ignore = ["E203", "W503", "E402"] +[tool.black] +line-length = 88 + [tool.poetry.dependencies] python = ">=3.7.0,<3.11" argcomplete = "^2.0.0"