Skip to content

Commit

Permalink
merged from master
Browse files Browse the repository at this point in the history
  • Loading branch information
ChristofferEdlund committed Oct 17, 2023
2 parents 67dd274 + e958307 commit d128a18
Show file tree
Hide file tree
Showing 68 changed files with 951 additions and 512 deletions.
6 changes: 1 addition & 5 deletions .vscode/settings.json
Original file line number Diff line number Diff line change
Expand Up @@ -13,15 +13,11 @@
"editor.insertSpaces": true,
"editor.tabSize": 2
},
"python.formatting.blackPath": "black",
"python.formatting.provider": "black",
"python.linting.mypyEnabled": true,
"isort.args": [
"--profile",
"black"
],
"python.analysis.autoImportCompletions": true,
"python.testing.pytestEnabled": true,
"python.linting.enabled": true,
"python.analysis.typeCheckingMode": "basic"
"python.analysis.typeCheckingMode": "basic",
}
1 change: 0 additions & 1 deletion darwin/dataset/local_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,6 @@ def __init__(
self._validate_inputs(partition, split_type, annotation_type)
# Get the list of classes


annotation_types = [self.annotation_type]
# We fetch bounding_boxes annotations from selected polygons as well
if self.annotation_type == "bounding_boxes":
Expand Down
2 changes: 0 additions & 2 deletions darwin/dataset/split_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -230,8 +230,6 @@ def _stratified_split(
return

for stratified_type in stratified_types:


if stratified_type == "bounding_box":
class_annotation_types = [stratified_type, "polygon"]
else:
Expand Down
51 changes: 39 additions & 12 deletions darwin/future/core/client.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from __future__ import annotations

from pathlib import Path
from typing import Any, Callable, Dict, List, Optional, Union, overload
from typing import Callable, Dict, Optional, overload
from urllib.parse import urlparse

import requests
Expand Down Expand Up @@ -41,7 +41,10 @@ def validate_base_url(cls, v: str) -> str:
if not v.endswith("/"):
v += "/"
check = urlparse(v)
assert check.scheme in {"http", "https"}, "base_url must start with http or https"
assert check.scheme in {
"http",
"https",
}, "base_url must start with http or https"
assert check.netloc, "base_url must contain a domain"
return v

Expand Down Expand Up @@ -136,7 +139,9 @@ def __init__(self, config: DarwinConfig, retries: Optional[Retry] = None) -> Non
self.config = config
self.session = requests.Session()
if not retries:
retries = Retry(total=3, backoff_factor=0.2, status_forcelist=[500, 502, 503, 504])
retries = Retry(
total=3, backoff_factor=0.2, status_forcelist=[500, 502, 503, 504]
)
self._setup_session(retries)
self._mappings = {
"get": self.session.get,
Expand All @@ -153,20 +158,32 @@ def _setup_session(self, retries: Retry) -> None:

@property
def headers(self) -> Dict[str, str]:
http_headers: Dict[str, str] = {"Content-Type": "application/json", "Accept": "application/json"}
http_headers: Dict[str, str] = {
"Content-Type": "application/json",
"Accept": "application/json",
}
if self.config.api_key:
http_headers["Authorization"] = f"ApiKey {self.config.api_key}"
return http_headers

@overload
def _generic_call(self, method: Callable[[str], requests.Response], endpoint: str) -> dict:
def _generic_call(
self, method: Callable[[str], requests.Response], endpoint: str
) -> dict:
...

@overload
def _generic_call(self, method: Callable[[str, dict], requests.Response], endpoint: str, payload: dict) -> dict:
def _generic_call(
self,
method: Callable[[str, dict], requests.Response],
endpoint: str,
payload: dict,
) -> dict:
...

def _generic_call(self, method: Callable, endpoint: str, payload: Optional[dict] = None) -> JSONType:
def _generic_call(
self, method: Callable, endpoint: str, payload: Optional[dict] = None
) -> JSONType:
endpoint = self._sanitize_endpoint(endpoint)
url = self.config.api_endpoint + endpoint
if payload is not None:
Expand All @@ -179,24 +196,34 @@ def _generic_call(self, method: Callable, endpoint: str, payload: Optional[dict]

return response.json()

def _contain_qs_and_endpoint(self, endpoint: str, query_string: Optional[QueryString] = None) -> str:
def _contain_qs_and_endpoint(
self, endpoint: str, query_string: Optional[QueryString] = None
) -> str:
if not query_string:
return endpoint

assert "?" not in endpoint
return endpoint + str(query_string)

def get(self, endpoint: str, query_string: Optional[QueryString] = None) -> JSONType:
return self._generic_call(self.session.get, self._contain_qs_and_endpoint(endpoint, query_string))
def get(
self, endpoint: str, query_string: Optional[QueryString] = None
) -> JSONType:
return self._generic_call(
self.session.get, self._contain_qs_and_endpoint(endpoint, query_string)
)

def put(self, endpoint: str, data: dict) -> JSONType:
return self._generic_call(self.session.put, endpoint, data)

def post(self, endpoint: str, data: dict) -> JSONType:
return self._generic_call(self.session.post, endpoint, data)

def delete(self, endpoint: str, query_string: Optional[QueryString] = None) -> JSONType:
return self._generic_call(self.session.delete, self._contain_qs_and_endpoint(endpoint, query_string))
def delete(
self, endpoint: str, query_string: Optional[QueryString] = None
) -> JSONType:
return self._generic_call(
self.session.delete, self._contain_qs_and_endpoint(endpoint, query_string)
)

def patch(self, endpoint: str, data: dict) -> JSONType:
return self._generic_call(self.session.patch, endpoint, data)
Expand Down
8 changes: 4 additions & 4 deletions darwin/future/core/datasets/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from darwin.future.core.datasets.create_dataset import *
from darwin.future.core.datasets.get_dataset import *
from darwin.future.core.datasets.list_datasets import *
from darwin.future.core.datasets.remove_dataset import *
from darwin.future.core.datasets.create_dataset import create_dataset
from darwin.future.core.datasets.get_dataset import get_dataset
from darwin.future.core.datasets.list_datasets import list_datasets
from darwin.future.core.datasets.remove_dataset import remove_dataset
4 changes: 3 additions & 1 deletion darwin/future/core/datasets/remove_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,9 @@
from darwin.future.exceptions import DatasetNotFound


def remove_dataset(api_client: ClientCore, id: int, team_slug: Optional[str] = None) -> int:
def remove_dataset(
api_client: ClientCore, id: int, team_slug: Optional[str] = None
) -> int:
"""
Creates a new dataset for the given team
Expand Down
4 changes: 2 additions & 2 deletions darwin/future/core/items/__init__.py
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
from darwin.future.core.items.get import *
from darwin.future.core.items.move_items import *
from darwin.future.core.items.get import get_item_ids, get_item_ids_stage
from darwin.future.core.items.move_items import move_items_to_stage
25 changes: 19 additions & 6 deletions darwin/future/core/items/get.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,9 @@
from darwin.future.core.types.common import QueryString


def get_item_ids(api_client: ClientCore, team_slug: str, dataset_id: Union[str, int]) -> List[UUID]:
def get_item_ids(
api_client: ClientCore, team_slug: str, dataset_id: Union[str, int]
) -> List[UUID]:
"""
Returns a list of item ids for the dataset
Expand All @@ -26,15 +28,24 @@ def get_item_ids(api_client: ClientCore, team_slug: str, dataset_id: Union[str,

response = api_client.get(
f"/v2/teams/{team_slug}/items/ids",
QueryString({"not_statuses": "archived,error", "sort[id]": "desc", "dataset_ids": str(dataset_id)}),
QueryString(
{
"not_statuses": "archived,error",
"sort[id]": "desc",
"dataset_ids": str(dataset_id),
}
),
)
assert type(response) == dict
assert isinstance(response, dict)
uuids = [UUID(uuid) for uuid in response["item_ids"]]
return uuids


def get_item_ids_stage(
api_client: ClientCore, team_slug: str, dataset_id: Union[int, str], stage_id: Union[UUID, str]
api_client: ClientCore,
team_slug: str,
dataset_id: Union[int, str],
stage_id: Union[UUID, str],
) -> List[UUID]:
"""
Returns a list of item ids for the stage
Expand All @@ -57,8 +68,10 @@ def get_item_ids_stage(
"""
response = api_client.get(
f"/v2/teams/{team_slug}/items/ids",
QueryString({"workflow_stage_ids": str(stage_id), "dataset_ids": str(dataset_id)}),
QueryString(
{"workflow_stage_ids": str(stage_id), "dataset_ids": str(dataset_id)}
),
)
assert type(response) == dict
assert isinstance(response, dict)
uuids = [UUID(uuid) for uuid in response["item_ids"]]
return uuids
7 changes: 6 additions & 1 deletion darwin/future/core/items/move_items.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,12 @@


def move_items_to_stage(
api_client: ClientCore, team_slug: str, workflow_id: UUID, dataset_id: int, stage_id: UUID, item_ids: List[UUID]
api_client: ClientCore,
team_slug: str,
workflow_id: UUID,
dataset_id: int,
stage_id: UUID,
item_ids: List[UUID],
) -> JSONType:
"""
Moves a list of items to a stage
Expand Down
5 changes: 3 additions & 2 deletions darwin/future/core/team/get_team.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
from typing import List, Optional, Tuple

from darwin.future.core.client import ClientCore
from darwin.future.core.types.common import JSONType
from darwin.future.data_objects.team import TeamCore, TeamMemberCore


Expand All @@ -13,7 +12,9 @@ def get_team(client: ClientCore, team_slug: Optional[str] = None) -> TeamCore:
return TeamCore.parse_obj(response)


def get_team_members(client: ClientCore) -> Tuple[List[TeamMemberCore], List[Exception]]:
def get_team_members(
client: ClientCore,
) -> Tuple[List[TeamMemberCore], List[Exception]]:
response = client.get("/memberships")
members = []
errors = []
Expand Down
2 changes: 1 addition & 1 deletion darwin/future/core/types/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
from .common import *
from .common import JSONType, QueryString, TeamSlug
10 changes: 6 additions & 4 deletions darwin/future/core/types/common.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
from typing import Any, Dict, List, Union

import pydantic
from pydantic import BaseModel

from darwin.future.data_objects import validators as darwin_validators
from darwin.future.data_objects.typing import UnknownType
Expand All @@ -21,8 +19,12 @@ def __get_validators__(cls): # type: ignore

@classmethod
def validate(cls, v: str) -> "TeamSlug":
assert len(v) < cls.max_length, f"maximum length for team slug is {cls.max_length}"
assert len(v) > cls.min_length, f"minimum length for team slug is {cls.min_length}"
assert (
len(v) < cls.max_length
), f"maximum length for team slug is {cls.max_length}"
assert (
len(v) > cls.min_length
), f"minimum length for team slug is {cls.min_length}"
if not isinstance(v, str):
raise TypeError("string required")
modified_value = darwin_validators.parse_name(v)
Expand Down
33 changes: 23 additions & 10 deletions darwin/future/core/types/query.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,7 @@
Generic,
List,
Optional,
Tuple,
TypeVar,
overload,
)

from darwin.future.core.client import ClientCore
Expand Down Expand Up @@ -54,7 +52,9 @@ class QueryFilter(DefaultDarwin):

def filter_attr(self, attr: Any) -> bool: # type: ignore
caster: Callable[[str], Any] = type(attr) # type: ignore
param = caster(self.param) # attempt to cast the param to the type of the attribute
param = caster(
self.param
) # attempt to cast the param to the type of the attribute
if self.modifier is None:
return attr == param
elif self.modifier == Modifier.GREATER_EQUAL:
Expand All @@ -75,7 +75,9 @@ def filter_attr(self, attr: Any) -> bool: # type: ignore
@classmethod
def _from_dict(cls, d: Dict[str, Any]) -> QueryFilter: # type: ignore
if "name" not in d or "param" not in d:
raise InvalidQueryFilter(f"args must be a QueryFilter or a dict with 'name' and 'param' keys, got {d}")
raise InvalidQueryFilter(
f"args must be a QueryFilter or a dict with 'name' and 'param' keys, got {d}"
)
modifier = Modifier(d["modifier"]) if "modifier" in d else None
return QueryFilter(name=d["name"], param=str(d["param"]), modifier=modifier)

Expand All @@ -95,7 +97,9 @@ def _from_arg(cls, arg: object) -> QueryFilter:
elif isinstance(arg, dict):
return cls._from_dict(arg)
else:
raise InvalidQueryFilter(f"args must be a QueryFilter or a dict with 'name' and 'param' keys, got {arg}")
raise InvalidQueryFilter(
f"args must be a QueryFilter or a dict with 'name' and 'param' keys, got {arg}"
)

@classmethod
def _from_kwarg(cls, key: str, value: str) -> QueryFilter:
Expand All @@ -117,9 +121,12 @@ class Query(Generic[T], ABC):
"""

def __init__(
self, client: ClientCore, filters: Optional[List[QueryFilter]] = None, meta_params: Optional[Param] = None
self,
client: ClientCore,
filters: Optional[List[QueryFilter]] = None,
meta_params: Optional[Param] = None,
):
self.meta_params: dict = meta_params or dict()
self.meta_params: dict = meta_params or {}
self.client = client
self.filters = filters or []
self.results: Optional[List[T]] = None
Expand All @@ -130,12 +137,16 @@ def filter(self, filter: QueryFilter) -> Query[T]:

def __add__(self, filter: QueryFilter) -> Query[T]:
self._changed_since_last = True
return self.__class__(self.client, filters=[*self.filters, filter], meta_params=self.meta_params)
return self.__class__(
self.client, filters=[*self.filters, filter], meta_params=self.meta_params
)

def __sub__(self, filter: QueryFilter) -> Query[T]:
self._changed_since_last = True
return self.__class__(
self.client, filters=[f for f in self.filters if f != filter], meta_params=self.meta_params
self.client,
filters=[f for f in self.filters if f != filter],
meta_params=self.meta_params,
)

def __iadd__(self, filter: QueryFilter) -> Query[T]:
Expand Down Expand Up @@ -212,4 +223,6 @@ def first(self) -> Optional[T]:
return self.results[0]

def _generic_execute_filter(self, objects: List[T], filter: QueryFilter) -> List[T]:
return [m for m in objects if filter.filter_attr(getattr(m._element, filter.name))]
return [
m for m in objects if filter.filter_attr(getattr(m._element, filter.name))
]
6 changes: 4 additions & 2 deletions darwin/future/core/utils/pathutils.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import json
from pathlib import Path
from typing import Any, Optional
from typing import Optional

import yaml

Expand Down Expand Up @@ -29,7 +29,9 @@ def attempt_open(path: Path) -> dict:
return open_json(path, encoding)
except Exception:
pass
raise UnrecognizableFileEncoding(f"Unable to load file {path} with any encodings: {ENCODINGS}")
raise UnrecognizableFileEncoding(
f"Unable to load file {path} with any encodings: {ENCODINGS}"
)


def open_yaml(path: Path, encoding: Optional[str] = None) -> dict:
Expand Down
6 changes: 3 additions & 3 deletions darwin/future/core/workflows/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
from darwin.future.core.workflows.get_workflow import *
from darwin.future.core.workflows.get_workflows import *
from darwin.future.core.workflows.list_workflows import *
from darwin.future.core.workflows.get_workflow import get_workflow
from darwin.future.core.workflows.get_workflows import get_workflows
from darwin.future.core.workflows.list_workflows import list_workflows
Loading

0 comments on commit d128a18

Please sign in to comment.