From 7961af20780af96e489039224f12ec6bd4a4749c Mon Sep 17 00:00:00 2001 From: Erlend vollset Date: Fri, 8 Sep 2023 19:48:28 +0200 Subject: [PATCH] Support deleting constraints and indexes in data modeling (#1361) --- CHANGELOG.md | 7 ++ .../client/_api/data_modeling/containers.py | 65 +++++++++- cognite/client/_version.py | 2 +- .../data_classes/data_modeling/containers.py | 116 +++++++++++------- .../client/data_classes/data_modeling/ids.py | 2 + pyproject.toml | 2 +- .../test_data_modeling/test_containers.py | 11 ++ .../test_data_models/test_containers.py | 54 ++++++-- 8 files changed, 203 insertions(+), 56 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7e5eaa589..8a832ab94 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -17,6 +17,13 @@ Changes are grouped as follows - `Fixed` for any bug fixes. - `Security` in case of vulnerabilities. +## [6.23.0] - 2023-09-08 +### Added +- Supporting for deleting constraints and indexes on containers. + +### Changed +- The abstract class `Index` can no longer be instantiated. Use BTreeIndex or InvertedIndex instead. + ## [6.22.0] - 2023-09-08 ### Added - `client.data_modeling.instances.subscribe` which lets you subscribe to a given diff --git a/cognite/client/_api/data_modeling/containers.py b/cognite/client/_api/data_modeling/containers.py index ff84ee8b0..d6aca8c12 100644 --- a/cognite/client/_api/data_modeling/containers.py +++ b/cognite/client/_api/data_modeling/containers.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Iterator, Sequence, cast, overload +from typing import Iterator, Literal, Sequence, cast, overload from cognite.client._api_client import APIClient from cognite.client._constants import DATA_MODELING_DEFAULT_LIMIT_READ @@ -11,8 +11,10 @@ ContainerList, ) from cognite.client.data_classes.data_modeling.ids import ( + ConstraintIdentifier, ContainerId, ContainerIdentifier, + IndexIdentifier, _load_identifier, ) @@ -146,6 +148,67 @@ def delete(self, id: ContainerIdentifier | Sequence[ContainerIdentifier]) -> lis ) return [ContainerId(space=item["space"], external_id=item["externalId"]) for item in deleted_containers] + def delete_constraints(self, id: Sequence[ConstraintIdentifier]) -> list[ConstraintIdentifier]: + """`Delete one or more constraints `_ + + Args: + id (Sequence[ConstraintIdentifier]): The constraint identifier(s). + Returns: + list[ConstraintIdentifier]: The constraints(s) which have been deleted. + Examples: + + Delete constraints by id:: + + >>> from cognite.client import CogniteClient + >>> c = CogniteClient() + >>> c.data_modeling.containers.delete_constraints( + ... [(ContainerId("mySpace", "myContainer"), "myConstraint")] + ... ) + """ + return self._delete_constraints_or_indexes(id, "constraints") + + def delete_indexes(self, id: Sequence[IndexIdentifier]) -> list[IndexIdentifier]: + """`Delete one or more indexes `_ + + Args: + id (Sequence[IndexIdentifier]): The index identifier(s). + Returns: + list[IndexIdentifier]: The indexes(s) which has been deleted. + Examples: + + Delete indexes by id:: + + >>> from cognite.client import CogniteClient + >>> c = CogniteClient() + >>> c.data_modeling.containers.delete_indexes( + ... [(ContainerId("mySpace", "myContainer"), "myIndex")] + ... ) + """ + return self._delete_constraints_or_indexes(id, "indexes") + + def _delete_constraints_or_indexes( + self, + id: Sequence[ConstraintIdentifier] | Sequence[IndexIdentifier], + constraint_or_index: Literal["constraints", "indexes"], + ) -> list[tuple[ContainerId, str]]: + res = self._post( + url_path=f"{self._RESOURCE_PATH}/{constraint_or_index}/delete", + json={ + "items": [ + { + "space": constraint_id[0].space, + "containerExternalId": constraint_id[0].external_id, + "identifier": constraint_id[1], + } + for constraint_id in id + ] + }, + ) + return [ + (ContainerId(space=item["space"], external_id=item["containerExternalId"]), item["identifier"]) + for item in res.json()["items"] + ] + def list( self, space: str | None = None, diff --git a/cognite/client/_version.py b/cognite/client/_version.py index 2e0141a2f..121bfd5a7 100644 --- a/cognite/client/_version.py +++ b/cognite/client/_version.py @@ -1,4 +1,4 @@ from __future__ import annotations -__version__ = "6.22.0" +__version__ = "6.23.0" __api_subversion__ = "V20220125" diff --git a/cognite/client/data_classes/data_modeling/containers.py b/cognite/client/data_classes/data_modeling/containers.py index 8e29ac3e5..f68f7c76f 100644 --- a/cognite/client/data_classes/data_modeling/containers.py +++ b/cognite/client/data_classes/data_modeling/containers.py @@ -1,7 +1,7 @@ from __future__ import annotations import json -from abc import ABC +from abc import ABC, abstractmethod from dataclasses import asdict, dataclass from typing import Any, Literal, cast @@ -16,8 +16,7 @@ PropertyType, ) from cognite.client.data_classes.data_modeling.ids import ContainerId -from cognite.client.utils._auxiliary import rename_and_exclude_keys -from cognite.client.utils._text import convert_all_keys_to_camel_case_recursive, convert_all_keys_to_snake_case +from cognite.client.utils._text import convert_all_keys_to_camel_case_recursive class ContainerCore(DataModelingResource): @@ -207,7 +206,7 @@ def __init__(self, space: str | None = None, include_global: bool = False) -> No self.include_global = include_global -@dataclass +@dataclass(frozen=True) class ContainerProperty: type: PropertyType nullable: bool = True @@ -221,10 +220,17 @@ def load(cls, data: dict[str, Any]) -> ContainerProperty: if "type" not in data: raise ValueError("Type not specified") if data["type"].get("type") == "direct": - data["type"] = DirectRelation.load(data["type"]) + type_: PropertyType = DirectRelation.load(data["type"]) else: - data["type"] = PropertyType.load(data["type"]) - return cls(**convert_all_keys_to_snake_case(data)) + type_ = PropertyType.load(data["type"]) + return cls( + type=type_, + nullable=data["nullable"], + auto_increment=data["autoIncrement"], + name=data.get("name"), + default_value=data.get("defaultValue"), + description=data.get("description"), + ) def dump(self, camel_case: bool = False) -> dict[str, str | dict]: output = asdict(self) @@ -233,40 +239,32 @@ def dump(self, camel_case: bool = False) -> dict[str, str | dict]: return convert_all_keys_to_camel_case_recursive(output) if camel_case else output -@dataclass +@dataclass(frozen=True) class Constraint(ABC): @classmethod - def _load(cls, data: dict) -> Constraint: - return cls(**convert_all_keys_to_snake_case(data)) - - @classmethod - def load(cls, data: dict) -> RequiresConstraintDefinition | UniquenessConstraintDefinition: + def load(cls, data: dict) -> RequiresConstraint | UniquenessConstraintDefinition: if data["constraintType"] == "requires": - return RequiresConstraintDefinition.load(data) + return RequiresConstraint.load(data) elif data["constraintType"] == "uniqueness": return UniquenessConstraintDefinition.load(data) raise ValueError(f"Invalid constraint type {data['constraintType']}") + @abstractmethod def dump(self, camel_case: bool = False) -> dict[str, str | dict]: - output = asdict(self) - return convert_all_keys_to_camel_case_recursive(output) if camel_case else output + raise NotImplementedError -@dataclass -class RequiresConstraintDefinition(Constraint): +@dataclass(frozen=True) +class RequiresConstraint(Constraint): require: ContainerId @classmethod - def load(cls, data: dict) -> RequiresConstraintDefinition: - output = cast( - RequiresConstraintDefinition, super()._load(rename_and_exclude_keys(data, exclude={"constraintType"})) - ) - if "require" in data: - output.require = ContainerId.load(data["require"]) - return output + def load(cls, data: dict) -> RequiresConstraint: + return cls(require=ContainerId.load(data["require"])) def dump(self, camel_case: bool = False) -> dict[str, str | dict]: - output = super().dump(camel_case) + as_dict = asdict(self) + output = convert_all_keys_to_camel_case_recursive(as_dict) if camel_case else as_dict if "require" in output and isinstance(output["require"], dict): output["require"] = self.require.dump(camel_case) key = "constraintType" if camel_case else "constraint_type" @@ -274,37 +272,67 @@ def dump(self, camel_case: bool = False) -> dict[str, str | dict]: return output -@dataclass -class UniquenessConstraintDefinition(Constraint): +@dataclass(frozen=True) +class UniquenessConstraint(Constraint): properties: list[str] @classmethod - def load(cls, data: dict) -> UniquenessConstraintDefinition: - return cast( - UniquenessConstraintDefinition, super()._load(rename_and_exclude_keys(data, exclude={"constraintType"})) - ) + def load(cls, data: dict) -> UniquenessConstraint: + return cls(properties=data["properties"]) def dump(self, camel_case: bool = False) -> dict[str, str | dict]: - output = super().dump() + as_dict = asdict(self) + output = convert_all_keys_to_camel_case_recursive(as_dict) if camel_case else as_dict key = "constraintType" if camel_case else "constraint_type" output[key] = "uniqueness" return output -@dataclass -class Index: +# Type aliases for backwards compatibility after renaming +# TODO: Remove in some future major version +RequiresConstraintDefinition = RequiresConstraint +UniquenessConstraintDefinition = UniquenessConstraint + + +@dataclass(frozen=True) +class Index(ABC): + @classmethod + def load(cls, data: dict) -> Index: + if data["indexType"] == "btree": + return BTreeIndex.load(data) + if data["indexType"] == "inverted": + return InvertedIndex.load(data) + raise ValueError(f"Invalid index type {data['indexType']}") + + @abstractmethod + def dump(self, camel_case: bool = False) -> dict[str, str | dict]: + raise NotImplementedError + + +@dataclass(frozen=True) +class BTreeIndex(Index): properties: list[str] - index_type: Literal["btree"] | str = "btree" cursorable: bool = False @classmethod - def load(cls, data: dict[str, Any]) -> Index: - data = convert_all_keys_to_snake_case(data) - # We want to avoid repeating the default values here (e.g. cursorable = False): - for key in set(data) - set(cls.__dataclass_fields__): - del data[key] - return cls(**data) + def load(cls, data: dict[str, Any]) -> BTreeIndex: + return cls(properties=data["properties"], cursorable=data["cursorable"]) def dump(self, camel_case: bool = False) -> dict[str, str | dict]: - output = asdict(self) - return convert_all_keys_to_camel_case_recursive(output) if camel_case else output + as_dict = asdict(self) + as_dict["indexType" if camel_case else "index_type"] = "btree" + return convert_all_keys_to_camel_case_recursive(as_dict) if camel_case else as_dict + + +@dataclass(frozen=True) +class InvertedIndex(Index): + properties: list[str] + + @classmethod + def load(cls, data: dict[str, Any]) -> InvertedIndex: + return cls(properties=data["properties"]) + + def dump(self, camel_case: bool = False) -> dict[str, str | dict]: + as_dict = asdict(self) + as_dict["indexType" if camel_case else "index_type"] = "inverted" + return convert_all_keys_to_camel_case_recursive(as_dict) if camel_case else as_dict diff --git a/cognite/client/data_classes/data_modeling/ids.py b/cognite/client/data_classes/data_modeling/ids.py index 36ce54f67..cf7bf027e 100644 --- a/cognite/client/data_classes/data_modeling/ids.py +++ b/cognite/client/data_classes/data_modeling/ids.py @@ -159,6 +159,8 @@ def version(self) -> str | None: ContainerIdentifier = Union[ContainerId, Tuple[str, str]] +ConstraintIdentifier = Tuple[ContainerId, str] +IndexIdentifier = Tuple[ContainerId, str] ViewIdentifier = Union[ViewId, Tuple[str, str], Tuple[str, str, str]] DataModelIdentifier = Union[DataModelId, Tuple[str, str], Tuple[str, str, str]] NodeIdentifier = Union[NodeId, Tuple[str, str, str]] diff --git a/pyproject.toml b/pyproject.toml index de830db1e..1fd0ecd22 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,7 +1,7 @@ [tool.poetry] name = "cognite-sdk" -version = "6.22.0" +version = "6.23.0" description = "Cognite Python SDK" readme = "README.md" diff --git a/tests/tests_integration/test_api/test_data_modeling/test_containers.py b/tests/tests_integration/test_api/test_data_modeling/test_containers.py index 046b64a26..4a76823fa 100644 --- a/tests/tests_integration/test_api/test_data_modeling/test_containers.py +++ b/tests/tests_integration/test_api/test_data_modeling/test_containers.py @@ -19,6 +19,7 @@ Text, View, ) +from cognite.client.data_classes.data_modeling.containers import BTreeIndex, UniquenessConstraint from cognite.client.exceptions import CogniteAPIError @@ -65,6 +66,8 @@ def test_apply_retrieve_and_delete(self, cognite_client: CogniteClient, integrat description="Integration test, should not persist", name="Create and delete container", used_for="node", + constraints={"uniqueName": UniquenessConstraint(properties=["name"])}, + indexes={"nameIdx": BTreeIndex(properties=["name"])}, ) created: Container | None = None deleted_ids: list[ContainerId] = [] @@ -80,6 +83,14 @@ def test_apply_retrieve_and_delete(self, cognite_client: CogniteClient, integrat assert retrieved.as_apply().dump() == new_container.dump() # Act + deleted_indexes = cognite_client.data_modeling.containers.delete_indexes( + [(new_container.as_id(), "nameIdx")] + ) + assert deleted_indexes == [(new_container.as_id(), "nameIdx")] + deleted_constraints = cognite_client.data_modeling.containers.delete_constraints( + [(new_container.as_id(), "uniqueName")] + ) + assert deleted_constraints == [(new_container.as_id(), "uniqueName")] deleted_ids = cognite_client.data_modeling.containers.delete(new_container.as_id()) retrieved_deleted = cognite_client.data_modeling.containers.retrieve(new_container.as_id()) diff --git a/tests/tests_unit/test_data_classes/test_data_models/test_containers.py b/tests/tests_unit/test_data_classes/test_data_models/test_containers.py index 2ecc3fe1b..d9d280692 100644 --- a/tests/tests_unit/test_data_classes/test_data_models/test_containers.py +++ b/tests/tests_unit/test_data_classes/test_data_models/test_containers.py @@ -3,7 +3,7 @@ from cognite.client.data_classes.data_modeling.containers import Constraint, Index -class TestConstraintIdentifier: +class TestConstraint: @pytest.mark.parametrize( "data", [ @@ -19,22 +19,58 @@ def test_load_dump(self, data: dict) -> None: assert data == actual -class TestIndexIdentifier: - @pytest.mark.parametrize("data", [{"properties": ["name", "fullName"], "indexType": "btree", "cursorable": True}]) +class TestIndex: + @pytest.mark.parametrize( + "data", + [ + {"properties": ["name", "fullName"], "indexType": "btree", "cursorable": True}, + {"properties": ["name", "fullName"], "indexType": "inverted"}, + ], + ) def test_load_dump(self, data: dict) -> None: actual = Index.load(data).dump(camel_case=True) assert data == actual - @pytest.mark.parametrize("data", [{"properties": ["name"]}]) - def test_load_dump__default_values_are_used(self, data: dict) -> None: - actual = Index.load(data).dump(camel_case=False) - data.update(index_type="btree", cursorable=False) + @pytest.mark.parametrize( + "data", + [ + {"this-key-is-new-sooo-new": 42, "properties": ["name"], "indexType": "btree", "cursorable": True}, + {"this-key-is-new-sooo-new": 42, "properties": ["name"], "indexType": "inverted"}, + ], + ) + def test_load_dump__no_fail_on_unseen_key(self, data: dict) -> None: + actual = Index.load(data).dump(camel_case=True) + data.pop("this-key-is-new-sooo-new") + assert data == actual + + +class TestConstraints: + @pytest.mark.parametrize( + "data", + [ + { + "require": {"type": "container", "space": "mySpace", "externalId": "myExternalId"}, + "constraintType": "requires", + }, + {"properties": ["name", "fullName"], "constraintType": "uniqueness"}, + ], + ) + def test_load_dump(self, data: dict) -> None: + actual = Constraint.load(data).dump(camel_case=True) assert data == actual @pytest.mark.parametrize( - "data", [{"this-key-is-new-sooo-new": 42, "properties": ["name"], "indexType": "best-tree", "cursorable": True}] + "data", + [ + {"this-key-is-new-sooo-new": 42, "properties": ["name"], "constraintType": "uniqueness"}, + { + "this-key-is-new-sooo-new": 42, + "require": {"space": "hehe", "externalId": "hoho", "type": "container"}, + "constraintType": "requires", + }, + ], ) def test_load_dump__no_fail_on_unseen_key(self, data: dict) -> None: - actual = Index.load(data).dump(camel_case=True) + actual = Constraint.load(data).dump(camel_case=True) data.pop("this-key-is-new-sooo-new") assert data == actual