Skip to content

Commit

Permalink
Optional fields containers (#1439)
Browse files Browse the repository at this point in the history
  • Loading branch information
doctrino authored Oct 24, 2023
1 parent c8435fd commit dce2dda
Show file tree
Hide file tree
Showing 8 changed files with 83 additions and 20 deletions.
5 changes: 5 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,11 @@ Changes are grouped as follows
- `Fixed` for any bug fixes.
- `Security` in case of vulnerabilities.

## [6.34.2] - 2023-10-23
### Fixed
- Loading a `ContainerApply` from source failed with `KeyError` if `nullable`, `autoIncrement`, or `cursorable` were not set
in the `ContainerProperty` and `BTreeIndex` classes even though they are optional. This is now fixed.

## [6.34.1] - 2023-10-23
### Added
- Support for setting `data_set_id` and `metadata` in `ThreeDModelsAPI.create`.
Expand Down
2 changes: 1 addition & 1 deletion cognite/client/_version.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from __future__ import annotations

__version__ = "6.34.1"
__version__ = "6.34.2"
__api_subversion__ = "V20220125"
36 changes: 22 additions & 14 deletions cognite/client/data_classes/data_modeling/containers.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@
from dataclasses import asdict, dataclass
from typing import Any, Literal

from typing_extensions import Self

from cognite.client.data_classes._base import (
CogniteFilter,
CogniteResourceList,
Expand Down Expand Up @@ -56,7 +58,7 @@ def __init__(
self.indexes = indexes

@classmethod
def load(cls, resource: dict | str) -> ContainerCore:
def load(cls, resource: dict | str) -> Self:
data = json.loads(resource) if isinstance(resource, str) else resource
if "constraints" in data:
data["constraints"] = {k: Constraint.load(v) for k, v in data["constraints"].items()} or None
Expand Down Expand Up @@ -225,17 +227,21 @@ def load(cls, data: dict[str, Any]) -> ContainerProperty:
type_ = PropertyType.load(data["type"])
return cls(
type=type_,
nullable=data["nullable"],
auto_increment=data["autoIncrement"],
# If nothing is specified, we will pass through null values
nullable=data.get("nullable"), # type: ignore[arg-type]
auto_increment=data.get("autoIncrement"), # type: ignore[arg-type]
name=data.get("name"),
default_value=data.get("defaultValue"),
description=data.get("description"),
)

def dump(self, camel_case: bool = False) -> dict[str, str | dict]:
output = asdict(self)
if "type" in output and isinstance(output["type"], dict):
output: dict[str, str | dict] = {}
if self.type:
output["type"] = self.type.dump(camel_case)
for key in ["nullable", "auto_increment", "name", "default_value", "description"]:
if (value := getattr(self, key)) is not None:
output[key] = value
return convert_all_keys_to_camel_case_recursive(output) if camel_case else output


Expand Down Expand Up @@ -316,12 +322,14 @@ class BTreeIndex(Index):

@classmethod
def load(cls, data: dict[str, Any]) -> BTreeIndex:
return cls(properties=data["properties"], cursorable=data["cursorable"])
return cls(properties=data["properties"], cursorable=data.get("cursorable")) # type: ignore[arg-type]

def dump(self, camel_case: bool = False) -> dict[str, str | dict]:
as_dict = asdict(self)
as_dict["indexType" if camel_case else "index_type"] = "btree"
return convert_all_keys_to_camel_case_recursive(as_dict) if camel_case else as_dict
def dump(self, camel_case: bool = False) -> dict[str, Any]:
dumped: dict[str, Any] = {"properties": self.properties}
if self.cursorable is not None:
dumped["cursorable"] = self.cursorable
dumped["indexType" if camel_case else "index_type"] = "btree"
return convert_all_keys_to_camel_case_recursive(dumped) if camel_case else dumped


@dataclass(frozen=True)
Expand All @@ -332,7 +340,7 @@ class InvertedIndex(Index):
def load(cls, data: dict[str, Any]) -> InvertedIndex:
return cls(properties=data["properties"])

def dump(self, camel_case: bool = False) -> dict[str, str | dict]:
as_dict = asdict(self)
as_dict["indexType" if camel_case else "index_type"] = "inverted"
return convert_all_keys_to_camel_case_recursive(as_dict) if camel_case else as_dict
def dump(self, camel_case: bool = False) -> dict[str, Any]:
dumped: dict[str, Any] = {"properties": self.properties}
dumped["indexType" if camel_case else "index_type"] = "inverted"
return convert_all_keys_to_camel_case_recursive(dumped) if camel_case else dumped
12 changes: 10 additions & 2 deletions cognite/client/data_classes/data_modeling/data_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,9 @@ class PropertyType(ABC):
def dump(self, camel_case: bool = False) -> dict[str, Any]:
output = asdict(self)
output["type"] = self._type
for key in list(output):
if output[key] is None:
output.pop(key)
output = rename_and_exclude_keys(output, aliases=_PROPERTY_ALIAS_INV)
return convert_all_keys_recursive(output, camel_case)

Expand All @@ -57,6 +60,8 @@ def load(cls, data: dict) -> PropertyType:
data = convert_all_keys_to_snake_case(rename_and_exclude_keys(data, aliases=_PROPERTY_ALIAS, exclude={"type"}))

if type_cls := _TYPE_LOOKUP.get(type_):
if type_cls is DirectRelation:
return DirectRelation.load(data)
try:
return type_cls(**data)
except TypeError:
Expand Down Expand Up @@ -160,8 +165,11 @@ class DirectRelation(PropertyType):

def dump(self, camel_case: bool = False) -> dict:
output = super().dump(camel_case)
if "container" in output and isinstance(output["container"], dict):
output["container"]["type"] = "container"
if "container" in output:
if isinstance(output["container"], dict):
output["container"]["type"] = "container"
elif output["container"] is None:
output.pop("container")
return output

@classmethod
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
[tool.poetry]
name = "cognite-sdk"

version = "6.34.1"
version = "6.34.2"
description = "Cognite Python SDK"
readme = "README.md"
documentation = "https://cognite-sdk-python.readthedocs-hosted.com"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -203,3 +203,23 @@ def test_dump_json_serialize_load(self, movie_containers: ContainerList) -> None

# Assert
assert container == container_loaded

def test_load_and_create_only_required_fields(
self, cognite_client: CogniteClient, integration_test_space: Space
) -> None:
external_id = "test_load_and_create_only_required_fields"
data = {
"externalId": external_id,
"space": integration_test_space.space,
"properties": {"name": {"type": {"type": "text"}}},
"indexes": {"nameIdx": {"properties": ["name"], "indexType": "btree"}},
}

container = ContainerApply.load(data)

try:
created = cognite_client.data_modeling.containers.apply(container)

assert created.external_id == external_id
finally:
cognite_client.data_modeling.containers.delete(container.as_id())
Original file line number Diff line number Diff line change
@@ -1,6 +1,22 @@
import pytest

from cognite.client.data_classes.data_modeling.containers import Constraint, Index
from cognite.client.data_classes.data_modeling.containers import Constraint, ContainerProperty, Index


class TestContainerProperty:
@pytest.mark.parametrize(
"data",
[
{"type": {"type": "direct"}},
# List is not required, but gets set and thus will be dumped
{"type": {"type": "int32", "list": False}},
{"type": {"type": "text", "list": False, "collation": "ucs_basic"}},
{"type": {"type": "file", "list": False}},
],
)
def test_load_dump__only_required(self, data: dict) -> None:
actual = ContainerProperty.load(data).dump(camel_case=True)
assert data == actual


class TestConstraint:
Expand Down Expand Up @@ -43,6 +59,13 @@ def test_load_dump__no_fail_on_unseen_key(self, data: dict) -> None:
data.pop("this-key-is-new-sooo-new")
assert data == actual

@pytest.mark.parametrize(
"data", [{"properties": ["name"], "indexType": "btree"}, {"properties": ["name"], "indexType": "inverted"}]
)
def test_load_dump__only_required(self, data: dict) -> None:
actual = Index.load(data).dump(camel_case=True)
assert data == actual


class TestConstraints:
@pytest.mark.parametrize(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,6 @@ def test_load_dumped_mapped_property_for_read(self) -> None:
"name": "fullName",
"nullable": False,
"type": {
"container": None,
"type": "direct",
"source": {"external_id": "myExternalId", "space": "mySpace", "version": "myVersion"},
},
Expand Down

0 comments on commit dce2dda

Please sign in to comment.