diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index b51dc86ae..e99293a37 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -12,7 +12,7 @@ jobs: - uses: actions/checkout@v4 - uses: ./.github/actions/setup with: - extras: '-E pandas' + extras: "-E pandas" - name: Linting and static code checks run: pre-commit run --all-files @@ -46,13 +46,13 @@ jobs: fail-fast: false matrix: os: [ubuntu-latest, windows-latest] - python-version: ['3.8', '3.11', '3.12'] # TODO: 3.9, 3.10 (requires a lot of work for FakeCogResGen for tests) + python-version: ["3.8", "3.11", "3.12"] # TODO: 3.9, 3.10 (requires a lot of work for FakeCogResGen for tests) steps: - uses: actions/checkout@v4 - uses: ./.github/actions/setup with: python_version: ${{ matrix.python-version }} - extras: '-E all' + extras: "-E all" - name: Test full env: @@ -64,7 +64,8 @@ jobs: COGNITE_PROJECT: python-sdk-test COGNITE_BASE_URL: https://greenfield.cognitedata.com COGNITE_CLIENT_NAME: python-sdk-integration-tests - run: pytest tests --durations=10 --cov --cov-report term --cov-report xml:coverage.xml -n8 --dist loadscope --reruns 2 --maxfail 20 + # Testpaths are defined in the pytest.ini file: + run: pytest --durations=10 --cov --cov-report term --cov-report xml:coverage.xml -n8 --dist loadscope --reruns 2 --maxfail 20 - uses: codecov/codecov-action@v4 if: matrix.os == 'windows-latest' && matrix.python-version == '3.8' diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 4e65a4dad..fc6de2fed 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -25,7 +25,8 @@ jobs: COGNITE_PROJECT: python-sdk-test COGNITE_BASE_URL: https://greenfield.cognitedata.com COGNITE_CLIENT_NAME: python-sdk-integration-tests - run: pytest tests --durations=10 --cov --cov-report term --cov-report xml:coverage.xml -n8 --dist loadscope --reruns 2 --maxfail 20 + # Testpaths are defined in the pytest.ini file: + run: pytest --durations=10 --cov --cov-report term --cov-report xml:coverage.xml -n8 --dist loadscope --reruns 2 --maxfail 20 - uses: codecov/codecov-action@v4 with: diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f39b5f2c5..e3667cdec 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,7 +1,7 @@ --- repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.6.2 + rev: v0.6.3 hooks: - id: ruff args: diff --git a/CHANGELOG.md b/CHANGELOG.md index 39e26b59a..72d0f7874 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -17,18 +17,60 @@ Changes are grouped as follows - `Fixed` for any bug fixes. - `Security` in case of vulnerabilities. -## [7.55.2] - 2024-08-29 +## [7.58.6] - 2024-09-05 +### Fixed +- Data modeling convenience filter `SpaceFilter` now allows listing of global nodes by using `equals` + (when a single space is requested (requirement)). This also affects the `space` parameter to e.g. + `client.data_modeling.instances.list(...)` + +## [7.58.5] - 2024-09-04 ### Added -- [Feature Preview - beta] Support for `client.hosted_extractors.sources`. +- Data modeling filters now support properties that are lists. +### Fixed +- Read-only properties on CogniteAssetApply (root, path and last_updated_time) are now removed. + +## [7.58.4] - 2024-09-03 +### Fixed +- The deserialization `datetime` properties in `TypedNode`/`TypedEdge` now correctly handles truncated milliseconds. + +## [7.58.3] - 2024-09-03 +### Fixed +- The parameter `query` is now optional in `client.data_modeling.instances.search(...)`. + +## [7.58.2] - 2024-09-03 +### Added +- [Feature Preview - alpha] Support for `client.hosted_extractors.sources`. + +## [7.58.1] - 2024-09-03 +### Fixed +- [Feature Preview - beta] data workflows: `workflowExecutionId` in `cognite.client.data_classes.workflows.WorkflowTriggerRun` + can be null or missing, as according to the API spec. + +## [7.58.0] - 2024-09-03 +### Added +- Data Workflows: add support for `SubworkflowReferenceParameters` subworkflow task type. Allowing embedding other workflows into a workflow. + +## [7.57.0] - 2024-09-03 +### Added +- Add a `load` method to CogniteClient, ClientConfig, and CredenitalProvider (and all it's subclasses). +- Add `apply_settings` method to `global_config` to pass in a dict of settings + +## [7.56.0] - 2024-09-02 +### Added +- Support for referencing files by instance id when running diagrams.detect + +## [7.55.2] - 2024-08-29 +### Fixed +- Turn workflow_orchestration into data_workflows and add trigger doc, fix attribute names in data classes ## [7.55.1] - 2024-08-29 ### Fixed -- Missing exports for workflow triggers +- Missing exports for workflow triggers ## [7.55.0] - 2024-08-23 ### Added - Support for creating a session using a one-shot token in the `client.iam.session.create` method. -- Parameter `nonce` to the `client.functions.call()` and `client.workflow.executions.run()` methods to allow passing +- Parameter `nonce` to the `client.functions.call()` and `client.workflow.executions.run()` methods to allow passing a custom nonce instead of letting the SDK generate it from your current credentials. ## [7.54.19] - 2024-08-23 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index d9f88fc1e..09c991685 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -95,6 +95,16 @@ To speed up test runs pass the following arguments (this will parallelize across pytest -n4 --dist loadscope tests ``` +#### Unit Tests for Examples in Documentation + +For code examples defined in *docstrings* the doctest library is used and docstring tests are defined in `tests/tests_unit/test_docstring_examples.py`. Some docstring code examples may require patching which should be done here. + +For any code examples written directly in `docs/source` we are using the [sphinx doctest extension](https://www.sphinx-doc.org/en/master/usage/extensions/doctest.html) with pytest. See the `docs/source/quickstart.rst` for an example of a unit test that is setup to use some fixtures defined through pytest (`docs/source/conftest.py`). To run all the tests defined in docs run: + +``` +pytest docs +``` + ### Documentation Build html files of documentation locally by running diff --git a/cognite/client/_api/data_modeling/instances.py b/cognite/client/_api/data_modeling/instances.py index ba872aeda..5cfda0834 100644 --- a/cognite/client/_api/data_modeling/instances.py +++ b/cognite/client/_api/data_modeling/instances.py @@ -973,7 +973,8 @@ def apply( def search( self, view: ViewId, - query: str, + query: str | None = None, + *, instance_type: Literal["node"] = "node", properties: list[str] | None = None, target_units: list[TargetUnit] | None = None, @@ -988,7 +989,8 @@ def search( def search( self, view: ViewId, - query: str, + query: str | None = None, + *, instance_type: Literal["edge"], properties: list[str] | None = None, target_units: list[TargetUnit] | None = None, @@ -1003,7 +1005,8 @@ def search( def search( self, view: ViewId, - query: str, + query: str | None = None, + *, instance_type: type[T_Node], properties: list[str] | None = None, target_units: list[TargetUnit] | None = None, @@ -1018,7 +1021,8 @@ def search( def search( self, view: ViewId, - query: str, + query: str | None = None, + *, instance_type: type[T_Edge], properties: list[str] | None = None, target_units: list[TargetUnit] | None = None, @@ -1032,7 +1036,7 @@ def search( def search( self, view: ViewId, - query: str, + query: str | None = None, instance_type: Literal["node", "edge"] | type[T_Node] | type[T_Edge] = "node", properties: list[str] | None = None, target_units: list[TargetUnit] | None = None, @@ -1046,7 +1050,7 @@ def search( Args: view (ViewId): View to search in. - query (str): Query string that will be parsed and used for search. + query (str | None): Query string that will be parsed and used for search. instance_type (Literal["node", "edge"] | type[T_Node] | type[T_Edge]): Whether to search for nodes or edges. properties (list[str] | None): Optional array of properties you want to search through. If you do not specify one or more properties, the service will search all text fields within the view. target_units (list[TargetUnit] | None): Properties to convert to another unit. The API can only convert to another unit if a unit has been defined as part of the type on the underlying container being queried. @@ -1103,7 +1107,9 @@ def search( else: raise ValueError(f"Invalid instance type: {instance_type}") - body = {"view": view.dump(camel_case=True), "query": query, "instanceType": instance_type_str, "limit": limit} + body: dict[str, Any] = {"view": view.dump(camel_case=True), "instanceType": instance_type_str, "limit": limit} + if query: + body["query"] = query if properties: body["properties"] = properties if include_typing: diff --git a/cognite/client/_api/diagrams.py b/cognite/client/_api/diagrams.py index 3c3066f64..01da62154 100644 --- a/cognite/client/_api/diagrams.py +++ b/cognite/client/_api/diagrams.py @@ -15,6 +15,7 @@ FileReference, T_ContextualizationJob, ) +from cognite.client.data_classes.data_modeling import NodeId from cognite.client.exceptions import CogniteAPIError, CogniteMissingClientError from cognite.client.utils._experimental import FeaturePreviewWarning from cognite.client.utils._text import to_camel_case @@ -92,24 +93,37 @@ def _list_from_instance_or_list( def _process_file_ids( ids: Sequence[int] | int | None, external_ids: SequenceNotStr[str] | str | None, + instance_ids: Sequence[NodeId] | NodeId | None, file_references: Sequence[FileReference] | FileReference | None, - ) -> list[dict[str, int | str | dict[str, int]] | dict[str, str] | dict[str, int]]: + ) -> list[ + dict[str, int | str | dict[str, str] | dict[str, int]] + | dict[str, dict[str, str]] + | dict[str, str] + | dict[str, int] + ]: ids = DiagramsAPI._list_from_instance_or_list(ids, int, "ids must be int or list of int") external_ids = cast( SequenceNotStr[str], DiagramsAPI._list_from_instance_or_list(external_ids, str, "external_ids must be str or list of str"), ) + instance_ids = DiagramsAPI._list_from_instance_or_list( + instance_ids, NodeId, "instance_ids must be NodeId or list of NodeId" + ) file_references = DiagramsAPI._list_from_instance_or_list( file_references, FileReference, "file_references must be FileReference or list of FileReference" ) # Handle empty lists - if not (external_ids or ids or file_references): + if not (external_ids or ids or instance_ids or file_references): raise ValueError("No ids, external ids or file references specified") id_objs = [{"fileId": id} for id in ids] external_id_objs = [{"fileExternalId": external_id} for external_id in external_ids] + instance_id_objs = [ + {"fileInstanceId": instance_id.dump(camel_case=True, include_instance_type=False)} + for instance_id in instance_ids + ] file_reference_objects = [file_reference.to_api_item() for file_reference in file_references] - return [*id_objs, *external_id_objs, *file_reference_objects] + return [*id_objs, *external_id_objs, *instance_id_objs, *file_reference_objects] @overload def detect( @@ -120,6 +134,7 @@ def detect( min_tokens: int = 2, file_ids: int | Sequence[int] | None = None, file_external_ids: str | SequenceNotStr[str] | None = None, + file_instance_ids: NodeId | Sequence[NodeId] | None = None, file_references: list[FileReference] | FileReference | None = None, pattern_mode: bool = False, configuration: dict[str, Any] | None = None, @@ -136,6 +151,7 @@ def detect( min_tokens: int = 2, file_ids: int | Sequence[int] | None = None, file_external_ids: str | SequenceNotStr[str] | None = None, + file_instance_ids: NodeId | Sequence[NodeId] | None = None, file_references: list[FileReference] | FileReference | None = None, pattern_mode: bool = False, configuration: DiagramDetectConfig | dict[str, Any] | None = None, @@ -152,6 +168,7 @@ def detect( min_tokens: int = 2, file_ids: int | Sequence[int] | None = None, file_external_ids: str | SequenceNotStr[str] | None = None, + file_instance_ids: NodeId | Sequence[NodeId] | None = None, file_references: list[FileReference] | FileReference | None = None, pattern_mode: bool = False, configuration: DiagramDetectConfig | dict[str, Any] | None = None, @@ -165,6 +182,7 @@ def detect( min_tokens: int = 2, file_ids: int | Sequence[int] | None = None, file_external_ids: str | SequenceNotStr[str] | None = None, + file_instance_ids: NodeId | Sequence[NodeId] | None = None, file_references: list[FileReference] | FileReference | None = None, pattern_mode: bool | None = None, configuration: DiagramDetectConfig | dict[str, Any] | None = None, @@ -184,7 +202,8 @@ def detect( min_tokens (int): Minimal number of tokens a match must be based on file_ids (int | Sequence[int] | None): ID of the files, should already be uploaded in the same tenant. file_external_ids (str | SequenceNotStr[str] | None): File external ids, alternative to file_ids and file_references. - file_references (list[FileReference] | FileReference | None): File references (id or external_id), and first_page and last_page to specify page ranges per file. Each reference can specify up to 50 pages. Providing a page range will also make the page count of the document a part of the response. + file_instance_ids (NodeId | Sequence[NodeId] | None): Files to detect in, specified by instance id. + file_references (list[FileReference] | FileReference | None): File references (id, external_id or instance_id), and first_page and last_page to specify page ranges per file. Each reference can specify up to 50 pages. Providing a page range will also make the page count of the document a part of the response. pattern_mode (bool | None): If True, entities must be provided with a sample field. This enables detecting tags that are similar to the sample, but not necessarily identical. Defaults to None. configuration (DiagramDetectConfig | dict[str, Any] | None): Additional configuration for the detect algorithm. See `DiagramDetectConfig` class documentation and `beta API docs `_. multiple_jobs (bool): Enables you to publish multiple jobs. If True the method returns a tuple of DetectJobBundle and list of potentially unposted files. If False it will return a single DiagramDetectResults. Defaults to False. @@ -258,7 +277,7 @@ def detect( Check the documentation for `DiagramDetectConfig` for more information on the available options. """ - items = self._process_file_ids(file_ids, file_external_ids, file_references) + items = self._process_file_ids(file_ids, file_external_ids, file_instance_ids, file_references) entities = [ entity.dump(camel_case=True) if isinstance(entity, CogniteResource) else entity for entity in entities ] diff --git a/cognite/client/_api/hosted_extractors/sources.py b/cognite/client/_api/hosted_extractors/sources.py index adfe2f971..066930127 100644 --- a/cognite/client/_api/hosted_extractors/sources.py +++ b/cognite/client/_api/hosted_extractors/sources.py @@ -1,10 +1,11 @@ from __future__ import annotations from collections.abc import Iterator -from typing import TYPE_CHECKING, Any, Sequence, overload +from typing import TYPE_CHECKING, Any, Literal, Sequence, overload from cognite.client._api_client import APIClient from cognite.client._constants import DEFAULT_LIMIT_READ +from cognite.client.data_classes._base import CogniteResource, PropertySpec from cognite.client.data_classes.hosted_extractors.sources import Source, SourceList, SourceUpdate, SourceWrite from cognite.client.utils._experimental import FeaturePreviewWarning from cognite.client.utils._identifier import IdentifierSequence @@ -49,7 +50,7 @@ def __call__( ) -> Iterator[Source] | Iterator[SourceList]: """Iterate over sources - Fetches sources as they are iterated over, so you keep a limited number of spaces in memory. + Fetches sources as they are iterated over, so you keep a limited number of sources in memory. Args: chunk_size (int | None): Number of sources to return in each chunk. Defaults to yielding one source a time. @@ -72,7 +73,7 @@ def __call__( def __iter__(self) -> Iterator[Source]: """Iterate over sources - Fetches sources as they are iterated over, so you keep a limited number of spaces in memory. + Fetches sources as they are iterated over, so you keep a limited number of sources in memory. Returns: Iterator[Source]: yields Source one by one. @@ -92,7 +93,7 @@ def retrieve( Args: external_ids (str | SequenceNotStr[str]): The external ID provided by the client. Must be unique for the resource type. - ignore_unknown_ids (bool): No description. + ignore_unknown_ids (bool): Ignore external IDs that are not found rather than throw an exception. Returns: Source | SourceList: Requested sources @@ -103,11 +104,11 @@ def retrieve( >>> client = CogniteClient() >>> res = client.hosted_extractors.sources.retrieve('myMQTTSource') - Get multiple spaces by id: + Get multiple sources by id: >>> from cognite.client import CogniteClient >>> client = CogniteClient() - >>> res = client.hosted_extractors.sources.retrieve(["myMQTTSource", "MyEvenHubSource"], ignore_unknown_ids=True) + >>> res = client.hosted_extractors.sources.retrieve(["myMQTTSource", "MyEventHubSource"], ignore_unknown_ids=True) """ self._warning.warn() @@ -126,15 +127,15 @@ def delete( Args: external_ids (str | SequenceNotStr[str]): The external ID provided by the client. Must be unique for the resource type. - ignore_unknown_ids (bool): No description. - force (bool): No description. + ignore_unknown_ids (bool): Ignore external IDs that are not found rather than throw an exception. + force (bool): Delete any jobs associated with each item. Examples: Delete sources by id:: >>> from cognite.client import CogniteClient >>> client = CogniteClient() - >>> client.hosted_extractors.sources.delete(spaces=["myMQTTSource", "MyEvenHubSource"]) + >>> client.hosted_extractors.sources.delete(["myMQTTSource", "MyEventHubSource"]) """ self._warning.warn() extra_body_fields: dict[str, Any] = {} @@ -146,7 +147,6 @@ def delete( self._delete_multiple( identifiers=IdentifierSequence.load(external_ids=external_ids), wrap_ids=True, - returns_items=False, headers={"cdf-version": "beta"}, extra_body_fields=extra_body_fields or None, ) @@ -161,7 +161,7 @@ def create(self, items: SourceWrite | Sequence[SourceWrite]) -> Source | SourceL """`Create one or more sources. `_ Args: - items (SourceWrite | Sequence[SourceWrite]): Space | Sequence[Space]): Source(s) to create. + items (SourceWrite | Sequence[SourceWrite]): Source(s) to create. Returns: Source | SourceList: Created source(s) @@ -195,7 +195,7 @@ def update(self, items: SourceWrite | SourceUpdate | Sequence[SourceWrite | Sour """`Update one or more sources. `_ Args: - items (SourceWrite | SourceUpdate | Sequence[SourceWrite | SourceUpdate]): Space | Sequence[Space]): Source(s) to update. + items (SourceWrite | SourceUpdate | Sequence[SourceWrite | SourceUpdate]): Source(s) to update. Returns: Source | SourceList: Updated source(s) @@ -219,6 +219,18 @@ def update(self, items: SourceWrite | SourceUpdate | Sequence[SourceWrite | Sour headers={"cdf-version": "beta"}, ) + @classmethod + def _convert_resource_to_patch_object( + cls, + resource: CogniteResource, + update_attributes: list[PropertySpec], + mode: Literal["replace_ignore_null", "patch", "replace"] = "replace_ignore_null", + ) -> dict[str, dict[str, dict]]: + output = super()._convert_resource_to_patch_object(resource, update_attributes, mode) + if hasattr(resource, "_type"): + output["type"] = resource._type + return output + def list( self, limit: int | None = DEFAULT_LIMIT_READ, @@ -237,7 +249,7 @@ def list( >>> from cognite.client import CogniteClient >>> client = CogniteClient() - >>> space_list = client.hosted_extractors.sources.list(limit=5) + >>> source_list = client.hosted_extractors.sources.list(limit=5) Iterate over sources:: @@ -251,7 +263,7 @@ def list( >>> from cognite.client import CogniteClient >>> client = CogniteClient() >>> for source_list in client.hosted_extractors.sources(chunk_size=25): - ... source_list # do something with the spaces + ... source_list # do something with the sources """ self._warning.warn() return self._list( diff --git a/cognite/client/_api/raw.py b/cognite/client/_api/raw.py index 51a74ebd0..3bad1f2cf 100644 --- a/cognite/client/_api/raw.py +++ b/cognite/client/_api/raw.py @@ -398,7 +398,7 @@ def __call__( Note: When iterating using partitions > 1, the memory usage is bounded at 2 x partitions x chunk_size. This is implemented - by halting retrival speed when the callers code can't keep up. + by halting retrieval speed when the callers code can't keep up. """ if partitions is None or _RUNNING_IN_BROWSER: return self._list_generator( diff --git a/cognite/client/_api_client.py b/cognite/client/_api_client.py index 30c88c896..f58946a6c 100644 --- a/cognite/client/_api_client.py +++ b/cognite/client/_api_client.py @@ -1043,7 +1043,6 @@ def _update_multiple( item, update_cls._get_update_properties(item), mode, - update_cls._get_extra_identifying_properties(item), ) ) elif isinstance(item, CogniteUpdate): @@ -1224,7 +1223,6 @@ def _convert_resource_to_patch_object( resource: CogniteResource, update_attributes: list[PropertySpec], mode: Literal["replace_ignore_null", "patch", "replace"] = "replace_ignore_null", - identifying_properties: dict[str, Any] | None = None, ) -> dict[str, dict[str, dict]]: dumped_resource = resource.dump(camel_case=True) has_id = "id" in dumped_resource @@ -1240,9 +1238,6 @@ def _convert_resource_to_patch_object( elif has_external_id: patch_object["externalId"] = dumped_resource.pop("externalId") - if identifying_properties: - patch_object.update(identifying_properties) - update: dict[str, dict] = cls._clear_all_attributes(update_attributes) if mode == "replace" else {} update_attribute_by_name = {prop.name: prop for prop in update_attributes} diff --git a/cognite/client/_cognite_client.py b/cognite/client/_cognite_client.py index 5a0d16e3e..12d83162f 100644 --- a/cognite/client/_cognite_client.py +++ b/cognite/client/_cognite_client.py @@ -32,7 +32,7 @@ from cognite.client._api_client import APIClient from cognite.client.config import ClientConfig, global_config from cognite.client.credentials import CredentialProvider, OAuthClientCredentials, OAuthInteractive -from cognite.client.utils._auxiliary import get_current_sdk_version +from cognite.client.utils._auxiliary import get_current_sdk_version, load_resource_to_dict class CogniteClient: @@ -215,3 +215,37 @@ def default_oauth_interactive( """ credentials = OAuthInteractive.default_for_azure_ad(tenant_id, client_id, cdf_cluster) return cls.default(project, cdf_cluster, credentials, client_name) + + @classmethod + def load(cls, config: dict[str, Any] | str) -> CogniteClient: + """Load a cognite client object from a YAML/JSON string or dict. + + Args: + config (dict[str, Any] | str): A dictionary or YAML/JSON string containing configuration values defined in the CogniteClient class. + + Returns: + CogniteClient: A cognite client object. + + Examples: + + Create a cognite client object from a dictionary input: + + >>> from cognite.client import CogniteClient + >>> import os + >>> config = { + ... "client_name": "abcd", + ... "project": "cdf-project", + ... "base_url": "https://api.cognitedata.com/", + ... "credentials": { + ... "client_credentials": { + ... "client_id": "abcd", + ... "client_secret": os.environ["OAUTH_CLIENT_SECRET"], + ... "token_url": "https://login.microsoftonline.com/xyz/oauth2/v2.0/token", + ... "scopes": ["https://api.cognitedata.com/.default"], + ... }, + ... }, + ... } + >>> client = CogniteClient.load(config) + """ + loaded = load_resource_to_dict(config) + return cls(config=ClientConfig.load(loaded)) diff --git a/cognite/client/_version.py b/cognite/client/_version.py index a2f3828ca..ccd04ba03 100644 --- a/cognite/client/_version.py +++ b/cognite/client/_version.py @@ -1,4 +1,4 @@ from __future__ import annotations -__version__ = "7.55.2" +__version__ = "7.58.6" __api_subversion__ = "20230101" diff --git a/cognite/client/config.py b/cognite/client/config.py index 504b70540..cc89bd202 100644 --- a/cognite/client/config.py +++ b/cognite/client/config.py @@ -4,9 +4,11 @@ import pprint import warnings from contextlib import suppress +from typing import Any from cognite.client._version import __api_subversion__ from cognite.client.credentials import CredentialProvider +from cognite.client.utils._auxiliary import load_resource_to_dict class GlobalConfig: @@ -32,6 +34,17 @@ class GlobalConfig: features. Defaults to False. """ + def __new__(cls) -> GlobalConfig: + if hasattr(cls, "_instance"): + raise TypeError( + "GlobalConfig is a singleton and cannot be instantiated directly. Use `global_config` instead, " + "`from cognite.client import global_config`, then apply the wanted settings, e.g. `global_config.max_workers = 5`. " + "Settings are only guaranteed to take effect if applied before instantiating a CogniteClient." + ) + + cls._instance = super().__new__(cls) + return cls._instance + def __init__(self) -> None: self.default_client_config: ClientConfig | None = None self.disable_gzip: bool = False @@ -46,6 +59,43 @@ def __init__(self) -> None: self.max_workers: int = 5 self.silence_feature_preview_warnings: bool = False + def apply_settings(self, settings: dict[str, Any] | str) -> None: + """Apply settings to the global configuration object from a YAML/JSON string or dict. + + Note: + All settings in the dictionary will be applied unless an invalid key is provided, a ValueError will instead be raised and no settings will be applied. + + Warning: + This must be done before instantiating a CogniteClient for the configuration to take effect. + + Args: + settings (dict[str, Any] | str): A dictionary or YAML/JSON string containing configuration values defined in the GlobalConfig class. + + Examples: + + Apply settings to the global_config from a dictionary input: + + >>> from cognite.client import global_config + >>> settings = { + ... "max_retries": 5, + ... "disable_ssl": True, + ... } + >>> global_config.apply_settings(settings) + """ + + loaded = load_resource_to_dict(settings).copy() # doing a shallow copy to avoid mutating the user input config + current_settings = vars(self) + if not loaded.keys() <= current_settings.keys(): + raise ValueError( + f"One or more invalid keys provided for global_config, no settings applied: {loaded.keys() - current_settings}" + ) + + if "default_client_config" in loaded: + if not isinstance(loaded["default_client_config"], ClientConfig): + loaded["default_client_config"] = ClientConfig.load(loaded["default_client_config"]) + + current_settings.update(loaded) + global_config = GlobalConfig() @@ -163,3 +213,54 @@ def default( credentials=credentials, base_url=f"https://{cdf_cluster}.cognitedata.com/", ) + + @classmethod + def load(cls, config: dict[str, Any] | str) -> ClientConfig: + """Load a client config object from a YAML/JSON string or dict. + + Args: + config (dict[str, Any] | str): A dictionary or YAML/JSON string containing configuration values defined in the ClientConfig class. + + Returns: + ClientConfig: A client config object. + + Examples: + + Create a client config object from a dictionary input: + + >>> from cognite.client.config import ClientConfig + >>> import os + >>> config = { + ... "client_name": "abcd", + ... "project": "cdf-project", + ... "base_url": "https://api.cognitedata.com/", + ... "credentials": { + ... "client_credentials": { + ... "client_id": "abcd", + ... "client_secret": os.environ["OAUTH_CLIENT_SECRET"], + ... "token_url": "https://login.microsoftonline.com/xyz/oauth2/v2.0/token", + ... "scopes": ["https://api.cognitedata.com/.default"], + ... }, + ... }, + ... } + >>> client_config = ClientConfig.load(config) + """ + loaded = load_resource_to_dict(config) + + if isinstance(loaded["credentials"], CredentialProvider): + credentials = loaded["credentials"] + else: + credentials = CredentialProvider.load(loaded["credentials"]) + + return cls( + client_name=loaded["client_name"], + project=loaded["project"], + credentials=credentials, + api_subversion=loaded.get("api_subversion"), + base_url=loaded.get("base_url"), + max_workers=loaded.get("max_workers"), + headers=loaded.get("headers"), + timeout=loaded.get("timeout"), + file_transfer_timeout=loaded.get("file_transfer_timeout"), + debug=loaded.get("debug", False), + ) diff --git a/cognite/client/credentials.py b/cognite/client/credentials.py index 6fefae39b..ed5a24421 100644 --- a/cognite/client/credentials.py +++ b/cognite/client/credentials.py @@ -7,22 +7,80 @@ import time from abc import abstractmethod from pathlib import Path -from typing import Any, Callable, Protocol +from types import MappingProxyType +from typing import Any, Callable, Protocol, runtime_checkable from msal import ConfidentialClientApplication, PublicClientApplication, SerializableTokenCache from oauthlib.oauth2 import BackendApplicationClient, OAuth2Error from requests_oauthlib import OAuth2Session from cognite.client.exceptions import CogniteAuthError +from cognite.client.utils._auxiliary import load_resource_to_dict _TOKEN_EXPIRY_LEEWAY_SECONDS_DEFAULT = 30 # Do not change without also updating all the docstrings using it +@runtime_checkable class CredentialProvider(Protocol): @abstractmethod def authorization_header(self) -> tuple[str, str]: raise NotImplementedError + @classmethod + def load(cls, config: dict[str, Any] | str) -> CredentialProvider: + """Load a credential provider object from a YAML/JSON string or dict. + + Note: + The dictionary must contain exactly one top level key, which is the type of the credential provider and must be one of the + following strings: ``"token"``, ``"client_credentials"``, ``"interactive"``, ``"device_code"``, ``"client_certificate"``. + The value of the key is a dictionary containing the configuration for the credential provider. + + Args: + config (dict[str, Any] | str): A dictionary or YAML/JSON string containing the configuration for the credential provider. + + Returns: + CredentialProvider: Initialized credential provider of the specified type. + + Examples: + + Get a token credential provider: + + >>> from cognite.client.credentials import CredentialProvider + >>> config = {"token": "my secret token"} + >>> credentials = CredentialProvider.load(config) + + Get a client credential provider: + + >>> import os + >>> config = { + ... "client_credentials": { + ... "client_id": "abcd", + ... "client_secret": os.environ["OAUTH_CLIENT_SECRET"], + ... "token_url": "https://login.microsoftonline.com/xyz/oauth2/v2.0/token", + ... "scopes": ["https://api.cognitedata.com/.default"], + ... } + ... } + >>> credentials = CredentialProvider.load(config) + """ + loaded = load_resource_to_dict(config) + + if len(loaded) != 1: + raise ValueError( + f"Credential provider configuration must be a dictionary containing exactly one of the following " + f"supported types as the top level key: {sorted(_SUPPORTED_CREDENTIAL_TYPES.keys())}." + ) + + credential_type, credential_config = next(iter(loaded.items())) + + if credential_type not in _SUPPORTED_CREDENTIAL_TYPES: + raise ValueError( + f"Invalid credential provider type given, the valid options are: {sorted(_SUPPORTED_CREDENTIAL_TYPES.keys())}." + ) + + if credential_type == "token" and (isinstance(credential_config, str) or callable(credential_config)): + credential_config = {"token": credential_config} + return _SUPPORTED_CREDENTIAL_TYPES[credential_type].load(credential_config) # type: ignore [attr-defined] + class Token(CredentialProvider): """Token credential provider @@ -60,6 +118,27 @@ def thread_safe_get_token() -> str: def authorization_header(self) -> tuple[str, str]: return "Authorization", f"Bearer {self.__token_factory()}" + @classmethod + def load(cls, config: dict[str, str | Callable[[], str]] | str) -> Token: + """Load a token credential provider object from a YAML/JSON string or dict. + + Args: + config (dict[str, str | Callable[[], str]] | str): A dictionary or YAML/JSON string containing configuration values defined in the Token class. + + Returns: + Token: Initialized token credential provider. + + Note: + A callable token is not supported if passing in a yaml string. + + Examples: + + >>> from cognite.client.credentials import Token + >>> credentials = Token.load({"token": "my secret token"}) + """ + loaded = load_resource_to_dict(config) + return cls(token=loaded["token"]) + class _OAuthCredentialProviderWithTokenRefresh(CredentialProvider): def __init__(self, token_expiry_leeway_seconds: int = _TOKEN_EXPIRY_LEEWAY_SECONDS_DEFAULT) -> None: @@ -222,6 +301,38 @@ def _refresh_access_token(self) -> tuple[str, float]: self._verify_credentials(credentials) return credentials["access_token"], time.time() + float(credentials["expires_in"]) + @classmethod + def load(cls, config: dict[str, Any] | str) -> OAuthDeviceCode: + """Load a OAuth device code credential provider object from a YAML/JSON string or dict. + + Args: + config (dict[str, Any] | str): A dictionary or YAML/JSON string containing configuration values defined in the OAuthDeviceCode class. + + Returns: + OAuthDeviceCode: Initialized OAuthDeviceCode credential provider. + + Examples: + + >>> from cognite.client.credentials import OAuthDeviceCode + >>> config = { + ... "authority_url": "https://login.microsoftonline.com/xyz", + ... "client_id": "abcd", + ... "scopes": ["https://greenfield.cognitedata.com/.default"], + ... } + >>> credentials = OAuthDeviceCode.load(config) + """ + loaded = load_resource_to_dict(config) + token_cache_path = loaded.get("token_cache_path") + return cls( + authority_url=loaded["authority_url"], + client_id=loaded["client_id"], + scopes=loaded["scopes"], + token_cache_path=Path(token_cache_path) if token_cache_path else None, + token_expiry_leeway_seconds=int( + loaded.get("token_expiry_leeway_seconds", _TOKEN_EXPIRY_LEEWAY_SECONDS_DEFAULT) + ), + ) + class OAuthInteractive(_OAuthCredentialProviderWithTokenRefresh, _WithMsalSerializableTokenCache): """OAuth credential provider for an interactive login flow. @@ -302,6 +413,39 @@ def _refresh_access_token(self) -> tuple[str, float]: self._verify_credentials(credentials) return credentials["access_token"], time.time() + float(credentials["expires_in"]) + @classmethod + def load(cls, config: dict[str, Any] | str) -> OAuthInteractive: + """Load a OAuth interactive credential provider object from a YAML/JSON string or dict. + + Args: + config (dict[str, Any] | str): A dictionary or YAML/JSON string containing configuration values defined in the OAuthInteractive class. + + Returns: + OAuthInteractive: Initialized OAuthInteractive credential provider. + + Examples: + + >>> from cognite.client.credentials import OAuthInteractive + >>> config = { + ... "authority_url": "https://login.microsoftonline.com/xyz", + ... "client_id": "abcd", + ... "scopes": ["https://greenfield.cognitedata.com/.default"], + ... } + >>> credentials = OAuthInteractive.load(config) + """ + loaded = load_resource_to_dict(config) + token_cache_path = loaded.get("token_cache_path") + return cls( + authority_url=loaded["authority_url"], + client_id=loaded["client_id"], + scopes=loaded["scopes"], + redirect_port=int(loaded.get("redirect_port", 53000)), + token_cache_path=Path(token_cache_path) if token_cache_path else None, + token_expiry_leeway_seconds=int( + loaded.get("token_expiry_leeway_seconds", _TOKEN_EXPIRY_LEEWAY_SECONDS_DEFAULT) + ), + ) + @classmethod def default_for_azure_ad( cls, @@ -443,6 +587,41 @@ def _refresh_access_token(self) -> tuple[str, float]: f"Error generating access token: {oauth_err.error}, {oauth_err.status_code}, {oauth_err.description}" ) from oauth_err + @classmethod + def load(cls, config: dict[str, Any] | str) -> OAuthClientCredentials: + """Load a OAuth client credentials credential provider object from a YAML/JSON string or dict. + + Args: + config (dict[str, Any] | str): A dictionary or YAML/JSON string containing configuration values defined in the OAuthClientCredentials class. + + Returns: + OAuthClientCredentials: Initialized OAuthClientCredentials credential provider. + + Examples: + + >>> from cognite.client.credentials import OAuthClientCredentials + >>> import os + >>> config = { + ... "token_url": "https://login.microsoftonline.com/xyz/oauth2/v2.0/token", + ... "client_id": "abcd", + ... "client_secret": os.environ["OAUTH_CLIENT_SECRET"], + ... "scopes": ["https://greenfield.cognitedata.com/.default"], + ... "audience": "some-audience" + ... } + >>> credentials = OAuthClientCredentials.load(config) + """ + loaded = load_resource_to_dict(config).copy() # doing a shallow copy to avoid mutating the user input config + return cls( + token_url=loaded.pop("token_url"), + client_id=loaded.pop("client_id"), + client_secret=loaded.pop("client_secret"), + scopes=loaded.pop("scopes"), + token_expiry_leeway_seconds=int( + loaded.pop("token_expiry_leeway_seconds", _TOKEN_EXPIRY_LEEWAY_SECONDS_DEFAULT) + ), + **loaded, + ) + @classmethod def default_for_azure_ad( cls, @@ -553,3 +732,49 @@ def _refresh_access_token(self) -> tuple[str, float]: self._verify_credentials(credentials) return credentials["access_token"], time.time() + float(credentials["expires_in"]) + + @classmethod + def load(cls, config: dict[str, Any] | str) -> OAuthClientCertificate: + """Load a OAuth client certificate credential provider object from a YAML/JSON string or dict. + + Args: + config (dict[str, Any] | str): A dictionary or YAML/JSON string containing configuration values defined in the OAuthClientCertificate class. + + Returns: + OAuthClientCertificate: Initialized OAuthClientCertificate credential provider. + + Examples: + + >>> from cognite.client.credentials import OAuthClientCertificate + >>> from pathlib import Path + >>> config = { + ... "authority_url": "https://login.microsoftonline.com/xyz", + ... "client_id": "abcd", + ... "cert_thumbprint": "XYZ123", + ... "certificate": Path("certificate.pem").read_text(), + ... "scopes": ["https://greenfield.cognitedata.com/.default"], + ... } + >>> credentials = OAuthClientCertificate.load(config) + """ + loaded = load_resource_to_dict(config) + return cls( + authority_url=loaded["authority_url"], + client_id=loaded["client_id"], + cert_thumbprint=loaded["cert_thumbprint"], + certificate=loaded["certificate"], + scopes=loaded["scopes"], + token_expiry_leeway_seconds=int( + loaded.get("token_expiry_leeway_seconds", _TOKEN_EXPIRY_LEEWAY_SECONDS_DEFAULT) + ), + ) + + +_SUPPORTED_CREDENTIAL_TYPES = MappingProxyType( + { + "token": Token, + "client_credentials": OAuthClientCredentials, + "interactive": OAuthInteractive, + "device_code": OAuthDeviceCode, + "client_certificate": OAuthClientCertificate, + } +) diff --git a/cognite/client/data_classes/_base.py b/cognite/client/data_classes/_base.py index cc9cffbcf..d46dae087 100644 --- a/cognite/client/data_classes/_base.py +++ b/cognite/client/data_classes/_base.py @@ -29,7 +29,7 @@ from cognite.client.exceptions import CogniteMissingClientError from cognite.client.utils import _json -from cognite.client.utils._auxiliary import fast_dict_load, load_yaml_or_json +from cognite.client.utils._auxiliary import fast_dict_load, load_resource_to_dict, load_yaml_or_json from cognite.client.utils._identifier import IdentifierSequence from cognite.client.utils._importing import local_import from cognite.client.utils._pandas_helpers import ( @@ -151,14 +151,8 @@ def dump_yaml(self) -> str: @classmethod def load(cls, resource: dict | str, cognite_client: CogniteClient | None = None) -> Self: """Load a resource from a YAML/JSON string or dict.""" - if isinstance(resource, dict): - return cls._load(resource, cognite_client=cognite_client) - - if isinstance(resource, str): - resource = cast(dict, load_yaml_or_json(resource)) - return cls._load(resource, cognite_client=cognite_client) - - raise TypeError(f"Resource must be json or yaml str, or dict, not {type(resource)}") + loaded = load_resource_to_dict(resource) + return cls._load(loaded, cognite_client=cognite_client) @classmethod def _load(cls, resource: dict[str, Any], cognite_client: CogniteClient | None = None) -> Self: @@ -409,7 +403,7 @@ def _load( @classmethod def _load_raw_api_response(cls, responses: list[dict[str, Any]], cognite_client: CogniteClient) -> Self: - # Certain classes may need more than just 'items' from the raw repsonse. These need to provide + # Certain classes may need more than just 'items' from the raw response. These need to provide # an implementation of this method raise NotImplementedError @@ -772,7 +766,7 @@ def load( nulls=data[2], ) elif isinstance(data, str) and (prop_order := data.split(":", 1))[-1] in ("asc", "desc"): - # Syntax ":asc|desc" is depreacted but handled for compatibility + # Syntax ":asc|desc" is deprecated but handled for compatibility return cls(property=prop_order[0], order=cast(Literal["asc", "desc"], prop_order[1])) elif isinstance(data, (str, list, EnumProperty)): return cls(property=data) diff --git a/cognite/client/data_classes/contextualization.py b/cognite/client/data_classes/contextualization.py index a82080deb..ad056330c 100644 --- a/cognite/client/data_classes/contextualization.py +++ b/cognite/client/data_classes/contextualization.py @@ -26,6 +26,7 @@ ) from cognite.client.data_classes.annotation_types.primitives import VisionResource from cognite.client.data_classes.annotations import AnnotationList +from cognite.client.data_classes.data_modeling import NodeId from cognite.client.exceptions import CogniteAPIError, CogniteException, ModelFailedException from cognite.client.utils._auxiliary import convert_true_match, exactly_one_is_not_none, load_resource from cognite.client.utils._text import convert_all_keys_to_snake_case, to_camel_case, to_snake_case @@ -356,24 +357,29 @@ def __init__( self, file_id: int | None = None, file_external_id: str | None = None, + file_instance_id: NodeId | None = None, first_page: int | None = None, last_page: int | None = None, ) -> None: self.file_id = file_id self.file_external_id = file_external_id + self.file_instance_id = file_instance_id self.first_page = first_page self.last_page = last_page - if not exactly_one_is_not_none(file_id, file_external_id): - raise ValueError("Exactly one of file_id and file_external_id must be set for a file reference") + if not exactly_one_is_not_none(file_id, file_external_id, file_instance_id): + raise ValueError("File references must have exactly one of file_id, file_external_id and file_instance_id.") if exactly_one_is_not_none(first_page, last_page): raise ValueError("If the page range feature is used, both first page and last page must be set") - def to_api_item(self) -> dict[str, str | int | dict[str, int]]: - if self.file_id is None and self.file_external_id is not None: - item: dict[str, str | int | dict[str, int]] = {"fileExternalId": self.file_external_id} - if self.file_id is not None and self.file_external_id is None: + def to_api_item(self) -> dict[str, str | int | dict[str, int] | dict[str, str]]: + if self.file_id is None and self.file_external_id is not None and self.file_instance_id is None: + item: dict[str, str | int | dict[str, int] | dict[str, str]] = {"fileExternalId": self.file_external_id} + if self.file_id is not None and self.file_external_id is None and self.file_instance_id is None: item = {"fileId": self.file_id} + if self.file_id is None and self.file_external_id is None and self.file_instance_id is not None: + item = {"fileInstanceId": self.file_instance_id.dump(include_instance_type=False)} + if self.first_page is not None and self.last_page is not None: item["pageRange"] = {"begin": self.first_page, "end": self.last_page} return item @@ -472,6 +478,7 @@ def __init__( self, file_id: int | None = None, file_external_id: str | None = None, + file_instance_id: dict[str, str] | None = None, annotations: list | None = None, error_message: str | None = None, cognite_client: CogniteClient | None = None, @@ -480,6 +487,7 @@ def __init__( ) -> None: self.file_id = file_id self.file_external_id = file_external_id + self.file_instance_id = file_instance_id self.annotations = annotations self.error_message = error_message self._cognite_client = cast("CogniteClient", cognite_client) diff --git a/cognite/client/data_classes/data_modeling/cdm/v1.py b/cognite/client/data_classes/data_modeling/cdm/v1.py index 016ef81dc..2da2d6ce1 100644 --- a/cognite/client/data_classes/data_modeling/cdm/v1.py +++ b/cognite/client/data_classes/data_modeling/cdm/v1.py @@ -3845,9 +3845,6 @@ class CogniteAssetApply( source_created_user (str | None): User identifier from the source system on who created the source data. This identifier is not guaranteed to match the user identifiers in CDF source_updated_user (str | None): User identifier from the source system on who last updated the source data. This identifier is not guaranteed to match the user identifiers in CDF parent (DirectRelationReference | tuple[str, str] | None): Parent of this asset - root (DirectRelationReference | tuple[str, str] | None): Asset at the top of the hierarchy. - path (list[DirectRelationReference | tuple[str, str]] | None): Materialized path of this asset - path_last_updated_time (datetime | None): Last time the path was updated for this asset asset_class (DirectRelationReference | tuple[str, str] | None): Class of this asset type_ (DirectRelationReference | tuple[str, str] | None): Type of this asset existing_version (int | None): Fail the ingestion request if the node's version is greater than or equal to this value. If no existingVersion is specified, the ingestion will always overwrite any existing data for the node (for the specified container or node). If existingVersion is set to 0, the upsert will behave as an insert, so it will fail the bulk if the item already exists. If skipOnVersionConflict is set on the ingestion request, then the item will be skipped instead of failing the ingestion request. @@ -3872,9 +3869,6 @@ def __init__( source_created_user: str | None = None, source_updated_user: str | None = None, parent: DirectRelationReference | tuple[str, str] | None = None, - root: DirectRelationReference | tuple[str, str] | None = None, - path: list[DirectRelationReference | tuple[str, str]] | None = None, - path_last_updated_time: datetime | None = None, asset_class: DirectRelationReference | tuple[str, str] | None = None, type_: DirectRelationReference | tuple[str, str] | None = None, existing_version: int | None = None, @@ -3909,9 +3903,6 @@ def __init__( type=type, ) self.parent = DirectRelationReference.load(parent) if parent else None - self.root = DirectRelationReference.load(root) if root else None - self.path = [DirectRelationReference.load(path) for path in path] if path else None - self.path_last_updated_time = path_last_updated_time self.asset_class = DirectRelationReference.load(asset_class) if asset_class else None self.type_ = DirectRelationReference.load(type_) if type_ else None @@ -4045,9 +4036,6 @@ def as_write(self) -> CogniteAssetApply: source_created_user=self.source_created_user, source_updated_user=self.source_updated_user, parent=self.parent, - root=self.root, - path=self.path, # type: ignore[arg-type] - path_last_updated_time=self.path_last_updated_time, asset_class=self.asset_class, type_=self.type_, existing_version=self.version, diff --git a/cognite/client/data_classes/data_modeling/instances.py b/cognite/client/data_classes/data_modeling/instances.py index e3277882d..adfccc5e5 100644 --- a/cognite/client/data_classes/data_modeling/instances.py +++ b/cognite/client/data_classes/data_modeling/instances.py @@ -442,7 +442,7 @@ def to_pandas( # type: ignore [override] convert_timestamps (bool): Convert known attributes storing CDF timestamps (milliseconds since epoch) to datetime. Does not affect properties. expand_properties (bool): Expand the properties into separate rows. Note: Will change default to True in the next major version. remove_property_prefix (bool): Remove view ID prefix from row names of expanded properties (in index). Requires data to be from a single view. - **kwargs (Any): For backwards compatability. + **kwargs (Any): For backwards compatibility. Returns: pd.DataFrame: The dataframe. @@ -1004,7 +1004,7 @@ def to_pandas( # type: ignore [override] convert_timestamps (bool): Convert known columns storing CDF timestamps (milliseconds since epoch) to datetime. Does not affect properties. expand_properties (bool): Expand the properties into separate columns. Note: Will change default to True in the next major version. remove_property_prefix (bool): Remove view ID prefix from columns names of expanded properties. Requires data to be from a single view. - **kwargs (Any): For backwards compatability. + **kwargs (Any): For backwards compatibility. Returns: pd.DataFrame: The Cognite resource as a dataframe. diff --git a/cognite/client/data_classes/data_modeling/typed_instances.py b/cognite/client/data_classes/data_modeling/typed_instances.py index 4568e6fef..971f9d680 100644 --- a/cognite/client/data_classes/data_modeling/typed_instances.py +++ b/cognite/client/data_classes/data_modeling/typed_instances.py @@ -3,8 +3,8 @@ import inspect from abc import ABC from collections.abc import Iterable -from datetime import date, datetime -from typing import TYPE_CHECKING, Any, cast +from datetime import date +from typing import TYPE_CHECKING, Any, NoReturn, cast from typing_extensions import Self @@ -20,6 +20,7 @@ _serialize_property_value, ) from cognite.client.utils._text import to_camel_case +from cognite.client.utils._time import convert_data_modelling_timestamp if TYPE_CHECKING: from cognite.client import CogniteClient @@ -133,6 +134,23 @@ class TypedNode(Node, ABC): } ) + # We inherit a bit too much from Instance that we must override: + # (methods: get, __getitem__, __setitem__, __delitem__, __contains__) + def get(self, attr: str, default: Any = None) -> NoReturn: + raise AttributeError(f"{type(self).__qualname__} object has no attribute 'get'") + + def __getitem__(self, attr: str) -> NoReturn: + raise TypeError(f"{type(self).__qualname__} object is not subscriptable") + + def __setitem__(self, attr: str, value: Any) -> NoReturn: + raise TypeError(f"{type(self).__qualname__} object does not support item assignment") + + def __delitem__(self, attr: str) -> NoReturn: + raise TypeError(f"{type(self).__qualname__} object does not support item deletion") + + def __contains__(self, attr: str) -> NoReturn: + raise TypeError(f"argument of type {type(self).__qualname__} is not iterable") + @classmethod def get_source(cls) -> ViewId: raise NotImplementedError @@ -174,6 +192,23 @@ class TypedEdge(Edge, ABC): } ) + # We inherit a bit too much from Instance that we must override: + # (methods: get, __getitem__, __setitem__, __delitem__, __contains__) + def get(self, attr: str, default: Any = None) -> NoReturn: + raise AttributeError(f"{type(self).__qualname__!r} object has no attribute 'get'") + + def __getitem__(self, attr: str) -> NoReturn: + raise TypeError(f"{type(self).__qualname__!r} object is not subscriptable") + + def __setitem__(self, attr: str, value: Any) -> NoReturn: + raise TypeError(f"{type(self).__qualname__!r} object does not support item assignment") + + def __delitem__(self, attr: str) -> NoReturn: + raise TypeError(f"{type(self).__qualname__!r} object does not support item deletion") + + def __contains__(self, attr: str) -> NoReturn: + raise TypeError(f"argument of type {type(self).__qualname__!r} is not iterable") + @classmethod def get_source(cls) -> ViewId: raise NotImplementedError @@ -304,7 +339,7 @@ def _deserialize_value(value: Any, parameter: inspect.Parameter) -> Any: return value annotation = str(parameter.annotation) if "datetime" in annotation and isinstance(value, str): - return datetime.fromisoformat(value) + return convert_data_modelling_timestamp(value) elif "date" in annotation and isinstance(value, str): return date.fromisoformat(value) elif DirectRelationReference.__name__ in annotation and isinstance(value, dict): diff --git a/cognite/client/data_classes/datapoints.py b/cognite/client/data_classes/datapoints.py index 54bdaf9b2..85cd251d0 100644 --- a/cognite/client/data_classes/datapoints.py +++ b/cognite/client/data_classes/datapoints.py @@ -1324,7 +1324,8 @@ def get( # type: ignore [override] ) -> DatapointsArray | list[DatapointsArray] | None: """Get a specific DatapointsArray from this list by id or external_id. - Note: For duplicated time series, returns a list of DatapointsArray. + Note: + For duplicated time series, returns a list of DatapointsArray. Args: id (int | None): The id of the item(s) to get. @@ -1406,7 +1407,8 @@ def get( # type: ignore [override] ) -> Datapoints | list[Datapoints] | None: """Get a specific Datapoints from this list by id or external_id. - Note: For duplicated time series, returns a list of Datapoints. + Note: + For duplicated time series, returns a list of Datapoints. Args: id (int | None): The id of the item(s) to get. diff --git a/cognite/client/data_classes/filters.py b/cognite/client/data_classes/filters.py index 663e61823..fb385090e 100644 --- a/cognite/client/data_classes/filters.py +++ b/cognite/client/data_classes/filters.py @@ -34,15 +34,22 @@ class ParameterValue: FilterValueList: TypeAlias = Union[Sequence[RawValue], PropertyReferenceValue, ParameterValue] -def _dump_filter_value(filter_value: FilterValueList | FilterValue) -> Any: - if isinstance(filter_value, PropertyReferenceValue): - if isinstance(filter_value.property, EnumProperty): - return {"property": filter_value.property.as_reference()} - return {"property": filter_value.property} +def _dump_filter_value(value: FilterValueList | FilterValue) -> Any: + if isinstance(value, PropertyReferenceValue): + if isinstance(value.property, EnumProperty): + return {"property": value.property.as_reference()} + return {"property": value.property} - if isinstance(filter_value, ParameterValue): - return {"parameter": filter_value.parameter} - return filter_value + elif isinstance(value, ParameterValue): + return {"parameter": value.parameter} + + elif hasattr(value, "dump"): + return value.dump() + + elif isinstance(value, SequenceNotStr): + return list(map(_dump_filter_value, value)) + + return value def _load_filter_value(value: Any) -> FilterValue | FilterValueList: @@ -675,7 +682,8 @@ class Exists(FilterWithProperty): @final class Prefix(FilterWithPropertyAndValue): - """Prefix filter results based on whether the (text) property starts with the provided value. + """Prefix filter results based on whether the property starts with the provided value. When the property + is a list, the list starts with the provided values. Args: property (PropertyReference): The property to filter on. @@ -692,6 +700,10 @@ class Prefix(FilterWithPropertyAndValue): - Composing the property reference using the ``View.as_property_ref`` method: >>> flt = Prefix(my_view.as_property_ref("some_property"), "somePrefix") + + Filter that can be used to retrieve items where the property is a list of e.g. integers that starts with [1, 2, 3]: + + >>> flt = Prefix(my_view.as_property_ref("some_list_property"), [1, 2, 3]) """ _filter_name = "prefix" @@ -797,7 +809,7 @@ class Search(FilterWithPropertyAndValue): # ######################################################### # -class SpaceFilter(FilterWithPropertyAndValueList): +class SpaceFilter(FilterWithProperty): """Filters instances based on the space. Args: @@ -815,15 +827,24 @@ class SpaceFilter(FilterWithPropertyAndValueList): >>> flt = SpaceFilter("space3", instance_type="edge") """ - _filter_name = In._filter_name - def __init__(self, space: str | SequenceNotStr[str], instance_type: Literal["node", "edge"] = "node") -> None: - space_list = [space] if isinstance(space, str) else list(space) - super().__init__(property=[instance_type, "space"], values=space_list) + super().__init__(property=[instance_type, "space"]) + space = [space] if isinstance(space, str) else list(space) + single = len(space) == 1 + self._value = space[0] if single else space + self._value_key = "value" if single else "values" + self._filter_name = Equals._filter_name if single else In._filter_name + self._involved_filter: set[type[Filter]] = {Equals if single else In} @classmethod def load(cls, filter_: dict[str, Any]) -> NoReturn: raise NotImplementedError("Custom filter 'SpaceFilter' can not be loaded") + def _filter_body(self, camel_case_property: bool) -> dict[str, Any]: + return { + "property": self._dump_property(camel_case_property), + self._value_key: _dump_filter_value(self._value), + } + def _involved_filter_types(self) -> set[type[Filter]]: - return {In} + return self._involved_filter diff --git a/cognite/client/data_classes/hosted_extractors/sources.py b/cognite/client/data_classes/hosted_extractors/sources.py index 49b93633a..640244db8 100644 --- a/cognite/client/data_classes/hosted_extractors/sources.py +++ b/cognite/client/data_classes/hosted_extractors/sources.py @@ -3,7 +3,7 @@ import itertools from abc import ABC, abstractmethod from dataclasses import dataclass -from typing import TYPE_CHECKING, Any, ClassVar, Literal, cast +from typing import TYPE_CHECKING, Any, ClassVar, Literal, NoReturn, cast from typing_extensions import Self @@ -213,17 +213,8 @@ def __init__( self.created_time = created_time self.last_updated_time = last_updated_time - def as_write(self, key_value: str | None = None) -> EventHubSourceWrite: - if key_value is None: - raise ValueError("key_value must be provided") - return EventHubSourceWrite( - external_id=self.external_id, - host=self.host, - event_hub_name=self.event_hub_name, - key_name=self.key_name, - key_value=key_value, - consumer_group=self.consumer_group, - ) + def as_write(self) -> NoReturn: + raise TypeError(f"{type(self).__name__} cannot be converted to write as id does not contain the secrets") @classmethod def _load_source(cls, resource: dict[str, Any]) -> Self: @@ -355,7 +346,7 @@ def __init__( host: str, port: int | None = None, authentication: MQTTAuthenticationWrite | None = None, - useTls: bool = False, + use_tls: bool = False, ca_certificate: CACertificateWrite | None = None, auth_certificate: AuthCertificateWrite | None = None, ) -> None: @@ -363,7 +354,7 @@ def __init__( self.host = host self.port = port self.authentication = authentication - self.useTls = useTls + self.use_tls = use_tls self.ca_certificate = ca_certificate self.auth_certificate = auth_certificate @@ -376,7 +367,7 @@ def _load_source(cls, resource: dict[str, Any]) -> Self: authentication=MQTTAuthenticationWrite._load(resource["authentication"]) if "authentication" in resource else None, - useTls=resource.get("useTls", False), + use_tls=resource.get("useTls", False), ca_certificate=CACertificateWrite._load(resource["caCertificate"]) if "caCertificate" in resource else None, auth_certificate=AuthCertificateWrite._load(resource["authCertificate"]) if "authCertificate" in resource @@ -461,7 +452,7 @@ def __init__( last_updated_time: int, port: int | None = None, authentication: MQTTAuthentication | None = None, - useTls: bool = False, + use_tls: bool = False, ca_certificate: CACertificate | None = None, auth_certificate: AuthCertificate | None = None, ) -> None: @@ -469,7 +460,7 @@ def __init__( self.host = host self.port = port self.authentication = authentication - self.useTls = useTls + self.use_tls = use_tls self.ca_certificate = ca_certificate self.auth_certificate = auth_certificate self.created_time = created_time @@ -484,7 +475,7 @@ def _load_source(cls, resource: dict[str, Any]) -> Self: authentication=MQTTAuthentication._load(resource["authentication"]) if "authentication" in resource else None, - useTls=resource.get("useTls", False), + use_tls=resource.get("useTls", False), ca_certificate=CACertificate._load(resource["caCertificate"]) if "caCertificate" in resource else None, auth_certificate=AuthCertificate._load(resource["authCertificate"]) if "authCertificate" in resource @@ -493,7 +484,7 @@ def _load_source(cls, resource: dict[str, Any]) -> Self: last_updated_time=resource["lastUpdatedTime"], ) - def as_write(self) -> _MQTTSourceWrite: + def as_write(self) -> NoReturn: raise TypeError(f"{type(self).__name__} cannot be converted to write as id does not contain the secrets") def dump(self, camel_case: bool = True) -> dict[str, Any]: @@ -601,7 +592,7 @@ class SourceList(WriteableCogniteResourceList[SourceWrite, Source], ExternalIDTr def as_write( self, - ) -> SourceWriteList: + ) -> NoReturn: raise TypeError(f"{type(self).__name__} cannot be converted to write") diff --git a/cognite/client/data_classes/workflows.py b/cognite/client/data_classes/workflows.py index 90410888f..efae89d72 100644 --- a/cognite/client/data_classes/workflows.py +++ b/cognite/client/data_classes/workflows.py @@ -136,8 +136,10 @@ def load_parameters(cls, data: dict) -> WorkflowTaskParameters: return CDFTaskParameters._load(parameters) elif type_ == "dynamic": return DynamicTaskParameters._load(parameters) - elif type_ == "subworkflow": + elif type_ == "subworkflow" and "tasks" in parameters["subworkflow"]: return SubworkflowTaskParameters._load(parameters) + elif type_ == "subworkflow" and "workflowExternalId" in parameters["subworkflow"]: + return SubworkflowReferenceParameters._load(parameters) else: raise ValueError(f"Unknown task type: {type_}. Expected {ValidTaskType}") @@ -319,8 +321,8 @@ class SubworkflowTaskParameters(WorkflowTaskParameters): """ The subworkflow task parameters are used to specify a subworkflow task. - When a workflow is made of stages with dependencies between them, we can use subworkflow tasks for conveniece. It takes the tasks parameter which is an array of - function, transformation, and cdf task definitions. This array needs to be statically set on the worklow definition (if it needs to be defined at runtime, use a + When a workflow is made of stages with dependencies between them, we can use subworkflow tasks for convenience. It takes the tasks parameter which is an array of + function, transformation, cdf, ..., task definitions. This array needs to be statically set on the workflow definition (if it needs to be defined at runtime, use a dynamic task). Args: @@ -344,6 +346,38 @@ def dump(self, camel_case: bool = True) -> dict[str, Any]: return {self.task_type: {"tasks": [task.dump(camel_case) for task in self.tasks]}} +class SubworkflowReferenceParameters(WorkflowTaskParameters): + """ + The subworkflow task parameters are used to specify a subworkflow task. + When a workflow is made of stages with dependencies between them, we can use subworkflow tasks for convenience. + The subworkflow reference is used to specifying a reference to another workflow which will be embedded into the execution at start time. + + Args: + workflow_external_id (str): The external ID of the referenced workflow. + version (str): The version of the referenced workflow. + """ + + task_type = "subworkflow" + + def __init__(self, workflow_external_id: str, version: str) -> None: + self.workflow_external_id = workflow_external_id + self.version = version + + @classmethod + def _load(cls: type[Self], resource: dict, cognite_client: CogniteClient | None = None) -> Self: + subworkflow: dict[str, Any] = resource[cls.task_type] + + return cls(workflow_external_id=subworkflow["workflowExternalId"], version=subworkflow["version"]) + + def dump(self, camel_case: bool = True) -> dict[str, Any]: + return { + self.task_type: { + "workflowExternalId": self.workflow_external_id, + "version": self.version, + } + } + + class DynamicTaskParameters(WorkflowTaskParameters): """ The dynamic task parameters are used to specify a dynamic task. @@ -397,7 +431,8 @@ class WorkflowTask(CogniteResource): """ This class represents a workflow task. - Note: tasks do not distinguish between write and read versions. + Note: + Tasks do not distinguish between write and read versions. Args: external_id (str): The external ID provided by the client. Must be unique for the resource type. @@ -1381,23 +1416,34 @@ class WorkflowTriggerRun(CogniteResource): def __init__( self, - trigger_external_id: str, - trigger_fire_time: int, + external_id: str, + fire_time: int, workflow_external_id: str, workflow_version: str, + status: Literal["success", "failed"], + workflow_execution_id: str | None = None, + reason_for_failure: str | None = None, ) -> None: - self.trigger_external_id = trigger_external_id - self.trigger_fire_time = trigger_fire_time + self.external_id = external_id + self.fire_time = fire_time self.workflow_external_id = workflow_external_id self.workflow_version = workflow_version + self.workflow_execution_id = workflow_execution_id + self.status = status + self.reason_for_failure = reason_for_failure def dump(self, camel_case: bool = True) -> dict[str, Any]: item = { - "trigger_external_id": self.trigger_external_id, - "trigger_fire_time": self.trigger_fire_time, + "external_id": self.external_id, + "fire_time": self.fire_time, "workflow_external_id": self.workflow_external_id, "workflow_version": self.workflow_version, + "status": self.status, } + if self.workflow_execution_id: + item["workflow_execution_id"] = self.workflow_execution_id + if self.reason_for_failure: + item["reason_for_failure"] = self.reason_for_failure if camel_case: return convert_all_keys_to_camel_case(item) return item @@ -1405,10 +1451,13 @@ def dump(self, camel_case: bool = True) -> dict[str, Any]: @classmethod def _load(cls, resource: dict, cognite_client: CogniteClient | None = None) -> WorkflowTriggerRun: return cls( - trigger_external_id=resource["triggerExternalId"], - trigger_fire_time=resource["triggerFireTime"], + external_id=resource["externalId"], + fire_time=resource["fireTime"], workflow_external_id=resource["workflowExternalId"], workflow_version=resource["workflowVersion"], + status=resource["status"], + workflow_execution_id=resource.get("workflowExecutionId"), + reason_for_failure=resource.get("reasonForFailure"), ) diff --git a/cognite/client/utils/_auxiliary.py b/cognite/client/utils/_auxiliary.py index f7828dd47..50acdb1c0 100644 --- a/cognite/client/utils/_auxiliary.py +++ b/cognite/client/utils/_auxiliary.py @@ -55,6 +55,18 @@ def get_accepted_params(cls: type[T_CogniteResource]) -> dict[str, str]: return {to_camel_case(k): k for k in vars(cls()) if not k.startswith("_")} +def load_resource_to_dict(resource: dict[str, Any] | str) -> dict[str, Any]: + if isinstance(resource, dict): + return resource + + if isinstance(resource, str): + resource = load_yaml_or_json(resource) + if isinstance(resource, dict): + return resource + + raise TypeError(f"Resource must be json or yaml str, or dict, not {type(resource)}") + + def fast_dict_load( cls: type[T_CogniteObject], item: dict[str, Any], cognite_client: CogniteClient | None ) -> T_CogniteObject: diff --git a/cognite/client/utils/_time.py b/cognite/client/utils/_time.py index 7c46ea778..25bc9e6ce 100644 --- a/cognite/client/utils/_time.py +++ b/cognite/client/utils/_time.py @@ -178,6 +178,23 @@ def datetime_to_ms_iso_timestamp(dt: datetime) -> str: raise TypeError(f"Expected datetime object, got {type(dt)}") +def convert_data_modelling_timestamp(timestamp: str) -> datetime: + """Converts a timestamp string to a datetime object. + + Args: + timestamp (str): A timestamp string. + + Returns: + datetime: A datetime object. + """ + try: + return datetime.fromisoformat(timestamp) + except ValueError: + # Typically hits if the timestamp has truncated milliseconds, + # For example, "2021-01-01T00:00:00.17+00:00". + return datetime.strptime(timestamp, "%Y-%m-%dT%H:%M:%S.%f%z") + + def split_granularity_into_quantity_and_normalized_unit(granularity: str) -> tuple[int, str]: """A normalized unit is any unit accepted by the API""" if match := re.match(r"(\d+)(.*)", granularity): diff --git a/docs/source/cognite_client.rst b/docs/source/cognite_client.rst index abee017dc..6dff59321 100644 --- a/docs/source/cognite_client.rst +++ b/docs/source/cognite_client.rst @@ -1,6 +1,7 @@ CogniteClient ============= +.. _class_client_CogniteClient: .. autoclass:: cognite.client.CogniteClient :members: :member-order: bysource @@ -10,6 +11,7 @@ CogniteClient :members: :member-order: bysource +.. _class_client_GlobalConfig: .. autoclass:: cognite.client.config.GlobalConfig :members: :member-order: bysource diff --git a/docs/source/conf.py b/docs/source/conf.py index 3ee27b6a5..1cf25457a 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -28,7 +28,7 @@ # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. -extensions = ["sphinx.ext.autodoc", "sphinx.ext.napoleon", "sphinx.ext.autosectionlabel"] +extensions = ["sphinx.ext.autodoc", "sphinx.ext.doctest", "sphinx.ext.napoleon", "sphinx.ext.autosectionlabel"] autosectionlabel_prefix_document = True diff --git a/docs/source/conftest.py b/docs/source/conftest.py new file mode 100644 index 000000000..8c3839c62 --- /dev/null +++ b/docs/source/conftest.py @@ -0,0 +1,48 @@ +import os +from pathlib import Path + +import pytest +import yaml + +# Files to exclude test directories or modules +collect_ignore = ["conf.py"] + + +@pytest.fixture +def set_envs(monkeypatch): + env_vars = { + "MY_CLUSTER": "api", + "MY_TENANT_ID": "my-tenant-id", + "MY_CLIENT_ID": "my-client-id", + "MY_CLIENT_SECRET": "my-client-secret", + } + + monkeypatch.setattr(os, "environ", env_vars) + + +@pytest.fixture +def quickstart_client_config_file(monkeypatch): + data = { + "client": { + "project": "my-project", + "client_name": "my-special-client", + "base_url": "https://${MY_CLUSTER}.cognitedata.com", + "credentials": { + "client_credentials": { + "token_url": "https://login.microsoftonline.com/${MY_TENANT_ID}/oauth2/v2.0/token", + "client_id": "${MY_CLIENT_ID}", + "client_secret": "${MY_CLIENT_SECRET}", + "scopes": ["https://api.cognitedata.com/.default"], + }, + }, + }, + "global": { + "max_retries": 10, + "max_retry_backoff": 10, + }, + } + + def read_text(*args, **kwargs): + return yaml.dump(data) + + monkeypatch.setattr(Path, "read_text", read_text) diff --git a/docs/source/credential_providers.rst b/docs/source/credential_providers.rst index 6b83c12b8..ce5a28223 100644 --- a/docs/source/credential_providers.rst +++ b/docs/source/credential_providers.rst @@ -1,5 +1,8 @@ Credential Providers ==================== +.. autoclass:: cognite.client.credentials.CredentialProvider + :members: + :member-order: bysource .. autoclass:: cognite.client.credentials.Token :members: :member-order: bysource diff --git a/docs/source/workflow_orchestration.rst b/docs/source/data_workflows.rst similarity index 76% rename from docs/source/workflow_orchestration.rst rename to docs/source/data_workflows.rst index 002395270..81bcc3a3e 100644 --- a/docs/source/workflow_orchestration.rst +++ b/docs/source/data_workflows.rst @@ -71,6 +71,23 @@ Update Status of Async Task ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. automethod:: cognite.client._api.workflows.WorkflowTaskAPI.update +Workflow Triggers +------------------- +Create triggers for workflow executions +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +.. automethod:: cognite.client._api.workflows.WorkflowTriggerAPI.create + +Delete triggers for workflow executions +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +.. automethod:: cognite.client._api.workflows.WorkflowTriggerAPI.delete + +Get triggers for workflow executions +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +.. automethod:: cognite.client._api.workflows.WorkflowTriggerAPI.get_triggers + +Get trigger run history for a workflow trigger +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +.. automethod:: cognite.client._api.workflows.WorkflowTriggerAPI.get_trigger_run_history Data Workflows data classes ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/docs/source/index.rst b/docs/source/index.rst index 3d6796e5d..1881c108b 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -54,7 +54,7 @@ Contents data_organization transformations functions - workflow_orchestration + data_workflows unit_catalog filters deprecated diff --git a/docs/source/quickstart.rst b/docs/source/quickstart.rst index 4f358eb0a..427fd99b6 100644 --- a/docs/source/quickstart.rst +++ b/docs/source/quickstart.rst @@ -1,7 +1,94 @@ Quickstart ========== -Instantiate a new client ------------------------- + +There are multiple ways that a CogniteClient can be configured to authenticate with Cognite Data Fusion (CDF). For the purpose of +this quickstart we'll demonstrate the most common/recommended patterns. More details and usage examples can be found in each respective +section: :ref:`CogniteClient `, :ref:`ClientConfig `, +:ref:`GlobalConfig `, and :ref:`credential_providers:Credential Providers`. + +Instantiate a new client from a configuration file +-------------------------------------------------- +Use this code to instantiate a client using a configuration file in order to execute API calls to Cognite Data Fusion (CDF). + +.. note:: + How you read in the configuration file is up to you as the :ref:`CogniteClient ` load method + accepts both a dictionary and a YAML/JSON string. So for the purposes of this example, we will use the yaml library to read in a yaml file and + substitute environment variables in the file string to ensure that sensitive information is not stored in the file. + +See :ref:`CogniteClient `, :ref:`ClientConfig `, +:ref:`GlobalConfig `, and :ref:`credential_providers:Credential Providers` +for more information on the configuration options. + +.. code:: yaml + + # cognite-sdk-config.yaml + client: + project: "my-project" + client_name: "my-special-client" + base_url: "https://${MY_CLUSTER}.cognitedata.com" + credentials: + client_credentials: + token_url: "https://login.microsoftonline.com/${MY_TENANT_ID}/oauth2/v2.0/token" + client_id: "${MY_CLIENT_ID}" + client_secret: "${MY_CLIENT_SECRET}" + scopes: ["https://api.cognitedata.com/.default"] + global: + max_retries: 10 + max_retry_backoff: 10 + +.. testsetup:: client_config_file + + >>> getfixture("set_envs") # Fixture defined in conftest.py + >>> getfixture("quickstart_client_config_file") # Fixture defined in conftest.py + +.. doctest:: client_config_file + + >>> import os + >>> from pathlib import Path + >>> from string import Template + + >>> import yaml + + >>> from cognite.client import CogniteClient, global_config + + >>> file_path = Path("cognite-sdk-config.yaml") + + >>> # Read in yaml file and substitute environment variables in the file string + >>> env_sub_template = Template(file_path.read_text()) + >>> file_env_parsed = env_sub_template.substitute(dict(os.environ)) + + >>> # Load yaml file string into a dictionary to parse global and client configurations + >>> cognite_config = yaml.safe_load(file_env_parsed) + + >>> # If you want to set a global configuration it must be done before creating the client + >>> global_config.apply_settings(cognite_config["global"]) + >>> client = CogniteClient.load(cognite_config["client"]) + +.. testcode:: client_config_file + :hide: + + >>> global_config.max_retries + 10 + >>> global_config.max_retry_backoff + 10 + >>> client.config.project + 'my-project' + >>> client.config.client_name + 'my-special-client' + >>> client.config.credentials.client_id + 'my-client-id' + >>> client.config.credentials.client_secret + 'my-client-secret' + >>> client.config.credentials.token_url + 'https://login.microsoftonline.com/my-tenant-id/oauth2/v2.0/token' + >>> client.config.credentials.scopes + ['https://api.cognitedata.com/.default'] + +Instantiate a new client using ClientConfig +------------------------------------------- + +Use this code to instantiate a client using the ClientConfig and global_config in order to execute API calls to Cognite Data Fusion (CDF). + Use this code to instantiate a client in order to execute API calls to Cognite Data Fusion (CDF). The :code:`client_name` is a user-defined string intended to give the client a unique identifier. You can provide the :code:`client_name` by passing it directly to the :ref:`ClientConfig ` constructor. @@ -12,36 +99,57 @@ Use one of the credential providers such as OAuthClientCredentials to authentica .. note:: The following example sets a global client configuration which will be used if no config is explicitly passed to :ref:`cognite_client:CogniteClient`. - All examples in this documentation assume that such a global configuration has been set. - -.. code:: python + All examples in this documentation going forward assume that such a global configuration has been set. + +.. testsetup:: client_config + + >>> getfixture("set_envs") # Fixture defined in conftest.py + +.. doctest:: client_config + + >>> from cognite.client import CogniteClient, ClientConfig, global_config + >>> from cognite.client.credentials import OAuthClientCredentials + + >>> # This value will depend on the cluster your CDF project runs on + >>> cluster = "api" + >>> base_url = f"https://{cluster}.cognitedata.com" + >>> tenant_id = "my-tenant-id" + >>> client_id = "my-client-id" + >>> # client secret should not be stored in-code, so we load it from an environment variable + >>> client_secret = os.environ["MY_CLIENT_SECRET"] + >>> creds = OAuthClientCredentials( + ... token_url=f"https://login.microsoftonline.com/{tenant_id}/oauth2/v2.0/token", + ... client_id=client_id, + ... client_secret=client_secret, + ... scopes=[f"{base_url}/.default"] + ... ) + + >>> cnf = ClientConfig( + ... client_name="my-special-client", + ... base_url=base_url, + ... project="my-project", + ... credentials=creds + ... ) + + >>> global_config.default_client_config = cnf + >>> client = CogniteClient() + +.. testcode:: client_config + :hide: + + >>> client.config.project + 'my-project' + >>> client.config.client_name + 'my-special-client' + >>> client.config.credentials.client_id + 'my-client-id' + >>> client.config.credentials.client_secret + 'my-client-secret' + >>> client.config.credentials.token_url + 'https://login.microsoftonline.com/my-tenant-id/oauth2/v2.0/token' + >>> client.config.credentials.scopes + ['https://api.cognitedata.com/.default'] - from cognite.client import CogniteClient, ClientConfig, global_config - from cognite.client.credentials import OAuthClientCredentials - - # This value will depend on the cluster your CDF project runs on - cluster = "api" - base_url = f"https://{cluster}.cognitedata.com" - tenant_id = "my-tenant-id" - client_id = "my-client-id" - # client secret should not be stored in-code, so we load it from an environment variable - client_secret = os.environ["MY_CLIENT_SECRET"] - creds = OAuthClientCredentials( - token_url=f"https://login.microsoftonline.com/{tenant_id}/oauth2/v2.0/token", - client_id=client_id, - client_secret=client_secret, - scopes=[f"{base_url}/.default"] - ) - - cnf = ClientConfig( - client_name="my-special-client", - base_url=base_url, - project="my-project", - credentials=creds - ) - - global_config.default_client_config = cnf - client = CogniteClient() Examples for all OAuth credential providers can be found in the :ref:`credential_providers:Credential Providers` section. diff --git a/poetry.lock b/poetry.lock index 43f6bc5e2..24b9b0b2c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -132,13 +132,13 @@ tzdata = ["tzdata"] [[package]] name = "certifi" -version = "2024.7.4" +version = "2024.8.30" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, - {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, + {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, + {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, ] [[package]] @@ -589,13 +589,13 @@ testing = ["hatch", "pre-commit", "pytest", "tox"] [[package]] name = "executing" -version = "2.0.1" +version = "2.1.0" description = "Get the currently executing AST node of a frame, and other information" optional = false -python-versions = ">=3.5" +python-versions = ">=3.8" files = [ - {file = "executing-2.0.1-py2.py3-none-any.whl", hash = "sha256:eac49ca94516ccc753f9fb5ce82603156e590b27525a8bc32cce8ae302eb61bc"}, - {file = "executing-2.0.1.tar.gz", hash = "sha256:35afe2ce3affba8ee97f2d69927fa823b08b472b7b994e36a52a964b93d16147"}, + {file = "executing-2.1.0-py2.py3-none-any.whl", hash = "sha256:8d63781349375b5ebccc3142f4b30350c0cd9c79f921cde38be2be4637e98eaf"}, + {file = "executing-2.1.0.tar.gz", hash = "sha256:8ea27ddd260da8150fa5a708269c4a10e76161e2496ec3e587da9e3c0fe4b9ab"}, ] [package.extras] @@ -1672,22 +1672,22 @@ wcwidth = "*" [[package]] name = "protobuf" -version = "5.27.4" +version = "5.28.0" description = "" optional = false python-versions = ">=3.8" files = [ - {file = "protobuf-5.27.4-cp310-abi3-win32.whl", hash = "sha256:10319748764b917a9a7cddef1582a0a9cd0f8f6d04e545c6236f7ccaf9b624d9"}, - {file = "protobuf-5.27.4-cp310-abi3-win_amd64.whl", hash = "sha256:f0c24374aaaf103f33662e4de7666a4a4280abebdb8a9f3f0f9b1d71b61174ec"}, - {file = "protobuf-5.27.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e85fed07013e5a0121efbaf1b14355fdc66f6e545f12fc5985b2882370410006"}, - {file = "protobuf-5.27.4-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:d5a0e229061600842e57af4ff6a8522ede5280bcfa4fe7f3a1c20589377859a6"}, - {file = "protobuf-5.27.4-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:25ba1f0633f73c3939f3b84e1636f3eb3bab7196952ebb83906d56945edd6aa8"}, - {file = "protobuf-5.27.4-cp38-cp38-win32.whl", hash = "sha256:565b051249a2f8270af04206dd4f3b73a02343e7d9e072aed57441b369b3467d"}, - {file = "protobuf-5.27.4-cp38-cp38-win_amd64.whl", hash = "sha256:e673f173cbac4e59c7817ed358e471e4c77aa9166986edf3e731156379a556c7"}, - {file = "protobuf-5.27.4-cp39-cp39-win32.whl", hash = "sha256:25169c7624d5a9e669fa6faff5a6e818f854346d51ee347b2284676beb9e85dd"}, - {file = "protobuf-5.27.4-cp39-cp39-win_amd64.whl", hash = "sha256:1fe7735902e84ce35c4152cf07981c176713935a8efad78cea547aae5f4f75cb"}, - {file = "protobuf-5.27.4-py3-none-any.whl", hash = "sha256:b97259641e8d38738eef34a173e51d2d53a453baab01a32477a64752d9ce59a3"}, - {file = "protobuf-5.27.4.tar.gz", hash = "sha256:eaa1016e353d8fc5bf08c8087e96eed15f5297aa52bb7ee1f533278bb3f3aad7"}, + {file = "protobuf-5.28.0-cp310-abi3-win32.whl", hash = "sha256:66c3edeedb774a3508ae70d87b3a19786445fe9a068dd3585e0cefa8a77b83d0"}, + {file = "protobuf-5.28.0-cp310-abi3-win_amd64.whl", hash = "sha256:6d7cc9e60f976cf3e873acb9a40fed04afb5d224608ed5c1a105db4a3f09c5b6"}, + {file = "protobuf-5.28.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:532627e8fdd825cf8767a2d2b94d77e874d5ddb0adefb04b237f7cc296748681"}, + {file = "protobuf-5.28.0-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:018db9056b9d75eb93d12a9d35120f97a84d9a919bcab11ed56ad2d399d6e8dd"}, + {file = "protobuf-5.28.0-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:6206afcb2d90181ae8722798dcb56dc76675ab67458ac24c0dd7d75d632ac9bd"}, + {file = "protobuf-5.28.0-cp38-cp38-win32.whl", hash = "sha256:eef7a8a2f4318e2cb2dee8666d26e58eaf437c14788f3a2911d0c3da40405ae8"}, + {file = "protobuf-5.28.0-cp38-cp38-win_amd64.whl", hash = "sha256:d001a73c8bc2bf5b5c1360d59dd7573744e163b3607fa92788b7f3d5fefbd9a5"}, + {file = "protobuf-5.28.0-cp39-cp39-win32.whl", hash = "sha256:dde9fcaa24e7a9654f4baf2a55250b13a5ea701493d904c54069776b99a8216b"}, + {file = "protobuf-5.28.0-cp39-cp39-win_amd64.whl", hash = "sha256:853db610214e77ee817ecf0514e0d1d052dff7f63a0c157aa6eabae98db8a8de"}, + {file = "protobuf-5.28.0-py3-none-any.whl", hash = "sha256:510ed78cd0980f6d3218099e874714cdf0d8a95582e7b059b06cabad855ed0a0"}, + {file = "protobuf-5.28.0.tar.gz", hash = "sha256:dde74af0fa774fa98892209992295adbfb91da3fa98c8f67a88afe8f5a349add"}, ] [[package]] diff --git a/pyproject.toml b/pyproject.toml index f2c757338..5209f3871 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,7 +1,7 @@ [tool.poetry] name = "cognite-sdk" -version = "7.55.2" +version = "7.58.6" description = "Cognite Python SDK" readme = "README.md" documentation = "https://cognite-sdk-python.readthedocs-hosted.com" diff --git a/pytest.ini b/pytest.ini index 5dcac4857..5e41b34ef 100644 --- a/pytest.ini +++ b/pytest.ini @@ -11,4 +11,8 @@ filterwarnings = ignore::DeprecationWarning:test ignore::cognite.client.utils._experimental.FeaturePreviewWarning -addopts = --color=yes +testpaths = + tests + docs + +addopts = --color=yes --doctest-modules --doctest-glob='*.rst' diff --git a/tests/data/mypnid.pdf b/tests/data/mypnid.pdf new file mode 100644 index 000000000..4e6c7e15b Binary files /dev/null and b/tests/data/mypnid.pdf differ diff --git a/tests/tests_integration/test_api/test_data_modeling/test_instances.py b/tests/tests_integration/test_api/test_data_modeling/test_instances.py index 8ee76650a..91bcde8c9 100644 --- a/tests/tests_integration/test_api/test_data_modeling/test_instances.py +++ b/tests/tests_integration/test_api/test_data_modeling/test_instances.py @@ -1083,6 +1083,18 @@ def test_search_person(self, cognite_client: CogniteClient) -> None: assert len(persons) > 0 assert all(isinstance(person, PersonRead) for person in persons) + def test_listing_global_nodes(self, cognite_client: CogniteClient) -> None: + from cognite.client.data_classes.data_modeling.cdm.v1 import CogniteUnit + + # Space must be explicitly specified or nothing will be returned: + no_nodes = cognite_client.data_modeling.instances.list(sources=CogniteUnit.get_source()) + assert len(no_nodes) == 0 + + nodes = cognite_client.data_modeling.instances.list( + space="cdf_cdm_units", sources=CogniteUnit.get_source(), limit=5 + ) + assert len(nodes) == 5 + class TestInstancesSync: def test_sync_movies_released_in_1994(self, cognite_client: CogniteClient, movie_view: View) -> None: diff --git a/tests/tests_integration/test_api/test_data_workflows.py b/tests/tests_integration/test_api/test_data_workflows.py index 9a3c0f30b..7b87f44a7 100644 --- a/tests/tests_integration/test_api/test_data_workflows.py +++ b/tests/tests_integration/test_api/test_data_workflows.py @@ -253,7 +253,7 @@ def workflow_scheduled_trigger(cognite_client: CogniteClient, add_multiply_workf trigger = cognite_client.workflows.triggers.create( WorkflowTriggerCreate( external_id="integration_test-workflow-scheduled-trigger", - trigger_rule=WorkflowScheduledTriggerRule(cron_expression="0 0 * * *"), + trigger_rule=WorkflowScheduledTriggerRule(cron_expression="* * * * *"), workflow_external_id="integration_test-workflow-add_multiply", workflow_version="1", input={"a": 1, "b": 2}, @@ -490,7 +490,7 @@ def test_create_delete( ) -> None: assert workflow_scheduled_trigger is not None assert workflow_scheduled_trigger.external_id == "integration_test-workflow-scheduled-trigger" - assert workflow_scheduled_trigger.trigger_rule == WorkflowScheduledTriggerRule(cron_expression="0 0 * * *") + assert workflow_scheduled_trigger.trigger_rule == WorkflowScheduledTriggerRule(cron_expression="* * * * *") assert workflow_scheduled_trigger.workflow_external_id == "integration_test-workflow-add_multiply" assert workflow_scheduled_trigger.workflow_version == "1" assert workflow_scheduled_trigger.input == {"a": 1, "b": 2} diff --git a/tests/tests_integration/test_api/test_diagrams.py b/tests/tests_integration/test_api/test_diagrams.py index bd223e3bb..7c04c18e2 100644 --- a/tests/tests_integration/test_api/test_diagrams.py +++ b/tests/tests_integration/test_api/test_diagrams.py @@ -2,6 +2,7 @@ import pytest +from cognite.client import CogniteClient from cognite.client.data_classes.contextualization import ( DetectJobBundle, DiagramConvertResults, @@ -9,19 +10,78 @@ DiagramDetectResults, FileReference, ) +from cognite.client.data_classes.data_modeling import NodeApply, NodeId, NodeOrEdgeData, Space, SpaceApply, ViewId -PNID_FILE_ID = 3261066797848581 +PNID_FILE_EXTERNAL_ID = "mypnid.pdf" "" +DIAGRAM_SPACE = "diagram_space" + +CDM_SPACE = "cdf_cdm" +COGNITE_FILE = "CogniteFile" ELEVEN_PAGE_PNID_EXTERNAL_ID = "functional_tests.pdf" FIFTY_FIVE_PAGE_PNID_EXTERNAL_ID = "5functional_tests.pdf" +@pytest.fixture(scope="session") +def pnid_file_id(cognite_client: CogniteClient) -> int: + file = cognite_client.files.retrieve(external_id=PNID_FILE_EXTERNAL_ID) + + if file is None: + file = cognite_client.files.upload( + path="tests/data/mypnid.pdf", + external_id=PNID_FILE_EXTERNAL_ID, + name="mypnid.pdf", + mime_type="application/pdf", + ) + cognite_client.files.upload() + return file.id + + +@pytest.fixture(scope="session") +def diagram_space(cognite_client: CogniteClient) -> Space: + return cognite_client.data_modeling.spaces.apply(SpaceApply(space=DIAGRAM_SPACE)) + + +@pytest.fixture(scope="session") +def diagram_node(cognite_client: CogniteClient, pnid_file_id: int, diagram_space: Space) -> NodeId: + file = cognite_client.files.retrieve(id=pnid_file_id) + assert file is not None + + diagram_node_id = ( + cognite_client.data_modeling.instances.apply( + NodeApply( + space=diagram_space.space, + external_id=PNID_FILE_EXTERNAL_ID, + sources=[ + NodeOrEdgeData( + source=ViewId(space=CDM_SPACE, external_id=COGNITE_FILE, version="v1"), + properties={ + "name": file.name, + "mimeType": file.mime_type, + }, + ) + ], + ) + ) + .nodes[0] + .as_id() + ) + + node_file = cognite_client.files.retrieve(instance_id=diagram_node_id) + # Create the file if not existing already + if node_file is None or node_file.uploaded is False: + cognite_client.files.upload_content_bytes( + content=cognite_client.files.download_bytes(id=file.id), instance_id=diagram_node_id + ) + return diagram_node_id + + class TestPNIDParsingIntegration: @pytest.mark.skip - def test_run_diagram_detect(self, cognite_client): + def test_run_diagram_detect(self, cognite_client: CogniteClient, pnid_file_id: int): entities = [{"name": "YT-96122"}, {"name": "XE-96125", "ee": 123}, {"name": "XWDW-9615"}] - file_id = PNID_FILE_ID - detect_job = cognite_client.diagrams.detect(file_ids=[file_id], entities=entities) + + detect_job = cognite_client.diagrams.detect(file_ids=[pnid_file_id], entities=entities) assert isinstance(detect_job, DiagramDetectResults) assert {"statusCount", "numFiles", "items", "partialMatch", "minTokens", "searchField"}.issubset( detect_job.result @@ -30,10 +90,10 @@ def test_run_diagram_detect(self, cognite_client): assert "Completed" == detect_job.status assert [] == detect_job.errors assert isinstance(detect_job.items[0], DiagramDetectItem) - assert isinstance(detect_job[PNID_FILE_ID], DiagramDetectItem) + assert isinstance(detect_job[pnid_file_id], DiagramDetectItem) - assert 3 == len(detect_job[PNID_FILE_ID].annotations) - for annotation in detect_job[PNID_FILE_ID].annotations: + assert 3 == len(detect_job[pnid_file_id].annotations) + for annotation in detect_job[pnid_file_id].annotations: assert 1 == annotation["region"]["page"] convert_job = detect_job.convert() @@ -44,14 +104,14 @@ def test_run_diagram_detect(self, cognite_client): assert {"pngUrl", "svgUrl", "page"}.issubset(convert_job.result["items"][0]["results"][0]) assert "Completed" == convert_job.status - for res_page in convert_job[PNID_FILE_ID].pages: + for res_page in convert_job[pnid_file_id].pages: assert 1 == res_page.page assert ".svg" in res_page.svg_url assert ".png" in res_page.png_url # Enable multiple jobs job_bundle, _unposted_jobs = cognite_client.diagrams.detect( - file_ids=[file_id], entities=entities, multiple_jobs=True + file_ids=[pnid_file_id], entities=entities, multiple_jobs=True ) assert isinstance(job_bundle, DetectJobBundle) succeeded, failed = job_bundle.result @@ -103,3 +163,19 @@ def test_run_diagram_detect_in_pattern_mode(self, cognite_client): assert len(detected_by_resource_type["file_reference"]) >= 10 # 14 seen when making the test assert len(detected_by_resource_type["instrument"]) >= 60 # 72 seen when making the test + + def test_run_diagram_detect_with_file_instance_id(self, cognite_client, diagram_node: NodeId): + entities = [{"name": "YT-96122"}, {"name": "XE-96125", "ee": 123}, {"name": "XWDW-9615"}] + + detect_job = cognite_client.diagrams.detect(file_instance_ids=[diagram_node], entities=entities) + assert isinstance(detect_job, DiagramDetectResults) + assert {"statusCount", "numFiles", "items", "partialMatch", "minTokens", "searchField"}.issubset( + detect_job.result + ) + assert {"fileId", "fileInstanceId", "annotations"}.issubset(detect_job.result["items"][0]) + assert "Completed" == detect_job.status + assert [] == detect_job.errors + assert isinstance(detect_job.items[0], DiagramDetectItem) + assert detect_job.items[0].file_instance_id == diagram_node.dump(include_instance_type=False) + + assert len(detect_job.items[0].annotations) > 0 diff --git a/tests/tests_integration/test_api/test_hosted_extractors/test_sources.py b/tests/tests_integration/test_api/test_hosted_extractors/test_sources.py index d7c742f74..08cba2afb 100644 --- a/tests/tests_integration/test_api/test_hosted_extractors/test_sources.py +++ b/tests/tests_integration/test_api/test_hosted_extractors/test_sources.py @@ -1,5 +1,7 @@ from __future__ import annotations +import platform + import pytest from cognite.client import CogniteClient @@ -16,17 +18,16 @@ @pytest.fixture(scope="session") def one_event_hub_source(cognite_client: CogniteClient) -> SourceList: my_hub = EventHubSourceWrite( - external_id=f"myNewHub-{random_string(10)}", + external_id=f"myNewHub-{platform.system()}-{platform.python_version()}", host="myHost", key_name="myKeyName", key_value="myKey", event_hub_name="myEventHub", ) - retrieved = cognite_client.hosted_extractors.sources.retrieve(my_hub.external_id, ignore_unknown_ids=True) + retrieved = cognite_client.hosted_extractors.sources.retrieve([my_hub.external_id], ignore_unknown_ids=True) if retrieved: return retrieved - created = cognite_client.hosted_extractors.sources.create(my_hub) - return SourceList([created]) + return cognite_client.hosted_extractors.sources.create([my_hub]) class TestSources: @@ -69,7 +70,7 @@ def test_list(self, cognite_client: CogniteClient) -> None: def test_update_using_write_object(self, cognite_client: CogniteClient) -> None: my_hub = EventHubSourceWrite( - external_id=f"toupdatate-{random_string(10)}", + external_id=f"to-update-{random_string(10)}", host="myHost", key_name="myKeyName", key_value="myKey", diff --git a/tests/tests_unit/conftest.py b/tests/tests_unit/conftest.py index 2c9ff30cc..d772458f0 100644 --- a/tests/tests_unit/conftest.py +++ b/tests/tests_unit/conftest.py @@ -7,6 +7,9 @@ from cognite.client import ClientConfig, CogniteClient from cognite.client.credentials import Token +# Files to exclude test directories or modules +collect_ignore = ["test_api/function_test_resources"] + # TODO: This class-scoped client causes side-effects between tests... @pytest.fixture(scope="class") diff --git a/tests/tests_unit/test_base.py b/tests/tests_unit/test_base.py index 065c64200..ea54a53ee 100644 --- a/tests/tests_unit/test_base.py +++ b/tests/tests_unit/test_base.py @@ -193,6 +193,7 @@ def test_dump_load_only_required( "cognite_writable_cls", [ pytest.param(cls, id=f"{cls.__name__} in {cls.__module__}") + # Hosted extractors does not support the as_write method for cls in all_concrete_subclasses(WriteableCogniteResource) # Hosted extractors does not support the as_write method if cls not in {Destination} and not issubclass(cls, Source) diff --git a/tests/tests_unit/test_cognite_client.py b/tests/tests_unit/test_cognite_client.py index 4b4af8e6b..5a5f08cd7 100644 --- a/tests/tests_unit/test_cognite_client.py +++ b/tests/tests_unit/test_cognite_client.py @@ -102,6 +102,31 @@ def test_verify_ssl_enabled_by_default(self, rsps, client_config_w_token_factory assert client._api_client._http_client_with_retry.session.verify is True assert client._api_client._http_client.session.verify is True + def test_client_load(self): + config = { + "project": "test-project", + "client_name": "cognite-sdk-python", + "debug": True, + "credentials": { + "client_credentials": { + "client_id": "test-client-id", + "client_secret": "test-client-secret", + "token_url": TOKEN_URL, + "scopes": ["https://test.com/.default", "https://test.com/.admin"], + } + }, + } + client = CogniteClient.load(config) + assert client.config.project == "test-project" + assert client.config.credentials.client_id == "test-client-id" + assert client.config.credentials.client_secret == "test-client-secret" + assert client.config.credentials.token_url == TOKEN_URL + assert client.config.credentials.scopes == ["https://test.com/.default", "https://test.com/.admin"] + assert client.config.debug is True + log = logging.getLogger("cognite.client") + log.handlers = [] + log.propagate = False + class TestInstantiateWithClient: @pytest.mark.parametrize("cls", [Asset, Event, FileMetadata, TimeSeries]) diff --git a/tests/tests_unit/test_config.py b/tests/tests_unit/test_config.py new file mode 100644 index 000000000..37de12ae9 --- /dev/null +++ b/tests/tests_unit/test_config.py @@ -0,0 +1,102 @@ +from contextlib import nullcontext as does_not_raise + +import pytest + +from cognite.client import global_config +from cognite.client.config import ClientConfig, GlobalConfig +from cognite.client.credentials import Token + +_LOAD_RESOURCE_TO_DICT_ERROR = r"Resource must be json or yaml str, or dict, not" + + +class TestGlobalConfig: + def test_global_config_singleton(self): + with pytest.raises( + TypeError, + match=r"GlobalConfig is a singleton and cannot be instantiated directly. Use `global_config` instead,", + ): + GlobalConfig() + + @pytest.mark.parametrize( + "client_config", + [ + { + "project": "test-project", + "base_url": "https://test-cluster.cognitedata.com/", + "credentials": {"token": "abc"}, + "client_name": "test-client", + }, + ClientConfig( + project="test-project", + base_url="https://test-cluster.cognitedata.com/", + credentials=Token("abc"), + client_name="test-client", + ), + None, + ], + ) + def test_apply_settings(self, monkeypatch, client_config): + monkeypatch.delattr(GlobalConfig, "_instance") # ensure that the singleton is re-instantiated + gc = GlobalConfig() + assert gc.max_workers == 5 + assert gc.max_retries == 10 + + settings = { + "max_workers": 6, + "max_retries": 11, + "default_client_config": client_config, + } + with pytest.raises(TypeError, match=_LOAD_RESOURCE_TO_DICT_ERROR) if not client_config else does_not_raise(): + gc.apply_settings(settings) + assert gc.max_workers == 6 + assert gc.max_retries == 11 + assert isinstance(gc.default_client_config, ClientConfig) + assert isinstance(gc.default_client_config.credentials, Token) + assert gc.default_client_config.project == "test-project" + + def test_load_non_existent_attr(self): + settings = { + "max_workers": 0, # use a nonsensical value to ensure that it is not applied without assuming other tests kept the default value + "invalid_1": 10, + "invalid_2": "foo", + } + + with pytest.raises(ValueError, match=r"One or more invalid keys provided for global_config"): + global_config.apply_settings(settings) + + # confirm that the valid keys were not applied since we don't want a partial application + assert global_config.max_workers != 0 + + +class TestClientConfig: + def test_default(self): + config = { + "project": "test-project", + "cdf_cluster": "test-cluster", + "credentials": Token("abc"), + "client_name": "test-client", + } + client_config = ClientConfig.default(**config) + assert client_config.project == "test-project" + assert client_config.base_url == "https://test-cluster.cognitedata.com" + assert isinstance(client_config.credentials, Token) + assert client_config.client_name == "test-client" + + @pytest.mark.parametrize( + "credentials", + [{"token": "abc"}, '{"token": "abc"}', {"token": (lambda: "abc")}, Token("abc"), Token(lambda: "abc"), None], + ) + def test_load(self, credentials): + config = { + "project": "test-project", + "base_url": "https://test-cluster.cognitedata.com/", + "credentials": credentials, + "client_name": "test-client", + } + with pytest.raises(TypeError, match=_LOAD_RESOURCE_TO_DICT_ERROR) if not credentials else does_not_raise(): + client_config = ClientConfig.load(config) + assert client_config.project == "test-project" + assert client_config.base_url == "https://test-cluster.cognitedata.com" + assert isinstance(client_config.credentials, Token) + assert "Authorization", "Bearer abc" == client_config.credentials.authorization_header() + assert client_config.client_name == "test-client" diff --git a/tests/tests_unit/test_credential_providers.py b/tests/tests_unit/test_credential_providers.py index 4742eb5be..bce49dc4f 100644 --- a/tests/tests_unit/test_credential_providers.py +++ b/tests/tests_unit/test_credential_providers.py @@ -1,17 +1,45 @@ +from types import MappingProxyType from typing import ClassVar -from unittest.mock import Mock, patch +from unittest.mock import patch import pytest from oauthlib.oauth2 import InvalidClientIdError from cognite.client.credentials import ( + CredentialProvider, OAuthClientCertificate, OAuthClientCredentials, + OAuthDeviceCode, + OAuthInteractive, Token, ) from cognite.client.exceptions import CogniteAuthError +class TestCredentialProvider: + INVALID_CREDENTIAL_ERROR = "Invalid credential provider type given, the valid options are:" + INVALID_INPUT_LENGTH_ERROR = "Credential provider configuration must be a dictionary containing exactly one of the following supported types as the top level key:" + INVALID_INPUT_TYPE_ERROR = "Resource must be json or yaml str, or dict, not" + + @pytest.mark.parametrize( + "config,error_type,error_message", + [ + pytest.param({"foo": "abc"}, ValueError, INVALID_CREDENTIAL_ERROR, id="Invalid input: credential type"), + pytest.param("token", TypeError, INVALID_INPUT_TYPE_ERROR, id="Invalid input: not a dict, str"), + pytest.param({}, ValueError, INVALID_INPUT_LENGTH_ERROR, id="Invalid input: empty dict"), + pytest.param( + {"token": "abc", "client_credentials": {"client_id": "abc"}}, + ValueError, + INVALID_INPUT_LENGTH_ERROR, + id="Invalid input: multiple keys", + ), + ], + ) + def test_invalid_not_dict(self, config, error_type, error_message): + with pytest.raises(error_type, match=error_message): + CredentialProvider.load(config) + + class TestToken: def test_token_auth_header(self): creds = Token("abc") @@ -21,20 +49,125 @@ def test_token_factory_auth_header(self): creds = Token(lambda: "abc") assert "Authorization", "Bearer abc" == creds.authorization_header() + def test_token_non_string(self): + with pytest.raises( + TypeError, match=r"'token' must be a string or a no-argument-callable returning a string, not" + ): + Token({"foo": "bar"}) + + @pytest.mark.parametrize( + "config", + [ + {"token": "abc"}, + '{"token": "abc"}', + {"token": (lambda: "abc")}, + ], + ) + def test_load(self, config): + creds = Token.load(config) + assert isinstance(creds, Token) + assert "Authorization", "Bearer abc" == creds.authorization_header() + + @pytest.mark.parametrize( + "config", + [ + {"token": "abc"}, + {"token": {"token": "abc"}}, + '{"token": "abc"}', + '{"token": {"token": "abc"}}', + {"token": (lambda: "abc")}, + {"token": {"token": (lambda: "abc")}}, + ], + ) + def test_create_from_credential_provider(self, config): + creds = CredentialProvider.load(config) + assert isinstance(creds, Token) + assert "Authorization", "Bearer abc" == creds.authorization_header() + + +class TestOAuthDeviceCode: + DEFAULT_PROVIDER_ARGS: ClassVar = MappingProxyType( + { + "authority_url": "https://login.microsoftonline.com/xyz", + "client_id": "azure-client-id", + "scopes": ["https://greenfield.cognitedata.com/.default"], + } + ) + + @patch("cognite.client.credentials.PublicClientApplication") + @pytest.mark.parametrize("expires_in", (1000, "1001")) # some IDPs return as string + def test_access_token_generated(self, mock_public_client, expires_in): + mock_public_client().acquire_token_silent.return_value = { + "access_token": "azure_token", + "expires_in": expires_in, + } + creds = OAuthDeviceCode(**self.DEFAULT_PROVIDER_ARGS) + creds._refresh_access_token() + assert "Authorization", "Bearer azure_token" == creds.authorization_header() + + @patch("cognite.client.credentials.PublicClientApplication") + def test_load(self, mock_public_client): + creds = OAuthDeviceCode.load(dict(self.DEFAULT_PROVIDER_ARGS)) + assert isinstance(creds, OAuthDeviceCode) + assert "Authorization", "Bearer azure_token" == creds.authorization_header() + + @patch("cognite.client.credentials.PublicClientApplication") + def test_create_from_credential_provider(self, mock_public_client): + config = {"device_code": dict(self.DEFAULT_PROVIDER_ARGS)} + creds = CredentialProvider.load(config) + assert isinstance(creds, OAuthDeviceCode) + assert "Authorization", "Bearer azure_token" == creds.authorization_header() + + +class TestOAuthInteractive: + DEFAULT_PROVIDER_ARGS: ClassVar = MappingProxyType( + { + "authority_url": "https://login.microsoftonline.com/xyz", + "client_id": "azure-client-id", + "scopes": ["https://greenfield.cognitedata.com/.default"], + } + ) + + @patch("cognite.client.credentials.PublicClientApplication") + @pytest.mark.parametrize("expires_in", (1000, "1001")) # some IDPs return as string + def test_access_token_generated(self, mock_public_client, expires_in): + mock_public_client().acquire_token_silent.return_value = { + "access_token": "azure_token", + "expires_in": expires_in, + } + creds = OAuthInteractive(**self.DEFAULT_PROVIDER_ARGS) + creds._refresh_access_token() + assert "Authorization", "Bearer azure_token" == creds.authorization_header() + + @patch("cognite.client.credentials.PublicClientApplication") + def test_load(self, mock_public_client): + creds = OAuthInteractive.load(dict(self.DEFAULT_PROVIDER_ARGS)) + assert isinstance(creds, OAuthInteractive) + assert "Authorization", "Bearer azure_token" == creds.authorization_header() + + @patch("cognite.client.credentials.PublicClientApplication") + def test_create_from_credential_provider(self, mock_public_client): + config = {"interactive": dict(self.DEFAULT_PROVIDER_ARGS)} + creds = CredentialProvider.load(config) + assert isinstance(creds, OAuthInteractive) + assert "Authorization", "Bearer azure_token" == creds.authorization_header() + class TestOauthClientCredentials: - DEFAULT_PROVIDER_ARGS: ClassVar = { - "client_id": "azure-client-id", - "client_secret": "azure-client-secret", - "token_url": "https://login.microsoftonline.com/testingabc123/oauth2/v2.0/token", - "scopes": ["https://greenfield.cognitedata.com/.default"], - } + DEFAULT_PROVIDER_ARGS: ClassVar = MappingProxyType( + { + "client_id": "azure-client-id", + "client_secret": "azure-client-secret", + "token_url": "https://login.microsoftonline.com/testingabc123/oauth2/v2.0/token", + "scopes": ["https://greenfield.cognitedata.com/.default"], + "other_custom_arg": "some_value", + } + ) @patch("cognite.client.credentials.BackendApplicationClient") @patch("cognite.client.credentials.OAuth2Session") @pytest.mark.parametrize("expires_in", (1000, "1001")) # some IDPs return as string def test_access_token_generated(self, mock_oauth_session, mock_backend_client, expires_in): - mock_backend_client().return_value = Mock() mock_oauth_session().fetch_token.return_value = {"access_token": "azure_token", "expires_in": expires_in} creds = OAuthClientCredentials(**self.DEFAULT_PROVIDER_ARGS) creds._refresh_access_token() @@ -43,7 +176,6 @@ def test_access_token_generated(self, mock_oauth_session, mock_backend_client, e @patch("cognite.client.credentials.BackendApplicationClient") @patch("cognite.client.credentials.OAuth2Session") def test_access_token_not_generated_due_to_error(self, mock_oauth_session, mock_backend_client): - mock_backend_client().return_value = Mock() mock_oauth_session().fetch_token.side_effect = InvalidClientIdError() with pytest.raises( CogniteAuthError, @@ -55,7 +187,6 @@ def test_access_token_not_generated_due_to_error(self, mock_oauth_session, mock_ @patch("cognite.client.credentials.BackendApplicationClient") @patch("cognite.client.credentials.OAuth2Session") def test_access_token_expired(self, mock_oauth_session, mock_backend_client): - mock_backend_client().return_value = Mock() mock_oauth_session().fetch_token.side_effect = [ {"access_token": "azure_token_expired", "expires_in": -1000}, {"access_token": "azure_token_refreshed", "expires_in": 1000}, @@ -64,15 +195,30 @@ def test_access_token_expired(self, mock_oauth_session, mock_backend_client): assert "Authorization", "Bearer azure_token_expired" == creds.authorization_header() assert "Authorization", "Bearer azure_token_refreshed" == creds.authorization_header() + def test_load(self): + creds = OAuthClientCredentials.load(dict(self.DEFAULT_PROVIDER_ARGS)) + assert isinstance(creds, OAuthClientCredentials) + + def test_create_from_credential_provider(self): + creds = CredentialProvider.load({"client_credentials": dict(self.DEFAULT_PROVIDER_ARGS)}) + assert isinstance(creds, OAuthClientCredentials) + assert creds.client_id == "azure-client-id" + assert creds.client_secret == "azure-client-secret" + assert creds.token_url == "https://login.microsoftonline.com/testingabc123/oauth2/v2.0/token" + assert creds.scopes == ["https://greenfield.cognitedata.com/.default"] + assert creds.token_custom_args == {"other_custom_arg": "some_value"} + class TestOAuthClientCertificate: - DEFAULT_PROVIDER_ARGS: ClassVar = { - "authority_url": "https://login.microsoftonline.com/xyz", - "client_id": "azure-client-id", - "cert_thumbprint": "XYZ123", - "certificate": "certificatecontents123", - "scopes": ["https://greenfield.cognitedata.com/.default"], - } + DEFAULT_PROVIDER_ARGS: ClassVar = MappingProxyType( + { + "authority_url": "https://login.microsoftonline.com/xyz", + "client_id": "azure-client-id", + "cert_thumbprint": "XYZ123", + "certificate": "certificatecontents123", + "scopes": ["https://greenfield.cognitedata.com/.default"], + } + ) @patch("cognite.client.credentials.ConfidentialClientApplication") def test_access_token_generated(self, mock_msal_app): @@ -82,3 +228,31 @@ def test_access_token_generated(self, mock_msal_app): } creds = OAuthClientCertificate(**self.DEFAULT_PROVIDER_ARGS) assert "Authorization", "Bearer azure_token" == creds.authorization_header() + + @patch("cognite.client.credentials.ConfidentialClientApplication") + def test_load(self, mock_msal_app): + mock_msal_app().acquire_token_for_client.return_value = { + "access_token": "azure_token", + "expires_in": 1000, + } + creds = OAuthClientCertificate.load(dict(self.DEFAULT_PROVIDER_ARGS)) + assert isinstance(creds, OAuthClientCertificate) + assert creds.authority_url == "https://login.microsoftonline.com/xyz" + assert creds.client_id == "azure-client-id" + assert creds.cert_thumbprint == "XYZ123" + assert creds.certificate == "certificatecontents123" + assert creds.scopes == ["https://greenfield.cognitedata.com/.default"] + + @patch("cognite.client.credentials.ConfidentialClientApplication") + def test_create_from_credential_provider(self, mock_msal_app): + mock_msal_app().acquire_token_for_client.return_value = { + "access_token": "azure_token", + "expires_in": 1000, + } + creds = CredentialProvider.load({"client_certificate": dict(self.DEFAULT_PROVIDER_ARGS)}) + assert isinstance(creds, OAuthClientCertificate) + assert creds.authority_url == "https://login.microsoftonline.com/xyz" + assert creds.client_id == "azure-client-id" + assert creds.cert_thumbprint == "XYZ123" + assert creds.certificate == "certificatecontents123" + assert creds.scopes == ["https://greenfield.cognitedata.com/.default"] diff --git a/tests/tests_unit/test_data_classes/test_data_models/test_filters.py b/tests/tests_unit/test_data_classes/test_data_models/test_filters.py index 9e18d8d66..6bd50012a 100644 --- a/tests/tests_unit/test_data_classes/test_data_models/test_filters.py +++ b/tests/tests_unit/test_data_classes/test_data_models/test_filters.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Iterator, Literal +from typing import TYPE_CHECKING, Any, Iterator, Literal import pytest from _pytest.mark import ParameterSet @@ -8,6 +8,7 @@ import cognite.client.data_classes.filters as f from cognite.client.data_classes._base import EnumProperty from cognite.client.data_classes.data_modeling import ViewId +from cognite.client.data_classes.data_modeling.data_types import DirectRelationReference from cognite.client.data_classes.filters import Filter from tests.utils import all_subclasses @@ -100,6 +101,34 @@ def load_and_dump_equals_data() -> Iterator[ParameterSet]: yield pytest.param( {"prefix": {"property": ["name"], "value": {"parameter": "param1"}}}, id="prefix with parameters" ) + yield pytest.param( + { + "prefix": { + "property": ["cdf_cdm", "CogniteAsset/v1", "path"], + "value": [ + {"space": "s", "externalId": "0"}, + {"space": "s", "externalId": "1"}, + {"space": "s", "externalId": "2"}, + {"space": "s", "externalId": "3"}, + ], + } + }, + id="prefix with list of dicts", + ) + yield pytest.param( + { + "prefix": { + "property": ["cdf_cdm", "CogniteAsset/v1", "path"], + "value": [ + {"space": "s", "externalId": "0"}, + {"space": "s", "externalId": "1"}, + {"space": "s", "externalId": "2"}, + {"space": "s", "externalId": "3"}, + ], + } + }, + id="prefix with list of objects", + ) @pytest.mark.parametrize("raw_data", list(load_and_dump_equals_data())) @@ -114,7 +143,7 @@ def dump_filter_test_data() -> Iterator[ParameterSet]: f.Equals(property=["person", "name"], value=["Quentin", "Tarantino"]), f.ContainsAny(property=["person", "name"], values=[["Quentin", "Tarantino"]]), ) - expected = { + expected: dict[str, Any] = { "or": [ {"equals": {"property": ["person", "name"], "value": ["Quentin", "Tarantino"]}}, {"containsAny": {"property": ["person", "name"], "values": [["Quentin", "Tarantino"]]}}, @@ -181,6 +210,24 @@ def dump_filter_test_data() -> Iterator[ParameterSet]: {"invalid": {"previously_referenced_properties": [["some", "old", "prop"]], "filter_type": "overlaps"}}, ) + property_ref = ["cdf_cdm", "CogniteAsset/v1", "path"] + expected = { + "prefix": { + "property": property_ref, + "value": [{"space": "s", "externalId": "0"}, {"space": "s", "externalId": "1"}], + } + } + prop_list1 = f.Prefix( + property_ref, + [DirectRelationReference(space="s", external_id="0"), DirectRelationReference(space="s", external_id="1")], + ) + prop_list2 = f.Prefix( + property_ref, + [{"space": "s", "externalId": "0"}, {"space": "s", "externalId": "1"}], + ) + yield pytest.param(prop_list1, expected, id="Prefix filter with list property of objects") + yield pytest.param(prop_list2, expected, id="Prefix filter with list property of dicts") + @pytest.mark.parametrize("user_filter, expected", list(dump_filter_test_data())) def test_dump_filter(user_filter: Filter, expected: dict) -> None: @@ -208,25 +255,33 @@ def test_user_given_metadata_keys_are_not_camel_cased(property_cls: type) -> Non class TestSpaceFilter: @pytest.mark.parametrize( - "inst_type, space, expected_spaces", + "inst_type, space, expected", ( - ("node", "myspace", ["myspace"]), - ("edge", ["myspace"], ["myspace"]), - ("node", ["myspace", "another"], ["myspace", "another"]), + ("node", "myspace", {"equals": {"property": ["node", "space"], "value": "myspace"}}), + (None, ["myspace"], {"equals": {"property": ["node", "space"], "value": "myspace"}}), + ("edge", ["myspace"], {"equals": {"property": ["edge", "space"], "value": "myspace"}}), + ("node", ["myspace", "another"], {"in": {"property": ["node", "space"], "values": ["myspace", "another"]}}), + ("node", ("myspace", "another"), {"in": {"property": ["node", "space"], "values": ["myspace", "another"]}}), ), ) def test_space_filter( - self, inst_type: Literal["node", "edge"], space: str | list[str], expected_spaces: list[str] + self, inst_type: Literal["node", "edge"], space: str | list[str], expected: dict[str, Any] ) -> None: - space_filter = f.SpaceFilter(space, inst_type) - expected = {"in": {"property": [inst_type, "space"], "values": expected_spaces}} + space_filter = f.SpaceFilter(space, inst_type) if inst_type else f.SpaceFilter(space) assert expected == space_filter.dump() def test_space_filter_passes_isinstance_checks(self) -> None: space_filter = f.SpaceFilter("myspace", "edge") assert isinstance(space_filter, Filter) - def test_space_filter_passes_verification(self, cognite_client: CogniteClient) -> None: - space_filter = f.SpaceFilter("myspace", "edge") + @pytest.mark.parametrize( + "space_filter", + [ + f.SpaceFilter("s1", "edge"), + f.SpaceFilter(["s1"], "edge"), + f.SpaceFilter(["s1", "s2"], "edge"), + ], + ) + def test_space_filter_passes_verification(self, cognite_client: CogniteClient, space_filter: f.SpaceFilter) -> None: cognite_client.data_modeling.instances._validate_filter(space_filter) assert True diff --git a/tests/tests_unit/test_data_classes/test_data_models/test_typed_instances.py b/tests/tests_unit/test_data_classes/test_data_models/test_typed_instances.py index ed0854b57..23a382b9f 100644 --- a/tests/tests_unit/test_data_classes/test_data_models/test_typed_instances.py +++ b/tests/tests_unit/test_data_classes/test_data_models/test_typed_instances.py @@ -2,7 +2,10 @@ from datetime import date +import pytest + from cognite.client.data_classes.data_modeling import DirectRelationReference, ViewId +from cognite.client.data_classes.data_modeling.cdm.v1 import CogniteAssetApply, CogniteDescribableEdgeApply from cognite.client.data_classes.data_modeling.typed_instances import ( PropertyOptions, TypedEdge, @@ -262,3 +265,45 @@ def test_dump_load_flow(self) -> None: assert flow.dump() == expected loaded = Flow.load(expected) assert flow.dump() == loaded.dump() + + +@pytest.mark.parametrize( + "name, instance", + ( + ( + "CogniteAssetApply", + CogniteAssetApply( + space="foo", + external_id="child", + parent=("foo", "I-am-root"), + ), + ), + ( + "CogniteDescribableEdgeApply", + CogniteDescribableEdgeApply( + space="foo", + external_id="indescribable", + type=DirectRelationReference("foo", "yo"), + start_node=DirectRelationReference("foo", "yo2"), + end_node=DirectRelationReference("foo", "yo3"), + ), + ), + ), +) +def test_typed_instances_overrides_inherited_methods_from_instance_cls( + name: str, instance: TypedNode | TypedEdge +) -> None: + with pytest.raises(AttributeError, match=f"{name!r} object has no attribute 'get'"): + instance.get("space") + + with pytest.raises(TypeError, match=f"{name!r} object is not subscriptable"): + instance["foo"] + + with pytest.raises(TypeError, match=f"{name!r} object does not support item assignment"): + instance["foo"] = "bar" + + with pytest.raises(TypeError, match=f"{name!r} object does not support item deletion"): + del instance["external_id"] + + with pytest.raises(TypeError, match=f"argument of type {name!r} is not iterable"): + "foo" in instance diff --git a/tests/tests_unit/test_docstring_examples.py b/tests/tests_unit/test_docstring_examples.py index 6add9fbb4..756fa5119 100644 --- a/tests/tests_unit/test_docstring_examples.py +++ b/tests/tests_unit/test_docstring_examples.py @@ -1,10 +1,11 @@ import doctest from collections import defaultdict from unittest import TextTestRunner -from unittest.mock import patch +from unittest.mock import Mock, patch import pytest +from cognite.client import _cognite_client, config, credentials from cognite.client._api import ( assets, data_sets, @@ -39,8 +40,26 @@ def run_docstring_tests(module): assert 0 == len(s.failures) -@patch("cognite.client.CogniteClient", CogniteClientMock) @patch("os.environ", defaultdict(lambda: "value")) # ensure env.var. lookups does not fail in doctests +def test_cognite_client(): + run_docstring_tests(_cognite_client) + + +@patch("cognite.client.credentials.ConfidentialClientApplication") +@patch("cognite.client.credentials.PublicClientApplication") +@patch("pathlib.Path.read_text", Mock(return_value="certificatecontents123")) +@patch("os.environ", defaultdict(lambda: "value")) # ensure env.var. lookups does not fail in doctests +def test_credential_providers(mock_msal_app, mock_public_client): + mock_msal_app().acquire_token_for_client.return_value = { + "access_token": "azure_token", + "expires_in": 1000, + } + mock_public_client().acquire_token_silent.return_value = {"access_token": "azure_token", "expires_in": 1000} + run_docstring_tests(credentials) + + +@patch("cognite.client.CogniteClient", CogniteClientMock) +@patch("os.environ", defaultdict(lambda: "value")) class TestDocstringExamples: def test_time_series(self): run_docstring_tests(time_series) @@ -100,3 +119,6 @@ def test_workflows(self): def test_units(self): run_docstring_tests(units) + + def test_config(self): + run_docstring_tests(config) diff --git a/tests/tests_unit/test_utils/test_auxiliary.py b/tests/tests_unit/test_utils/test_auxiliary.py index df687f8ab..e5c91635b 100644 --- a/tests/tests_unit/test_utils/test_auxiliary.py +++ b/tests/tests_unit/test_utils/test_auxiliary.py @@ -13,6 +13,7 @@ get_accepted_params, handle_deprecated_camel_case_argument, interpolate_and_url_encode, + load_resource_to_dict, remove_duplicates_keep_order, split_into_chunks, split_into_n_parts, @@ -228,3 +229,24 @@ class TestFastDictLoad: def test_load(self, item, expected): get_accepted_params.cache_clear() # For good measure assert expected == fast_dict_load(MyTestResource, item, cognite_client=None) + + +class TestLoadDictOrStr: + @pytest.mark.parametrize( + "input, expected", + ( + ({"foo": "bar"}, {"foo": "bar"}), + ({"foo": None}, {"foo": None}), + ('{"foo": "bar"}', {"foo": "bar"}), + ('{"foo": null}', {"foo": None}), + ("foo: bar", {"foo": "bar"}), + ('{"foo": {"bar": "thing"}}', {"foo": {"bar": "thing"}}), + ), + ) + def test_load_resource_to_dict(self, input, expected): + assert expected == load_resource_to_dict(input) + + @pytest.mark.parametrize("input", ("foo", 100)) + def test_load_resource_to_dict_raises(self, input): + with pytest.raises(TypeError, match="Resource must be json or yaml str, or dict, not"): + load_resource_to_dict(input) diff --git a/tests/tests_unit/test_utils/test_time.py b/tests/tests_unit/test_utils/test_time.py index b55250d30..34ae4069c 100644 --- a/tests/tests_unit/test_utils/test_time.py +++ b/tests/tests_unit/test_utils/test_time.py @@ -19,6 +19,7 @@ align_large_granularity, align_start_and_end_for_granularity, convert_and_isoformat_time_attrs, + convert_data_modelling_timestamp, datetime_to_ms, datetime_to_ms_iso_timestamp, granularity_to_ms, @@ -230,6 +231,27 @@ def test_negative(self, t): timestamp_to_ms(t) +class TestConvertDataModelingTimestamp: + @pytest.mark.parametrize( + "timestamp_str, expected", + [ + ("2021-01-01T00:00:00.000+00:00", datetime(2021, 1, 1, 0, 0, 0, 0, tzinfo=timezone.utc)), + ("2021-01-01T00:00:00.000+01:00", datetime(2021, 1, 1, 0, 0, 0, 0, tzinfo=timezone(timedelta(hours=1)))), + ( + "2021-01-01T00:00:00.000+01:15", + datetime(2021, 1, 1, 0, 0, 0, 0, tzinfo=timezone(timedelta(hours=1, minutes=15))), + ), + ( + "2021-01-01T00:00:00.000-01:15", + datetime(2021, 1, 1, 0, 0, 0, 0, tzinfo=timezone(timedelta(hours=-1, minutes=-15))), + ), + ("2024-09-03T09:36:01.17+00:00", datetime(2024, 9, 3, 9, 36, 1, 170000, tzinfo=timezone.utc)), + ], + ) + def test_valid_timestamp_str(self, timestamp_str: str, expected: datetime) -> None: + assert expected == convert_data_modelling_timestamp(timestamp_str) + + class TestGranularityToMs: @pytest.mark.parametrize( "granularity, expected_ms",