From c138c891444118dc83490f97b8923737342c7877 Mon Sep 17 00:00:00 2001 From: Bert Verstraete Date: Thu, 14 Nov 2024 12:00:50 +0100 Subject: [PATCH] [CDF-22608] Add data set support for data workflows (#2020) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Add data set support for data workflows on both the groups API and Data Workflows API. The capability lives on the Workflow object; any operations on versions, executions, triggers, tasks belonging to or spawned by that workflow require data set access to the data set of the workflow. Co-authored-by: HÃ¥kon V. Treider --- CHANGELOG.md | 4 ++++ cognite/client/_version.py | 2 +- cognite/client/data_classes/capabilities.py | 1 + cognite/client/data_classes/workflows.py | 22 ++++++++++++++----- pyproject.toml | 2 +- .../test_api/test_data_workflows.py | 14 +++++++++--- .../test_data_classes/test_capabilities.py | 7 ++++++ 7 files changed, 42 insertions(+), 10 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0fd6ca283b..e2a659c50d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -17,6 +17,10 @@ Changes are grouped as follows - `Fixed` for any bug fixes. - `Security` in case of vulnerabilities. +## [7.65.1] - 2023-11-14 +### Added +- Workflows now support data sets + ## [7.65.0] - 2024-11-13 ### Added - DatapointsAPI now support iteration like most other APIs: `for dps in client.time_series.data(...)`. diff --git a/cognite/client/_version.py b/cognite/client/_version.py index c89fbbe0ef..d55a82d627 100644 --- a/cognite/client/_version.py +++ b/cognite/client/_version.py @@ -1,4 +1,4 @@ from __future__ import annotations -__version__ = "7.65.0" +__version__ = "7.65.1" __api_subversion__ = "20230101" diff --git a/cognite/client/data_classes/capabilities.py b/cognite/client/data_classes/capabilities.py index 5fbb58fe01..cdfde0f8b6 100644 --- a/cognite/client/data_classes/capabilities.py +++ b/cognite/client/data_classes/capabilities.py @@ -1275,6 +1275,7 @@ class Action(Capability.Action): # type: ignore [misc] class Scope: All = AllScope + DataSet = DataSetScope @dataclass diff --git a/cognite/client/data_classes/workflows.py b/cognite/client/data_classes/workflows.py index d63fafc5bb..5a7d9c29e1 100644 --- a/cognite/client/data_classes/workflows.py +++ b/cognite/client/data_classes/workflows.py @@ -38,9 +38,10 @@ class WorkflowCore(WriteableCogniteResource["WorkflowUpsert"], ABC): - def __init__(self, external_id: str, description: str | None) -> None: + def __init__(self, external_id: str, description: str | None = None, data_set_id: int | None = None) -> None: self.external_id = external_id self.description = description + self.data_set_id = data_set_id class WorkflowUpsert(WorkflowCore): @@ -50,13 +51,21 @@ class WorkflowUpsert(WorkflowCore): Args: external_id (str): The external ID provided by the client. Must be unique for the resource type. description (str | None): Description of the workflow. Note that when updating a workflow, the description will - always be overwritten also if it is set to None. Meaning if the wokflow already has a description, + always be overwritten also if it is set to None. Meaning if the workflow already has a description, and you want to keep it, you need to provide the description when updating the workflow. + data_set_id (int | None): The id of the data set this workflow belongs to. + If a dataSetId is provided, any operations on this workflow, or its versions, executions, + and triggers will require appropriate access to the data set. More information on data sets + and their configuration can be found here: https://docs.cognite.com/cdf/data_governance/concepts/datasets/ """ @classmethod def _load(cls, resource: dict, cognite_client: CogniteClient | None = None) -> Self: - return cls(external_id=resource["externalId"], description=resource.get("description")) + return cls( + external_id=resource["externalId"], + description=resource.get("description"), + data_set_id=resource.get("dataSetId"), + ) def as_write(self) -> WorkflowUpsert: """Returns this workflow instance.""" @@ -71,7 +80,7 @@ class Workflow(WorkflowCore): external_id (str): The external ID provided by the client. Must be unique for the resource type. created_time (int): The time when the workflow was created. Unix timestamp in milliseconds. description (str | None): Description of the workflow. Defaults to None. - + data_set_id (int | None): The id of the data set this workflow belongs to. """ def __init__( @@ -79,8 +88,9 @@ def __init__( external_id: str, created_time: int, description: str | None = None, + data_set_id: int | None = None, ) -> None: - super().__init__(external_id, description) + super().__init__(external_id, description, data_set_id) self.created_time = created_time @classmethod @@ -89,6 +99,7 @@ def _load(cls, resource: dict, cognite_client: CogniteClient | None = None) -> S external_id=resource["externalId"], description=resource.get("description"), created_time=resource["createdTime"], + data_set_id=resource.get("dataSetId"), ) def as_write(self) -> WorkflowUpsert: @@ -96,6 +107,7 @@ def as_write(self) -> WorkflowUpsert: return WorkflowUpsert( external_id=self.external_id, description=self.description, + data_set_id=self.data_set_id, ) diff --git a/pyproject.toml b/pyproject.toml index 69f514464c..178dcdfe58 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,7 +1,7 @@ [tool.poetry] name = "cognite-sdk" -version = "7.65.0" +version = "7.65.1" description = "Cognite Python SDK" readme = "README.md" documentation = "https://cognite-sdk-python.readthedocs-hosted.com" diff --git a/tests/tests_integration/test_api/test_data_workflows.py b/tests/tests_integration/test_api/test_data_workflows.py index 57ed924b50..7eb00651f1 100644 --- a/tests/tests_integration/test_api/test_data_workflows.py +++ b/tests/tests_integration/test_api/test_data_workflows.py @@ -5,7 +5,7 @@ import pytest from cognite.client import CogniteClient -from cognite.client.data_classes import Function +from cognite.client.data_classes import DataSet, Function from cognite.client.data_classes.data_modeling import ViewId from cognite.client.data_classes.data_modeling.query import NodeResultSetExpression, Select, SourceSelector from cognite.client.data_classes.workflows import ( @@ -33,10 +33,11 @@ @pytest.fixture -def workflow_list(cognite_client: CogniteClient) -> WorkflowList: +def workflow_list(cognite_client: CogniteClient, data_set: DataSet) -> WorkflowList: workflow1 = WorkflowUpsert( external_id="integration_test-workflow1", description="This is workflow for testing purposes", + data_set_id=data_set.id, ) workflow2 = WorkflowUpsert( external_id="integration_test-workflow2", @@ -268,6 +269,11 @@ def workflow_scheduled_trigger(cognite_client: CogniteClient, add_multiply_workf cognite_client.workflows.triggers.delete(trigger.external_id) +@pytest.fixture(scope="session") +def data_set(cognite_client: CogniteClient) -> DataSet: + return cognite_client.data_sets.list(limit=1)[0] + + @pytest.fixture() def workflow_data_modeling_trigger(cognite_client: CogniteClient, add_multiply_workflow: WorkflowVersion) -> None: trigger = cognite_client.workflows.triggers.create( @@ -294,10 +300,11 @@ def workflow_data_modeling_trigger(cognite_client: CogniteClient, add_multiply_w class TestWorkflows: - def test_upsert_delete(self, cognite_client: CogniteClient) -> None: + def test_upsert_delete(self, cognite_client: CogniteClient, data_set: DataSet) -> None: workflow = WorkflowUpsert( external_id="integration_test-test_create_delete" + random_string(5), description="This is ephemeral workflow for testing purposes", + data_set_id=data_set.id, ) cognite_client.workflows.delete(workflow.external_id, ignore_unknown_ids=True) @@ -308,6 +315,7 @@ def test_upsert_delete(self, cognite_client: CogniteClient) -> None: assert created_workflow.external_id == workflow.external_id assert created_workflow.description == workflow.description assert created_workflow.created_time is not None + assert created_workflow.data_set_id == data_set.id finally: if created_workflow is not None: cognite_client.workflows.delete(created_workflow.external_id) diff --git a/tests/tests_unit/test_data_classes/test_capabilities.py b/tests/tests_unit/test_data_classes/test_capabilities.py index 7dcc257a56..21af7a2aeb 100644 --- a/tests/tests_unit/test_data_classes/test_capabilities.py +++ b/tests/tests_unit/test_data_classes/test_capabilities.py @@ -135,6 +135,13 @@ def all_acls(): {"transformationsAcl": {"actions": ["READ", "WRITE"], "scope": {"datasetScope": {"ids": ["94"]}}}}, {"visionModelAcl": {"actions": ["READ", "WRITE"], "scope": {"all": {}}}}, {"wellsAcl": {"actions": ["READ", "WRITE"], "scope": {"all": {}}}}, + {"workflowOrchestrationAcl": {"actions": ["READ", "WRITE"], "scope": {"all": {}}}}, + { + "workflowOrchestrationAcl": { + "actions": ["READ", "WRITE"], + "scope": {"datasetScope": {"ids": ["2332579", "372"]}}, + } + }, ]