diff --git a/cognite/client/_api/assets.py b/cognite/client/_api/assets.py index f1e6cf2fe1..afd0515c93 100644 --- a/cognite/client/_api/assets.py +++ b/cognite/client/_api/assets.py @@ -49,7 +49,7 @@ from cognite.client.data_classes.filters import Filter, _validate_filter from cognite.client.data_classes.shared import AggregateBucketResult from cognite.client.exceptions import CogniteAPIError -from cognite.client.utils._auxiliary import split_into_n_parts +from cognite.client.utils._auxiliary import split_into_chunks, split_into_n_parts from cognite.client.utils._concurrency import classify_error, get_priority_executor from cognite.client.utils._identifier import IdentifierSequence from cognite.client.utils._text import to_camel_case @@ -1009,17 +1009,13 @@ def retrieve_subtree( def _get_asset_subtree(self, assets: list, current_depth: int, depth: int | None) -> list: subtree = assets if depth is None or current_depth < depth: - children = self._get_children(assets) - if children: + if children := self._get_children(subtree): subtree.extend(self._get_asset_subtree(children, current_depth + 1, depth)) return subtree def _get_children(self, assets: list) -> list: ids = [a.id for a in assets] - tasks = [] - chunk_size = 100 - for i in range(0, len(ids), chunk_size): - tasks.append({"parent_ids": ids[i : i + chunk_size], "limit": -1}) + tasks = [{"parent_ids": chunk, "limit": -1} for chunk in split_into_chunks(ids, 100)] tasks_summary = utils._concurrency.execute_tasks(self.list, tasks=tasks, max_workers=self._config.max_workers) tasks_summary.raise_compound_exception_if_failed_tasks() res_list = tasks_summary.results diff --git a/cognite/client/_api/datapoints.py b/cognite/client/_api/datapoints.py index da7d645b57..51df41366c 100644 --- a/cognite/client/_api/datapoints.py +++ b/cognite/client/_api/datapoints.py @@ -1537,9 +1537,7 @@ def _bin_datapoints(self, dps_object_list: list[dict[str, Any]]) -> list[list[di return binned_dps_object_list def _insert_datapoints_concurrently(self, dps_object_lists: list[list[dict[str, Any]]]) -> None: - tasks = [] - for dps_object_list in dps_object_lists: - tasks.append((dps_object_list,)) + tasks = [(dps_object_list,) for dps_object_list in dps_object_lists] summary = execute_tasks(self._insert_datapoints, tasks, max_workers=self.dps_client._config.max_workers) summary.raise_compound_exception_if_failed_tasks( task_unwrap_fn=lambda x: x[0], diff --git a/cognite/client/_api/synthetic_time_series.py b/cognite/client/_api/synthetic_time_series.py index 08bb2b735e..1f158837bf 100644 --- a/cognite/client/_api/synthetic_time_series.py +++ b/cognite/client/_api/synthetic_time_series.py @@ -74,10 +74,8 @@ def query( expressions if (isinstance(expressions, Sequence) and not isinstance(expressions, str)) else [expressions] ) - for i in range(len(expressions_to_iterate)): - expression, short_expression = self._build_expression( - expressions_to_iterate[i], variables, aggregate, granularity - ) + for exp in expressions_to_iterate: + expression, short_expression = self._build_expression(exp, variables, aggregate, granularity) query = { "expression": expression, "start": cognite.client.utils._time.timestamp_to_ms(start), diff --git a/tests/tests_integration/test_api/test_geospatial.py b/tests/tests_integration/test_api/test_geospatial.py index b90916e80b..d476d053cb 100644 --- a/tests/tests_integration/test_api/test_geospatial.py +++ b/tests/tests_integration/test_api/test_geospatial.py @@ -73,10 +73,9 @@ def test_feature_type(cognite_client): def large_feature_type(cognite_client): external_id = f"FT_{uuid.uuid4().hex[:10]}" feature_type_spec = FeatureType( - external_id=external_id, properties={f"attr{i}": {"type": "LONG"} for i in range(0, 80)} + external_id=external_id, properties={f"attr{i}": {"type": "LONG"} for i in range(80)} ) - feature_type = cognite_client.geospatial.create_feature_types(feature_type_spec) - yield feature_type + yield cognite_client.geospatial.create_feature_types(feature_type_spec) cognite_client.geospatial.delete_feature_types(external_id=external_id) @@ -178,9 +177,9 @@ def another_test_feature(cognite_client, test_feature_type): def many_features(cognite_client, large_feature_type): specs = [ Feature( - external_id=f"F_{uuid.uuid4().hex[:10]}", **{f"attr{i}": random.randint(10000, 20000) for i in range(0, 80)} + external_id=f"F_{uuid.uuid4().hex[:10]}", **{f"attr{i}": random.randint(10000, 20000) for i in range(80)} ) - for _ in range(0, 2000) + for _ in range(2000) ] features = cognite_client.geospatial.create_features(large_feature_type.external_id, specs) yield features @@ -273,18 +272,18 @@ def test_update_single_feature( assert res.asset_ids == [new_asset.id] def test_update_multiple_features(self, cognite_client, allow_crs_transformation, test_feature_type, test_features): - res = cognite_client.geospatial.update_features( + results = cognite_client.geospatial.update_features( feature_type_external_id=test_feature_type.external_id, feature=[ - Feature(external_id=test_features[idx].external_id, temperature=6.237, pressure=12.21, volume=34.43) - for idx in range(0, len(test_features)) + Feature(external_id=test_feat.external_id, temperature=6.237, pressure=12.21, volume=34.43) + for test_feat in test_features ], allow_crs_transformation=allow_crs_transformation, chunk_size=2, ) - for idx in range(0, len(test_features)): - assert res[idx].external_id == test_features[idx].external_id - assert res[idx].temperature == 6.237 + for res, test_feat in zip(results, test_features): + assert res.external_id == test_feat.external_id + assert res.temperature == 6.237 def test_search_single_feature(self, cognite_client, test_feature_type, test_feature): res = cognite_client.geospatial.search_features( diff --git a/tests/tests_integration/test_api/test_synthetic_time_series.py b/tests/tests_integration/test_api/test_synthetic_time_series.py index d4c9c42bb0..a106d12589 100644 --- a/tests/tests_integration/test_api/test_synthetic_time_series.py +++ b/tests/tests_integration/test_api/test_synthetic_time_series.py @@ -9,7 +9,7 @@ @pytest.fixture(scope="session") def test_time_series(cognite_client): - time_series_names = [f"test__constant_{i}_with_noise" for i in range(0, 10)] + time_series_names = [f"test__constant_{i}_with_noise" for i in range(10)] time_series = {} for ts in cognite_client.time_series.retrieve_multiple(external_ids=time_series_names): value = int(re.match(r"test__constant_(\d+)_with_noise", ts.name).group(1)) diff --git a/tests/tests_integration/test_api/test_templates.py b/tests/tests_integration/test_api/test_templates.py index bfc9350a80..f8d7459ceb 100644 --- a/tests/tests_integration/test_api/test_templates.py +++ b/tests/tests_integration/test_api/test_templates.py @@ -1,3 +1,4 @@ +import time import uuid import pytest @@ -11,18 +12,42 @@ TemplateInstance, TemplateInstanceList, ) -from cognite.client.data_classes.events import Event +from cognite.client.data_classes.events import Event, EventList from cognite.client.data_classes.templates import Source, TemplateInstanceUpdate, View, ViewResolveList, ViewResolver from cognite.client.exceptions import CogniteNotFoundError +@pytest.fixture(scope="session") +def ensure_event_test_data(cognite_client): + events = EventList( + [ + Event( + external_id=f"test_evt_templates_1_{i}", + type="test_templates_1", + start_time=i * 1000, + ) + for i in range(1001) + ] + ) + try: + cognite_client.events.retrieve_multiple( + external_ids=events.as_external_ids(), + ignore_unknwown_ids=False, + ) + except CogniteNotFoundError: + cognite_client.events.upsert(events) + time.sleep(3) + + @pytest.fixture def new_template_group(cognite_client): external_id = uuid.uuid4().hex[:20] username = cognite_client.iam.token.inspect().subject template_group = cognite_client.templates.groups.create( TemplateGroup( - external_id=external_id, description="some description", owners=[username, external_id + "@cognite.com"] + external_id=external_id, + description="some description", + owners=[username, f"{external_id}@cognite.com"], ) ) yield template_group, external_id @@ -74,16 +99,8 @@ def new_template_instance(cognite_client, new_template_group_version): @pytest.fixture +@pytest.mark.usefixtures("ensure_event_test_data") def new_view(cognite_client, new_template_group_version): - events = [] - for i in range(0, 1001): - events.append(Event(external_id="test_evt_templates_1_" + str(i), type="test_templates_1", start_time=i * 1000)) - try: - cognite_client.events.create(events) - except Exception: - # We only generate this data once for a given project, to prevent issues with eventual consistency etc. - None - new_group, ext_id, new_version = new_template_group_version view = View( external_id="test", @@ -108,14 +125,14 @@ def test_groups_get_single(self, cognite_client, new_template_group): assert isinstance(res[0], TemplateGroup) assert new_group.external_id == ext_id - def test_groups_retrieve_unknown(self, cognite_client, new_template_group): + def test_groups_retrieve_unknown(self, cognite_client): with pytest.raises(CogniteNotFoundError): cognite_client.templates.groups.retrieve_multiple(external_ids=["this does not exist"]) assert cognite_client.templates.groups.retrieve_multiple(external_ids="this does not exist") is None def test_groups_list_filter(self, cognite_client, new_template_group): new_group, ext_id = new_template_group - res = cognite_client.templates.groups.list(owners=[ext_id + "@cognite.com"]) + res = cognite_client.templates.groups.list(owners=[f"{ext_id}@cognite.com"]) assert len(res) == 1 assert isinstance(res, TemplateGroupList) @@ -217,16 +234,12 @@ def test_view_list(self, cognite_client, new_view): def test_view_delete(self, cognite_client, new_view): new_group, ext_id, new_version, view = new_view cognite_client.templates.views.delete(ext_id, new_version.version, [view.external_id]) - assert ( - len( - [ - res - for res in cognite_client.templates.views.list(ext_id, new_version.version) - if res.external_id == view.external_id - ] - ) - == 0 - ) + should_be_empty = [ + res + for res in cognite_client.templates.views.list(ext_id, new_version.version) + if res.external_id == view.external_id + ] + assert len(should_be_empty) == 0 def test_view_resolve(self, cognite_client, new_view): new_group, ext_id, new_version, view = new_view @@ -234,7 +247,7 @@ def test_view_resolve(self, cognite_client, new_view): ext_id, new_version.version, view.external_id, input={"minStartTime": 10 * 1000}, limit=10 ) expected = ViewResolveList._load( - [{"startTime": (i + 10) * 1000, "test_type": "test_templates_1"} for i in range(0, 10)] + [{"startTime": (i + 10) * 1000, "test_type": "test_templates_1"} for i in range(10)] ) assert res == expected diff --git a/tests/tests_unit/test_api/test_sequences.py b/tests/tests_unit/test_api/test_sequences.py index 164b0460cb..14046c7ed9 100644 --- a/tests/tests_unit/test_api/test_sequences.py +++ b/tests/tests_unit/test_api/test_sequences.py @@ -113,7 +113,7 @@ def mock_get_sequence_data_many_columns(rsps, cognite_client): json = { "id": 0, "externalId": "eid", - "columns": [{"externalId": "ceid" + str(i)} for i in range(0, 200)], + "columns": [{"externalId": f"ceid{i}"} for i in range(200)], "rows": [{"rowNumber": 0, "values": ["str"] * 200}], } rsps.add( @@ -558,7 +558,7 @@ def test_retrieve_dataframe_columns_mixed_with_zero( def test_retrieve_dataframe_columns_many_extid(self, cognite_client, mock_get_sequence_data_many_columns): data = cognite_client.sequences.data.retrieve(external_id="foo", start=1000000, end=1100000) assert isinstance(data, SequenceData) - assert ["ceid" + str(i) for i in range(200)] == list(data.to_pandas().columns) + assert [f"ceid{i}" for i in range(200)] == list(data.to_pandas().columns) def test_retrieve_dataframe_convert_null(self, cognite_client, mock_seq_response, mock_get_sequence_data_with_null): df = cognite_client.sequences.data.retrieve_dataframe(external_id="foo", start=0, end=None)