From 9c971cf734c6b0e903a03e3f6065e05d43e498d2 Mon Sep 17 00:00:00 2001 From: Mattia Date: Fri, 11 Oct 2024 12:22:33 +0200 Subject: [PATCH] [Fixes #273] connect resource with execution request --- importer/celery_tasks.py | 3 +++ importer/handlers/common/metadata.py | 2 ++ importer/handlers/common/vector.py | 6 +++--- importer/orchestrator.py | 5 +++++ importer/tests/unit/test_task.py | 2 +- 5 files changed, 14 insertions(+), 4 deletions(-) diff --git a/importer/celery_tasks.py b/importer/celery_tasks.py index ac79fe8..4888577 100644 --- a/importer/celery_tasks.py +++ b/importer/celery_tasks.py @@ -367,6 +367,9 @@ def create_geonode_resource( handler_module_path, resource, _exec, **kwargs ) + # assign geonode resource to ExectionRequest + orchestrator.update_execution_request_obj(_exec, {"geonode_resource": resource}) + # at the end recall the import_orchestrator for the next step import_orchestrator.apply_async( ( diff --git a/importer/handlers/common/metadata.py b/importer/handlers/common/metadata.py index 76fbbcf..b0778cd 100644 --- a/importer/handlers/common/metadata.py +++ b/importer/handlers/common/metadata.py @@ -78,6 +78,8 @@ def import_resource(self, files: dict, execution_id: str, **kwargs): self.handle_metadata_resource(_exec, dataset, original_handler) dataset.refresh_from_db() + # assign the resource to the execution_obj + orchestrator.update_execution_request_obj(_exec, {"geonode_resource": dataset}) orchestrator.evaluate_execution_progress( execution_id, handler_module_path=str(self) diff --git a/importer/handlers/common/vector.py b/importer/handlers/common/vector.py index 13f49cf..a18f3c0 100644 --- a/importer/handlers/common/vector.py +++ b/importer/handlers/common/vector.py @@ -607,7 +607,7 @@ def create_geonode_resource( resource_manager.set_thumbnail(None, instance=saved_dataset) ResourceBase.objects.filter(alternate=alternate).update(dirty_state=False) - + saved_dataset.refresh_from_db() return saved_dataset @@ -805,13 +805,13 @@ def _import_resource_rollback(self, exec_id, instance_name=None, *args, **kwargs "Dynamic model does not exists, removing ogr2ogr table in progress" ) if instance_name is None: - logger.info("No table created, skipping...") + logger.warning("No table created, skipping...") return db_name = os.getenv("DEFAULT_BACKEND_DATASTORE", "datastore") with connections[db_name].cursor() as cursor: cursor.execute(f"DROP TABLE {instance_name}") except Exception as e: - logger.info(e) + logger.warning(e) pass def _publish_resource_rollback(self, exec_id, instance_name=None, *args, **kwargs): diff --git a/importer/orchestrator.py b/importer/orchestrator.py index fb170ac..e87127e 100644 --- a/importer/orchestrator.py +++ b/importer/orchestrator.py @@ -332,6 +332,11 @@ def update_execution_request_status( task_args=celery_task_request.args ) + def update_execution_request_obj(self, _exec_obj, payload): + ExecutionRequest.objects.filter(pk=_exec_obj.pk).update(**payload) + _exec_obj.refresh_from_db() + return _exec_obj + def _last_step(self, execution_id, handler_module_path): """ Last hookable step for each handler before mark the execution as completed diff --git a/importer/tests/unit/test_task.py b/importer/tests/unit/test_task.py index ae1d436..3c80b85 100644 --- a/importer/tests/unit/test_task.py +++ b/importer/tests/unit/test_task.py @@ -245,7 +245,7 @@ def test_publish_resource_if_overwrite_should_not_call_the_publishing( """ try: with self.assertRaises(Exception): - get_resource.return_falue = True + get_resource.return_value = True publish_resources.return_value = True extract_resource_to_publish.return_value = [ {"crs": 4326, "name": "dataset3"}