From a5c56fe1f1ea7afeb9ace72c292e378ce5b35cb8 Mon Sep 17 00:00:00 2001 From: abhinavchobey <111754147+abhinavchobey@users.noreply.github.com> Date: Thu, 29 Aug 2024 06:50:43 +0530 Subject: [PATCH] Regression master branch (#203) * bug fix in update_global_exe_ids * bug update_global_art_ids updates doesn't modifies * adding changes * Added logging dir before current script --------- Co-authored-by: Abhinav Chobey Co-authored-by: AyeshaSanadi --- cmflib/cmf.py | 9 ++++----- server/app/main.py | 12 ++++-------- 2 files changed, 8 insertions(+), 13 deletions(-) diff --git a/cmflib/cmf.py b/cmflib/cmf.py index 09cfda3b..d33d4e1d 100644 --- a/cmflib/cmf.py +++ b/cmflib/cmf.py @@ -357,6 +357,7 @@ def create_execution( Returns: Execution object from ML Metadata library associated with the new execution for this stage. """ + logging_dir = change_dir(self.cmf_init_path) # Assigning current file name as stage and execution name current_script = sys.argv[0] file_name = os.path.basename(current_script) @@ -368,7 +369,6 @@ def create_execution( # Initializing the execution related fields - logging_dir = change_dir(self.cmf_init_path) self.metrics = {} self.input_artifacts = [] self.execution_label_props = {} @@ -647,6 +647,7 @@ def log_dataset( Returns: Artifact object from ML Metadata library associated with the new dataset artifact. """ + logging_dir = change_dir(self.cmf_init_path) # Assigning current file name as stage and execution name current_script = sys.argv[0] file_name = os.path.basename(current_script) @@ -665,7 +666,6 @@ def log_dataset( # If the dataset already exist , then we just link the existing dataset to the execution # We do not update the dataset properties . # We need to append the new properties to the existing dataset properties - logging_dir = change_dir(self.cmf_init_path) custom_props = {} if custom_properties is None else custom_properties git_repo = git_get_repo() name = re.split("/", url)[-1] @@ -999,6 +999,7 @@ def log_model( Artifact object from ML Metadata library associated with the new model artifact. """ + logging_dir = change_dir(self.cmf_init_path) # Assigning current file name as stage and execution name current_script = sys.argv[0] file_name = os.path.basename(current_script) @@ -1018,7 +1019,6 @@ def log_model( # If the model already exist , then we just link the existing model to the execution # We do not update the model properties . # We need to append the new properties to the existing model properties - logging_dir = change_dir(self.cmf_init_path) if custom_properties is None: custom_properties = {} custom_props = {} if custom_properties is None else custom_properties @@ -1379,6 +1379,7 @@ def log_execution_metrics( Returns: Artifact object from ML Metadata library associated with the new coarse-grained metrics artifact. """ + logging_dir = change_dir(self.cmf_init_path) # Assigning current file name as stage and execution name current_script = sys.argv[0] file_name = os.path.basename(current_script) @@ -1393,8 +1394,6 @@ def log_execution_metrics( self.create_execution(execution_type=name_without_extension) assert self.execution is not None, f"Failed to create execution for {self.pipeline_name}!!" - - logging_dir = change_dir(self.cmf_init_path) custom_props = {} if custom_properties is None else custom_properties uri = str(uuid.uuid1()) metrics_name = metrics_name + ":" + uri + ":" + str(self.execution.id) diff --git a/server/app/main.py b/server/app/main.py index 1e6ecade..4ab11071 100644 --- a/server/app/main.py +++ b/server/app/main.py @@ -373,18 +373,14 @@ async def model_card(request:Request, modelId: int, response_model=List[Dict[str async def update_global_art_dict(pipeline_name): global dict_of_art_ids output_dict = await get_all_artifact_ids(server_store_path, dict_of_exe_ids, pipeline_name) - if pipeline_name in dict_of_art_ids: - dict_of_art_ids[pipeline_name].update(output_dict[pipeline_name]) - else: - dict_of_art_ids[pipeline_name] = output_dict[pipeline_name] + # type(dict_of_exe_ids[pipeline_name]) = Dict[ ] + dict_of_art_ids[pipeline_name]=output_dict[pipeline_name] return async def update_global_exe_dict(pipeline_name): global dict_of_exe_ids output_dict = await get_all_exe_ids(server_store_path, pipeline_name) - if pipeline_name in dict_of_exe_ids: - dict_of_exe_ids[pipeline_name].update(output_dict[pipeline_name]) - else: - dict_of_exe_ids[pipeline_name] = output_dict[pipeline_name] + # type(dict_of_exe_ids[pipeline_name]) = + dict_of_exe_ids[pipeline_name] = output_dict[pipeline_name] return