diff --git a/src/depiction_targeted_preproc/pipeline/prepare_inputs.py b/src/depiction_targeted_preproc/pipeline/prepare_inputs.py index d21c1df..4b21310 100644 --- a/src/depiction_targeted_preproc/pipeline/prepare_inputs.py +++ b/src/depiction_targeted_preproc/pipeline/prepare_inputs.py @@ -3,9 +3,9 @@ from pathlib import Path import yaml + from bfabric import Bfabric from bfabric.entities import Resource -from bfabric.experimental.app_interface.input_preparation import prepare_folder def _get_ibd_resource_id(imzml_resource_id: int, client: Bfabric) -> int: @@ -58,14 +58,3 @@ def write_inputs_spec(dataset_id: int, imzml_resource_id: int, client: Bfabric, inputs_yaml = sample_dir / "inputs.yml" with inputs_yaml.open("w") as file: yaml.safe_dump(inputs_spec, file) - - -def prepare_inputs( - client: Bfabric, - sample_dir: Path, - dataset_id: int, - imzml_resource_id: int, - ssh_user: str | None, -) -> None: - write_inputs_spec(dataset_id=dataset_id, imzml_resource_id=imzml_resource_id, client=client, sample_dir=sample_dir) - prepare_folder(inputs_yaml=sample_dir / "inputs.yml", target_folder=sample_dir, client=client, ssh_user=ssh_user) diff --git a/src/depiction_targeted_preproc/pipeline/prepare_params.py b/src/depiction_targeted_preproc/pipeline/prepare_params.py index b6e6174..a1d2938 100644 --- a/src/depiction_targeted_preproc/pipeline/prepare_params.py +++ b/src/depiction_targeted_preproc/pipeline/prepare_params.py @@ -1,13 +1,6 @@ -from pathlib import Path - -import cyclopts -import yaml -from bfabric import Bfabric -from bfabric.entities import Workunit -from bfabric.experimental.app_interface.workunit.definition import WorkunitExecutionDefinition -from loguru import logger from pydantic import BaseModel +from bfabric.experimental.app_interface.workunit.definition import WorkunitExecutionDefinition from depiction_targeted_preproc.pipeline_config.model import PipelineArtifact @@ -31,44 +24,3 @@ def parse_params(definition: WorkunitExecutionDefinition) -> dict[str, str | int requested_artifacts=requested_artifacts, mass_list_id=definition.raw_parameters.get("mass_list_id"), ).model_dump(mode="json") - - -def prepare_params( - client: Bfabric, - sample_dir: Path, - workunit_id: int, - override: bool, -) -> None: - sample_dir.mkdir(parents=True, exist_ok=True) - params_yaml = sample_dir / "params.yml" - if params_yaml.is_file() and not override: - logger.info(f"Skipping params generation for {workunit_id} as it already exists and override is not set") - return - definition = WorkunitExecutionDefinition.from_workunit(Workunit.find(id=workunit_id, client=client)) - with params_yaml.open("w") as file: - yaml.safe_dump(parse_params(definition), file) - - -app = cyclopts.App() - - -@app.default -def prepare_params_from_cli( - sample_dir: Path, - config_preset: str, - requested_artifacts: list[str], - n_jobs: int = 32, -) -> None: - sample_dir.mkdir(parents=True, exist_ok=True) - params_yaml = sample_dir / "params.yml" - with params_yaml.open("w") as file: - yaml.safe_dump( - Params(config_preset=config_preset, requested_artifacts=requested_artifacts, n_jobs=n_jobs).model_dump( - mode="json" - ), - file, - ) - - -if __name__ == "__main__": - app() diff --git a/src/depiction_targeted_preproc/pipeline/store_outputs.py b/src/depiction_targeted_preproc/pipeline/store_outputs.py deleted file mode 100644 index f8ebad8..0000000 --- a/src/depiction_targeted_preproc/pipeline/store_outputs.py +++ /dev/null @@ -1,36 +0,0 @@ -from __future__ import annotations - -from pathlib import Path - -import yaml -from bfabric import Bfabric -from bfabric.entities import Workunit -from bfabric.experimental.app_interface.output_registration import register_outputs - - -def _get_outputs_spec(zip_file_path: Path, workunit: Workunit) -> dict[str, list[dict[str, str | int | bool]]]: - return { - "outputs": [ - { - "type": "bfabric_copy_resource", - "local_path": str(zip_file_path.absolute()), - "store_entry_path": zip_file_path.name, - "workunit_id": workunit.id, - "storage_id": workunit.application.storage.id, - } - ] - } - - -def write_outputs_spec(zip_file_path: Path, workunit: Workunit) -> Path: - output_spec = _get_outputs_spec(zip_file_path=zip_file_path, workunit=workunit) - outputs_yaml = zip_file_path.parent / f"{zip_file_path.stem}_outputs_spec.yml" - with outputs_yaml.open("w") as file: - yaml.safe_dump(output_spec, file) - return outputs_yaml - - -def store_outputs(client: Bfabric, zip_file_path: Path, workunit_id: int, ssh_user: str | None): - workunit = Workunit.find(id=workunit_id, client=client) - outputs_yaml = write_outputs_spec(zip_file_path=zip_file_path, workunit=workunit) - register_outputs(outputs_yaml=outputs_yaml, client=client, ssh_user=ssh_user)