diff --git a/pyhdx/batch_processing.py b/pyhdx/batch_processing.py index f3798ceb..96e3b296 100644 --- a/pyhdx/batch_processing.py +++ b/pyhdx/batch_processing.py @@ -237,7 +237,7 @@ class JobParser(object): cwd = param.ClassSelector(Path, doc='Path of the current working directory') - def __init__(self, job_spec, cwd=None, ): + def __init__(self, job_spec, cwd=None): self.job_spec = job_spec self.cwd = cwd or Path().cwd() diff --git a/pyhdx/cli.py b/pyhdx/cli.py index 04ebf995..cfb92dfd 100644 --- a/pyhdx/cli.py +++ b/pyhdx/cli.py @@ -1,32 +1,30 @@ -import argparse import time -from ipaddress import ip_address -from pyhdx.web import serve -from pyhdx.config import cfg -from pyhdx.local_cluster import verify_cluster, default_cluster +from typing import Union, Optional +from pathlib import Path +import typer +from ipaddress import ip_address +import yaml -# todo add check to see if the web module requirements are installed +app = typer.Typer() -def main(): - parser = argparse.ArgumentParser(prog="pyhdx", description="PyHDX Launcher") +@app.command() +def serve(scheduler_address: Optional[str] = typer.Option(None, help="Address for dask scheduler to use")): + """Launch the PyHDX web application""" - parser.add_argument("serve", help="Runs PyHDX Dashboard") - parser.add_argument( - "--scheduler_address", help="Run with local cluster :" - ) - args = parser.parse_args() + from pyhdx.config import cfg + from pyhdx.local_cluster import verify_cluster, default_cluster - if args.scheduler_address: - ip, port = args.scheduler_address.split(":") + if scheduler_address is not None: + ip, port = scheduler_address.split(":") if not ip_address(ip): print("Invalid IP Address") return elif not 0 <= int(port) < 2 ** 16: print("Invalid port, must be 0-65535") return - cfg.set("cluster", "scheduler_address", args.scheduler_address) + cfg.set("cluster", "scheduler_address", scheduler_address) scheduler_address = cfg.get("cluster", "scheduler_address") if not verify_cluster(scheduler_address): @@ -37,8 +35,9 @@ def main(): scheduler_address = f"{ip}:{port}" print(f"Started new Dask LocalCluster at {scheduler_address}") - if args.serve: - serve.run_apps() + # Start the PyHDX web application + from pyhdx.web import serve as serve_pyhdx + serve_pyhdx.run_apps() loop = True while loop: @@ -49,11 +48,22 @@ def main(): loop = False -if __name__ == "__main__": - import sys +@app.command() +def process( + jobfile: Path = typer.Argument(..., help="Path to .yaml jobfile"), + cwd: Optional[Path] = typer.Option(None, help="Optional path to working directory") +): + """ + Process a HDX dataset according to a jobfile + """ + + from pyhdx.batch_processing import JobParser - sys.argv.append("serve") - sys.argv.append("--scheduler_address") - sys.argv.append("127.0.0.1:53270") + job_spec = yaml.safe_load(jobfile.read_text()) + parser = JobParser(job_spec, cwd=cwd) - main() + parser.execute() + + +if __name__ == "__main__": + app() diff --git a/setup.cfg b/setup.cfg index 0c5e2f1c..42ab74e5 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,9 +1,9 @@ [metadata] name = PyHDX author = Jochem H. Smit -author-email = jhsmit@gmail.com +author_email = jhsmit@gmail.com maintainer = Jochem H. Smit -maintainer-email = jhsmit@gmail.com +maintainer_email = jhsmit@gmail.com url = https://github.com/Jhsmit/PyHDX license = MIT license_files = LICENSE @@ -31,6 +31,7 @@ install_requires = sympy==1.5.1 torch tqdm + typer dask distributed packaging @@ -41,7 +42,7 @@ python_requires = [options.entry_points] console_scripts = - pyhdx = pyhdx.cli:main + pyhdx = pyhdx.cli:app [options.extras_require] diff --git a/tests/test_batchprocessing.py b/tests/test_batchprocessing.py index 8692acea..c4526e5b 100644 --- a/tests/test_batchprocessing.py +++ b/tests/test_batchprocessing.py @@ -1,8 +1,9 @@ -from pyhdx.batch_processing import yaml_to_hdxm, yaml_to_hdxmset, StateParser +from pyhdx.batch_processing import StateParser, JobParser from pyhdx.models import HDXMeasurement, HDXMeasurementSet import numpy as np from pathlib import Path import yaml +import shutil cwd = Path(__file__).parent input_dir = cwd / 'test_data' / 'input' @@ -17,16 +18,6 @@ def test_load_from_yaml(self): yaml_pth = Path(input_dir / 'data_states.yaml') data_dict = yaml.safe_load(yaml_pth.read_text()) - hdxm = yaml_to_hdxm(data_dict['SecB_tetramer'], data_dir=input_dir) - assert isinstance(hdxm, HDXMeasurement) - - assert hdxm.metadata['temperature'] == data_dict['SecB_tetramer']['temperature']['value'] + 273.15 - assert hdxm.name == 'SecB WT apo' - - hdxm_set = yaml_to_hdxmset(data_dict, data_dir=input_dir) - assert isinstance(hdxm_set, HDXMeasurementSet) - assert hdxm_set.names == list(data_dict.keys()) - parser = StateParser(data_dict, data_src=input_dir) hdxm = parser.load_hdxm('SecB_tetramer') @@ -39,4 +30,14 @@ def test_load_from_yaml(self): assert isinstance(hdxm_set, HDXMeasurementSet) assert hdxm_set.names == list(data_dict.keys()) + def test_load_job_parser(self): + fit_output_dir = input_dir / 'fit_result_output_1' + if fit_output_dir.exists(): + shutil.rmtree(fit_output_dir, ignore_errors=True) + + job_spec = yaml.safe_load((input_dir / 'jobfile.yaml').read_text()) + parser = JobParser(job_spec, cwd=input_dir) + parser.execute() + assert fit_output_dir.exists() + shutil.rmtree(fit_output_dir, ignore_errors=True) diff --git a/tests/test_data/input/jobfile.yaml b/tests/test_data/input/jobfile.yaml index 6ba2f547..29d0a5ab 100644 --- a/tests/test_data/input/jobfile.yaml +++ b/tests/test_data/input/jobfile.yaml @@ -2,7 +2,7 @@ steps: - task: load_hdxm_set name: load_data state_file: data_states.yaml - - task: estimate_rates # todo allow specification of method etc + - task: estimate_rates name: rates hdxm_set: $(load_data.out) - task: create_guess