Skip to content

Commit

Permalink
update cli for serve/process, add test
Browse files Browse the repository at this point in the history
  • Loading branch information
Jhsmit committed Apr 8, 2022
1 parent 526ceff commit de28097
Show file tree
Hide file tree
Showing 5 changed files with 52 additions and 40 deletions.
2 changes: 1 addition & 1 deletion pyhdx/batch_processing.py
Original file line number Diff line number Diff line change
Expand Up @@ -237,7 +237,7 @@ class JobParser(object):

cwd = param.ClassSelector(Path, doc='Path of the current working directory')

def __init__(self, job_spec, cwd=None, ):
def __init__(self, job_spec, cwd=None):
self.job_spec = job_spec
self.cwd = cwd or Path().cwd()

Expand Down
58 changes: 34 additions & 24 deletions pyhdx/cli.py
Original file line number Diff line number Diff line change
@@ -1,32 +1,30 @@
import argparse
import time
from ipaddress import ip_address
from pyhdx.web import serve
from pyhdx.config import cfg
from pyhdx.local_cluster import verify_cluster, default_cluster
from typing import Union, Optional
from pathlib import Path

import typer
from ipaddress import ip_address
import yaml

# todo add check to see if the web module requirements are installed

app = typer.Typer()

def main():
parser = argparse.ArgumentParser(prog="pyhdx", description="PyHDX Launcher")
@app.command()
def serve(scheduler_address: Optional[str] = typer.Option(None, help="Address for dask scheduler to use")):
"""Launch the PyHDX web application"""

parser.add_argument("serve", help="Runs PyHDX Dashboard")
parser.add_argument(
"--scheduler_address", help="Run with local cluster <ip>:<port>"
)
args = parser.parse_args()
from pyhdx.config import cfg
from pyhdx.local_cluster import verify_cluster, default_cluster

if args.scheduler_address:
ip, port = args.scheduler_address.split(":")
if scheduler_address is not None:
ip, port = scheduler_address.split(":")
if not ip_address(ip):
print("Invalid IP Address")
return
elif not 0 <= int(port) < 2 ** 16:
print("Invalid port, must be 0-65535")
return
cfg.set("cluster", "scheduler_address", args.scheduler_address)
cfg.set("cluster", "scheduler_address", scheduler_address)

scheduler_address = cfg.get("cluster", "scheduler_address")
if not verify_cluster(scheduler_address):
Expand All @@ -37,8 +35,9 @@ def main():
scheduler_address = f"{ip}:{port}"
print(f"Started new Dask LocalCluster at {scheduler_address}")

if args.serve:
serve.run_apps()
# Start the PyHDX web application
from pyhdx.web import serve as serve_pyhdx
serve_pyhdx.run_apps()

loop = True
while loop:
Expand All @@ -49,11 +48,22 @@ def main():
loop = False


if __name__ == "__main__":
import sys
@app.command()
def process(
jobfile: Path = typer.Argument(..., help="Path to .yaml jobfile"),
cwd: Optional[Path] = typer.Option(None, help="Optional path to working directory")
):
"""
Process a HDX dataset according to a jobfile
"""

from pyhdx.batch_processing import JobParser

sys.argv.append("serve")
sys.argv.append("--scheduler_address")
sys.argv.append("127.0.0.1:53270")
job_spec = yaml.safe_load(jobfile.read_text())
parser = JobParser(job_spec, cwd=cwd)

main()
parser.execute()


if __name__ == "__main__":
app()
7 changes: 4 additions & 3 deletions setup.cfg
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
[metadata]
name = PyHDX
author = Jochem H. Smit
author-email = [email protected]
author_email = [email protected]
maintainer = Jochem H. Smit
maintainer-email = [email protected]
maintainer_email = [email protected]
url = https://github.com/Jhsmit/PyHDX
license = MIT
license_files = LICENSE
Expand Down Expand Up @@ -31,6 +31,7 @@ install_requires =
sympy==1.5.1
torch
tqdm
typer
dask
distributed
packaging
Expand All @@ -41,7 +42,7 @@ python_requires =

[options.entry_points]
console_scripts =
pyhdx = pyhdx.cli:main
pyhdx = pyhdx.cli:app


[options.extras_require]
Expand Down
23 changes: 12 additions & 11 deletions tests/test_batchprocessing.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
from pyhdx.batch_processing import yaml_to_hdxm, yaml_to_hdxmset, StateParser
from pyhdx.batch_processing import StateParser, JobParser
from pyhdx.models import HDXMeasurement, HDXMeasurementSet
import numpy as np
from pathlib import Path
import yaml
import shutil

cwd = Path(__file__).parent
input_dir = cwd / 'test_data' / 'input'
Expand All @@ -17,16 +18,6 @@ def test_load_from_yaml(self):
yaml_pth = Path(input_dir / 'data_states.yaml')
data_dict = yaml.safe_load(yaml_pth.read_text())

hdxm = yaml_to_hdxm(data_dict['SecB_tetramer'], data_dir=input_dir)
assert isinstance(hdxm, HDXMeasurement)

assert hdxm.metadata['temperature'] == data_dict['SecB_tetramer']['temperature']['value'] + 273.15
assert hdxm.name == 'SecB WT apo'

hdxm_set = yaml_to_hdxmset(data_dict, data_dir=input_dir)
assert isinstance(hdxm_set, HDXMeasurementSet)
assert hdxm_set.names == list(data_dict.keys())

parser = StateParser(data_dict, data_src=input_dir)

hdxm = parser.load_hdxm('SecB_tetramer')
Expand All @@ -39,4 +30,14 @@ def test_load_from_yaml(self):
assert isinstance(hdxm_set, HDXMeasurementSet)
assert hdxm_set.names == list(data_dict.keys())

def test_load_job_parser(self):
fit_output_dir = input_dir / 'fit_result_output_1'
if fit_output_dir.exists():
shutil.rmtree(fit_output_dir, ignore_errors=True)

job_spec = yaml.safe_load((input_dir / 'jobfile.yaml').read_text())
parser = JobParser(job_spec, cwd=input_dir)
parser.execute()

assert fit_output_dir.exists()
shutil.rmtree(fit_output_dir, ignore_errors=True)
2 changes: 1 addition & 1 deletion tests/test_data/input/jobfile.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ steps:
- task: load_hdxm_set
name: load_data
state_file: data_states.yaml
- task: estimate_rates # todo allow specification of method etc
- task: estimate_rates
name: rates
hdxm_set: $(load_data.out)
- task: create_guess
Expand Down

0 comments on commit de28097

Please sign in to comment.