diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 435348a4..11afb3e2 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -1,56 +1,13 @@ name: Test -on: [push, pull_request_target] - +on: push jobs: macos: - name: Test MacOs + name: Test strategy: fail-fast: false matrix: - python: ["3.8", "3.9"] - platform: ["macos-10.15"] - runs-on: ${{ matrix.platform }} - steps: - - name: Checkout - uses: actions/checkout@v2 - - name: Setup Python - uses: actions/setup-python@v2 - with: - python-version: ${{ matrix.python }} - - name: Download Docker - run: | - brew install docker docker-machine - - # Latest virtualbox has breaking change for mac - brew uninstall virtualbox - cd $(brew --repo homebrew/cask) - git checkout 8670a72380c57c606d6582b645421e31dad2eee2 - brew install --cask virtualbox - - # Avoids throttling git api in `docker-machine create` cmd - curl --create-dirs -Lo /Users/runner/.docker/machine/cache/boot2docker.iso https://github.com/boot2docker/boot2docker/releases/download/v18.09.1-rc1/boot2docker.iso - - docker-machine create --driver virtualbox default - docker-machine env default - - name: Install dev - run: | - python3 -m pip install -r dev-requirements.txt - - name: Install latch - run: | - python3 -m pip install -e . - - name: Test - env: - TEST_TOKEN: ${{ secrets.TEST_TOKEN }} - run: | - eval $(docker-machine env default) - cd tests; python3 -m pytest -s . - linux: - name: Test Linux - strategy: - fail-fast: false - matrix: - python: ["3.8", "3.9"] - platform: ["ubuntu-18.04"] + python: ["3.8", "3.9", "3.10", "3.11"] + platform: ["macos-12", "ubuntu-22.04"] runs-on: ${{ matrix.platform }} steps: - name: Checkout @@ -61,13 +18,12 @@ jobs: python-version: ${{ matrix.python }} - name: Install dev run: | - python3 -m pip install -r dev-requirements.txt + pip install -r dev-requirements.txt - name: Install latch run: | - python3 -m pip install -e . + pip install . - name: Test env: TEST_TOKEN: ${{ secrets.TEST_TOKEN }} run: | - eval $(docker-machine env default) - cd tests; python3 -m pytest -s . + pytest -s diff --git a/.gitignore b/.gitignore index 1fae6583..4be4e4c5 100644 --- a/.gitignore +++ b/.gitignore @@ -18,5 +18,4 @@ docs/build .vscode scratch.py /scratch -test_* .latch_report.tar.gz diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index bb801d91..e48210ce 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -36,7 +36,7 @@ repos: # - id: sort-simple-yaml # - id: trailing-whitespace - repo: https://github.com/psf/black - rev: 23.7.0 + rev: 23.9.1 hooks: - id: black args: [--preview] diff --git a/CHANGELOG.md b/CHANGELOG.md index 92399912..208f4d93 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -16,7 +16,7 @@ Types of changes # Latch SDK Changelog -## 2.33.0 - 2023-10-04 +## 2.34.0 - 2023-10-04 ### Added @@ -34,6 +34,12 @@ temporary because they are deleted at the end of each job on Latch. * Detect use of `conda` keyword and install in image. This effectively supports wrapper/conda keywords. * `Iterable, Generator` cause issues as type hints when imported from `collections.abc` rather than `typing` +## 2.33.0 - 2023-09-29 + +### Added + +* Add `latch sync` for synchronization from local to remote directories that only uploads modified content + ## 2.32.8 - 2023-09-07 ### Fixed diff --git a/docs/source/assets/snakemake/tutorial.png b/docs/source/assets/snakemake/tutorial.png new file mode 100644 index 00000000..de8f0b5a Binary files /dev/null and b/docs/source/assets/snakemake/tutorial.png differ diff --git a/docs/source/index.md b/docs/source/index.md index c2c55570..086340e3 100644 --- a/docs/source/index.md +++ b/docs/source/index.md @@ -149,6 +149,7 @@ cli/mv :maxdepth: 2 :caption: Manual manual/snakemake.md +manual/tutorial.md ``` ```{toctree} diff --git a/docs/source/manual/snakemake.md b/docs/source/manual/snakemake.md index 3a5f5bcb..e551dff4 100644 --- a/docs/source/manual/snakemake.md +++ b/docs/source/manual/snakemake.md @@ -1,10 +1,4 @@ -# [Alpha Pre-release] Snakemake Integration - -## Pre-release Disclaimer - Currently not ready for production use - -Latch support for Snakemake is in active development. Some workflows already work but a lot of common use cases need minor work. This documentation is also in active development. - -This pre-release was created to integrate miscellaneous improvements that accumulated over the course of developing the integration and to prevent the codebase from further diverging from the main branch. +# Snakemake Integration ## Getting Started diff --git a/docs/source/manual/tutorial.md b/docs/source/manual/tutorial.md new file mode 100644 index 00000000..b36b87f2 --- /dev/null +++ b/docs/source/manual/tutorial.md @@ -0,0 +1,144 @@ +# A simple Snakemake example + +In this guide, we will walk through how you can upload a simple Snakemake workflow to Latch. + +The example being used here comes from the [short tutorial in Snakemake's documentation](https://snakemake.readthedocs.io/en/stable/tutorial/short.html). + +## Prerequisites + +* Install the [Latch SDK](https://github.com/latchbio/latch#installation) and [snakemake](https://snakemake.readthedocs.io/en/stable/getting_started/installation.html) with: + +```console +pip install latch[snakemake] +``` + +* Install [Docker](https://www.docker.com/get-started/) and have Docker run locally + +## Step 1 + +First, initialize an example Snakemake workflow: + +```console +latch init snakemake-wf --template snakemake +``` + +The workflow generated contains what is typically seen in a Snakemake workflow, such as python scripts and a Snakefile. + +```console +snakemake-wf +├── Dockerfile # Latch specific +├── Snakefile +├── data +│   ├── genome.fa +│   ├── genome.fa.amb +│   ├── genome.fa.ann +│   ├── genome.fa.bwt +│   ├── genome.fa.fai +│   ├── genome.fa.pac +│   ├── genome.fa.sa +│   └── samples +│   ├── A.fastq +│   ├── B.fastq +│   └── C.fastq +├── environment.yaml +├── scripts +│   ├── __pycache__ +│   │   └── plot-quals.cpython-39.pyc +│   └── plot-quals.py +├── version +└── wf +``` + +To make the workflow compatible to execute on Latch, two additional files are needed: + +* `Dockerfile` to specify dependencies the workflow needs to run +* `latch_metadata.py` to specify workflow parameters to expose on the user interface. + +In this tutorial, we will walk through how these two files can be constructed. + +## Step 2: Add a metadata file + +The `latch_metadata.py` is used to specify the input parameters that the Snakemake workflow needs to run. + +For example, by examining the Snakefile, we determine there are two parameters that the workflow needs: a reference genome and a list of samples to be aligned against the reference genome. + +```python +# latch_metadata.py +from latch.types.metadata import SnakemakeMetadata, SnakemakeFileParameter +from latch.types.directory import LatchDir +from latch.types.metadata import LatchAuthor, LatchMetadata, LatchParameter +from pathlib import Path + +SnakemakeMetadata( + display_name="snakemake_tutorial_workflow", + author=LatchAuthor( + name="latchbio", + ), + parameters={ + "samples" : SnakemakeFileParameter( + display_name="Sample Input Directory", + description="A directory full of FastQ files", + type=LatchDir, + path=Path("data/samples"), + ), + "ref_genome" : SnakemakeFileParameter( + display_name="Indexed Reference Genome", + description="A directory with a reference Fasta file and the 6 index files produced from `bwa index`", + type=LatchDir, + path=Path("genome"), + ), + }, +) +``` + +For each `LatchFile`/`LatchDir` parameter, the `path` keyword specifies the path where files will be copied before the Snakemake workflow is run and should match the paths of the inputs for each rule in the Snakefile. + +## Step 3: Add dependencies + +Next, create an `environment.yaml` file to specify the dependencies that the Snakefile needs to run successfully: + +```python +# environment.yaml +channels: + - bioconda + - conda-forge +dependencies: + - snakemake=7.25.0 + - jinja2 + - matplotlib + - graphviz + - bcftools =1.15 + - samtools =1.15 + - bwa =0.7.17 + - pysam =0.19 + - pygments +``` + +A Dockerfile can be automatically generated by typing: +```console +latch dockerfile snakemake-wf --snakemake +``` + +## Step 3: Upload the workflow to Latch + +Finally, type the following command to register the workflow to Latch: + +```console +cd snakemake-wf &&\ +latch register . --snakefile Snakefile +``` + +During registration, a workflow image is built based on dependencies specified in the `environment.yaml` file. Once the registration finishes, the `stdout` provides a link to your workflow on Latch. + +![Snakemake workflow interface on Latch](../assets/snakemake/tutorial.png) + +## Step 4: Run the workflow + +Snakemake support is currently uses JIT (Just-In-Time) registration. This means that the workflow produced by `latch register` will register a second workflow, which will run the actual Snakemake jobs. + +Once the workflow finishes running, results will be deposited to [Latch Data](https://console.latch.bio/data) under the `Snakemake Outputs` folder. + +## Next Steps + +* Learn more in-depth about how Snakemake integration works on Latch by reading our [manual](../manual/snakemake.md). +* Visit the repository of [public examples](https://github.com/latchbio/latch-snakemake-examples) of Snakemake workflows on Latch. diff --git a/latch/account.py b/latch/account.py index 2ddf1971..e18c4be3 100644 --- a/latch/account.py +++ b/latch/account.py @@ -155,14 +155,12 @@ def load(self) -> None: @overload def list_registry_projects( self, *, load_if_missing: Literal[True] = True - ) -> List[Project]: - ... + ) -> List[Project]: ... @overload def list_registry_projects( self, *, load_if_missing: bool - ) -> Optional[List[Project]]: - ... + ) -> Optional[List[Project]]: ... def list_registry_projects( self, *, load_if_missing: bool = True diff --git a/latch/registry/project.py b/latch/registry/project.py index d4c099ed..a2a52d72 100644 --- a/latch/registry/project.py +++ b/latch/registry/project.py @@ -85,12 +85,10 @@ def load(self) -> None: # get_display_name @overload - def get_display_name(self, *, load_if_missing: Literal[True] = True) -> str: - ... + def get_display_name(self, *, load_if_missing: Literal[True] = True) -> str: ... @overload - def get_display_name(self, *, load_if_missing: bool) -> Optional[str]: - ... + def get_display_name(self, *, load_if_missing: bool) -> Optional[str]: ... def get_display_name(self, *, load_if_missing: bool = True) -> Optional[str]: """Get the display name of this project. @@ -116,12 +114,10 @@ def get_display_name(self, *, load_if_missing: bool = True) -> Optional[str]: # list_tables @overload - def list_tables(self, *, load_if_missing: Literal[True] = True) -> List[Table]: - ... + def list_tables(self, *, load_if_missing: Literal[True] = True) -> List[Table]: ... @overload - def list_tables(self, *, load_if_missing: bool) -> Optional[List[Table]]: - ... + def list_tables(self, *, load_if_missing: bool) -> Optional[List[Table]]: ... def list_tables(self, *, load_if_missing: bool = True) -> Optional[List[Table]]: """List Registry tables contained in this project. diff --git a/latch/registry/record.py b/latch/registry/record.py index 5710c68d..d217732b 100644 --- a/latch/registry/record.py +++ b/latch/registry/record.py @@ -167,12 +167,10 @@ def load(self) -> None: # get_name @overload - def get_name(self, *, load_if_missing: Literal[True] = True) -> str: - ... + def get_name(self, *, load_if_missing: Literal[True] = True) -> str: ... @overload - def get_name(self, *, load_if_missing: bool) -> Optional[str]: - ... + def get_name(self, *, load_if_missing: bool) -> Optional[str]: ... def get_name(self, *, load_if_missing: bool = True) -> Optional[str]: """Get the name of this record. @@ -200,16 +198,14 @@ def get_columns( self, *, load_if_missing: Literal[True] = True, - ) -> Dict[str, Column]: - ... + ) -> Dict[str, Column]: ... @overload def get_columns( self, *, load_if_missing: bool, - ) -> Optional[Dict[str, Column]]: - ... + ) -> Optional[Dict[str, Column]]: ... def get_columns( self, @@ -238,16 +234,14 @@ def get_values( self, *, load_if_missing: Literal[True] = True, - ) -> Dict[str, RecordValue]: - ... + ) -> Dict[str, RecordValue]: ... @overload def get_values( self, *, load_if_missing: bool, - ) -> Optional[Dict[str, RecordValue]]: - ... + ) -> Optional[Dict[str, RecordValue]]: ... def get_values( self, diff --git a/latch/registry/table.py b/latch/registry/table.py index 7dcff1a1..f8a9a603 100644 --- a/latch/registry/table.py +++ b/latch/registry/table.py @@ -131,12 +131,10 @@ def load(self) -> None: # get_display_name @overload - def get_display_name(self, *, load_if_missing: Literal[True] = True) -> str: - ... + def get_display_name(self, *, load_if_missing: Literal[True] = True) -> str: ... @overload - def get_display_name(self, *, load_if_missing: bool) -> Optional[str]: - ... + def get_display_name(self, *, load_if_missing: bool) -> Optional[str]: ... def get_display_name(self, *, load_if_missing: bool = True) -> Optional[str]: """Get the display name of this table. @@ -164,12 +162,10 @@ def get_display_name(self, *, load_if_missing: bool = True) -> Optional[str]: @overload def get_columns( self, *, load_if_missing: Literal[True] = True - ) -> Dict[str, Column]: - ... + ) -> Dict[str, Column]: ... @overload - def get_columns(self, *, load_if_missing: bool) -> Optional[Dict[str, Column]]: - ... + def get_columns(self, *, load_if_missing: bool) -> Optional[Dict[str, Column]]: ... def get_columns( self, *, load_if_missing: bool = True diff --git a/latch/registry/types.py b/latch/registry/types.py index ee56973b..48608010 100644 --- a/latch/registry/types.py +++ b/latch/registry/types.py @@ -16,13 +16,11 @@ from enum import StrEnum except ImportError: - class StrEnum(str, Enum): - ... + class StrEnum(str, Enum): ... else: - class StrEnum(str, Enum): - ... + class StrEnum(str, Enum): ... @dataclass(frozen=True) diff --git a/latch/registry/utils.py b/latch/registry/utils.py index a32f6d83..fa98ae09 100644 --- a/latch/registry/utils.py +++ b/latch/registry/utils.py @@ -26,8 +26,7 @@ T = TypeVar("T") -class RegistryTransformerException(ValueError): - ... +class RegistryTransformerException(ValueError): ... def to_python_type(registry_type: RegistryType) -> Type[RegistryPythonValue]: diff --git a/latch/verified/deseq2.py b/latch/verified/deseq2.py index eb984cbc..d99b6445 100644 --- a/latch/verified/deseq2.py +++ b/latch/verified/deseq2.py @@ -70,5 +70,4 @@ def deseq2_wf( ), ] = [], number_of_genes_to_plot: int = 30, -) -> LatchDir: - ... +) -> LatchDir: ... diff --git a/latch/verified/mafft.py b/latch/verified/mafft.py index 083ae69c..531e9aba 100644 --- a/latch/verified/mafft.py +++ b/latch/verified/mafft.py @@ -27,5 +27,4 @@ def mafft( offset: float = 0.0, maxiterate: int = 0, output_file: str = "aligned_mafft.fa", -) -> LatchFile: - ... +) -> LatchFile: ... diff --git a/latch/verified/pathway.py b/latch/verified/pathway.py index 0c97e41e..436d136e 100644 --- a/latch/verified/pathway.py +++ b/latch/verified/pathway.py @@ -15,5 +15,4 @@ def gene_ontology_pathway_analysis( report_name: str, number_of_pathways: int = 20, output_location: LatchDir = LatchDir("latch:///Pathway Analysis/"), -) -> LatchDir: - ... +) -> LatchDir: ... diff --git a/latch/verified/rnaseq.py b/latch/verified/rnaseq.py index 4d3277cc..3f7c57e5 100644 --- a/latch/verified/rnaseq.py +++ b/latch/verified/rnaseq.py @@ -76,5 +76,4 @@ def rnaseq( salmon_index: Optional[LatchFile] = None, save_indices: bool = False, custom_output_dir: Optional[LatchDir] = None, -) -> List[LatchFile]: - ... +) -> List[LatchFile]: ... diff --git a/latch/verified/trim_galore.py b/latch/verified/trim_galore.py index 2973cb8b..4b6c34e0 100644 --- a/latch/verified/trim_galore.py +++ b/latch/verified/trim_galore.py @@ -58,5 +58,4 @@ def trim_galore( retain_unpaired: bool = True, length_1: int = 35, length_2: int = 35, -) -> LatchDir: - ... +) -> LatchDir: ... diff --git a/latch_cli/auth/csrf.py b/latch_cli/auth/csrf.py index eced4e4c..b511a8cb 100644 --- a/latch_cli/auth/csrf.py +++ b/latch_cli/auth/csrf.py @@ -27,5 +27,4 @@ def __init__(self): def __enter__(self, *args): return self - def __exit__(self, *args): - ... + def __exit__(self, *args): ... diff --git a/latch_cli/auth/pkce.py b/latch_cli/auth/pkce.py index e9df3bf5..f800758e 100644 --- a/latch_cli/auth/pkce.py +++ b/latch_cli/auth/pkce.py @@ -86,8 +86,7 @@ def __init__(self): def __enter__(self, *args): return self - def __exit__(self, *args): - ... + def __exit__(self, *args): ... def construct_challenge(self) -> Tuple[str, str]: """Construct verifier & challenge to verify a client's identity in PKCE. diff --git a/latch_cli/centromere/ctx.py b/latch_cli/centromere/ctx.py index d6646621..e16f290b 100644 --- a/latch_cli/centromere/ctx.py +++ b/latch_cli/centromere/ctx.py @@ -159,13 +159,11 @@ def __init__( fg="red", ) click.secho( - ( - "\nIt is possible to avoid including the Snakefile" - " prior to registration by providing a" - " `latch_metadata.py` file in the workflow root.\nThis" - " way it is not necessary to install dependencies or" - " ensure that Snakemake inputs locally." - ), + "\nIt is possible to avoid including the Snakefile" + " prior to registration by providing a" + " `latch_metadata.py` file in the workflow root.\nThis" + " way it is not necessary to install dependencies or" + " ensure that Snakemake inputs locally.", fg="red", ) click.secho("\nExample ", fg="red", nl=False) @@ -251,10 +249,8 @@ def __init__( if self.nucleus_check_version(self.version, self.workflow_name): click.secho( - ( - f"\nVersion ({self.version}) already exists." - " Make sure that you've saved any changes you made." - ), + f"\nVersion ({self.version}) already exists." + " Make sure that you've saved any changes you made.", fg="red", bold=True, ) @@ -293,8 +289,7 @@ def __init__( ) self.ssh_client = ssh_client - def _patched_connect(self): - ... + def _patched_connect(self): ... def _patched_create_paramiko_client(self, base_url): self.ssh_client = ssh_client diff --git a/latch_cli/click_utils.py b/latch_cli/click_utils.py index ff5fb627..7b941e0a 100644 --- a/latch_cli/click_utils.py +++ b/latch_cli/click_utils.py @@ -51,8 +51,7 @@ def format_epilog(self, ctx: Context, formatter: HelpFormatter) -> None: ) -class LatchGroup(LatchCommand, Group): - ... +class LatchGroup(LatchCommand, Group): ... def colored_exception_show(self, file: Optional[IO] = None) -> None: diff --git a/latch_cli/docker_utils/__init__.py b/latch_cli/docker_utils/__init__.py index be1c4acb..3bf3c607 100644 --- a/latch_cli/docker_utils/__init__.py +++ b/latch_cli/docker_utils/__init__.py @@ -248,8 +248,7 @@ def infer_commands(pkg_root: Path) -> List[DockerCmdBlock]: has_buildable_pyproject = True break - except FileNotFoundError: - ... + except FileNotFoundError: ... # from https://peps.python.org/pep-0518/ and https://peps.python.org/pep-0621/ if has_setup_py or has_buildable_pyproject: diff --git a/latch_cli/exceptions/errors.py b/latch_cli/exceptions/errors.py index da7c3241..1016532f 100644 --- a/latch_cli/exceptions/errors.py +++ b/latch_cli/exceptions/errors.py @@ -12,8 +12,7 @@ class _SyntaxError(BaseException): end_offset: int -class _FlytekitError(BaseException): - ... +class _FlytekitError(BaseException): ... _HandledError = Union[_SyntaxError, _FlytekitError] diff --git a/latch_cli/main.py b/latch_cli/main.py index 471e50d6..46671da7 100644 --- a/latch_cli/main.py +++ b/latch_cli/main.py @@ -439,10 +439,8 @@ def get_params(wf_name: Union[str, None], version: Union[str, None] = None): if version is None: version = "latest" click.secho( - ( - f"Successfully generated python param map named {wf_name}.params.py with" - f" version {version}\n Run `latch launch {wf_name}.params.py` to launch it." - ), + f"Successfully generated python param map named {wf_name}.params.py with" + f" version {version}\n Run `latch launch {wf_name}.params.py` to launch it.", fg="green", ) @@ -686,3 +684,36 @@ def test_data_ls(): click.secho("Listing your managed objects by full S3 path.\n", fg="green") for o in objects: print(f"\ts3://latch-public/{o}") + + +@main.command() +@click.argument("srcs", nargs=-1) +@click.argument("dst", nargs=1) +@click.option( + "--delete", + help="Delete extraneous files from destination.", + is_flag=True, + default=False, +) +@click.option( + "--ignore-unsyncable", + help=( + "Synchronize even if some source paths do not exist or refer to special files." + ), + is_flag=True, + default=False, +) +def sync(srcs: List[str], dst: str, delete: bool, ignore_unsyncable: bool): + """ + Update the contents of a remote directory with local data or vice versa. + """ + from latch_cli.services.sync import sync + + # todo(maximsmol): remote -> local + # todo(maximsmol): remote -> remote + sync( + srcs, + dst, + delete=delete, + ignore_unsyncable=ignore_unsyncable, + ) diff --git a/latch_cli/menus.py b/latch_cli/menus.py index d91b35bc..70cebed9 100644 --- a/latch_cli/menus.py +++ b/latch_cli/menus.py @@ -295,8 +295,7 @@ def render( start_index=start_index, max_per_page=max_per_page, ) - except KeyboardInterrupt: - ... + except KeyboardInterrupt: ... finally: clear(num_lines_rendered) reveal_cursor() diff --git a/latch_cli/services/cp/exceptions.py b/latch_cli/services/cp/exceptions.py index 0e4645d4..a57e099f 100644 --- a/latch_cli/services/cp/exceptions.py +++ b/latch_cli/services/cp/exceptions.py @@ -1,2 +1 @@ -class PathResolutionError(ValueError): - ... +class PathResolutionError(ValueError): ... diff --git a/latch_cli/services/cp/upload.py b/latch_cli/services/cp/upload.py index a642660d..0bdaf16f 100644 --- a/latch_cli/services/cp/upload.py +++ b/latch_cli/services/cp/upload.py @@ -380,8 +380,8 @@ def upload_file_chunk( url: str, part_index: int, part_size: int, - progress_bars: ProgressBars, - pbar_index: Optional[int], + progress_bars: Optional[ProgressBars] = None, + pbar_index: Optional[int] = None, parts_by_source: Optional["PartsBySrcType"] = None, upload_id: Optional[str] = None, dest: Optional[str] = None, @@ -405,20 +405,25 @@ def upload_file_chunk( if parts_by_source is not None: parts_by_source[src].append(ret) - progress_bars.update(pbar_index, len(data)) - pending_parts = progress_bars.dec_usage(str(src)) - - if pending_parts == 0: - progress_bars.return_task_bar(pbar_index) - progress_bars.update_total_progress(1) - progress_bars.write(f"Copied {src}") - - if dest is not None and parts_by_source is not None and upload_id is not None: - end_upload( - dest=dest, - upload_id=upload_id, - parts=list(parts_by_source[src]), - ) + if progress_bars is not None: + progress_bars.update(pbar_index, len(data)) + pending_parts = progress_bars.dec_usage(str(src)) + + if pending_parts == 0: + progress_bars.return_task_bar(pbar_index) + progress_bars.update_total_progress(1) + progress_bars.write(f"Copied {src}") + + if ( + dest is not None + and parts_by_source is not None + and upload_id is not None + ): + end_upload( + dest=dest, + upload_id=upload_id, + parts=list(parts_by_source[src]), + ) return ret diff --git a/latch_cli/services/get_executions.py b/latch_cli/services/get_executions.py index 8d3fa401..5232eb0f 100644 --- a/latch_cli/services/get_executions.py +++ b/latch_cli/services/get_executions.py @@ -223,8 +223,7 @@ def render( prev = (curr_selected, hor_index, term_width, term_height) menus.clear_screen() max_row_len = render(curr_selected, hor_index, term_width, term_height) - except KeyboardInterrupt: - ... + except KeyboardInterrupt: ... finally: menus.clear_screen() menus.reveal_cursor() @@ -317,8 +316,7 @@ def render(curr_selected: int, term_width: int, term_height: int): menus.clear_screen() prev = (curr_selected, term_width, term_height) render(curr_selected, term_width, term_height) - except KeyboardInterrupt: - ... + except KeyboardInterrupt: ... finally: menus.clear_screen() menus.move_cursor((0, 0)) @@ -454,8 +452,7 @@ def render(vert_index, hor_index, term_width, term_height): menus.clear_screen() prev_term_dims = (vert_index, hor_index, term_width, term_height) render(vert_index, hor_index, term_width, term_height) - except KeyboardInterrupt: - ... + except KeyboardInterrupt: ... finally: log_sched.shutdown() log_file.unlink(missing_ok=True) @@ -516,8 +513,7 @@ def render(term_width: int, term_height: int): if prev_term_dims != (term_width, term_height): prev_term_dims = (term_width, term_height) render(term_width, term_height) - except KeyboardInterrupt: - ... + except KeyboardInterrupt: ... finally: menus.clear_screen() menus.move_cursor((0, 0)) diff --git a/latch_cli/services/get_params.py b/latch_cli/services/get_params.py index 2aaf5320..f7e450a6 100644 --- a/latch_cli/services/get_params.py +++ b/latch_cli/services/get_params.py @@ -21,8 +21,7 @@ from latch_cli.utils import retrieve_or_login -class _Unsupported: - ... +class _Unsupported: ... _simple_table = { @@ -326,8 +325,7 @@ def _guess_python_type(literal: LiteralType, param_name: str): # we can parse the variants and define the object in the param map # code. - class _VariantCarrier(enum.Enum): - ... + class _VariantCarrier(enum.Enum): ... _VariantCarrier._variants = literal.enum_type.values # Use param name to uniquely identify each enum diff --git a/latch_cli/services/init/example_snakemake/scripts/plot-quals.py b/latch_cli/services/init/example_snakemake/scripts/plot-quals.py index 345189e0..fe896946 100644 --- a/latch_cli/services/init/example_snakemake/scripts/plot-quals.py +++ b/latch_cli/services/init/example_snakemake/scripts/plot-quals.py @@ -1,4 +1,5 @@ import matplotlib + matplotlib.use("Agg") import matplotlib.pyplot as plt from pysam import VariantFile diff --git a/latch_cli/services/local_dev_old.py b/latch_cli/services/local_dev_old.py index 27a53bbe..13b33263 100644 --- a/latch_cli/services/local_dev_old.py +++ b/latch_cli/services/local_dev_old.py @@ -311,8 +311,7 @@ async def output_task(): try: io_task = asyncio.gather(input_task(), output_task(), resize_task()) await io_task - except asyncio.CancelledError: - ... + except asyncio.CancelledError: ... finally: termios.tcsetattr(sys.stdin.fileno(), termios.TCSANOW, old_settings_stdin) signal.signal(signal.SIGWINCH, old_sigwinch_handler) diff --git a/latch_cli/services/preview.py b/latch_cli/services/preview.py index 8fc42f29..db6a54d3 100644 --- a/latch_cli/services/preview.py +++ b/latch_cli/services/preview.py @@ -176,8 +176,7 @@ def render( start_index=start_index, max_per_page=max_per_page, ) - except KeyboardInterrupt: - ... + except KeyboardInterrupt: ... finally: menus.clear(num_lines_rendered) menus.reveal_cursor() diff --git a/latch_cli/services/sync.py b/latch_cli/services/sync.py new file mode 100644 index 00000000..08281a94 --- /dev/null +++ b/latch_cli/services/sync.py @@ -0,0 +1,334 @@ +import os +import stat +import sys +from datetime import datetime +from pathlib import Path +from typing import Dict, List, Optional, Tuple + +import click +import dateutil.parser as dp +import gql +from gql.transport.exceptions import TransportQueryError +from latch_sdk_gql.execute import JsonValue, execute + +import latch_cli.services.cp.upload as upl + + +def upload_file(src: Path, dest: str): + start = upl.start_upload(src, dest) + if start is None: + return + + parts: List[upl.CompletedPart] = [] + for idx, url in enumerate(start.urls): + parts.append( + upl.upload_file_chunk( + src, + url, + idx, + start.part_size, + ) + ) + + upl.end_upload(dest, start.upload_id, parts) + + +def check_src(p: Path, *, indent: str = "") -> Optional[Tuple[Path, os.stat_result]]: + try: + p_stat = os.stat(p) + except FileNotFoundError: + click.secho(indent + f"`{p}`: no such file or directory", fg="red", bold=True) + return + + if not stat.S_ISREG(p_stat.st_mode) and not stat.S_ISDIR(p_stat.st_mode): + click.secho(indent + f"`{p}`: not a regular file", fg="red", bold=True) + return + + return (p, p_stat) + + +def sync_rec( + srcs: Dict[str, Tuple[Path, os.stat_result]], + dest: str, + *, + delete: bool, + level: int = 0, +): + # rsync never deletes from the top level destination + delete_effective = delete and level > 0 + indent = " " * level + + try: + query = """ + query LatchCLISync($argPath: String! ${name_filter_arg}) { + ldataResolvePathData(argPath: $argPath) { + finalLinkTarget { + type + childLdataTreeEdges( + filter: { + child: { + removed: {equalTo: false}, + pending: {equalTo: false}, + copiedFrom: {isNull: true} + ${name_filter} + } + } + ) { + nodes { + child { + id + name + finalLinkTarget { + type + ldataNodeEvents( + condition: {type: INGRESS}, + orderBy: TIME_DESC, + first: 1 + ) { + nodes { + time + } + } + } + } + } + } + } + } + } + """ + + args: JsonValue = {"argPath": dest, "nameFilter": []} + if not delete_effective: + query = query.replace("${name_filter_arg}", ", $nameFilter: [String!]") + query = query.replace("${name_filter}", ", name: {in: $nameFilter}") + args["nameFilter"] = list(srcs.keys()) + else: + query = query.replace("${name_filter_arg}", "") + query = query.replace("${name_filter}", "") + + resolve_data = execute( + gql.gql(query), + args, + )["ldataResolvePathData"] + + dest_data = None + if resolve_data is not None: + dest_data = resolve_data["finalLinkTarget"] + except TransportQueryError as e: + if e.errors is None or len(e.errors) == 0: + raise + + msg: str = e.errors[0]["message"] + + raise + + if len(srcs) == 0: + if dest_data is not None: + if dest_data["type"] != "DIR": + click.secho( + indent + f"`{dest}` is in the way of a directory", + fg="red", + ) + return + + click.secho(indent + "Empty directory", dim=True) + return + + if not dest[-1] == "/": + dest += "/" + + click.secho(indent + "Creating empty directory", fg="bright_blue") + execute( + gql.gql(""" + mutation LatchCLISyncMkdir($argPath: String!) { + ldataMkdirp(input: {argPath: $argPath}) { + clientMutationId + } + } + """), + {"argPath": dest}, + ) + return + + if ( + (len(srcs) > 1 or stat.S_ISDIR(list(srcs.values())[0][1].st_mode)) + and dest_data is not None + and dest_data["type"] not in {"DIR", "ACCOUNT_ROOT"} + ): + click.secho(f"`{dest}` is not a directory", fg="red", bold=True) + click.secho("\nOnly a single file can be synced with a file", fg="red") + sys.exit(1) + + if dest_data is not None and dest_data["type"] not in {"DIR", "ACCOUNT_ROOT"}: + # todo(maximsmol): implement + click.secho( + "Syncing single files is currently not supported", bold=True, fg="red" + ) + sys.exit(1) + + dest_children_by_name = ( + { + x["name"]: x + for x in (raw["child"] for raw in dest_data["childLdataTreeEdges"]["nodes"]) + } + if dest_data is not None + else {} + ) + + for name, (p, p_stat) in srcs.items(): + is_dir = stat.S_ISDIR(p_stat.st_mode) + + child = dest_children_by_name.get(name) + child_dest = f"{dest}/{name}" + + skip = False + verb = "Uploading" + reason = "new" + if child is not None: + flt = child["finalLinkTarget"] + if flt["type"] == "DIR" and not is_dir: + # todo(maximsmol): confirm? pre-check? + click.secho( + indent + f"`{dest}` is in the way of a file", + fg="red", + ) + continue + + if flt["type"] != "DIR" and is_dir: + # todo(maximsmol): confirm? pre-check? + click.secho( + indent + f"`{dest}` is in the way of a directory", + fg="red", + ) + continue + + if flt["type"] == "OBJ": + remote_mtime = dp.isoparse(flt["ldataNodeEvents"]["nodes"][0]["time"]) + + local_mtime = datetime.fromtimestamp(p_stat.st_mtime).astimezone() + if remote_mtime == local_mtime: + verb = "Skipping" + reason = "unmodified" + skip = True + elif remote_mtime > local_mtime: + verb = "Skipping" + reason = "older" + skip = True + else: + verb = "Uploading" + reason = "updated" + else: + reason = "existing" + + if verb == "Uploading" and is_dir: + verb = "Syncing" + + fg = "bright_blue" + dim = None + if verb == "Skipping": + fg = None + dim = True + + click.echo( + click.style( + indent + verb + " ", + fg=fg, + dim=dim, + ) + + click.style( + reason, + underline=True, + fg=fg, + dim=dim, + ) + + click.style( + ": ", + fg=fg, + dim=dim, + ) + + click.style( + str(p) + + ("" if not is_dir else "/") + + ("" if skip else click.style(" -> ", dim=True) + child_dest), + dim=dim, + ) + ) + if skip: + continue + + if is_dir: + sub_srcs: Dict[str, Tuple[Path, os.stat_result]] = {} + for x in p.iterdir(): + res = check_src(x, indent=indent + " ") + if res is None: + # todo(maximsmol): pre-check or confirm? + continue + + sub_srcs[x.name] = res + sync_rec(sub_srcs, child_dest, delete=delete, level=level + 1) + continue + + # todo(maximsmol): upload in parallel? + upload_file(p, child_dest) + + if delete_effective: + for name, child in dest_children_by_name.items(): + child_dest = f"{dest}/{name}" + if name in srcs: + continue + + click.echo( + indent + click.style("Removing extraneous: ", fg="yellow") + child_dest + ) + execute( + gql.gql(""" + mutation LatchCLISyncRemove($argNodeId: BigInt!) { + ldataRmr(input: {argNodeId: $argNodeId}) { + clientMutationId + } + } + """), + {"argNodeId": child["id"]}, + ) + + +def sync( + srcs_raw: List[str], + dest: str, + *, + delete: bool, + ignore_unsyncable: bool, +): + srcs: Dict[str, Tuple[Path, os.stat_result]] = {} + have_errors = False + for x in srcs_raw: + p = Path(x) + res = check_src(p) + if res is None: + have_errors = True + continue + + srcs[p.name] = res + + if len(srcs) == 0: + click.secho( + "\nAll source paths were skipped due to errors", fg="red", bold=True + ) + sys.exit(1) + + if have_errors: + # todo(maximsmol): do we want to precheck recursively? + click.secho("\nSome source paths will be skipped due to errors", fg="red") + + if not ignore_unsyncable: + if not click.confirm(click.style(f"Proceed?", fg="red")): + sys.exit(1) + else: + click.secho( + "Proceeding due to " + click.style("`--ignore-unsyncable`", bold=True), + fg="yellow", + ) + click.echo() + + sync_rec(srcs, dest, delete=delete) diff --git a/latch_cli/snakemake/single_task_snakemake.py b/latch_cli/snakemake/single_task_snakemake.py index 6331e059..e2785401 100644 --- a/latch_cli/snakemake/single_task_snakemake.py +++ b/latch_cli/snakemake/single_task_snakemake.py @@ -121,7 +121,7 @@ def render_annotated_str(x) -> str: elif "report" in flags: report_vals = flags.get("report", False) res = ( - f"report({res}, caption={report_vals['caption']}," + f"report({res}, caption={repr(report_vals['caption'])}," f" category={report_vals['category']})" ) diff --git a/latch_cli/snakemake/workflow.py b/latch_cli/snakemake/workflow.py index 300419a3..102878a1 100644 --- a/latch_cli/snakemake/workflow.py +++ b/latch_cli/snakemake/workflow.py @@ -95,8 +95,7 @@ class JobOutputInfo: type_: Union[LatchFile, LatchDir] -def task_fn_placeholder(): - ... +def task_fn_placeholder(): ... def variable_name_for_file(file: snakemake.io.AnnotatedString): @@ -583,6 +582,7 @@ class _WorkflowInfoNode(TypedDict): _interface_request = { "workflow_id": wf_id, "params": params, + "snakemake_jit": True, } response = requests.post(urljoin(config.nucleus_url, "/api/create-execution"), headers=headers, json=_interface_request) diff --git a/latch_cli/utils/__init__.py b/latch_cli/utils/__init__.py index 7ce0b718..884cdc77 100644 --- a/latch_cli/utils/__init__.py +++ b/latch_cli/utils/__init__.py @@ -58,8 +58,7 @@ def urljoins(*args: str, dir: bool = False) -> str: return res -class AuthenticationError(RuntimeError): - ... +class AuthenticationError(RuntimeError): ... def get_auth_header() -> str: @@ -199,8 +198,7 @@ def hash_directory(dir_path: Path) -> str: continue exclude.append(l) - except FileNotFoundError: - ... + except FileNotFoundError: ... from docker.utils import exclude_paths diff --git a/pyproject.toml b/pyproject.toml index b9fec2c4..76a342e0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,5 @@ [tool.poetry.dev-dependencies] -black = "^23.3.0" +black = "^23.9.1" isort = "^5.12.0" ruff = "^0.0.261" @@ -37,3 +37,6 @@ extend-ignore = [ "UP006", "UP035", ] + +[tool.pytest.ini_options] +testpaths = ["tests"] diff --git a/tests/test_cli.py b/tests/test_cli.py deleted file mode 100644 index 56c77b78..00000000 --- a/tests/test_cli.py +++ /dev/null @@ -1,190 +0,0 @@ -import os -import secrets -import string -import subprocess -import textwrap -from pathlib import Path -from typing import List - -import pytest -import requests - -from latch_cli.config.latch import config -from tests.fixtures import test_account_jwt - - -def _random_name(length: int): - return "".join(secrets.choice(string.ascii_letters) for _ in range(length)) - - -def _normalize_remote_path(path: str): - if path.startswith("latch://"): - path = path[len("latch://") :] - path = path.strip("/") - return path - - -def _run_and_verify(cmd: List[str], does_exist: str): - output = subprocess.run(cmd, capture_output=True, check=True) - stdout = output.stdout.decode("utf-8") - assert does_exist in stdout - - -def _file_exists(token, remote_dir: str, filename: str) -> bool: - filename = _normalize_remote_path(filename) - remote_dir = _normalize_remote_path(remote_dir) - - if not remote_dir: - remote_path = filename - else: - remote_path = f"{remote_dir}/{filename}" - - headers = {"Authorization": f"Bearer {token}"} - data = {"filename": remote_path} - response = requests.post(url=config.api.data.verify, headers=headers, json=data) - try: - assert response.status_code == 200 - except: - raise ValueError(f"{response.content}") - return response.json()["exists"] - - -def _run_mkdir_touch_recursive(token, curr_dir: str, branching_factor: int, depth: int): - if depth > 2: - return - curr_dir = _normalize_remote_path(curr_dir) - for _ in range(branching_factor): - name = _random_name(10) - if not curr_dir: - remote_path = name - else: - remote_path = f"{curr_dir}/{name}" - operation = secrets.choice(["mkdir", "touch"]) - _cmd = ["latch", operation, remote_path] - _run_and_verify(_cmd, "Success") - assert _file_exists(token, curr_dir, name) - if operation == "mkdir": - _run_mkdir_touch_recursive(token, remote_path, branching_factor, depth + 1) - _cmd = ["latch", "rm", remote_path] - _run_and_verify(_cmd, "Success") - - -def _run_nested_cp(token, curr_dir: str, filename: str, depth: int): - if depth > 5: - return - filename = _normalize_remote_path(filename) - curr_dir = _normalize_remote_path(curr_dir) - _cmd = ["latch", "mkdir", curr_dir] - _run_and_verify(_cmd, f"Successfully created directory {curr_dir}.") - _run_cp_and_clean_up(token, curr_dir, filename) - nested_dir_name = _random_name(10) - nested_filename = _random_name(10) - _run_nested_cp(token, f"{curr_dir}/{nested_dir_name}", nested_filename, depth + 1) - _cmd = ["latch", "rm", curr_dir] - _run_and_verify(_cmd, f"Successfully deleted {curr_dir}.") - - -def _run_cp_and_clean_up(token, remote_dir: str, filename: str): - """ - Checks that - (1) the file was actually copied to latch, and - (2) the file contents do not change from local -> latch -> local - """ - filename = _normalize_remote_path(filename) - remote_dir = _normalize_remote_path(remote_dir) - - initial = Path(f"initial_{filename}").resolve() - final = Path(f"final_{filename}").resolve() - try: - if not remote_dir: - remote_path = f"latch:///{filename}" - else: - remote_path = f"latch:///{remote_dir}/{filename}" - initial_text = _random_name(100) - with open(initial, "w") as f: - f.write(initial_text) - cmd = ["latch", "cp", initial, remote_path] - _run_and_verify(cmd, f"Successfully copied {initial} to {remote_path}") - assert _file_exists(token, remote_dir, filename) - cmd = ["latch", "cp", remote_path, final] - _run_and_verify(cmd, f"Successfully copied {remote_path} to {final}") - with open(final, "r") as f: - final_text = f.read() - assert initial_text == final_text - cmd = ["latch", "rm", remote_path] - _run_and_verify(cmd, f"Successfully deleted {remote_path}") - assert not _file_exists(token, remote_dir, filename) - finally: - if os.path.isfile(initial): - os.remove(initial) - if os.path.isfile(final): - os.remove(final) - - -def test_cp_home_robustness(test_account_jwt): - for _ in range(5): - filename = _random_name(10) - filename = f"{filename}.txt" - _run_cp_and_clean_up(test_account_jwt, "", filename) - - -def test_cp_nested(test_account_jwt): - initial_dir_name = _random_name(10) - initial_filename = _random_name(10) - _run_nested_cp(test_account_jwt, initial_dir_name, initial_filename, 0) - - -def test_touch_mkdir_higher_branching_factor(test_account_jwt): - # don't do any more than 3 for the branching_factor - _run_mkdir_touch_recursive(test_account_jwt, "/", branching_factor=3, depth=2) - - -@pytest.mark.xfail(strict=True) -def test_bad_input_cp_1(): - name1 = _random_name(10) - name2 = _random_name(10) - _cmd = ["latch", "cp", name1, name2] - _run_and_verify(_cmd, "Success") - - -@pytest.mark.xfail(strict=True) -def test_bad_input_cp_2(): - name1 = _random_name(10) - name2 = _random_name(10) - _cmd = ["latch", "cp", f"latch:///{name1}", f"latch:///{name2}"] - _run_and_verify(_cmd, "Success") - - -def test_ls(test_account_jwt): - for _ in range(5): - name = _random_name(10) - _cmd = ["latch", "mkdir", name] - _run_and_verify(_cmd, "Success") - _cmd = ["latch", "ls"] - _run_and_verify(_cmd, name) - _cmd = ["latch", "rm", name] - _run_and_verify(_cmd, "Success") - - -# def test_launch(test_account_jwt): -# with open("foo.py", "w") as f: -# f.write( -# textwrap.dedent( -# """ -# from latch.types import LatchFile -# -# params = { -# "_name": "wf.__init__.assemble_and_sort", -# "read1": LatchFile("latch:///read1"), -# "read2": LatchFile("latch:///read2"), -# } -# """ -# ) -# ) -# -# _cmd = ["latch", "launch", "foo.py"] -# _run_and_verify( -# _cmd, -# "Successfully launched workflow named wf.__init__.assemble_and_sort with" -# " version latest.", -# ) diff --git a/tests/test_launch.py b/tests/test_launch.py deleted file mode 100644 index a6bd20fa..00000000 --- a/tests/test_launch.py +++ /dev/null @@ -1,102 +0,0 @@ -""" -test.test_launch -~~~ - - - -""" - -from tempfile import NamedTemporaryFile - -from latch_cli.services.launch import launch - -simple_plan = """from latch.types import LatchFile - -params = { - "_name": "wf.__init__.assemble_and_sort", - "read1": LatchFile("latch:///read1"), - "read2": LatchFile("latch:///read2"), -}""" - -crispresso_plan = """from latch.types import LatchFile, LatchDir - - -params = { - "_name": "wf.__init__.crispresso2_wf", - "output_folder": LatchDir("latch:///CRISPResso2_output/"), - "fastq_r1": LatchFile("s3://latch-public/welcome/CRISPResso2/nhej.r1.fastq.gz"), - "fastq_r2": LatchFile("s3://latch-public/welcome/CRISPResso2/nhej.r2.fastq.gz"), - "amplicon_seq": [ - "AATGTCCCCCAATGGGAAGTTCATCTGGCACTGCCCACAGGTGAGGAGGTCATGATCCCCTTCTGGAGCTCCCAACGGGCCGTGGTCTGGTTCATCATCTGTAAGAATGGCTTCAAGAGGCTCGGCTGTGGTT" - ], - "name": "nhej", -}""" - -rnaseq_plan = """from latch.types import LatchFile, LatchDir -from enum import Enum - -class Strandedness(Enum): - reverse = "reverse" - forward = "forward" - -params = { - "_name": "wf.__init__.nf_rnaseq_wf", - "sample_ids": [ - "WT_REP1", - "RAP1_UNINDUCED_REP1", - "RAP1_IAA_30M_REP1", - ], - "samples": [ - [ - LatchFile("s3://latch-public/welcome/nf_rnaseq/SRR6357070_1.fastq.gz"), - LatchFile("s3://latch-public/welcome/nf_rnaseq/SRR6357070_2.fastq.gz"), - ], - [ - LatchFile("s3://latch-public/welcome/nf_rnaseq/SRR6357073_1.fastq.gz"), - ], - [ - LatchFile("s3://latch-public/welcome/nf_rnaseq/SRR6357076_1.fastq.gz"), - LatchFile("s3://latch-public/welcome/nf_rnaseq/SRR6357076_2.fastq.gz"), - ], - ], - "strandedness": [ - Strandedness.reverse, - Strandedness.reverse, - Strandedness.reverse, - ], - "fasta": LatchFile("s3://latch-public/welcome/nf_rnaseq/genome.fa.gz"), - "gtf": LatchFile("s3://latch-public/welcome/nf_rnaseq/genes.gtf.gz"), - "gene_bed": LatchFile("s3://latch-public/welcome/nf_rnaseq/genes.bed"), - "output_dir": LatchDir("latch://nf_rnaseq_results/"), -}""" - -# NOTE (kenny) ~ This is a poor test for the moment , but without mocking out -# the connection to Latch nucleus, we can rely on the boolean response as -# success. - - -# def test_execute_previous_versions(): -# with NamedTemporaryFile("w+") as tf: -# tf.write(simple_plan) -# tf.seek(0) - -# assert launch(tf.name) == "wf.__init__.assemble_and_sort" -# assert launch(tf.name, "barrackobama") == "wf.__init__.assemble_and_sort" - - -# def test_execute_rnaseq(): -# with NamedTemporaryFile("w+") as tf: -# tf.write(rnaseq_plan) -# tf.seek(0) - -# assert launch(tf.name) == "wf.__init__.nf_rnaseq_wf" - - -# TODO(ayush, kenny): fix this test - -# def test_execute_crispresso(): - -# with NamedTemporaryFile("w+") as tf: -# tf.write(crispresso_plan) -# tf.seek(0) - -# assert launch(tf.name) == "wf.__init__.crispresso2_wf" diff --git a/tests/test_login.py b/tests/test_login.py deleted file mode 100644 index 6268f2c4..00000000 --- a/tests/test_login.py +++ /dev/null @@ -1,7 +0,0 @@ -""" -test.test_login -~~~ -User can retrieve a authorization token for the sdk using 0Auth browser flow. -""" - -# We will need selenium for this diff --git a/tests/test_ls.py b/tests/test_ls.py new file mode 100644 index 00000000..6defd6f7 --- /dev/null +++ b/tests/test_ls.py @@ -0,0 +1,7 @@ +import subprocess + +from .fixtures import test_account_jwt + + +def test_ls(test_account_jwt): + subprocess.run(["latch", "ls"], check=True) diff --git a/tests/test_types.py b/tests/test_types.py deleted file mode 100644 index 6b23bf0b..00000000 --- a/tests/test_types.py +++ /dev/null @@ -1,20 +0,0 @@ -import pytest - -from latch.types.utils import _is_valid_url - - -def test_validate_latch_url(): - valid_urls = ( - "latch:///foo.txt", - "latch:///foo/bar.txt", - "latch:///foo/bar/", - "latch:///foo/bar", - "s3:///foo/bar", - ) - invalid_urls = ("latch://foo.txt", "lach:///foo.txt", "gcp:///foo.txt") - - for url in valid_urls: - assert _is_valid_url(url) is True - - for url in invalid_urls: - assert _is_valid_url(url) is False