diff --git a/404.html b/404.html index 39ca0886..1d5af95f 100644 --- a/404.html +++ b/404.html @@ -12,7 +12,7 @@ - + diff --git a/about/index.html b/about/index.html index 880481eb..bf501ce1 100644 --- a/about/index.html +++ b/about/index.html @@ -14,7 +14,7 @@ - + diff --git a/docs/assertions/assertions/index.html b/docs/assertions/assertions/index.html index 0444fc8e..e64c3ce0 100644 --- a/docs/assertions/assertions/index.html +++ b/docs/assertions/assertions/index.html @@ -16,7 +16,7 @@ - + diff --git a/docs/assertions/fasta/index.html b/docs/assertions/fasta/index.html index d5871e84..36273b2f 100644 --- a/docs/assertions/fasta/index.html +++ b/docs/assertions/fasta/index.html @@ -16,7 +16,7 @@ - + diff --git a/docs/assertions/files/index.html b/docs/assertions/files/index.html index 2d76b398..ebc4ecb1 100644 --- a/docs/assertions/files/index.html +++ b/docs/assertions/files/index.html @@ -16,7 +16,7 @@ - + diff --git a/docs/assertions/libraries/index.html b/docs/assertions/libraries/index.html index 31da3cbc..2ec5fb8f 100644 --- a/docs/assertions/libraries/index.html +++ b/docs/assertions/libraries/index.html @@ -16,7 +16,7 @@ - + diff --git a/docs/assertions/regular-expressions/index.html b/docs/assertions/regular-expressions/index.html index 0291cc43..a939c267 100644 --- a/docs/assertions/regular-expressions/index.html +++ b/docs/assertions/regular-expressions/index.html @@ -16,7 +16,7 @@ - + diff --git a/docs/assertions/snapshots/index.html b/docs/assertions/snapshots/index.html index 51f8134e..7fe2578c 100644 --- a/docs/assertions/snapshots/index.html +++ b/docs/assertions/snapshots/index.html @@ -16,7 +16,7 @@ - + diff --git a/docs/cli/clean/index.html b/docs/cli/clean/index.html index 6c0a5136..a45f5b99 100644 --- a/docs/cli/clean/index.html +++ b/docs/cli/clean/index.html @@ -16,7 +16,7 @@ - + diff --git a/docs/cli/generate/index.html b/docs/cli/generate/index.html index 17acbae0..a819d4ea 100644 --- a/docs/cli/generate/index.html +++ b/docs/cli/generate/index.html @@ -16,7 +16,7 @@ - + diff --git a/docs/cli/init/index.html b/docs/cli/init/index.html index 664e4615..0329f3a1 100644 --- a/docs/cli/init/index.html +++ b/docs/cli/init/index.html @@ -16,7 +16,7 @@ - + diff --git a/docs/cli/list/index.html b/docs/cli/list/index.html index 7a45b3bd..47c6b405 100644 --- a/docs/cli/list/index.html +++ b/docs/cli/list/index.html @@ -16,7 +16,7 @@ - + diff --git a/docs/cli/test/index.html b/docs/cli/test/index.html index 5527fd92..b8fea052 100644 --- a/docs/cli/test/index.html +++ b/docs/cli/test/index.html @@ -16,7 +16,7 @@ - + diff --git a/docs/configuration/index.html b/docs/configuration/index.html index 3992e5ac..af3d7ebe 100644 --- a/docs/configuration/index.html +++ b/docs/configuration/index.html @@ -16,7 +16,7 @@ - + diff --git a/docs/getting-started/index.html b/docs/getting-started/index.html index d31e897a..41066358 100644 --- a/docs/getting-started/index.html +++ b/docs/getting-started/index.html @@ -16,7 +16,7 @@ - + diff --git a/docs/nftest_pipelines/index.html b/docs/nftest_pipelines/index.html index 0379873d..e4c603b5 100644 --- a/docs/nftest_pipelines/index.html +++ b/docs/nftest_pipelines/index.html @@ -12,7 +12,7 @@ - + diff --git a/docs/plugins/developing-plugins/index.html b/docs/plugins/developing-plugins/index.html index 82fb632d..6f18b2e9 100644 --- a/docs/plugins/developing-plugins/index.html +++ b/docs/plugins/developing-plugins/index.html @@ -16,7 +16,7 @@ - + diff --git a/docs/plugins/using-plugins/index.html b/docs/plugins/using-plugins/index.html index b36ff895..38a8d9ae 100644 --- a/docs/plugins/using-plugins/index.html +++ b/docs/plugins/using-plugins/index.html @@ -16,7 +16,7 @@ - + diff --git a/docs/running-tests/index.html b/docs/running-tests/index.html index 5be1a32a..83d8b80a 100644 --- a/docs/running-tests/index.html +++ b/docs/running-tests/index.html @@ -16,7 +16,7 @@ - + diff --git a/docs/testcases/global_variables/index.html b/docs/testcases/global_variables/index.html index e8fc305f..4993b673 100644 --- a/docs/testcases/global_variables/index.html +++ b/docs/testcases/global_variables/index.html @@ -16,7 +16,7 @@ - + diff --git a/docs/testcases/index.html b/docs/testcases/index.html index 82277b0a..31608c6f 100644 --- a/docs/testcases/index.html +++ b/docs/testcases/index.html @@ -12,7 +12,7 @@ - + diff --git a/docs/testcases/nextflow_function/index.html b/docs/testcases/nextflow_function/index.html index 5b2d85a5..af3104db 100644 --- a/docs/testcases/nextflow_function/index.html +++ b/docs/testcases/nextflow_function/index.html @@ -16,7 +16,7 @@ - + diff --git a/docs/testcases/nextflow_pipeline/index.html b/docs/testcases/nextflow_pipeline/index.html index d92f1f78..6928119e 100644 --- a/docs/testcases/nextflow_pipeline/index.html +++ b/docs/testcases/nextflow_pipeline/index.html @@ -16,7 +16,7 @@ - + diff --git a/docs/testcases/nextflow_process/index.html b/docs/testcases/nextflow_process/index.html index cb6252bf..20c2558a 100644 --- a/docs/testcases/nextflow_process/index.html +++ b/docs/testcases/nextflow_process/index.html @@ -16,7 +16,7 @@ - + diff --git a/docs/testcases/nextflow_workflow/index.html b/docs/testcases/nextflow_workflow/index.html index 7743eb78..af3ed316 100644 --- a/docs/testcases/nextflow_workflow/index.html +++ b/docs/testcases/nextflow_workflow/index.html @@ -16,7 +16,7 @@ - + diff --git a/docs/testcases/params/index.html b/docs/testcases/params/index.html index ca4b4100..5ecaa25c 100644 --- a/docs/testcases/params/index.html +++ b/docs/testcases/params/index.html @@ -16,7 +16,7 @@ - + diff --git a/index.html b/index.html index 3eca4df3..9315697a 100644 --- a/index.html +++ b/index.html @@ -14,7 +14,7 @@ - + diff --git a/installation/index.html b/installation/index.html index 29feb100..4c668171 100644 --- a/installation/index.html +++ b/installation/index.html @@ -16,7 +16,7 @@ - + diff --git a/resources/index.html b/resources/index.html index f83f8353..f3a5f4f9 100644 --- a/resources/index.html +++ b/resources/index.html @@ -16,7 +16,7 @@ - + @@ -1164,6 +1164,7 @@
Slides to follow along can be found here.
The presentation was recorded as part of the Workflows Community Meetup - All Things Groovy at the Wellcome Genome Campus.
Test your production ready\u00a0Nextflow\u00a0pipelines in an efficient and automated way. \ud83d\ude80
Getting Started Installation Source
A DSL language similar to Nextflow Describes expected behavior using 'when' and 'then' blocks Abundance of functions for writing elegant and readable assertions Utilizes snapshots to write tests for complex data structures Provides commands for generating boilerplate code Includes a test-runner that executes these scripts Easy installation on CI systems
"},{"location":"#unit-testing","title":"Unit testing","text":"nf-test enables you to test all components of your data science pipeline: from end-to-end testing of the entire pipeline to specific tests of processes or even custom functions. This ensures that all testing is conducted consistently across your project.
Pipeline Process Functionsnextflow_pipeline {\n\nname \"Test Hello World\"\nscript \"nextflow-io/hello\"\n\ntest(\"hello world example should start 4 processes\") {\nexpect {\nwith(workflow) {\nassert success\nassert trace.tasks().size() == 4\nassert \"Ciao world!\" in stdout\nassert \"Bonjour world!\" in stdout\nassert \"Hello world!\" in stdout\nassert \"Hola world!\" in stdout\n}\n}\n}\n\n}\n
nextflow_process {\n\nname \"Test Process SALMON_INDEX\"\nscript \"modules/local/salmon_index.nf\"\nprocess \"SALMON_INDEX\"\n\ntest(\"Should create channel index files\") {\n\nwhen {\nprocess {\n\"\"\"\n input[0] = file(\"test_data/transcriptome.fa\")\n \"\"\"\n}\n}\n\nthen {\n//check if test case succeeded\nassert process.success\n//analyze trace file\nassert process.trace.tasks().size() == 1\nwith(process.out) {\n// check if emitted output has been created\nassert index.size() == 1\n// count amount of created files\nassert path(index.get(0)).list().size() == 16\n// parse info.json file\ndef info = path(index.get(0)+'/info.json').json\nassert info.num_kmers == 375730\nassert info.seq_length == 443050\n//verify md5 checksum\nassert path(index.get(0)+'/info.json').md5 == \"80831602e2ac825e3e63ba9df5d23505\"\n}\n}\n\n}\n\n}\n
nextflow_function {\n\nname \"Test functions\"\nscript \"functions.nf\"\n\ntest(\"Test function1\") {\nfunction \"function1\"\n...\n}\n\ntest(\"Test function2\") {\nfunction \"function2\"\n...\n}\n}\n
Learn more about pipeline tests, workflow tests, process tests and function tests in the documentation.
"},{"location":"#snapshot-testing","title":"Snapshot testing","text":"nf-test supports snapshot testing and automatically generates a baseline set of unit tests to safeguard against regressions caused by changes.nf-test captures a snapshot of output channels or any other objects and subsequently compares them to reference snapshot files stored alongside the tests. If the two snapshots do not match, the test will fail
Learn more
"},{"location":"#highly-extendable","title":"Highly extendable","text":"nf-test supports the inclusion of third-party libraries (e.g., jar files) or functions from Groovy files. This can be done to either extend its functionality or to prevent code duplication, thus maintaining simplicity in the logic of test cases. Given that many assertions are specific to use cases, nf-test incorporates a plugin system that allows for the extension of existing classes with custom methods. For example FASTA file support.
Learn more
"},{"location":"#support-us","title":"Support us","text":"We love stars as much as we love rockets! So make sure you star us on GitHub.
Star
Show the world your Nextflow pipeline is using nf-test and add the following badge to your README.md
:
[![nf-test](https://img.shields.io/badge/tested_with-nf--test-337ab7.svg)](https://code.askimed.com/nf-test)\n
"},{"location":"#about","title":"About","text":"nf-test has been created by Lukas Forer and Sebastian Sch\u00f6nherr and is MIT Licensed.
Thanks to all the contributors to help us maintaining and improving nf-test!
"},{"location":"about/","title":"About","text":"nf-test has been created by Lukas Forer and Sebastian Sch\u00f6nherr and is MIT Licensed.
"},{"location":"about/#contributors","title":"Contributors","text":""},{"location":"about/#statistics","title":"Statistics","text":"
GitHub:
Bioconda:
"},{"location":"installation/","title":"Installation","text":"nf-test has the same requirements as Nextflow and can be used on POSIX compatible systems like Linux or OS X. You can install nf-test using the following command:
curl -fsSL https://code.askimed.com/install/nf-test | bash\n
If you don't have curl installed, you could use wget:
wget -qO- https://code.askimed.com/install/nf-test | bash\n
It will create the nf-test
executable file in the current directory. Optionally, move the nf-test
file to a directory accessible by your $PATH
variable.
Test the installation with the following command:
nf-test version\n
You should see something like this:
\ud83d\ude80 nf-test 0.5.0\nhttps://code.askimed.com/nf-test\n(c) 2021 -2022 Lukas Forer and Sebastian Schoenherr\n\nNextflow Runtime:\n\n N E X T F L O W\n version 21.10.6 build 5660\n created 21-12-2021 16:55 UTC (17:55 CEST)\n cite doi:10.1038/nbt.3820\n http://nextflow.io\n
Now you are ready to write your first testcase.
"},{"location":"installation/#install-a-specific-version","title":"Install a specific version","text":"If you want to install a specific version pass it to the install script as so
curl -fsSL https://code.askimed.com/install/nf-test | bash -s 0.7.0\n
"},{"location":"installation/#manual-installation","title":"Manual installation","text":"All releases are also available on Github.
"},{"location":"installation/#nextflow-binary-not-found","title":"Nextflow Binary not found?","text":"If you get an error message like this, then nf-test was not able to detect your Nextflow installation.
\ud83d\ude80 nf-test 0.5.0\nhttps://code.askimed.com/nf-test\n(c) 2021 -2022 Lukas Forer and Sebastian Schoenherr\n\nNextflow Runtime:\nError: Nextflow Binary not found. Please check if Nextflow is in a directory accessible by your $PATH variable or set $NEXTFLOW_HOME.\n
To solve this issue you have two possibilites:
$PATH
variable.NEXTFLOW_HOME
to the directory that contains the Nextflow binary.To update an existing nf-test installtion to the latest version, run the following command:
nf-test update\n
"},{"location":"installation/#compiling-from-source","title":"Compiling from source","text":"To compile nf-test from source you shall have maven installed. This will produce a nf-test/target/nf-test.jar
file.
git clone git@github.com:askimed/nf-test.git\ncd nf-test\nmvn install\n
To use the newly compiled nf-test.jar
, update the nf-test
bash script that is on your PATH to point to the new .jar
file. First locate it with which nf-test
, and then modify APP_HOME
and APP_JAR
vars at the top: #!/bin/bash\nAPP_HOME=\"/PATH/TO/nf-test/target/\"\nAPP_JAR=\"nf-test.jar\"\nAPP_UPDATE_URL=\"https://code.askimed.com/install/nf-test\"\n...\n
"},{"location":"resources/","title":"Resources","text":"This page collects videos and blog posts about nf-test created by the community. Have you authored a blog post or given a talk about nf-test? Feel free to contact us, and we will be delighted to feature it here.
"},{"location":"resources/#nf-test-a-simple-test-framework-specifically-tailored-for-nextflow-pipelines","title":"nf-test, a simple test framework specifically tailored for Nextflow pipelines","text":"Sateesh Peri does a hands-on exploration of nf-test, a simple test framework specifically tailored for Nextflow pipelines.
The presentation was recorded as part of the Workflows Community Meetup - All Things Groovy at the Wellcome Genome Campus.
"},{"location":"resources/#nf-corebytesize-nf-test","title":"nf-core/bytesize: nf-test","text":"Edmund Miller shares with us his impressions about nf-test from a user perspective. nf-test is a simple test framework for Nextflow pipelines.
The presentation was recored as part of the nf-core/bytesize
"},{"location":"resources/#episode-8-nf-test-mentorships-and-debugging-resume","title":"Episode 8: nf-test, mentorships and debugging resume","text":"Phil Ewels, Chris Hakkaart and Marcel Ribeiro-Dantas chat about the nf-test framework for testing Nextflow pipelines.
The presentation was part of the \"News & Views\" episode of Channels (Nextflow Podcast).
"},{"location":"resources/#blog-post-a-simple-test-framework-for-nextflow-pipelines","title":"Blog post: A simple test framework for Nextflow pipelines","text":"Discover how nf-test originated from the need to efficiently and automatically test production-ready Nextflow pipelines.
Read blog post
"},{"location":"docs/configuration/","title":"Configuration","text":""},{"location":"docs/configuration/#setup-test-profile","title":"Setup test profile","text":"To run your test using a specific Nextflow profile, you can use the --profile
argument on the command line or define a default profile in nf-test.config
.
nf-test.config
","text":"This config file contains settings for nf-test.
config {\n// location for all nf-tests\ntestsDir \"tests\"\n// nf-test directory including temporary files for each test\nworkDir \".nf-test\"\n// location of an optional nextflow.config file specific for executing tests\nconfigFile \"tests/nextflow.config\"\n// location of library folder that is added automatically to the classpath\nlibDir \"tests/lib\" // run all test with the defined docker profile from the main nextflow.config\nprofile \"docker\"\n// disable tracing options in case container does not include `procps` Linux tool.\nwithTrace false\n//disable sorted channels\nautoSort false\n// add Nextflow options\noptions \"-dump-channels -stub-run\"\n}\n
"},{"location":"docs/configuration/#testsnextflowconfig","title":"tests/nextflow.config
","text":"This optional nextflow.config
file is used to execute tests. This is a good place to set default params
for all your tests. Example number of threads:
params {\n// run all tests with 1 threads\nthreads = 1\n}\n
"},{"location":"docs/configuration/#configuration-for-tests","title":"Configuration for tests","text":"nf-test allows to set an additional configuration for a testsuite:
nextflow_process {\n\n name \"Test Process...\"\n script \"main.nf\"\n process \"my_process\"\n config \"path/to/test/nextflow.config\"\n autoSort false\n options \"-dump-channels\"\n ...\n\n}\n
It is also possible to overwrite the config
, autoSort
or Nextflow properties (e.g. options \"-dump-channels\"
) for a specific test. Depending on the used Nextflow option, also add the --debug
nf-test option on the command-line to see the addtional output.
nextflow_process {\n\n test(\"my test\") {\n\n config \"path/to/test/nextflow.config\"\n autoSort false\n options \"-dump-channels\"\n ...\n\n }\n\n}\n
"},{"location":"docs/getting-started/","title":"Getting started","text":"This guide helps you to understand the concepts of nf-test and to write your first test cases. Before you start, please check if you have installed nf-test properly on your computer. Also, this guide assumes that you have a basic knowledge of Groovy and unit testing. The Groovy documentation is the best place to learn its syntax.
"},{"location":"docs/getting-started/#lets-get-started","title":"Let's get started","text":"To show the power of nf-test, we adapted a recently published proof of concept Nextflow pipeline. We adapted the pipeline to the new DSL2 syntax using modules. First, open the terminal and clone our test pipeline:
# clone nextflow pipeline\ngit clone https://github.com/askimed/nf-test-examples\n\n# enter project directory\ncd nf-test-examples\n
The pipeline consists of three modules (salmon.index.nf
, salmon_align_quant.nf
,fastqc.nf
). Here, we use the salmon.index.nf
process to create a test case from scratch. This process takes a reference as an input and creates an index using salmon.
Before creating test cases, we use the init
command to setup nf-test.
//Init command has already been executed for our repository\nnf-test init\n
The init
command creates the following files: nf-test.config
and the .nf-test/tests
folder.
In the configuration section you can learn more about these files and how to customize the directory layout.
"},{"location":"docs/getting-started/#create-your-first-test","title":"Create your first test","text":"The generate
command helps you to create a skeleton test code for a Nextflow process or the complete pipeline/workflow.
Here we generate a test case for the process salmon.index.nf
:
# delete already existing test case\nrm tests/modules/local/salmon_index.nf.test\nnf-test generate process modules/local/salmon_index.nf\n
This command creates a new file tests/modules/local/salmon_index.nf
with the following content:
nextflow_process {\n\nname \"Test Process SALMON_INDEX\"\nscript \"modules/local/salmon_index.nf\"\nprocess \"SALMON_INDEX\"\n\ntest(\"Should run without failures\") {\n\nwhen {\nparams {\n// define parameters here. Example:\n// outdir = \"tests/results\"\n}\nprocess {\n\"\"\"\n // define inputs of the process here. Example:\n // input[0] = file(\"test-file.txt\")\n \"\"\"\n}\n}\n\nthen {\nassert process.success\nwith(process.out) {\n// Make assertions about the content and elements of output channels here. Example:\n// assert out_channel != null\n}\n}\n\n}\n\n}\n
The generate
command filled automatically the name, script and process of our test case as well as created a skeleton for your first test
method. Typically you create one file per process and use different test
methods to describe the expected behaviour of the process.
This test
has a name, a when
and a then
closure (when/then closures are required here, since inputs need to be defined). The when
block describes the input parameters of the workflow or the process. nf-test executes the process with exactly these parameters and parses the content of the output channels. Then, it evaluates the assertions defined in the then
block to check if content of the output channels matches your expectations.
when
block","text":"The when
block describes the input of the process and/or the Nextflow params
.
The params
block is optional and is a simple map that can be used to override Nextflow's input params
.
The process
block is a multi-line string. The input
array can be used to set the different inputs arguments of the process. In our example, we only have one input that expects a file. Let us update the process
block by setting the first element of the input
array to the path of our reference file:
when {\nparams {\noutdir = \"output\"\n}\nprocess {\n\"\"\"\n // Use transcriptome.fa as a first input paramter for our process\n input[0] = file(\"test_data/transcriptome.fa\")\n \"\"\"\n}\n}\n
Everything which is defined in the process block is later executed in a Nextflow script (created automatically to test your process). Therefore, you can use every Nextflow specific function or command to define the values of the input array (e.g. Channels, files, paths, etc.).
"},{"location":"docs/getting-started/#the-then-block","title":"Thethen
block","text":"The then
block describes the expected output channels of the process when we execute it with the input parameters defined in the when
block.
The then
block typically contains mainly assertions to check assumptions (e.g. the size and the content of an output channel). However, this block accepts every Groovy script. This means you can also import third party libraries to define very specific assertions.
nf-test automatically loads all output channels of the process and all their items into a map named process.out
. You can then use this map to formulate your assertions.
For example, in the salmon_index
process we expect to get one process executed and 16 files created. But we also want to check the md5 sum and want to look into the actual JSON file. Let us update the then
section with some assertions that describe our expectations:
then {\n//check if test case succeeded\nassert process.success\n//analyze trace file\nassert process.trace.tasks().size() == 1\nwith(process.out) {\n// check if emitted output has been created\nassert index.size() == 1\n// count amount of created files\nassert path(index.get(0)).list().size() == 16\n// parse info.json file using a json parser provided by nf-test\ndef info = path(index.get(0)+'/info.json').json\nassert info.num_kmers == 375730\nassert info.seq_length == 443050\nassert path(index.get(0)+'/info.json').md5 == \"80831602e2ac825e3e63ba9df5d23505\"\n}\n}\n
The items of a channel are always sorted by nf-test. This provides a deterministic order inside the channel and enables you to write reproducible tests.
"},{"location":"docs/getting-started/#your-first-test-specification","title":"Your first test specification","text":"You can update the name of the test method to something that gives us later a good description of our specification. When we put everything together, we get the following full working test specification:
nextflow_process {\n\nname \"Test Process SALMON_INDEX\"\nscript \"modules/local/salmon_index.nf\"\nprocess \"SALMON_INDEX\"\n\ntest(\"Should create channel index files\") {\n\nwhen {\nprocess {\n\"\"\"\n input[0] = file(\"test_data/transcriptome.fa\")\n \"\"\"\n}\n}\n\nthen {\n//check if test case succeeded\nassert process.success\n//analyze trace file\nassert process.trace.tasks().size() == 1\nwith(process.out) {\n// check if emitted output has been created\nassert index.size() == 1\n// count amount of created files\nassert path(index.get(0)).list().size() == 16\n// parse info.json file\ndef info = path(index.get(0)+'/info.json').json\nassert info.num_kmers == 375730\nassert info.seq_length == 443050\nassert path(index.get(0)+'/info.json').md5 == \"80831602e2ac825e3e63ba9df5d23505\"\n}\n}\n}\n}\n
"},{"location":"docs/getting-started/#run-your-first-test","title":"Run your first test","text":"Now, the test
command can be used to run your test:
nf-test test tests/modules/local/salmon_index.nf.test --profile docker\n
"},{"location":"docs/getting-started/#specifying-profiles","title":"Specifying profiles","text":"In this case, the docker
profile defined in the Nextflow pipeline is used to execute the test. The profile is set using the --profile
parameter, but you can also define a default profile in the configuration file.
Congratulations! You created you first nf-test specification.
"},{"location":"docs/getting-started/#nextflow-options","title":"Nextflow options","text":"nf-test also allows to specify Nextflow options (e.g. -dump-channels
, -stub-run
) globally in the nf-test.config file or by adding an option to the test suite or the actual test. Read more about this in the configuration documentation.
nextflow_process {\n\n options \"-dump-channels\"\n\n}\n
"},{"location":"docs/getting-started/#whats-next","title":"What's next?","text":"All test cases described in this documentation can be found in the nf-test-examples repository.
"},{"location":"docs/nftest_pipelines/#gwas-regenie-pipeline","title":"GWAS-Regenie Pipeline","text":"To show the power of nf-test, we applied nf-test to a Nextflow pipeline that performs whole genome regression modelling using regenie. Please click here to learn more about this pipeline and checkout different kind of test cases.
"},{"location":"docs/running-tests/","title":"Running tests","text":""},{"location":"docs/running-tests/#basic-usage","title":"Basic usage","text":"The easiest way to use nf-test is to run the following command. This command will run all tests under the tests
directory. The testDir
can be changed in the nf-test.config
.
nf-test test\n
"},{"location":"docs/running-tests/#execute-specific-tests","title":"Execute specific tests","text":"You can also specify a list of tests, which should be executed.
nf-test test tests/modules/local/salmon_index.nf.test tests/modules/bwa_index.nf.test\n\nnf-test test tests/modules tests/modules/bwa_index.nf.test\n
"},{"location":"docs/running-tests/#tag-tests","title":"Tag tests","text":"nf-test provides a simple tagging mechanism that allows to execute tests by name or by tag.
Tags can be defined for each testsuite or for each testcase using the new tag
directive:
nextflow_process {\n\n name \"suite 1\"\n tag \"tag1\"\n\n test(\"test 1\") {\n tag \"tag2\"\n tag \"tag3\" \n ...\n }\n\n test(\"test 2\") {\n\n tag \"tag4\"\n tag \"tag5\" \n ...\n\n }\n}\n
For example, to execute all tests with tag2
use the following command.
nf-test test --tag tag2 # collects test1\n
Names are automatically added to tags. This enables to execute suits or tests directly.
nf-test test --tag \"suite 1\" # collects test1 and test2\n
When more tags are provided,\u00a0all tests that match at least one tag will be executed. Tags are also not case-sensitive, both lines will result the same tests.
nf-test test --tag tag3,tag4 # collects test1 and test2\nnf-test test --tag TAG3,TAG4 # collects test1 and test2\n
"},{"location":"docs/running-tests/#create-a-tap-output","title":"Create a TAP output","text":"To run all tests and create a report.tap
file, use the following command.
nf-test test --tap report.tap\n
"},{"location":"docs/running-tests/#run-test-by-its-hash-value","title":"Run test by its hash value","text":"To run a specific test using its hash, the following command can be used. The hash value is generated during its first execution.
nf-test test tests/main.nf.test@d41119e4\n
"},{"location":"docs/assertions/assertions/","title":"Assertions","text":"Writing test cases means formulating assumptions by using assertions. Groovy\u2019s power assert provides a detailed output when the boolean expression validates to false. nf-test provides several extensions and commands to simplify the work with Nextflow channels. Here we summarise how nextflow and nf-test handles channels and provide examples for the tools that nf-test
provides:
with
: assert the contents of an item in a channel by indexcontains
: assert the contents of an item in the channel is present anywhere in the channelassertContainsInAnyOrder
: order-agnostic assertion of the contents of a channelNextflow channels emit (in a random order) a single value or a tuple of values.
Channels that emit a single item produce an unordered list of objects, List<Object>
, for example:
process.out.outputCh = ['Hola', 'Hello', 'Bonjour']\n
Channels that contain Nextflow file
values have a unique path each run. For Example:
process.out.outputCh = ['/.nf-test/tests/c563c/work/65/85d0/Hola.json', '/.nf-test/tests/c563c/work/65/fa20/Hello.json', '/.nf-test/tests/c563c/work/65/b62f/Bonjour.json']\n
Channels that emit tuples produce an unordered list of ordered objects, List<List<Object>>
:
process.out.outputCh = [\n['Hola', '/.nf-test/tests/c563c/work/65/85d0/Hola.json'], ['Hello', '/.nf-test/tests/c563c/work/65/fa20/Hello.json'], ['Bonjour', '/.nf-test/tests/c563c/work/65/b62f/Bonjour.json']\n]\n
Assertions by channel index are made possible through sorting of the nextflow channel. The sorting is performed automatically by nf-test
prior to launch of the then
closure via integer, string and path comparisons. For example, the above would be sorted by nf-test
:
process.out.outputCh = [\n['Bonjour', '/.nf-test/tests/c563c/work/65/b62f/Bonjour.json'],\n['Hello', '/.nf-test/tests/c563c/work/65/fa20/Hello.json'],\n['Hola', '/.nf-test/tests/c563c/work/65/85d0/Hola.json']\n]\n
"},{"location":"docs/assertions/assertions/#using-with","title":"Using with
","text":"This assertions...
assert process.out.imputed_plink2\nassert process.out.imputed_plink2.size() == 1\nassert process.out.imputed_plink2.get(0).get(0) == \"example.vcf\"\nassert process.out.imputed_plink2.get(0).get(1) ==~ \".*/example.vcf.pgen\"\nassert process.out.imputed_plink2.get(0).get(2) ==~ \".*/example.vcf.psam\"\nassert process.out.imputed_plink2.get(0).get(3) ==~ \".*/example.vcf.pvar\"\n
... can be written by using with(){}
to improve readability:
assert process.out.imputed_plink2\nwith(process.out.imputed_plink2) {\nassert size() == 1\nwith(get(0)) {\nassert get(0) == \"example.vcf\"\nassert get(1) ==~ \".*/example.vcf.pgen\"\nassert get(2) ==~ \".*/example.vcf.psam\"\nassert get(3) ==~ \".*/example.vcf.pvar\"\n}\n}\n
"},{"location":"docs/assertions/assertions/#using-contains-to-assert-an-item-in-the-channel-is-present","title":"Using contains
to assert an item in the channel is present","text":"Groovy's contains and collect methods can be used to flexibly assert an item exists in the channel output.
For example, the below represents a channel that emits a two-element tuple, a string and a json file:
/*\ndef process.out.outputCh = [\n ['Bonjour', '/.nf-test/tests/c563c/work/65/b62f/Bonjour.json'],\n ['Hello', '/.nf-test/tests/c563c/work/65/fa20/Hello.json'],\n ['Hola', '/.nf-test/tests/c563c/work/65/85d0/Hola.json']\n]\n*/\n
To assert the channel contains one of the tuples, parse the json and assert:
testData = process.out.outputCh.collect { greeting, jsonPath -> [greeting, path(jsonPath).json] } assert testData.contains(['Hello', path('./myTestData/Hello.json').json])\n
To assert a subset of the tuple data, filter the channel using collect. For example, to assert the greeting only:
testData = process.out.outputCh.collect { greeting, jsonPath -> greeting } assert testData.contains('Hello')\n
See the files page for more information on parsing and asserting various file types.
"},{"location":"docs/assertions/assertions/#using-assertcontainsinanyorder-for-order-agnostic-assertion-of-the-contents-of-a-channel","title":"UsingassertContainsInAnyOrder
for order-agnostic assertion of the contents of a channel","text":"assertContainsInAnyOrder(List<object> list1, List<object> list2)
performs an order agnostic assertion on channels contents and is available in every nf-test
closure. It is a binding for Hamcrest's assertContainsInAnyOrder.
Some example use-cases are provided below.
"},{"location":"docs/assertions/assertions/#channel-that-emits-strings","title":"Channel that emits strings","text":"// process.out.outputCh = ['Bonjour', 'Hello', 'Hola'] \n\ndef expected = ['Hola', 'Hello', 'Bonjour']\nassertContainsInAnyOrder(process.out.outputCh, expected)\n
"},{"location":"docs/assertions/assertions/#channel-that-emits-a-single-maps-eg-valmymap","title":"Channel that emits a single maps, e.g. val(myMap)","text":"/*\nprocess.out.outputCh = [\n [\n 'D': [10,11,12],\n 'C': [7,8,9]\n ],\n [\n 'B': [4,5,6],\n 'A': [1,2,3]\n ]\n]\n*/\n\ndef expected = [\n[\n'A': [1,2,3],\n'B': [4,5,6]\n],\n[\n'C': [7,8,9],\n'D': [10,11,12]\n]\n]\n\nassertContainsInAnyOrder(process.out.outputCh, expected)\n
"},{"location":"docs/assertions/assertions/#channel-that-emits-json-files","title":"Channel that emits json files","text":"See the files page for more information on parsing and asserting various file types.
Since the outputCh filepaths are different between consecutive runs, the files need to be read/parsed prior to comparison
/*\nprocess.out.outputCh = [\n '/.nf-test/tests/c563c/work/65/b62f/Bonjour.json',\n '/.nf-test/tests/c563c/work/65/fa20/Hello.json',\n '/.nf-test/tests/c563c/work/65/85d0/Hola.json'\n]\n*/\n\ndef actual = process.out.outputCh.collect { filepath -> path(filepath).json }\ndef expected = [\npath('./myTestData/Hello.json').json,\npath('./myTestData/Hola.json').json,\npath('./myTestData/Bonjour.json').json,\n]\n\nassertContainsInAnyOrder(actual, expected)\n
"},{"location":"docs/assertions/assertions/#channel-that-emits-a-tuple-of-strings-and-json-files","title":"Channel that emits a tuple of strings and json files","text":"See the files page for more information on parsing and asserting various file types.
Since the ordering of items within the tuples are consistent, we can assert this case:
/*\nprocess.out.outputCh = [\n ['Bonjour', '/.nf-test/tests/c563c/work/65/b62f/Bonjour.json'],\n ['Hello', '/.nf-test/tests/c563c/work/65/fa20/Hello.json'],\n ['Hola', '/.nf-test/tests/c563c/work/65/85d0/Hola.json']\n]\n*/\n\ndef actual = process.out.outputCh.collect { greeting, filepath -> [greeting, path(filepath).json] }\ndef expected = [\n['Hola', path('./myTestData/Hola.json').json], ['Hello', path('./myTestData/Hello.json').json],\n['Bonjour', path('./myTestData/Bonjour.json').json],\n]\n\nassertContainsInAnyOrder(actual, expected)\n
To assert the json only and ignore the strings:
/*\nprocess.out.outputCh = [\n ['Bonjour', '/.nf-test/tests/c563c/work/65/b62f/Bonjour.json'],\n ['Hello', '/.nf-test/tests/c563c/work/65/fa20/Hello.json'],\n ['Hola', '/.nf-test/tests/c563c/work/65/85d0/Hola.json']\n]\n*/\n\ndef actual = process.out.outputCh.collect { greeting, filepath -> path(filepath).json }\ndef expected = [\npath('./myTestData/Hello.json').json, path('./myTestData/Hola.json').json,\npath('./myTestData/Bonjour.json').json\n]\n\nassertContainsInAnyOrder(actual, expected)\n
To assert the strings only and not the json files:
/*\nprocess.out.outputCh = [\n ['Bonjour', '/.nf-test/tests/c563c/work/65/b62f/Bonjour.json'],\n ['Hello', '/.nf-test/tests/c563c/work/65/fa20/Hello.json'],\n ['Hola', '/.nf-test/tests/c563c/work/65/85d0/Hola.json']\n]\n*/\n\ndef actual = process.out.outputCh.collect { greeting, filepath -> greeting }\ndef expected = ['Hello', 'Hola', 'Bonjour]\n\nassertContainsInAnyOrder(actual, expected)\n
"},{"location":"docs/assertions/assertions/#using-assertall","title":"Using assertAll
","text":"assertAll(Closure... closures)
ensures that all supplied closures do no throw exceptions. The number of failed closures is reported in the Exception message. This useful for efficient debugging of a set of test assertions from a single test run.
def a = 2\n\nassertAll(\n{ assert a==1 },\n{ a = 1/0 },\n{ assert a==2 },\n{ assert a==3 }\n)\n
The output will look like this: assert a==1\n||\n|false\n2\n\njava.lang.ArithmeticException: Division by zero\nAssertion failed:\n\nassert a==3\n||\n|false\n2\n\nFAILED (7.106s)\n\njava.lang.Exception: 3 of 4 assertions failed\n
"},{"location":"docs/assertions/fasta/","title":"FASTA Files","text":"0.7.0
The nft-fasta plugin extends path
by a fasta
property that can be used to read FASTA files into maps. nft-fasta supports also gzipped FASTA files.
To use the fasta
property you need to activate the nft-fasta
plugin in your nf-test.config
file:
config {\n plugins {\n load \"nft-fasta@1.0.0\"\n }\n}\n
More about plugins can be fond here.
"},{"location":"docs/assertions/fasta/#comparing-files","title":"Comparing files","text":"assert path('path/to/fasta1.fasta').fasta == path(\"path/to/fasta2.fasta'\").fasta\n
"},{"location":"docs/assertions/fasta/#work-with-individual-samples","title":"Work with individual samples","text":"def sequences = path('path/to/fasta1.fasta.gz').fasta\nassert \"seq1\" in sequences\nassert !(\"seq8\" in sequences)\nassert sequences.seq1 == \"AGTACGTAGTAGCTGCTGCTACGTGCGCTAGCTAGTACGTCACGACGTAGATGCTAGCTGACTCGATGC\"\n
"},{"location":"docs/assertions/files/","title":"Files","text":""},{"location":"docs/assertions/files/#md5-checksum","title":"md5 Checksum","text":"nf-test extends path
by a md5
property that can be used to compare the file content with an expected checksum:
assert path(process.out.out_ch.get(0)).md5 == \"64debea5017a035ddc67c0b51fa84b16\"\n
"},{"location":"docs/assertions/files/#json-files","title":"JSON Files","text":"nf-test supports comparison of JSON files and keys within JSON files. To assert that two JSON files contain the same keys and values:
assert path(process.out.out_ch.get(0)).json == path('./some.json').json\n
Individual keys can also be asserted: assert path(process.out.out_ch.get(0)).json.key == \"value\"\n
"},{"location":"docs/assertions/files/#yaml-files","title":"YAML Files","text":"nf-test supports comparison of YAML files and keys within YAML files. To assert that two YAML files contain the same keys and values:
assert path(process.out.out_ch.get(0)).yaml == path('./some.yaml').yaml\n
Individual keys can also be asserted: assert path(process.out.out_ch.get(0)).yaml.key == \"value\"\n
"},{"location":"docs/assertions/files/#gzip-files","title":"GZip Files","text":"nf-test extends path
by a linesGzip
property that can be used to read gzip compressed files.
assert path(process.out.out_ch.get(0)).linesGzip.size() == 5\nassert path(process.out.out_ch.get(0)).linesGzip.contains(\"Line Content\")\n
"},{"location":"docs/assertions/files/#filter-lines","title":"Filter lines","text":"The returned array can also be filtered by lines.
def lines = path(process.out.gzip.get(0)).linesGzip[0..5]\nassert lines.size() == 6\ndef lines = path(process.out.gzip.get(0)).linesGzip[0]\nassert lines.equals(\"MY_HEADER\")\n
"},{"location":"docs/assertions/files/#grep-lines","title":"Grep lines","text":"nf-test also provides the possibility to grep only specific lines with the advantage that only a subset of lines need to be read (especially helpful for larger files).
def lines = path(process.out.gzip.get(0)).grepLinesGzip(0,5)\nassert lines.size() == 6\ndef lines = path(process.out.gzip.get(0)).grepLineGzip(0)\nassert lines.equals(\"MY_HEADER\")\n
"},{"location":"docs/assertions/files/#snapshot-support","title":"Snapshot Support","text":"The possibility of filter lines from a *.gz file can also be combined with the snapshot functionality.
assert snapshot(\npath(process.out.gzip.get(0)).linesGzip[0]\n).match()\n
"},{"location":"docs/assertions/libraries/","title":"Using Third-Party Libraries","text":"nf-test supports including third party libraries (e.g. jar files ) or functions from groovy files to either extend it functionality or to avoid duplicate code and to keep the logic in test cases simple.
"},{"location":"docs/assertions/libraries/#using-local-groovy-files","title":"Using Local Groovy Files","text":"0.7.0 \u00b7
If nf-test detects a lib
folder in the directory of a tescase, then it adds it automatically to the classpath.
We have a Groovy script MyWordUtils.groovy
that contains the following class:
class MyWordUtils {\n\ndef static capitalize(String word){\nreturn word.toUpperCase();\n}\n\n}\n
We can put this file in a subfolder called lib
:
testcase_1\n\u251c\u2500\u2500 capitalizer.nf\n\u251c\u2500\u2500 capitalizer.test\n\u2514\u2500\u2500 lib\n \u2514\u2500\u2500 MyWordUtils.groovy\n
The file capitalizer.nf
contains the CAPITALIZER
process:
#!/usr/bin/env nextflow\nnextflow.enable.dsl=2\n\nprocess CAPITALIZER {\ninput:\nval cheers\noutput:\nstdout emit: output\nscript:\nprintln \"$cheers\".toUpperCase()\n\"\"\"\n \"\"\"\n\n}\n
Next, we can use this class in the capitalizer.nf.test
like every other class that is provided by nf-test or Groovy itself:
nextflow_process {\n\nname \"Test Process CAPITALIZER\"\nscript \"capitalizer.nf\"\nprocess \"CAPITALIZER\"\n\ntest(\"Should run without failures\") {\n\nwhen {\nprocess {\n\"\"\"\n input[0] = \"world\"\n \"\"\"\n}\n}\n\nthen {\nassert process.success\nassert process.stdout.contains(MyWordUtils.capitalize('world'))\n}\n\n}\n\n}\n
If we have a project and we want to reuse libraries in multiple test cases, then we can store the class in the shared lib folder. Both test cases are now able to use MyWordUtils
:
tests\n\u251c\u2500\u2500 testcase_1\n \u251c\u2500\u2500 hello_1.nf\n \u251c\u2500\u2500 hello_1.nf.test\n\u251c\u2500\u2500 testcase_2\n \u251c\u2500\u2500 hello_2.nf\n \u251c\u2500\u2500 hello_2.nf.test\n\u2514\u2500\u2500 lib\n \u2514\u2500\u2500 MyWordUtils.groovy\n
The default location is tests/lib
. This folder location can be changed in nf-test config file.
It is also possible to use the --lib
parameter to add an additional folder to the classpath:
nf-test test tests/testcase_1/hello_1.nf.test --lib tests/mylibs\n
If multiple folders are used, the they need to be separate with a colon (like in Java or Groovy).
"},{"location":"docs/assertions/libraries/#using-local-jar-files","title":"Using Local Jar Files","text":"To integrate local jar files, you can either specify the path to the jar within the nf-test --lib
option
nf-test test test.nf.test --lib tests/lib/groovy-ngs-utils/groovy-ngs-utils.jar\n
or add it as follows to the nf-test.config
file:
libDir \"tests/lib:tests/lib/groovy-ngs-utils/groovy-ngs-utils.jar\"\n
You could then import the class and use it in the then
statement:
import gngs.VCF;\n\nnextflow_process {\n\nname \"Test Process VARIANT_CALLER\"\nscript \"variant_caller.nf\"\nprocess \"VARIANT_CALLER\"\n\ntest(\"Should run without failures\") {\n\nwhen {\n...\n}\n\nthen {\nassert process.success def vcf = VCF.parse(\"$baseDir/tests/test_data/NA12879.vcf.gz\")\nassert vcf.samples.size() == 10\nassert vcf.variants.size() == 20\n}\n\n}\n\n}\n
"},{"location":"docs/assertions/libraries/#using-maven-artifcats-with-grab","title":"Using Maven Artifcats with @Grab
","text":"nf-test supports the @Grab
annotation to include third-party libraries that are available in a maven repository. As the dependency is defined as a maven artifact, there is no local copy of the jar file needed and maven enables to include an exact version as well as provides an easy update process.
The following example uses the WordUtil
class from commons-lang
:
@Grab(group='commons-lang', module='commons-lang', version='2.4')\nimport org.apache.commons.lang.WordUtils\n\nnextflow_process {\n\nname \"Test Process CAPITALIZER\"\nscript \"capitalizer.nf\"\nprocess \"CAPITALIZER\"\n\ntest(\"Should run without failures\") {\n\nwhen {\nprocess {\n\"\"\"\n input[0] = \"world\"\n \"\"\"\n}\n}\n\nthen {\nassert process.success\nassert process.stdout.contains(WordUtils.capitalize('world'))\n}\n\n}\n\n}\n
"},{"location":"docs/assertions/regular-expressions/","title":"Regular Expressions","text":""},{"location":"docs/assertions/regular-expressions/#using-operator","title":"Using ==~
operator","text":"The operator ==~
can be used to check if a string matches a regular expression:
assert \"/my/full/path/to/process/dir/example.vcf.pgen\" ==~ \".*/example.vcf.pgen\"\n
"},{"location":"docs/assertions/snapshots/","title":"Snapshots","text":"0.7.0
Snapshots are a very useful tool whenever you want to make sure your output channels or output files not change unexpectedly. This feature is highly inspired by Jest.
A typical snapshot test case takes a snapshot of the output channels or any other object, then compares it to a reference snapshot file stored alongside the test (*.nf.test.snap
). The test will fail, if the two snapshots do not match: either the change is unexpected, or the reference snapshot needs to be updated to the new output of a process, workflow, pipeline or function.
The snapshot
keyword creates a snapshot of the object and its match
method can then be used to check if its contains the expected data from the snap file. The following example shows how to create a snapshot of a workflow channel:
assert snapshot(workflow.out.channel1).match()\n
You can also create a snapshot of all output channels of a process:
assert snapshot(process.out).match()\n
Even the result of a function can be used:
assert snapshot(function.result).match()\n
The first time this test runs, nf-test creates a snapshot file. This is a json file that contains a serialized version of the provided object.
The snapshot file should be committed alongside code changes, and reviewed as part of your code review process. nf-test uses pretty-format to make snapshots human-readable during code review. On subsequent test runs, nf-test will compare the data with the previous snapshot. If they match, the test will pass. If they don't match, either the test runner found a bug in your code that should be fixed, or the implementation has changed and the snapshot needs to be updated.
"},{"location":"docs/assertions/snapshots/#updating-snapshots","title":"Updating Snapshots","text":"When a snapshot test is failing due to an intentional implementation change, you can use the --update-snapshot
flag to re-generate snapshots for all failed tests.
nf-test test tests/main.nf.test --update-snapshot\n
"},{"location":"docs/assertions/snapshots/#cleaning-obsolete-snapshots","title":"Cleaning Obsolete Snapshots","text":"0.8.0
Over time, snapshots can become outdated, leading to inconsistencies in your testing process. To help you manage obsolete snapshots, nf-test generates a list of these obsolete keys. This list provides transparency into which snapshots are no longer needed and can be safely removed.
Running your tests with the --clean-snapshot
or --wipe-snapshot
option removes the obsolete snapshots from the snapshot file. This option is useful when you want to maintain the structure of your snapshot file but remove unused entries. It ensures that your snapshot file only contains the snapshots required for your current tests, reducing file bloat and improving test performance.
nf-test test tests/main.nf.test --clean-snapshot\n
Obsolete snapshots can only be detected when running all tests in a test file simultaneously, and when all tests pass. If you run a single test or if tests are skipped, nf-test cannot detect obsolete snapshots.
"},{"location":"docs/assertions/snapshots/#constructing-complex-snapshots","title":"Constructing Complex Snapshots","text":"It is also possible to include multiple objects into one snapshot:
assert snapshot(workflow.out.channel1, workflow.out.channel2).match()\n
Every object that is serializable can be included into snapshots. Therefore you can even make a snapshot of the complete workflow or process object. This includes stdout, stderr, exist status, trace etc. and is the easiest way to create a test that checks for all of this properties:
assert snapshot(workflow).match()\n
You can also include output files to a snapshot (e.g. useful in pipeline tests where no channels are available):
assert snapshot(\nworkflow,\npath(\"${params.outdir}/file1.txt\"),\npath(\"${params.outdir}/file2.txt\"),\npath(\"${params.outdir}/file3.txt\")\n).match()\n
By default the snapshot has the same name as the test. You can also store a snapshot under a user defined name. This enables you to use multiple snapshots in one single test and to separate them in a logical way. In the following example a workflow snapshot is created, stored under the name \"workflow\".
assert snapshot(workflow).match(\"workflow\")\n
The next example creates a snapshot of two files and saves it under \"files\".
assert snapshot(path(\"${params.outdir}/file1.txt\"), path(\"${params.outdir}/file2.txt\")).match(\"files\")\n
You can also use helper methods to add objects to snapshots. For example, you can use the list()
method to add all files of a folder to a snapshot:
assert snapshot(workflow, path(params.outdir).list()).match()\n
"},{"location":"docs/assertions/snapshots/#file-paths","title":"File Paths","text":"If nf-test detects a path in the snapshot it automatically replace it by a unique fingerprint of the file that ensures the file content is the same. The fingerprint is default the md5 sum.
"},{"location":"docs/assertions/snapshots/#snapshot-differences","title":"Snapshot Differences","text":"0.8.0
By default, nf-test uses the diff
tool for comparing snapshots. It employs the following default arguments:
-y
: Enables side-by-side comparison mode.-W 200
: Sets the maximum width for displaying the differences to 200 characters.These default arguments are applied when no custom settings are specified.
If diff
is not installed on the system, nf-test will print exepcted and found snapshots without highlighting differences.
Users have the flexibility to customize the arguments passed to the diff tool using an environment variable called NFT_DIFF_ARGS
. This environment variable allows you to modify the way the diff tool behaves when comparing snapshots.
To customize the arguments, follow these steps:
Set the NFT_DIFF_ARGS
environment variable with your desired arguments.
export NFT_DIFF_ARGS=\"<your_custom_arguments>\"\n
Run nf-test
to perform snapshot comparison, and it will utilize the custom arguments specified in NFT_DIFF_ARGS
.
nf-test
not only allows you to customize the arguments but also provides the flexibility to change the diff tool itself. This can be achieved by using the environment variable NFT_DIFF
.
As an example, you can change the diff tool to icdiff
, which supports features like colors. To switch to icdiff
, follow these steps:
Install icdiff
Set the NFT_DIFF
environment variable to icdiff
to specify the new diff tool.
export NFT_DIFF=\"icdiff\"\n
If needed, customize the arguments for icdiff
using NFT_DIFF_ARGS
as explained in the previous section
export NFT_DIFF_ARGS=\"-N --cols 200 -L expected -L observed -t\"\n
Run nf-test
, and it will use icdiff
as the diff tool for comparing snapshots.
clean
command","text":""},{"location":"docs/cli/clean/#usage","title":"Usage","text":"nf-test clean\n
The clean
command removes the .nf-test
directory.
generate
command","text":""},{"location":"docs/cli/generate/#usage","title":"Usage","text":"nf-test generate <TEST_CASE_TYPE> <NEXTFLOW_FILES>\n
"},{"location":"docs/cli/generate/#supported-types","title":"Supported Types","text":""},{"location":"docs/cli/generate/#process","title":"process
","text":""},{"location":"docs/cli/generate/#workflow","title":"workflow
","text":""},{"location":"docs/cli/generate/#pipeline","title":"pipeline
","text":""},{"location":"docs/cli/generate/#function","title":"function
","text":""},{"location":"docs/cli/generate/#examples","title":"Examples","text":"Create a test case for a process:
nf-test generate process modules/local/salmon_index.nf\n
Create a test cases for all processes in folder modules
:
nf-test generate process modules/**/*.nf\n
Create a test case for a sub workflow:
nf-test generate workflow workflows/some_workflow.nf\n
Create a test case for the whole pipeline:
nf-test generate pipeline main.nf\n
Create a test case for each functio in file functions.nf
:
nf-test generate function functions.nf\n
"},{"location":"docs/cli/init/","title":"init
command","text":""},{"location":"docs/cli/init/#usage","title":"Usage","text":"nf-test init\n
The init
command set ups nf-test in the current directory.
The init
command creates the following files: nf-test.config
and tests/nextflow.config
. It also creates a folder tests
which is the home directory of your test code.
In the configuration section you can learn more about these files and how to customize the directory layout.
"},{"location":"docs/cli/list/","title":"list
command","text":""},{"location":"docs/cli/list/#usage","title":"Usage","text":"list
command provides a convenient way to list all available test cases.
nf-test list [<NEXTFLOW_FILES>|<SCRIPT_FOLDERS>]\n
"},{"location":"docs/cli/list/#optional-arguments","title":"Optional Arguments","text":""},{"location":"docs/cli/list/#-tags","title":"--tags
","text":"Print a list of all used tags.
"},{"location":"docs/cli/list/#-format-json","title":"--format json
","text":"Print the list of tests or tags as json object.
"},{"location":"docs/cli/list/#-format-raw","title":"--format raw
","text":"Print the list of tests or tags as simple list without formatting.
"},{"location":"docs/cli/list/#-silent","title":"--silent
","text":"Hide program version and header infos.
"},{"location":"docs/cli/list/#-debug","title":"--debug
","text":"Show debugging infos.
"},{"location":"docs/cli/list/#examples","title":"Examples","text":"List test cases that can be found in the testDir
defined in the nf-test.config
file in the current working directory:
nf-test list\n
List test cases in specified test scripts and search specified directories for additional test scripts:
nf-test list tests/modules/local/salmon_index.nf.test tests/modules/bwa_index.nf.test\n\nnf-test list tests/modules tests/modules/bwa_index.nf.test\n
List of all testcases as json:
nf-test list --format json --silent\n[\"/Users/lukfor/Development/git/nf-gwas/tests/main.nf.test@69b98c67\",\"/Users/lukfor/Development/git/nf-gwas/tests/main.nf.test@fdb6c1cc\",\"/Users/lukfor/Development/git/nf-gwas/tests/main.nf.test@d1c219eb\",\"/Users/lukfor/Development/git/nf-gwas/tests/main.nf.test@3c54e3cb\",...]\n
nf-test list --format raw --silent\n/Users/lukfor/Development/git/nf-gwas/tests/main.nf.test@69b98c67\n/Users/lukfor/Development/git/nf-gwas/tests/main.nf.test@fdb6c1cc\n/Users/lukfor/Development/git/nf-gwas/tests/main.nf.test@d1c219eb\n/Users/lukfor/Development/git/nf-gwas/tests/main.nf.test@3c54e3cb\n...\n
nf-test list --tags --format json --silent\n[\"fastqc\",\"snakemake\"]\n
nf-test list --tags --format raw --silent\nfastqc\nsnakemake\n
"},{"location":"docs/cli/test/","title":"test
command","text":""},{"location":"docs/cli/test/#usage","title":"Usage","text":"nf-test test [<NEXTFLOW_FILES>|<SCRIPT_FOLDERS>]\n
"},{"location":"docs/cli/test/#optional-arguments","title":"Optional Arguments","text":""},{"location":"docs/cli/test/#-profile-nextflow_profile","title":"--profile <NEXTFLOW_PROFILE>
","text":""},{"location":"docs/cli/test/#-debug","title":"--debug
","text":"The debug parameter prints out all available output channels which can be accessed in the then
clause.
--without-trace
","text":"The Linux tool procps
is required to run Nextflow tracing. In case your container does not support this tool, you can also run nf-test without tracing. Please note that the workflow.trace
are not available when running it with this flag.
--tag <tag>
","text":"Execute only tests with the provided tag. Multiple tags can be used and have to be separated by commas (e.g. tag1,tag2
).
--tap <filename>
","text":"Writes test results in TAP format to file.
"},{"location":"docs/cli/test/#-junitxml-filename","title":"--junitxml <filename>
","text":"Writes test results in JUnit XML format to file, which conforms to the standard schema.
"},{"location":"docs/cli/test/#examples","title":"Examples","text":"Run all test scripts that can be found in the testDir
defined in the nf-test.config
file in the current working directory:
nf-test test\n
Run all specified test scripts and search specified directories for additional test scripts:
nf-test test tests/modules/local/salmon_index.nf.test tests/modules/bwa_index.nf.test\n\nnf-test test tests/modules tests/modules/bwa_index.nf.test\n
Run a specific test using its hash:
nf-test test tests/main.nf.test@d41119e4\n
Run all tests and write results to report.tap
:
nf-test test --tap report.tap\n
0.7.0
The following plugin can be used as a boilerplate: https://github.com/askimed/nft-fasta
"},{"location":"docs/plugins/developing-plugins/#developing-plugins","title":"Developing Plugins","text":"A plugin has the possibility:
fasta
to class Path
). It uses Groovy's ExtensionModule concept. Important: the method has to be static. One class can provide multiple methods.// com.askimed.nf.test.fasta.PathExtension\npublic class PathExtension {\n//can be used as: path(filename).fasta\npublic static Object getFasta(Path self) {\nreturn FastaUtil.readAsMap(self);\n}\n\n}\n
// com.askimed.nf.test.fasta.Methods\npublic class Methods {\n\n//can be used as: helloFasta()\npublic static void helloFasta() {\nSystem.out.println(\"Hello FASTA\");\n}\n\n}\n
"},{"location":"docs/plugins/developing-plugins/#manifest-file","title":"Manifest file","text":"You need to create a file META-INF/nf-test-plugin
(in your resources). This file contains metadata about the plugin and both classes can now be registered by using the extensionClasses
and extensionMethods
properties.
moduleName=nft-my-plugin\nmoduleVersion=1.0.0\nmoduleAuthors=Lukas Forer\nextensionClasses=com.askimed.nf.test.fasta.PathExtension\nextensionMethods=com.askimed.nf.test.fasta.Methods\n
"},{"location":"docs/plugins/developing-plugins/#building-a-jar-file","title":"Building a jar file","text":"The plugin itself is a jar file that contains all classes and the META-INF/nf-test-plugin
file. If you have dependencies then you have to create a uber-jar that includes all libraries, because nf-test doesn't support the classpath set in META-INF\\MANIFEST
.
Available plugins are managed in this default repository: https://github.com/askimed/nf-test-plugins/blob/main/plugins.json
Add your plugin or a new release to the plugin.json
file and create a pull request to publish your plugin in the default repository. Or host you own repository:
[{\n \"id\": \"nft-fasta\",\n \"releases\": [{\n \"version\": \"1.0.0\",\n \"url\": \"https://github.com/askimed/nft-fasta/releases/download/v1.0.0/nft-fasta-1.0.0.jar\",\n },{\n \"version\": \"2.0.0\",\n \"url\": \"https://github.com/askimed/nft-fasta/releases/download/v2.0.0/nft-fasta-2.0.0.jar\",\n }]\n},{\n \"id\": \"nft-my-plugin\",\n \"releases\": [{\n \"version\": \"1.0.0\",\n \"url\": \"https://github.com/lukfor/nft-my-plugin2/releases/download/v1.0.0/nft-my-plugin-1.0.0.jar\",\n }]\n}]\n
"},{"location":"docs/plugins/using-plugins/","title":"Plugins","text":"0.7.0
Most assertions are usecase specific. Therefore, separating this functionality and helper classes from the nf-test codebase has several advantages:
For this purpose, we integrated the following plugin system that provides (a) the possibility to extend existing classes with custom methods (e.g. path(filename).fasta
) and (2) to extends nf-test with new methods.
Available plugins are listed here.
A plugin can be activated via the nf-test.config
by adding the plugin
section and by using load
method to specify the plugin and its version:
config {\n\n plugins {\n\n load \"nft-fasta@1.0.0\"\n\n }\n\n}\n
It is also possible to add one ore more additional repositories. (Example: repository with development/snapshot versions, in-house repository, ...)
config {\n\n plugins {\n\n repository \"https://github.com/askimed/nf-test-plugins/blob/main/plugins-snapshots.json\"\n repository \"https://github.com/seppinho/nf-test-plugin2/blob/main/plugins.json\"\n\n load \"nft-fasta@1.1.0-snapshot\"\n load \"nft-plugin2@1.1.0\"\n\n // you can also load jar files directly without any repository\n // loadFromFile \"path/to/my/nft-plugin.jar\"\n }\n\n}\n
All plugins are downloaded and cached in .nf-test\\plugins
. This installation mechanism is yet not safe for parallel execution when multiple nf-test instances are resolving the same plugin. However, you can use nf-test update-plugins
to download all plugins before you run your tests in parallel.
To clear the cache and to force redownloading plugins and repositories you can execute the nf-test clean
command.
One or multiple plugins can be activated also via the --plugins
parameter:
nf-test test my-test.nf.test --plugins nft-fasta@1.0.0,plugin2@1.0.0\n
or
nf-test test my-test.nf.test --plugins path/to/my/nft-plugin.jar\n
"},{"location":"docs/testcases/","title":"Documentation","text":""},{"location":"docs/testcases/global_variables/","title":"Global Variables","text":"The following variables are available and can be used in setup
, when
, then
and cleanup
closures.
baseDir
orprojectDir
The directory where the nf-test.config
script is located. mypipeline
moduleDir
The directory where the module script is located mypipeline/modules/mymodule
moduleTestDir
The directory where the test script is located mypipeline/tests/modules/mymodule
launchDir
The directory where the test is run. mypipeline/.nf-test/tests/<test_hash>
metaDir
The directory where all meta are located (e.g. mock.nf
). mypipeline/.nf-test/tests/<test_hash>/meta
workDir
The directory where tasks temporary files are created. mypipeline/.nf-test/tests/<test_hash>/work
outputDir
An output directory in the $launchDir
that can be used to store output files. The variable contains the absolute path. If you need a relative outpu directory see launchDir
example. mypipeline/.nf-test/tests/<test_hash>/output
params
Dictionary like object holding all parameters."},{"location":"docs/testcases/global_variables/#examples","title":"Examples","text":""},{"location":"docs/testcases/global_variables/#outputdir","title":"outputDir
","text":"This variable points to the directory within the temporary test directory (.nf-test/tests/<test-dir>/output/
). The variable can be set under params:
params {\noutdir = \"$outputDir\"\n}\n
"},{"location":"docs/testcases/global_variables/#basedir","title":"baseDir
","text":"This variable points to the directory to locate the base directory of the main nf-test config. The variable can be used e.g. in the process definition to build absolute paths for input files:
process {\n\"\"\"\n file1 = file(\"$baseDir/tests/input/file123.gz\")\n \"\"\"\n}\n
"},{"location":"docs/testcases/global_variables/#launchdir","title":"launchDir
","text":"This variable points to the directory where the test is executed. This can be used get access to results that are created in an relative output directory:
when {\nparams {\noutdir = \"results\"\n}\n}\n
then {\nassert path(\"$launchDir/results\").exists()\n}\n
"},{"location":"docs/testcases/nextflow_function/","title":"Function Testing","text":"nf-test allows testing of functions that are defined in a Nextflow file or defined in lib
. Please checkout the CLI to generate a function test.
nextflow_function {\n\nname \"<NAME>\"\nscript \"<PATH/TO/NEXTFLOW_SCRIPT.nf>\"\nfunction \"<FUNCTION_NAME>\"\n\ntest(\"<TEST_NAME>\") {\n\n}\n}\n
Script paths that start with ./
or ../
are considered relative paths. These paths are resolved based on the location of the test script. Relative paths are beneficial when you want to reference files or directories located within the same directory as your test script or in a parent directory. These paths provide a convenient way to access files without specifying the entire path.
If a Nextflow script contains multiple functions and you want to test them all in the same testsuite, you can override the function
property in each test. For example:
functions.nf
","text":"def function1() {\n...\n}\n\ndef function2() {\n...\n}\n
"},{"location":"docs/testcases/nextflow_function/#functionsnftest","title":"functions.nf.test
","text":"nextflow_function {\n\nname \"Test functions\"\nscript \"functions.nf\"\n\ntest(\"Test function1\") {\nfunction \"function1\"\n...\n}\n\ntest(\"Test function2\") {\nfunction \"function2\"\n...\n}\n}\n
"},{"location":"docs/testcases/nextflow_function/#functions-in-lib-folder","title":"Functions in lib
folder","text":"If you want to test a function that is inside a groovy file in your lib
folder, you can ignore the script
property, because Nextflow adds them automatically to the classpath. For example:
lib\\Utils.groovy
","text":"class Utils {\n\n public static void sayHello(name) {\n if (name == null) {\n error('Cannot greet a null person')\n }\n\n def greeting = \"Hello ${name}\"\n\n println(greeting)\n }\n\n}\n
"},{"location":"docs/testcases/nextflow_function/#testslibutilsgroovytest","title":"tests\\lib\\Utils.groovy.test
","text":"nextflow_function {\n\nname \"Test Utils.groovy\"\n\ntest(\"Test function1\") {\nfunction \"Utils.sayHello\"\n...\n}\n}\n
Note: the generate function
command works only with Nextflow functions.
The function
object can be used in asserts to check its status, result value or error messages.
// function status\nassert function.success\nassert function.failed\n\n// return value\nassert function.result == 27\n\n//returns a list containing all lines from stdout\nassert function.stdout.contains(\"Hello World\") == 3\n
"},{"location":"docs/testcases/nextflow_function/#example","title":"Example","text":""},{"location":"docs/testcases/nextflow_function/#nextflow-script","title":"Nextflow script","text":"Create a new file and name it functions.nf
.
def say_hello(name) {\nif (name == null) {\nerror('Cannot greet a null person')\n}\n\ndef greeting = \"Hello ${name}\"\n\nprintln(greeting)\nreturn greeting\n}\n
"},{"location":"docs/testcases/nextflow_function/#nf-test-script","title":"nf-test script","text":"Create a new file and name it functions.nf.test
.
nextflow_function {\n\nname \"Test Function Say Hello\"\n\nscript \"functions.nf\"\nfunction \"say_hello\"\n\ntest(\"Passing case\") {\n\nwhen {\nfunction {\n\"\"\"\n input[0] = \"aaron\"\n \"\"\"\n}\n}\n\nthen {\nassert function.success\nassert function.result == \"Hello aaron\"\nassert function.stdout.contains(\"Hello aaron\")\nassert function.stderr.isEmpty()\n}\n\n}\n\ntest(\"Failure Case\") {\n\nwhen {\nfunction {\n\"\"\"\n input[0] = null\n \"\"\"\n}\n}\n\nthen {\nassert function.failed\n//It seems to me that error(..) writes message to stdout\nassert function.stdout.contains(\"Cannot greet a null person\")\n}\n}\n}\n
"},{"location":"docs/testcases/nextflow_function/#execute-test","title":"Execute test","text":"nf-test test functions.nf.test\n
"},{"location":"docs/testcases/nextflow_pipeline/","title":"Pipeline Testing","text":"nf-test also allows to test the complete pipeline end-to-end. Please checkout the CLI to generate a pipeline test.
"},{"location":"docs/testcases/nextflow_pipeline/#syntax","title":"Syntax","text":"nextflow_pipeline {\n\nname \"<NAME>\"\nscript \"<PATH/TO/NEXTFLOW_SCRIPT.nf>\"\n\ntest(\"<TEST_NAME>\") {\n\n}\n}\n
"},{"location":"docs/testcases/nextflow_pipeline/#assertions","title":"Assertions","text":"The workflow
object can be used in asserts to check its status, error messages or traces.
// workflow status\nassert workflow.success\nassert workflow.failed\nassert workflow.exitStatus == 0\n\n// workflow error message\nassert workflow.errorReport.contains(\"....\")\n\n// trace\n//returns a list containing succeeded tasks\nassert workflow.trace.succeeded().size() == 3\n\n//returns a list containing failed tasks\nassert workflow.trace.failed().size() == 0\n\n//returns a list containing all tasks\nassert workflow.trace.tasks().size() == 3\n
"},{"location":"docs/testcases/nextflow_pipeline/#example","title":"Example","text":""},{"location":"docs/testcases/nextflow_pipeline/#nextflow-script","title":"Nextflow script","text":"Create a new file and name it pipeline.nf
.
#!/usr/bin/env nextflow\nnextflow.enable.dsl=2\n\nprocess SAY_HELLO {\ninput:\nval cheers\n\noutput:\nstdout emit: verbiage_ch\npath '*.txt', emit: verbiage_ch2\n\nscript:\n\"\"\"\n echo -n $cheers\n echo -n $cheers > ${cheers}.txt\n \"\"\"\n}\n\nworkflow {\ninput = params.input_text.trim().split(',')\nChannel.from(input) | SAY_HELLO\n}\n
"},{"location":"docs/testcases/nextflow_pipeline/#nf-test-script","title":"nf-test script","text":"Create a new file and name it pipeline.nf.test
.
nextflow_pipeline {\n\nname \"Test Pipeline with 1 process\"\nscript \"pipeline.nf\"\n\ntest(\"Should run without failures\") {\n\nwhen {\nparams {\ninput_text = \"hello,nf-test\"\n}\n}\n\nthen {\nassert workflow.success\nassert workflow.trace.tasks().size() == 2\n}\n\n}\n\n}\n
"},{"location":"docs/testcases/nextflow_pipeline/#execute-test","title":"Execute test","text":"nf-test init\nnf-test test pipeline.nf.test\n
"},{"location":"docs/testcases/nextflow_process/","title":"Process Testing","text":"nf-test allows to test each process defined in a module file. Please checkout the CLI to generate a process test.
"},{"location":"docs/testcases/nextflow_process/#syntax","title":"Syntax","text":"nextflow_process {\n\nname \"<NAME>\"\nscript \"<PATH/TO/NEXTFLOW_SCRIPT.nf>\"\nprocess \"<PROCESS_NAME>\"\n\ntest(\"<TEST_NAME>\") {\n\n}\n}\n
Script paths that start with ./
or ../
are considered relative paths. These paths are resolved based on the location of the test script. Relative paths are beneficial when you want to reference files or directories located within the same directory as your test script or in a parent directory. These paths provide a convenient way to access files without specifying the entire path.
The process
object can be used in asserts to check its status or error messages.
// process status\nassert process.success\nassert process.failed\nassert process.exitStatus == 0\n\n// Analyze Nextflow trace file\nassert process.trace.tasks().size() == 1\n\n// process error message\nassert process.errorReport.contains(\"....\")\n\n//returns a list containing all lines from stdout\nassert process.stdout.contains(\"Hello World\") == 3\n
"},{"location":"docs/testcases/nextflow_process/#output-channels","title":"Output Channels","text":"The process.out
object provides access to the content of all named output Channels (see Nextflow emit
):
// channel exists\nassert process.out.my_channel != null\n\n// channel contains 3 elements\nassert process.out.my_channel.size() == 3\n\n// first element is \"hello\"\nassert process.out.my_channel.get(0) == \"hello\"\n
Channels that lack explicit names can be addressed using square brackets and the corresponding index. This indexing method provides a straightforward way to interact with channels without the need for predefined names. To access the first output channel, you can use the index [0] as demonstrated below:
// channel exists\nassert process.out[0] != null\n\n// channel contains 3 elements\nassert process.out[0].size() == 3\n\n// first element is \"hello\"\nassert process.out[0].get(0) == \"hello\"\n
"},{"location":"docs/testcases/nextflow_process/#example","title":"Example","text":""},{"location":"docs/testcases/nextflow_process/#nextflow-script","title":"Nextflow script","text":"Create a new file and name it say_hello.nf
.
#!/usr/bin/env nextflow\nnextflow.enable.dsl=2\n\nprocess SAY_HELLO {\ninput:\nval cheers\n\noutput:\nstdout emit: verbiage_ch\npath '*.txt', emit: verbiage_ch2\n\nscript:\n\"\"\"\n echo -n $cheers\n echo -n $cheers > ${cheers}.txt\n \"\"\"\n}\n
"},{"location":"docs/testcases/nextflow_process/#nf-test-script","title":"nf-test script","text":"Create a new file and name it say_hello.nf.test
.
nextflow_process {\n\nname \"Test Process SAY_HELLO\"\nscript \"say_hello.nf\"\nprocess \"SAY_HELLO\"\n\ntest(\"Should run without failures\") {\n\nwhen {\nprocess {\n\"\"\"\n input[0] = Channel.from('hello','nf-test')\n \"\"\"\n}\n}\n\nthen {\n\nassert process.success\nassert process.trace.tasks().size() == 2\n\nwith(process.out.trial_out_ch) {\nassert size() == 2\nassert path(get(0)).readLines().size() == 1\nassert path(get(1)).readLines().size() == 1\nassert path(get(1)).md5 == \"4a17df7a54b41a84df492da3f1bab1e3\"\n}\n\n}\n\n}\n}\n
"},{"location":"docs/testcases/nextflow_process/#execute-test","title":"Execute test","text":"nf-test init\nnf-test test say_hello.nf.test\n
"},{"location":"docs/testcases/nextflow_workflow/","title":"Workflow Testing","text":"nf-test also allows to test a specific workflow. Please checkout the CLI to generate a workflow test.
"},{"location":"docs/testcases/nextflow_workflow/#syntax","title":"Syntax","text":"nextflow_workflow {\n\nname \"<NAME>\"\nscript \"<PATH/TO/NEXTFLOW_SCRIPT.nf>\"\nworkflow \"<WORKFLOW_NAME>\"\n\ntest(\"<TEST_NAME>\") {\n\n}\n}\n
Script paths that start with ./
or ../
are considered relative paths. These paths are resolved based on the location of the test script. Relative paths are beneficial when you want to reference files or directories located within the same directory as your test script or in a parent directory. These paths provide a convenient way to access files without specifying the entire path.
The workflow
object can be used in asserts to check its status, error messages or traces.
// workflow status\nassert workflow.success\nassert workflow.failed\nassert workflow.exitStatus == 0\n\n// workflow error message\nassert workflow.errorReport.contains(\"....\")\n\n// trace\n//returns a list containing succeeded tasks\nassert workflow.trace.succeeded().size() == 3\n\n//returns a list containing failed tasks\nassert workflow.trace.failed().size() == 0\n\n//returns a list containing all tasks\nassert workflow.trace.tasks().size() == 3\n\n//returns a list containing all lines from stdout\nassert workflow.stdout.contains(\"Hello World\") == 3\n
"},{"location":"docs/testcases/nextflow_workflow/#output-channels","title":"Output Channels","text":"The workflow.out
object provides access to the content of all named output Channels (see Nextflow emit
):
// channel exists\nassert workflow.out.my_channel != null\n\n// channel contains 3 elements\nassert workflow.out.my_channel.size() == 3\n\n// first element is \"hello\"\nassert workflow.out.my_channel.get(0) == \"hello\"\n
"},{"location":"docs/testcases/nextflow_workflow/#example","title":"Example","text":""},{"location":"docs/testcases/nextflow_workflow/#nextflow-script","title":"Nextflow script","text":"Create a new file and name it trial.nf
.
#!/usr/bin/env nextflow\nnextflow.enable.dsl=2\n\nprocess sayHello {\ninput:\nval cheers\n\noutput:\nstdout emit: verbiage_ch\npath '*.txt', emit: verbiage_ch2\n\nscript:\n\"\"\"\n echo -n $cheers\n echo -n $cheers > ${cheers}.txt\n \"\"\"\n}\n\nworkflow trial {\ntake: things\nmain:\nsayHello(things)\nsayHello.out.verbiage_ch.view()\nemit:\ntrial_out_ch = sayHello.out.verbiage_ch2\n}\n\nworkflow {\nChannel.from('hello','nf-test') | trial\n}\n
"},{"location":"docs/testcases/nextflow_workflow/#nf-test-script","title":"nf-test script","text":"Create a new file and name it trial.nf.test
.
nextflow_workflow {\n\nname \"Test Workflow Trial\"\nscript \"trial.nf\"\nworkflow \"trial\"\n\ntest(\"Should run without failures\") {\n\nwhen {\nworkflow {\n\"\"\"\n input[0] = Channel.from('hello','nf-test')\n \"\"\"\n}\n}\n\nthen {\n\nassert workflow.success\n\nwith(workflow.out.trial_out_ch) {\nassert size() == 2\nassert path(get(0)).readLines().size() == 1\nassert path(get(1)).readLines().size() == 1\nassert path(get(1)).md5 == \"4a17df7a54b41a84df492da3f1bab1e3\"\n}\n\n}\n\n}\n\n}\n
"},{"location":"docs/testcases/nextflow_workflow/#execute-test","title":"Execute test","text":"nf-test init\nnf-test test trial.nf.test\n
"},{"location":"docs/testcases/params/","title":"Params Dictionary","text":"The params
block is optional and is a simple map that can be used to overwrite Nextflow's input params
. The params
block is located in the when
block of a testcase. You can set params manually:
when {\nparams {\noutdir = \"output\"\n}\n}\n
It is also possible to set nested params using the same syntax as in your Nextflow script:
when {\nparams {\noutput {\ndir = \"output\"\n}\n}\n}\n
The params
map can also be used in the then
block:
then {\nassert params.output == \"output\" }\n
"},{"location":"docs/testcases/params/#load-params-from-files","title":"Load params from files","text":"In addition, you can load the params
from a JSON file:
when {\nparams {\nload(\"$baseDir/tests/params.json\")\n}\n}\n
or from a YAML file:
when {\nparams {\nload(\"$baseDir/tests/params.yaml\")\n}\n}\n
nf-test allows to combine both techniques and therefor it is possible to overwrite one or more params
from the json file:
when {\nparams {\nload(\"$baseDir/tests/params.json\")\noutputDir = \"new/output/path\"\n}\n}\n
"}]}
\ No newline at end of file
+{"config":{"lang":["en"],"separator":"[\\s\\-]+","pipeline":["stopWordFilter"]},"docs":[{"location":"","title":"Home","text":""},{"location":"#nf-test-a-simple-testing-framework-for-nextflow-pipelines","title":"nf-test: A simple testing framework for Nextflow pipelines","text":"Test your production ready\u00a0Nextflow\u00a0pipelines in an efficient and automated way. \ud83d\ude80
Getting Started Installation Source
A DSL language similar to Nextflow Describes expected behavior using 'when' and 'then' blocks Abundance of functions for writing elegant and readable assertions Utilizes snapshots to write tests for complex data structures Provides commands for generating boilerplate code Includes a test-runner that executes these scripts Easy installation on CI systems
"},{"location":"#unit-testing","title":"Unit testing","text":"nf-test enables you to test all components of your data science pipeline: from end-to-end testing of the entire pipeline to specific tests of processes or even custom functions. This ensures that all testing is conducted consistently across your project.
Pipeline Process Functionsnextflow_pipeline {\n\nname \"Test Hello World\"\nscript \"nextflow-io/hello\"\n\ntest(\"hello world example should start 4 processes\") {\nexpect {\nwith(workflow) {\nassert success\nassert trace.tasks().size() == 4\nassert \"Ciao world!\" in stdout\nassert \"Bonjour world!\" in stdout\nassert \"Hello world!\" in stdout\nassert \"Hola world!\" in stdout\n}\n}\n}\n\n}\n
nextflow_process {\n\nname \"Test Process SALMON_INDEX\"\nscript \"modules/local/salmon_index.nf\"\nprocess \"SALMON_INDEX\"\n\ntest(\"Should create channel index files\") {\n\nwhen {\nprocess {\n\"\"\"\n input[0] = file(\"test_data/transcriptome.fa\")\n \"\"\"\n}\n}\n\nthen {\n//check if test case succeeded\nassert process.success\n//analyze trace file\nassert process.trace.tasks().size() == 1\nwith(process.out) {\n// check if emitted output has been created\nassert index.size() == 1\n// count amount of created files\nassert path(index.get(0)).list().size() == 16\n// parse info.json file\ndef info = path(index.get(0)+'/info.json').json\nassert info.num_kmers == 375730\nassert info.seq_length == 443050\n//verify md5 checksum\nassert path(index.get(0)+'/info.json').md5 == \"80831602e2ac825e3e63ba9df5d23505\"\n}\n}\n\n}\n\n}\n
nextflow_function {\n\nname \"Test functions\"\nscript \"functions.nf\"\n\ntest(\"Test function1\") {\nfunction \"function1\"\n...\n}\n\ntest(\"Test function2\") {\nfunction \"function2\"\n...\n}\n}\n
Learn more about pipeline tests, workflow tests, process tests and function tests in the documentation.
"},{"location":"#snapshot-testing","title":"Snapshot testing","text":"nf-test supports snapshot testing and automatically generates a baseline set of unit tests to safeguard against regressions caused by changes.nf-test captures a snapshot of output channels or any other objects and subsequently compares them to reference snapshot files stored alongside the tests. If the two snapshots do not match, the test will fail
Learn more
"},{"location":"#highly-extendable","title":"Highly extendable","text":"nf-test supports the inclusion of third-party libraries (e.g., jar files) or functions from Groovy files. This can be done to either extend its functionality or to prevent code duplication, thus maintaining simplicity in the logic of test cases. Given that many assertions are specific to use cases, nf-test incorporates a plugin system that allows for the extension of existing classes with custom methods. For example FASTA file support.
Learn more
"},{"location":"#support-us","title":"Support us","text":"We love stars as much as we love rockets! So make sure you star us on GitHub.
Star
Show the world your Nextflow pipeline is using nf-test and add the following badge to your README.md
:
[![nf-test](https://img.shields.io/badge/tested_with-nf--test-337ab7.svg)](https://code.askimed.com/nf-test)\n
"},{"location":"#about","title":"About","text":"nf-test has been created by Lukas Forer and Sebastian Sch\u00f6nherr and is MIT Licensed.
Thanks to all the contributors to help us maintaining and improving nf-test!
"},{"location":"about/","title":"About","text":"nf-test has been created by Lukas Forer and Sebastian Sch\u00f6nherr and is MIT Licensed.
"},{"location":"about/#contributors","title":"Contributors","text":""},{"location":"about/#statistics","title":"Statistics","text":"
GitHub:
Bioconda:
"},{"location":"installation/","title":"Installation","text":"nf-test has the same requirements as Nextflow and can be used on POSIX compatible systems like Linux or OS X. You can install nf-test using the following command:
curl -fsSL https://code.askimed.com/install/nf-test | bash\n
If you don't have curl installed, you could use wget:
wget -qO- https://code.askimed.com/install/nf-test | bash\n
It will create the nf-test
executable file in the current directory. Optionally, move the nf-test
file to a directory accessible by your $PATH
variable.
Test the installation with the following command:
nf-test version\n
You should see something like this:
\ud83d\ude80 nf-test 0.5.0\nhttps://code.askimed.com/nf-test\n(c) 2021 -2022 Lukas Forer and Sebastian Schoenherr\n\nNextflow Runtime:\n\n N E X T F L O W\n version 21.10.6 build 5660\n created 21-12-2021 16:55 UTC (17:55 CEST)\n cite doi:10.1038/nbt.3820\n http://nextflow.io\n
Now you are ready to write your first testcase.
"},{"location":"installation/#install-a-specific-version","title":"Install a specific version","text":"If you want to install a specific version pass it to the install script as so
curl -fsSL https://code.askimed.com/install/nf-test | bash -s 0.7.0\n
"},{"location":"installation/#manual-installation","title":"Manual installation","text":"All releases are also available on Github.
"},{"location":"installation/#nextflow-binary-not-found","title":"Nextflow Binary not found?","text":"If you get an error message like this, then nf-test was not able to detect your Nextflow installation.
\ud83d\ude80 nf-test 0.5.0\nhttps://code.askimed.com/nf-test\n(c) 2021 -2022 Lukas Forer and Sebastian Schoenherr\n\nNextflow Runtime:\nError: Nextflow Binary not found. Please check if Nextflow is in a directory accessible by your $PATH variable or set $NEXTFLOW_HOME.\n
To solve this issue you have two possibilites:
$PATH
variable.NEXTFLOW_HOME
to the directory that contains the Nextflow binary.To update an existing nf-test installtion to the latest version, run the following command:
nf-test update\n
"},{"location":"installation/#compiling-from-source","title":"Compiling from source","text":"To compile nf-test from source you shall have maven installed. This will produce a nf-test/target/nf-test.jar
file.
git clone git@github.com:askimed/nf-test.git\ncd nf-test\nmvn install\n
To use the newly compiled nf-test.jar
, update the nf-test
bash script that is on your PATH to point to the new .jar
file. First locate it with which nf-test
, and then modify APP_HOME
and APP_JAR
vars at the top: #!/bin/bash\nAPP_HOME=\"/PATH/TO/nf-test/target/\"\nAPP_JAR=\"nf-test.jar\"\nAPP_UPDATE_URL=\"https://code.askimed.com/install/nf-test\"\n...\n
"},{"location":"resources/","title":"Resources","text":"This page collects videos and blog posts about nf-test created by the community. Have you authored a blog post or given a talk about nf-test? Feel free to contact us, and we will be delighted to feature it here.
"},{"location":"resources/#nf-test-a-simple-test-framework-specifically-tailored-for-nextflow-pipelines","title":"nf-test, a simple test framework specifically tailored for Nextflow pipelines","text":"Sateesh Peri does a hands-on exploration of nf-test, a simple test framework specifically tailored for Nextflow pipelines.
Slides to follow along can be found here.
The presentation was recorded as part of the Workflows Community Meetup - All Things Groovy at the Wellcome Genome Campus.
"},{"location":"resources/#nf-corebytesize-nf-test","title":"nf-core/bytesize: nf-test","text":"Edmund Miller shares with us his impressions about nf-test from a user perspective. nf-test is a simple test framework for Nextflow pipelines.
The presentation was recored as part of the nf-core/bytesize
"},{"location":"resources/#episode-8-nf-test-mentorships-and-debugging-resume","title":"Episode 8: nf-test, mentorships and debugging resume","text":"Phil Ewels, Chris Hakkaart and Marcel Ribeiro-Dantas chat about the nf-test framework for testing Nextflow pipelines.
The presentation was part of the \"News & Views\" episode of Channels (Nextflow Podcast).
"},{"location":"resources/#blog-post-a-simple-test-framework-for-nextflow-pipelines","title":"Blog post: A simple test framework for Nextflow pipelines","text":"Discover how nf-test originated from the need to efficiently and automatically test production-ready Nextflow pipelines.
Read blog post
"},{"location":"docs/configuration/","title":"Configuration","text":""},{"location":"docs/configuration/#setup-test-profile","title":"Setup test profile","text":"To run your test using a specific Nextflow profile, you can use the --profile
argument on the command line or define a default profile in nf-test.config
.
nf-test.config
","text":"This config file contains settings for nf-test.
config {\n// location for all nf-tests\ntestsDir \"tests\"\n// nf-test directory including temporary files for each test\nworkDir \".nf-test\"\n// location of an optional nextflow.config file specific for executing tests\nconfigFile \"tests/nextflow.config\"\n// location of library folder that is added automatically to the classpath\nlibDir \"tests/lib\" // run all test with the defined docker profile from the main nextflow.config\nprofile \"docker\"\n// disable tracing options in case container does not include `procps` Linux tool.\nwithTrace false\n//disable sorted channels\nautoSort false\n// add Nextflow options\noptions \"-dump-channels -stub-run\"\n}\n
"},{"location":"docs/configuration/#testsnextflowconfig","title":"tests/nextflow.config
","text":"This optional nextflow.config
file is used to execute tests. This is a good place to set default params
for all your tests. Example number of threads:
params {\n// run all tests with 1 threads\nthreads = 1\n}\n
"},{"location":"docs/configuration/#configuration-for-tests","title":"Configuration for tests","text":"nf-test allows to set an additional configuration for a testsuite:
nextflow_process {\n\n name \"Test Process...\"\n script \"main.nf\"\n process \"my_process\"\n config \"path/to/test/nextflow.config\"\n autoSort false\n options \"-dump-channels\"\n ...\n\n}\n
It is also possible to overwrite the config
, autoSort
or Nextflow properties (e.g. options \"-dump-channels\"
) for a specific test. Depending on the used Nextflow option, also add the --debug
nf-test option on the command-line to see the addtional output.
nextflow_process {\n\n test(\"my test\") {\n\n config \"path/to/test/nextflow.config\"\n autoSort false\n options \"-dump-channels\"\n ...\n\n }\n\n}\n
"},{"location":"docs/getting-started/","title":"Getting started","text":"This guide helps you to understand the concepts of nf-test and to write your first test cases. Before you start, please check if you have installed nf-test properly on your computer. Also, this guide assumes that you have a basic knowledge of Groovy and unit testing. The Groovy documentation is the best place to learn its syntax.
"},{"location":"docs/getting-started/#lets-get-started","title":"Let's get started","text":"To show the power of nf-test, we adapted a recently published proof of concept Nextflow pipeline. We adapted the pipeline to the new DSL2 syntax using modules. First, open the terminal and clone our test pipeline:
# clone nextflow pipeline\ngit clone https://github.com/askimed/nf-test-examples\n\n# enter project directory\ncd nf-test-examples\n
The pipeline consists of three modules (salmon.index.nf
, salmon_align_quant.nf
,fastqc.nf
). Here, we use the salmon.index.nf
process to create a test case from scratch. This process takes a reference as an input and creates an index using salmon.
Before creating test cases, we use the init
command to setup nf-test.
//Init command has already been executed for our repository\nnf-test init\n
The init
command creates the following files: nf-test.config
and the .nf-test/tests
folder.
In the configuration section you can learn more about these files and how to customize the directory layout.
"},{"location":"docs/getting-started/#create-your-first-test","title":"Create your first test","text":"The generate
command helps you to create a skeleton test code for a Nextflow process or the complete pipeline/workflow.
Here we generate a test case for the process salmon.index.nf
:
# delete already existing test case\nrm tests/modules/local/salmon_index.nf.test\nnf-test generate process modules/local/salmon_index.nf\n
This command creates a new file tests/modules/local/salmon_index.nf
with the following content:
nextflow_process {\n\nname \"Test Process SALMON_INDEX\"\nscript \"modules/local/salmon_index.nf\"\nprocess \"SALMON_INDEX\"\n\ntest(\"Should run without failures\") {\n\nwhen {\nparams {\n// define parameters here. Example:\n// outdir = \"tests/results\"\n}\nprocess {\n\"\"\"\n // define inputs of the process here. Example:\n // input[0] = file(\"test-file.txt\")\n \"\"\"\n}\n}\n\nthen {\nassert process.success\nwith(process.out) {\n// Make assertions about the content and elements of output channels here. Example:\n// assert out_channel != null\n}\n}\n\n}\n\n}\n
The generate
command filled automatically the name, script and process of our test case as well as created a skeleton for your first test
method. Typically you create one file per process and use different test
methods to describe the expected behaviour of the process.
This test
has a name, a when
and a then
closure (when/then closures are required here, since inputs need to be defined). The when
block describes the input parameters of the workflow or the process. nf-test executes the process with exactly these parameters and parses the content of the output channels. Then, it evaluates the assertions defined in the then
block to check if content of the output channels matches your expectations.
when
block","text":"The when
block describes the input of the process and/or the Nextflow params
.
The params
block is optional and is a simple map that can be used to override Nextflow's input params
.
The process
block is a multi-line string. The input
array can be used to set the different inputs arguments of the process. In our example, we only have one input that expects a file. Let us update the process
block by setting the first element of the input
array to the path of our reference file:
when {\nparams {\noutdir = \"output\"\n}\nprocess {\n\"\"\"\n // Use transcriptome.fa as a first input paramter for our process\n input[0] = file(\"test_data/transcriptome.fa\")\n \"\"\"\n}\n}\n
Everything which is defined in the process block is later executed in a Nextflow script (created automatically to test your process). Therefore, you can use every Nextflow specific function or command to define the values of the input array (e.g. Channels, files, paths, etc.).
"},{"location":"docs/getting-started/#the-then-block","title":"Thethen
block","text":"The then
block describes the expected output channels of the process when we execute it with the input parameters defined in the when
block.
The then
block typically contains mainly assertions to check assumptions (e.g. the size and the content of an output channel). However, this block accepts every Groovy script. This means you can also import third party libraries to define very specific assertions.
nf-test automatically loads all output channels of the process and all their items into a map named process.out
. You can then use this map to formulate your assertions.
For example, in the salmon_index
process we expect to get one process executed and 16 files created. But we also want to check the md5 sum and want to look into the actual JSON file. Let us update the then
section with some assertions that describe our expectations:
then {\n//check if test case succeeded\nassert process.success\n//analyze trace file\nassert process.trace.tasks().size() == 1\nwith(process.out) {\n// check if emitted output has been created\nassert index.size() == 1\n// count amount of created files\nassert path(index.get(0)).list().size() == 16\n// parse info.json file using a json parser provided by nf-test\ndef info = path(index.get(0)+'/info.json').json\nassert info.num_kmers == 375730\nassert info.seq_length == 443050\nassert path(index.get(0)+'/info.json').md5 == \"80831602e2ac825e3e63ba9df5d23505\"\n}\n}\n
The items of a channel are always sorted by nf-test. This provides a deterministic order inside the channel and enables you to write reproducible tests.
"},{"location":"docs/getting-started/#your-first-test-specification","title":"Your first test specification","text":"You can update the name of the test method to something that gives us later a good description of our specification. When we put everything together, we get the following full working test specification:
nextflow_process {\n\nname \"Test Process SALMON_INDEX\"\nscript \"modules/local/salmon_index.nf\"\nprocess \"SALMON_INDEX\"\n\ntest(\"Should create channel index files\") {\n\nwhen {\nprocess {\n\"\"\"\n input[0] = file(\"test_data/transcriptome.fa\")\n \"\"\"\n}\n}\n\nthen {\n//check if test case succeeded\nassert process.success\n//analyze trace file\nassert process.trace.tasks().size() == 1\nwith(process.out) {\n// check if emitted output has been created\nassert index.size() == 1\n// count amount of created files\nassert path(index.get(0)).list().size() == 16\n// parse info.json file\ndef info = path(index.get(0)+'/info.json').json\nassert info.num_kmers == 375730\nassert info.seq_length == 443050\nassert path(index.get(0)+'/info.json').md5 == \"80831602e2ac825e3e63ba9df5d23505\"\n}\n}\n}\n}\n
"},{"location":"docs/getting-started/#run-your-first-test","title":"Run your first test","text":"Now, the test
command can be used to run your test:
nf-test test tests/modules/local/salmon_index.nf.test --profile docker\n
"},{"location":"docs/getting-started/#specifying-profiles","title":"Specifying profiles","text":"In this case, the docker
profile defined in the Nextflow pipeline is used to execute the test. The profile is set using the --profile
parameter, but you can also define a default profile in the configuration file.
Congratulations! You created you first nf-test specification.
"},{"location":"docs/getting-started/#nextflow-options","title":"Nextflow options","text":"nf-test also allows to specify Nextflow options (e.g. -dump-channels
, -stub-run
) globally in the nf-test.config file or by adding an option to the test suite or the actual test. Read more about this in the configuration documentation.
nextflow_process {\n\n options \"-dump-channels\"\n\n}\n
"},{"location":"docs/getting-started/#whats-next","title":"What's next?","text":"All test cases described in this documentation can be found in the nf-test-examples repository.
"},{"location":"docs/nftest_pipelines/#gwas-regenie-pipeline","title":"GWAS-Regenie Pipeline","text":"To show the power of nf-test, we applied nf-test to a Nextflow pipeline that performs whole genome regression modelling using regenie. Please click here to learn more about this pipeline and checkout different kind of test cases.
"},{"location":"docs/running-tests/","title":"Running tests","text":""},{"location":"docs/running-tests/#basic-usage","title":"Basic usage","text":"The easiest way to use nf-test is to run the following command. This command will run all tests under the tests
directory. The testDir
can be changed in the nf-test.config
.
nf-test test\n
"},{"location":"docs/running-tests/#execute-specific-tests","title":"Execute specific tests","text":"You can also specify a list of tests, which should be executed.
nf-test test tests/modules/local/salmon_index.nf.test tests/modules/bwa_index.nf.test\n\nnf-test test tests/modules tests/modules/bwa_index.nf.test\n
"},{"location":"docs/running-tests/#tag-tests","title":"Tag tests","text":"nf-test provides a simple tagging mechanism that allows to execute tests by name or by tag.
Tags can be defined for each testsuite or for each testcase using the new tag
directive:
nextflow_process {\n\n name \"suite 1\"\n tag \"tag1\"\n\n test(\"test 1\") {\n tag \"tag2\"\n tag \"tag3\" \n ...\n }\n\n test(\"test 2\") {\n\n tag \"tag4\"\n tag \"tag5\" \n ...\n\n }\n}\n
For example, to execute all tests with tag2
use the following command.
nf-test test --tag tag2 # collects test1\n
Names are automatically added to tags. This enables to execute suits or tests directly.
nf-test test --tag \"suite 1\" # collects test1 and test2\n
When more tags are provided,\u00a0all tests that match at least one tag will be executed. Tags are also not case-sensitive, both lines will result the same tests.
nf-test test --tag tag3,tag4 # collects test1 and test2\nnf-test test --tag TAG3,TAG4 # collects test1 and test2\n
"},{"location":"docs/running-tests/#create-a-tap-output","title":"Create a TAP output","text":"To run all tests and create a report.tap
file, use the following command.
nf-test test --tap report.tap\n
"},{"location":"docs/running-tests/#run-test-by-its-hash-value","title":"Run test by its hash value","text":"To run a specific test using its hash, the following command can be used. The hash value is generated during its first execution.
nf-test test tests/main.nf.test@d41119e4\n
"},{"location":"docs/assertions/assertions/","title":"Assertions","text":"Writing test cases means formulating assumptions by using assertions. Groovy\u2019s power assert provides a detailed output when the boolean expression validates to false. nf-test provides several extensions and commands to simplify the work with Nextflow channels. Here we summarise how nextflow and nf-test handles channels and provide examples for the tools that nf-test
provides:
with
: assert the contents of an item in a channel by indexcontains
: assert the contents of an item in the channel is present anywhere in the channelassertContainsInAnyOrder
: order-agnostic assertion of the contents of a channelNextflow channels emit (in a random order) a single value or a tuple of values.
Channels that emit a single item produce an unordered list of objects, List<Object>
, for example:
process.out.outputCh = ['Hola', 'Hello', 'Bonjour']\n
Channels that contain Nextflow file
values have a unique path each run. For Example:
process.out.outputCh = ['/.nf-test/tests/c563c/work/65/85d0/Hola.json', '/.nf-test/tests/c563c/work/65/fa20/Hello.json', '/.nf-test/tests/c563c/work/65/b62f/Bonjour.json']\n
Channels that emit tuples produce an unordered list of ordered objects, List<List<Object>>
:
process.out.outputCh = [\n['Hola', '/.nf-test/tests/c563c/work/65/85d0/Hola.json'], ['Hello', '/.nf-test/tests/c563c/work/65/fa20/Hello.json'], ['Bonjour', '/.nf-test/tests/c563c/work/65/b62f/Bonjour.json']\n]\n
Assertions by channel index are made possible through sorting of the nextflow channel. The sorting is performed automatically by nf-test
prior to launch of the then
closure via integer, string and path comparisons. For example, the above would be sorted by nf-test
:
process.out.outputCh = [\n['Bonjour', '/.nf-test/tests/c563c/work/65/b62f/Bonjour.json'],\n['Hello', '/.nf-test/tests/c563c/work/65/fa20/Hello.json'],\n['Hola', '/.nf-test/tests/c563c/work/65/85d0/Hola.json']\n]\n
"},{"location":"docs/assertions/assertions/#using-with","title":"Using with
","text":"This assertions...
assert process.out.imputed_plink2\nassert process.out.imputed_plink2.size() == 1\nassert process.out.imputed_plink2.get(0).get(0) == \"example.vcf\"\nassert process.out.imputed_plink2.get(0).get(1) ==~ \".*/example.vcf.pgen\"\nassert process.out.imputed_plink2.get(0).get(2) ==~ \".*/example.vcf.psam\"\nassert process.out.imputed_plink2.get(0).get(3) ==~ \".*/example.vcf.pvar\"\n
... can be written by using with(){}
to improve readability:
assert process.out.imputed_plink2\nwith(process.out.imputed_plink2) {\nassert size() == 1\nwith(get(0)) {\nassert get(0) == \"example.vcf\"\nassert get(1) ==~ \".*/example.vcf.pgen\"\nassert get(2) ==~ \".*/example.vcf.psam\"\nassert get(3) ==~ \".*/example.vcf.pvar\"\n}\n}\n
"},{"location":"docs/assertions/assertions/#using-contains-to-assert-an-item-in-the-channel-is-present","title":"Using contains
to assert an item in the channel is present","text":"Groovy's contains and collect methods can be used to flexibly assert an item exists in the channel output.
For example, the below represents a channel that emits a two-element tuple, a string and a json file:
/*\ndef process.out.outputCh = [\n ['Bonjour', '/.nf-test/tests/c563c/work/65/b62f/Bonjour.json'],\n ['Hello', '/.nf-test/tests/c563c/work/65/fa20/Hello.json'],\n ['Hola', '/.nf-test/tests/c563c/work/65/85d0/Hola.json']\n]\n*/\n
To assert the channel contains one of the tuples, parse the json and assert:
testData = process.out.outputCh.collect { greeting, jsonPath -> [greeting, path(jsonPath).json] } assert testData.contains(['Hello', path('./myTestData/Hello.json').json])\n
To assert a subset of the tuple data, filter the channel using collect. For example, to assert the greeting only:
testData = process.out.outputCh.collect { greeting, jsonPath -> greeting } assert testData.contains('Hello')\n
See the files page for more information on parsing and asserting various file types.
"},{"location":"docs/assertions/assertions/#using-assertcontainsinanyorder-for-order-agnostic-assertion-of-the-contents-of-a-channel","title":"UsingassertContainsInAnyOrder
for order-agnostic assertion of the contents of a channel","text":"assertContainsInAnyOrder(List<object> list1, List<object> list2)
performs an order agnostic assertion on channels contents and is available in every nf-test
closure. It is a binding for Hamcrest's assertContainsInAnyOrder.
Some example use-cases are provided below.
"},{"location":"docs/assertions/assertions/#channel-that-emits-strings","title":"Channel that emits strings","text":"// process.out.outputCh = ['Bonjour', 'Hello', 'Hola'] \n\ndef expected = ['Hola', 'Hello', 'Bonjour']\nassertContainsInAnyOrder(process.out.outputCh, expected)\n
"},{"location":"docs/assertions/assertions/#channel-that-emits-a-single-maps-eg-valmymap","title":"Channel that emits a single maps, e.g. val(myMap)","text":"/*\nprocess.out.outputCh = [\n [\n 'D': [10,11,12],\n 'C': [7,8,9]\n ],\n [\n 'B': [4,5,6],\n 'A': [1,2,3]\n ]\n]\n*/\n\ndef expected = [\n[\n'A': [1,2,3],\n'B': [4,5,6]\n],\n[\n'C': [7,8,9],\n'D': [10,11,12]\n]\n]\n\nassertContainsInAnyOrder(process.out.outputCh, expected)\n
"},{"location":"docs/assertions/assertions/#channel-that-emits-json-files","title":"Channel that emits json files","text":"See the files page for more information on parsing and asserting various file types.
Since the outputCh filepaths are different between consecutive runs, the files need to be read/parsed prior to comparison
/*\nprocess.out.outputCh = [\n '/.nf-test/tests/c563c/work/65/b62f/Bonjour.json',\n '/.nf-test/tests/c563c/work/65/fa20/Hello.json',\n '/.nf-test/tests/c563c/work/65/85d0/Hola.json'\n]\n*/\n\ndef actual = process.out.outputCh.collect { filepath -> path(filepath).json }\ndef expected = [\npath('./myTestData/Hello.json').json,\npath('./myTestData/Hola.json').json,\npath('./myTestData/Bonjour.json').json,\n]\n\nassertContainsInAnyOrder(actual, expected)\n
"},{"location":"docs/assertions/assertions/#channel-that-emits-a-tuple-of-strings-and-json-files","title":"Channel that emits a tuple of strings and json files","text":"See the files page for more information on parsing and asserting various file types.
Since the ordering of items within the tuples are consistent, we can assert this case:
/*\nprocess.out.outputCh = [\n ['Bonjour', '/.nf-test/tests/c563c/work/65/b62f/Bonjour.json'],\n ['Hello', '/.nf-test/tests/c563c/work/65/fa20/Hello.json'],\n ['Hola', '/.nf-test/tests/c563c/work/65/85d0/Hola.json']\n]\n*/\n\ndef actual = process.out.outputCh.collect { greeting, filepath -> [greeting, path(filepath).json] }\ndef expected = [\n['Hola', path('./myTestData/Hola.json').json], ['Hello', path('./myTestData/Hello.json').json],\n['Bonjour', path('./myTestData/Bonjour.json').json],\n]\n\nassertContainsInAnyOrder(actual, expected)\n
To assert the json only and ignore the strings:
/*\nprocess.out.outputCh = [\n ['Bonjour', '/.nf-test/tests/c563c/work/65/b62f/Bonjour.json'],\n ['Hello', '/.nf-test/tests/c563c/work/65/fa20/Hello.json'],\n ['Hola', '/.nf-test/tests/c563c/work/65/85d0/Hola.json']\n]\n*/\n\ndef actual = process.out.outputCh.collect { greeting, filepath -> path(filepath).json }\ndef expected = [\npath('./myTestData/Hello.json').json, path('./myTestData/Hola.json').json,\npath('./myTestData/Bonjour.json').json\n]\n\nassertContainsInAnyOrder(actual, expected)\n
To assert the strings only and not the json files:
/*\nprocess.out.outputCh = [\n ['Bonjour', '/.nf-test/tests/c563c/work/65/b62f/Bonjour.json'],\n ['Hello', '/.nf-test/tests/c563c/work/65/fa20/Hello.json'],\n ['Hola', '/.nf-test/tests/c563c/work/65/85d0/Hola.json']\n]\n*/\n\ndef actual = process.out.outputCh.collect { greeting, filepath -> greeting }\ndef expected = ['Hello', 'Hola', 'Bonjour]\n\nassertContainsInAnyOrder(actual, expected)\n
"},{"location":"docs/assertions/assertions/#using-assertall","title":"Using assertAll
","text":"assertAll(Closure... closures)
ensures that all supplied closures do no throw exceptions. The number of failed closures is reported in the Exception message. This useful for efficient debugging of a set of test assertions from a single test run.
def a = 2\n\nassertAll(\n{ assert a==1 },\n{ a = 1/0 },\n{ assert a==2 },\n{ assert a==3 }\n)\n
The output will look like this: assert a==1\n||\n|false\n2\n\njava.lang.ArithmeticException: Division by zero\nAssertion failed:\n\nassert a==3\n||\n|false\n2\n\nFAILED (7.106s)\n\njava.lang.Exception: 3 of 4 assertions failed\n
"},{"location":"docs/assertions/fasta/","title":"FASTA Files","text":"0.7.0
The nft-fasta plugin extends path
by a fasta
property that can be used to read FASTA files into maps. nft-fasta supports also gzipped FASTA files.
To use the fasta
property you need to activate the nft-fasta
plugin in your nf-test.config
file:
config {\n plugins {\n load \"nft-fasta@1.0.0\"\n }\n}\n
More about plugins can be fond here.
"},{"location":"docs/assertions/fasta/#comparing-files","title":"Comparing files","text":"assert path('path/to/fasta1.fasta').fasta == path(\"path/to/fasta2.fasta'\").fasta\n
"},{"location":"docs/assertions/fasta/#work-with-individual-samples","title":"Work with individual samples","text":"def sequences = path('path/to/fasta1.fasta.gz').fasta\nassert \"seq1\" in sequences\nassert !(\"seq8\" in sequences)\nassert sequences.seq1 == \"AGTACGTAGTAGCTGCTGCTACGTGCGCTAGCTAGTACGTCACGACGTAGATGCTAGCTGACTCGATGC\"\n
"},{"location":"docs/assertions/files/","title":"Files","text":""},{"location":"docs/assertions/files/#md5-checksum","title":"md5 Checksum","text":"nf-test extends path
by a md5
property that can be used to compare the file content with an expected checksum:
assert path(process.out.out_ch.get(0)).md5 == \"64debea5017a035ddc67c0b51fa84b16\"\n
"},{"location":"docs/assertions/files/#json-files","title":"JSON Files","text":"nf-test supports comparison of JSON files and keys within JSON files. To assert that two JSON files contain the same keys and values:
assert path(process.out.out_ch.get(0)).json == path('./some.json').json\n
Individual keys can also be asserted: assert path(process.out.out_ch.get(0)).json.key == \"value\"\n
"},{"location":"docs/assertions/files/#yaml-files","title":"YAML Files","text":"nf-test supports comparison of YAML files and keys within YAML files. To assert that two YAML files contain the same keys and values:
assert path(process.out.out_ch.get(0)).yaml == path('./some.yaml').yaml\n
Individual keys can also be asserted: assert path(process.out.out_ch.get(0)).yaml.key == \"value\"\n
"},{"location":"docs/assertions/files/#gzip-files","title":"GZip Files","text":"nf-test extends path
by a linesGzip
property that can be used to read gzip compressed files.
assert path(process.out.out_ch.get(0)).linesGzip.size() == 5\nassert path(process.out.out_ch.get(0)).linesGzip.contains(\"Line Content\")\n
"},{"location":"docs/assertions/files/#filter-lines","title":"Filter lines","text":"The returned array can also be filtered by lines.
def lines = path(process.out.gzip.get(0)).linesGzip[0..5]\nassert lines.size() == 6\ndef lines = path(process.out.gzip.get(0)).linesGzip[0]\nassert lines.equals(\"MY_HEADER\")\n
"},{"location":"docs/assertions/files/#grep-lines","title":"Grep lines","text":"nf-test also provides the possibility to grep only specific lines with the advantage that only a subset of lines need to be read (especially helpful for larger files).
def lines = path(process.out.gzip.get(0)).grepLinesGzip(0,5)\nassert lines.size() == 6\ndef lines = path(process.out.gzip.get(0)).grepLineGzip(0)\nassert lines.equals(\"MY_HEADER\")\n
"},{"location":"docs/assertions/files/#snapshot-support","title":"Snapshot Support","text":"The possibility of filter lines from a *.gz file can also be combined with the snapshot functionality.
assert snapshot(\npath(process.out.gzip.get(0)).linesGzip[0]\n).match()\n
"},{"location":"docs/assertions/libraries/","title":"Using Third-Party Libraries","text":"nf-test supports including third party libraries (e.g. jar files ) or functions from groovy files to either extend it functionality or to avoid duplicate code and to keep the logic in test cases simple.
"},{"location":"docs/assertions/libraries/#using-local-groovy-files","title":"Using Local Groovy Files","text":"0.7.0 \u00b7
If nf-test detects a lib
folder in the directory of a tescase, then it adds it automatically to the classpath.
We have a Groovy script MyWordUtils.groovy
that contains the following class:
class MyWordUtils {\n\ndef static capitalize(String word){\nreturn word.toUpperCase();\n}\n\n}\n
We can put this file in a subfolder called lib
:
testcase_1\n\u251c\u2500\u2500 capitalizer.nf\n\u251c\u2500\u2500 capitalizer.test\n\u2514\u2500\u2500 lib\n \u2514\u2500\u2500 MyWordUtils.groovy\n
The file capitalizer.nf
contains the CAPITALIZER
process:
#!/usr/bin/env nextflow\nnextflow.enable.dsl=2\n\nprocess CAPITALIZER {\ninput:\nval cheers\noutput:\nstdout emit: output\nscript:\nprintln \"$cheers\".toUpperCase()\n\"\"\"\n \"\"\"\n\n}\n
Next, we can use this class in the capitalizer.nf.test
like every other class that is provided by nf-test or Groovy itself:
nextflow_process {\n\nname \"Test Process CAPITALIZER\"\nscript \"capitalizer.nf\"\nprocess \"CAPITALIZER\"\n\ntest(\"Should run without failures\") {\n\nwhen {\nprocess {\n\"\"\"\n input[0] = \"world\"\n \"\"\"\n}\n}\n\nthen {\nassert process.success\nassert process.stdout.contains(MyWordUtils.capitalize('world'))\n}\n\n}\n\n}\n
If we have a project and we want to reuse libraries in multiple test cases, then we can store the class in the shared lib folder. Both test cases are now able to use MyWordUtils
:
tests\n\u251c\u2500\u2500 testcase_1\n \u251c\u2500\u2500 hello_1.nf\n \u251c\u2500\u2500 hello_1.nf.test\n\u251c\u2500\u2500 testcase_2\n \u251c\u2500\u2500 hello_2.nf\n \u251c\u2500\u2500 hello_2.nf.test\n\u2514\u2500\u2500 lib\n \u2514\u2500\u2500 MyWordUtils.groovy\n
The default location is tests/lib
. This folder location can be changed in nf-test config file.
It is also possible to use the --lib
parameter to add an additional folder to the classpath:
nf-test test tests/testcase_1/hello_1.nf.test --lib tests/mylibs\n
If multiple folders are used, the they need to be separate with a colon (like in Java or Groovy).
"},{"location":"docs/assertions/libraries/#using-local-jar-files","title":"Using Local Jar Files","text":"To integrate local jar files, you can either specify the path to the jar within the nf-test --lib
option
nf-test test test.nf.test --lib tests/lib/groovy-ngs-utils/groovy-ngs-utils.jar\n
or add it as follows to the nf-test.config
file:
libDir \"tests/lib:tests/lib/groovy-ngs-utils/groovy-ngs-utils.jar\"\n
You could then import the class and use it in the then
statement:
import gngs.VCF;\n\nnextflow_process {\n\nname \"Test Process VARIANT_CALLER\"\nscript \"variant_caller.nf\"\nprocess \"VARIANT_CALLER\"\n\ntest(\"Should run without failures\") {\n\nwhen {\n...\n}\n\nthen {\nassert process.success def vcf = VCF.parse(\"$baseDir/tests/test_data/NA12879.vcf.gz\")\nassert vcf.samples.size() == 10\nassert vcf.variants.size() == 20\n}\n\n}\n\n}\n
"},{"location":"docs/assertions/libraries/#using-maven-artifcats-with-grab","title":"Using Maven Artifcats with @Grab
","text":"nf-test supports the @Grab
annotation to include third-party libraries that are available in a maven repository. As the dependency is defined as a maven artifact, there is no local copy of the jar file needed and maven enables to include an exact version as well as provides an easy update process.
The following example uses the WordUtil
class from commons-lang
:
@Grab(group='commons-lang', module='commons-lang', version='2.4')\nimport org.apache.commons.lang.WordUtils\n\nnextflow_process {\n\nname \"Test Process CAPITALIZER\"\nscript \"capitalizer.nf\"\nprocess \"CAPITALIZER\"\n\ntest(\"Should run without failures\") {\n\nwhen {\nprocess {\n\"\"\"\n input[0] = \"world\"\n \"\"\"\n}\n}\n\nthen {\nassert process.success\nassert process.stdout.contains(WordUtils.capitalize('world'))\n}\n\n}\n\n}\n
"},{"location":"docs/assertions/regular-expressions/","title":"Regular Expressions","text":""},{"location":"docs/assertions/regular-expressions/#using-operator","title":"Using ==~
operator","text":"The operator ==~
can be used to check if a string matches a regular expression:
assert \"/my/full/path/to/process/dir/example.vcf.pgen\" ==~ \".*/example.vcf.pgen\"\n
"},{"location":"docs/assertions/snapshots/","title":"Snapshots","text":"0.7.0
Snapshots are a very useful tool whenever you want to make sure your output channels or output files not change unexpectedly. This feature is highly inspired by Jest.
A typical snapshot test case takes a snapshot of the output channels or any other object, then compares it to a reference snapshot file stored alongside the test (*.nf.test.snap
). The test will fail, if the two snapshots do not match: either the change is unexpected, or the reference snapshot needs to be updated to the new output of a process, workflow, pipeline or function.
The snapshot
keyword creates a snapshot of the object and its match
method can then be used to check if its contains the expected data from the snap file. The following example shows how to create a snapshot of a workflow channel:
assert snapshot(workflow.out.channel1).match()\n
You can also create a snapshot of all output channels of a process:
assert snapshot(process.out).match()\n
Even the result of a function can be used:
assert snapshot(function.result).match()\n
The first time this test runs, nf-test creates a snapshot file. This is a json file that contains a serialized version of the provided object.
The snapshot file should be committed alongside code changes, and reviewed as part of your code review process. nf-test uses pretty-format to make snapshots human-readable during code review. On subsequent test runs, nf-test will compare the data with the previous snapshot. If they match, the test will pass. If they don't match, either the test runner found a bug in your code that should be fixed, or the implementation has changed and the snapshot needs to be updated.
"},{"location":"docs/assertions/snapshots/#updating-snapshots","title":"Updating Snapshots","text":"When a snapshot test is failing due to an intentional implementation change, you can use the --update-snapshot
flag to re-generate snapshots for all failed tests.
nf-test test tests/main.nf.test --update-snapshot\n
"},{"location":"docs/assertions/snapshots/#cleaning-obsolete-snapshots","title":"Cleaning Obsolete Snapshots","text":"0.8.0
Over time, snapshots can become outdated, leading to inconsistencies in your testing process. To help you manage obsolete snapshots, nf-test generates a list of these obsolete keys. This list provides transparency into which snapshots are no longer needed and can be safely removed.
Running your tests with the --clean-snapshot
or --wipe-snapshot
option removes the obsolete snapshots from the snapshot file. This option is useful when you want to maintain the structure of your snapshot file but remove unused entries. It ensures that your snapshot file only contains the snapshots required for your current tests, reducing file bloat and improving test performance.
nf-test test tests/main.nf.test --clean-snapshot\n
Obsolete snapshots can only be detected when running all tests in a test file simultaneously, and when all tests pass. If you run a single test or if tests are skipped, nf-test cannot detect obsolete snapshots.
"},{"location":"docs/assertions/snapshots/#constructing-complex-snapshots","title":"Constructing Complex Snapshots","text":"It is also possible to include multiple objects into one snapshot:
assert snapshot(workflow.out.channel1, workflow.out.channel2).match()\n
Every object that is serializable can be included into snapshots. Therefore you can even make a snapshot of the complete workflow or process object. This includes stdout, stderr, exist status, trace etc. and is the easiest way to create a test that checks for all of this properties:
assert snapshot(workflow).match()\n
You can also include output files to a snapshot (e.g. useful in pipeline tests where no channels are available):
assert snapshot(\nworkflow,\npath(\"${params.outdir}/file1.txt\"),\npath(\"${params.outdir}/file2.txt\"),\npath(\"${params.outdir}/file3.txt\")\n).match()\n
By default the snapshot has the same name as the test. You can also store a snapshot under a user defined name. This enables you to use multiple snapshots in one single test and to separate them in a logical way. In the following example a workflow snapshot is created, stored under the name \"workflow\".
assert snapshot(workflow).match(\"workflow\")\n
The next example creates a snapshot of two files and saves it under \"files\".
assert snapshot(path(\"${params.outdir}/file1.txt\"), path(\"${params.outdir}/file2.txt\")).match(\"files\")\n
You can also use helper methods to add objects to snapshots. For example, you can use the list()
method to add all files of a folder to a snapshot:
assert snapshot(workflow, path(params.outdir).list()).match()\n
"},{"location":"docs/assertions/snapshots/#file-paths","title":"File Paths","text":"If nf-test detects a path in the snapshot it automatically replace it by a unique fingerprint of the file that ensures the file content is the same. The fingerprint is default the md5 sum.
"},{"location":"docs/assertions/snapshots/#snapshot-differences","title":"Snapshot Differences","text":"0.8.0
By default, nf-test uses the diff
tool for comparing snapshots. It employs the following default arguments:
-y
: Enables side-by-side comparison mode.-W 200
: Sets the maximum width for displaying the differences to 200 characters.These default arguments are applied when no custom settings are specified.
If diff
is not installed on the system, nf-test will print exepcted and found snapshots without highlighting differences.
Users have the flexibility to customize the arguments passed to the diff tool using an environment variable called NFT_DIFF_ARGS
. This environment variable allows you to modify the way the diff tool behaves when comparing snapshots.
To customize the arguments, follow these steps:
Set the NFT_DIFF_ARGS
environment variable with your desired arguments.
export NFT_DIFF_ARGS=\"<your_custom_arguments>\"\n
Run nf-test
to perform snapshot comparison, and it will utilize the custom arguments specified in NFT_DIFF_ARGS
.
nf-test
not only allows you to customize the arguments but also provides the flexibility to change the diff tool itself. This can be achieved by using the environment variable NFT_DIFF
.
As an example, you can change the diff tool to icdiff
, which supports features like colors. To switch to icdiff
, follow these steps:
Install icdiff
Set the NFT_DIFF
environment variable to icdiff
to specify the new diff tool.
export NFT_DIFF=\"icdiff\"\n
If needed, customize the arguments for icdiff
using NFT_DIFF_ARGS
as explained in the previous section
export NFT_DIFF_ARGS=\"-N --cols 200 -L expected -L observed -t\"\n
Run nf-test
, and it will use icdiff
as the diff tool for comparing snapshots.
clean
command","text":""},{"location":"docs/cli/clean/#usage","title":"Usage","text":"nf-test clean\n
The clean
command removes the .nf-test
directory.
generate
command","text":""},{"location":"docs/cli/generate/#usage","title":"Usage","text":"nf-test generate <TEST_CASE_TYPE> <NEXTFLOW_FILES>\n
"},{"location":"docs/cli/generate/#supported-types","title":"Supported Types","text":""},{"location":"docs/cli/generate/#process","title":"process
","text":""},{"location":"docs/cli/generate/#workflow","title":"workflow
","text":""},{"location":"docs/cli/generate/#pipeline","title":"pipeline
","text":""},{"location":"docs/cli/generate/#function","title":"function
","text":""},{"location":"docs/cli/generate/#examples","title":"Examples","text":"Create a test case for a process:
nf-test generate process modules/local/salmon_index.nf\n
Create a test cases for all processes in folder modules
:
nf-test generate process modules/**/*.nf\n
Create a test case for a sub workflow:
nf-test generate workflow workflows/some_workflow.nf\n
Create a test case for the whole pipeline:
nf-test generate pipeline main.nf\n
Create a test case for each functio in file functions.nf
:
nf-test generate function functions.nf\n
"},{"location":"docs/cli/init/","title":"init
command","text":""},{"location":"docs/cli/init/#usage","title":"Usage","text":"nf-test init\n
The init
command set ups nf-test in the current directory.
The init
command creates the following files: nf-test.config
and tests/nextflow.config
. It also creates a folder tests
which is the home directory of your test code.
In the configuration section you can learn more about these files and how to customize the directory layout.
"},{"location":"docs/cli/list/","title":"list
command","text":""},{"location":"docs/cli/list/#usage","title":"Usage","text":"list
command provides a convenient way to list all available test cases.
nf-test list [<NEXTFLOW_FILES>|<SCRIPT_FOLDERS>]\n
"},{"location":"docs/cli/list/#optional-arguments","title":"Optional Arguments","text":""},{"location":"docs/cli/list/#-tags","title":"--tags
","text":"Print a list of all used tags.
"},{"location":"docs/cli/list/#-format-json","title":"--format json
","text":"Print the list of tests or tags as json object.
"},{"location":"docs/cli/list/#-format-raw","title":"--format raw
","text":"Print the list of tests or tags as simple list without formatting.
"},{"location":"docs/cli/list/#-silent","title":"--silent
","text":"Hide program version and header infos.
"},{"location":"docs/cli/list/#-debug","title":"--debug
","text":"Show debugging infos.
"},{"location":"docs/cli/list/#examples","title":"Examples","text":"List test cases that can be found in the testDir
defined in the nf-test.config
file in the current working directory:
nf-test list\n
List test cases in specified test scripts and search specified directories for additional test scripts:
nf-test list tests/modules/local/salmon_index.nf.test tests/modules/bwa_index.nf.test\n\nnf-test list tests/modules tests/modules/bwa_index.nf.test\n
List of all testcases as json:
nf-test list --format json --silent\n[\"/Users/lukfor/Development/git/nf-gwas/tests/main.nf.test@69b98c67\",\"/Users/lukfor/Development/git/nf-gwas/tests/main.nf.test@fdb6c1cc\",\"/Users/lukfor/Development/git/nf-gwas/tests/main.nf.test@d1c219eb\",\"/Users/lukfor/Development/git/nf-gwas/tests/main.nf.test@3c54e3cb\",...]\n
nf-test list --format raw --silent\n/Users/lukfor/Development/git/nf-gwas/tests/main.nf.test@69b98c67\n/Users/lukfor/Development/git/nf-gwas/tests/main.nf.test@fdb6c1cc\n/Users/lukfor/Development/git/nf-gwas/tests/main.nf.test@d1c219eb\n/Users/lukfor/Development/git/nf-gwas/tests/main.nf.test@3c54e3cb\n...\n
nf-test list --tags --format json --silent\n[\"fastqc\",\"snakemake\"]\n
nf-test list --tags --format raw --silent\nfastqc\nsnakemake\n
"},{"location":"docs/cli/test/","title":"test
command","text":""},{"location":"docs/cli/test/#usage","title":"Usage","text":"nf-test test [<NEXTFLOW_FILES>|<SCRIPT_FOLDERS>]\n
"},{"location":"docs/cli/test/#optional-arguments","title":"Optional Arguments","text":""},{"location":"docs/cli/test/#-profile-nextflow_profile","title":"--profile <NEXTFLOW_PROFILE>
","text":""},{"location":"docs/cli/test/#-debug","title":"--debug
","text":"The debug parameter prints out all available output channels which can be accessed in the then
clause.
--without-trace
","text":"The Linux tool procps
is required to run Nextflow tracing. In case your container does not support this tool, you can also run nf-test without tracing. Please note that the workflow.trace
are not available when running it with this flag.
--tag <tag>
","text":"Execute only tests with the provided tag. Multiple tags can be used and have to be separated by commas (e.g. tag1,tag2
).
--tap <filename>
","text":"Writes test results in TAP format to file.
"},{"location":"docs/cli/test/#-junitxml-filename","title":"--junitxml <filename>
","text":"Writes test results in JUnit XML format to file, which conforms to the standard schema.
"},{"location":"docs/cli/test/#examples","title":"Examples","text":"Run all test scripts that can be found in the testDir
defined in the nf-test.config
file in the current working directory:
nf-test test\n
Run all specified test scripts and search specified directories for additional test scripts:
nf-test test tests/modules/local/salmon_index.nf.test tests/modules/bwa_index.nf.test\n\nnf-test test tests/modules tests/modules/bwa_index.nf.test\n
Run a specific test using its hash:
nf-test test tests/main.nf.test@d41119e4\n
Run all tests and write results to report.tap
:
nf-test test --tap report.tap\n
0.7.0
The following plugin can be used as a boilerplate: https://github.com/askimed/nft-fasta
"},{"location":"docs/plugins/developing-plugins/#developing-plugins","title":"Developing Plugins","text":"A plugin has the possibility:
fasta
to class Path
). It uses Groovy's ExtensionModule concept. Important: the method has to be static. One class can provide multiple methods.// com.askimed.nf.test.fasta.PathExtension\npublic class PathExtension {\n//can be used as: path(filename).fasta\npublic static Object getFasta(Path self) {\nreturn FastaUtil.readAsMap(self);\n}\n\n}\n
// com.askimed.nf.test.fasta.Methods\npublic class Methods {\n\n//can be used as: helloFasta()\npublic static void helloFasta() {\nSystem.out.println(\"Hello FASTA\");\n}\n\n}\n
"},{"location":"docs/plugins/developing-plugins/#manifest-file","title":"Manifest file","text":"You need to create a file META-INF/nf-test-plugin
(in your resources). This file contains metadata about the plugin and both classes can now be registered by using the extensionClasses
and extensionMethods
properties.
moduleName=nft-my-plugin\nmoduleVersion=1.0.0\nmoduleAuthors=Lukas Forer\nextensionClasses=com.askimed.nf.test.fasta.PathExtension\nextensionMethods=com.askimed.nf.test.fasta.Methods\n
"},{"location":"docs/plugins/developing-plugins/#building-a-jar-file","title":"Building a jar file","text":"The plugin itself is a jar file that contains all classes and the META-INF/nf-test-plugin
file. If you have dependencies then you have to create a uber-jar that includes all libraries, because nf-test doesn't support the classpath set in META-INF\\MANIFEST
.
Available plugins are managed in this default repository: https://github.com/askimed/nf-test-plugins/blob/main/plugins.json
Add your plugin or a new release to the plugin.json
file and create a pull request to publish your plugin in the default repository. Or host you own repository:
[{\n \"id\": \"nft-fasta\",\n \"releases\": [{\n \"version\": \"1.0.0\",\n \"url\": \"https://github.com/askimed/nft-fasta/releases/download/v1.0.0/nft-fasta-1.0.0.jar\",\n },{\n \"version\": \"2.0.0\",\n \"url\": \"https://github.com/askimed/nft-fasta/releases/download/v2.0.0/nft-fasta-2.0.0.jar\",\n }]\n},{\n \"id\": \"nft-my-plugin\",\n \"releases\": [{\n \"version\": \"1.0.0\",\n \"url\": \"https://github.com/lukfor/nft-my-plugin2/releases/download/v1.0.0/nft-my-plugin-1.0.0.jar\",\n }]\n}]\n
"},{"location":"docs/plugins/using-plugins/","title":"Plugins","text":"0.7.0
Most assertions are usecase specific. Therefore, separating this functionality and helper classes from the nf-test codebase has several advantages:
For this purpose, we integrated the following plugin system that provides (a) the possibility to extend existing classes with custom methods (e.g. path(filename).fasta
) and (2) to extends nf-test with new methods.
Available plugins are listed here.
A plugin can be activated via the nf-test.config
by adding the plugin
section and by using load
method to specify the plugin and its version:
config {\n\n plugins {\n\n load \"nft-fasta@1.0.0\"\n\n }\n\n}\n
It is also possible to add one ore more additional repositories. (Example: repository with development/snapshot versions, in-house repository, ...)
config {\n\n plugins {\n\n repository \"https://github.com/askimed/nf-test-plugins/blob/main/plugins-snapshots.json\"\n repository \"https://github.com/seppinho/nf-test-plugin2/blob/main/plugins.json\"\n\n load \"nft-fasta@1.1.0-snapshot\"\n load \"nft-plugin2@1.1.0\"\n\n // you can also load jar files directly without any repository\n // loadFromFile \"path/to/my/nft-plugin.jar\"\n }\n\n}\n
All plugins are downloaded and cached in .nf-test\\plugins
. This installation mechanism is yet not safe for parallel execution when multiple nf-test instances are resolving the same plugin. However, you can use nf-test update-plugins
to download all plugins before you run your tests in parallel.
To clear the cache and to force redownloading plugins and repositories you can execute the nf-test clean
command.
One or multiple plugins can be activated also via the --plugins
parameter:
nf-test test my-test.nf.test --plugins nft-fasta@1.0.0,plugin2@1.0.0\n
or
nf-test test my-test.nf.test --plugins path/to/my/nft-plugin.jar\n
"},{"location":"docs/testcases/","title":"Documentation","text":""},{"location":"docs/testcases/global_variables/","title":"Global Variables","text":"The following variables are available and can be used in setup
, when
, then
and cleanup
closures.
baseDir
orprojectDir
The directory where the nf-test.config
script is located. mypipeline
moduleDir
The directory where the module script is located mypipeline/modules/mymodule
moduleTestDir
The directory where the test script is located mypipeline/tests/modules/mymodule
launchDir
The directory where the test is run. mypipeline/.nf-test/tests/<test_hash>
metaDir
The directory where all meta are located (e.g. mock.nf
). mypipeline/.nf-test/tests/<test_hash>/meta
workDir
The directory where tasks temporary files are created. mypipeline/.nf-test/tests/<test_hash>/work
outputDir
An output directory in the $launchDir
that can be used to store output files. The variable contains the absolute path. If you need a relative outpu directory see launchDir
example. mypipeline/.nf-test/tests/<test_hash>/output
params
Dictionary like object holding all parameters."},{"location":"docs/testcases/global_variables/#examples","title":"Examples","text":""},{"location":"docs/testcases/global_variables/#outputdir","title":"outputDir
","text":"This variable points to the directory within the temporary test directory (.nf-test/tests/<test-dir>/output/
). The variable can be set under params:
params {\noutdir = \"$outputDir\"\n}\n
"},{"location":"docs/testcases/global_variables/#basedir","title":"baseDir
","text":"This variable points to the directory to locate the base directory of the main nf-test config. The variable can be used e.g. in the process definition to build absolute paths for input files:
process {\n\"\"\"\n file1 = file(\"$baseDir/tests/input/file123.gz\")\n \"\"\"\n}\n
"},{"location":"docs/testcases/global_variables/#launchdir","title":"launchDir
","text":"This variable points to the directory where the test is executed. This can be used get access to results that are created in an relative output directory:
when {\nparams {\noutdir = \"results\"\n}\n}\n
then {\nassert path(\"$launchDir/results\").exists()\n}\n
"},{"location":"docs/testcases/nextflow_function/","title":"Function Testing","text":"nf-test allows testing of functions that are defined in a Nextflow file or defined in lib
. Please checkout the CLI to generate a function test.
nextflow_function {\n\nname \"<NAME>\"\nscript \"<PATH/TO/NEXTFLOW_SCRIPT.nf>\"\nfunction \"<FUNCTION_NAME>\"\n\ntest(\"<TEST_NAME>\") {\n\n}\n}\n
Script paths that start with ./
or ../
are considered relative paths. These paths are resolved based on the location of the test script. Relative paths are beneficial when you want to reference files or directories located within the same directory as your test script or in a parent directory. These paths provide a convenient way to access files without specifying the entire path.
If a Nextflow script contains multiple functions and you want to test them all in the same testsuite, you can override the function
property in each test. For example:
functions.nf
","text":"def function1() {\n...\n}\n\ndef function2() {\n...\n}\n
"},{"location":"docs/testcases/nextflow_function/#functionsnftest","title":"functions.nf.test
","text":"nextflow_function {\n\nname \"Test functions\"\nscript \"functions.nf\"\n\ntest(\"Test function1\") {\nfunction \"function1\"\n...\n}\n\ntest(\"Test function2\") {\nfunction \"function2\"\n...\n}\n}\n
"},{"location":"docs/testcases/nextflow_function/#functions-in-lib-folder","title":"Functions in lib
folder","text":"If you want to test a function that is inside a groovy file in your lib
folder, you can ignore the script
property, because Nextflow adds them automatically to the classpath. For example:
lib\\Utils.groovy
","text":"class Utils {\n\n public static void sayHello(name) {\n if (name == null) {\n error('Cannot greet a null person')\n }\n\n def greeting = \"Hello ${name}\"\n\n println(greeting)\n }\n\n}\n
"},{"location":"docs/testcases/nextflow_function/#testslibutilsgroovytest","title":"tests\\lib\\Utils.groovy.test
","text":"nextflow_function {\n\nname \"Test Utils.groovy\"\n\ntest(\"Test function1\") {\nfunction \"Utils.sayHello\"\n...\n}\n}\n
Note: the generate function
command works only with Nextflow functions.
The function
object can be used in asserts to check its status, result value or error messages.
// function status\nassert function.success\nassert function.failed\n\n// return value\nassert function.result == 27\n\n//returns a list containing all lines from stdout\nassert function.stdout.contains(\"Hello World\") == 3\n
"},{"location":"docs/testcases/nextflow_function/#example","title":"Example","text":""},{"location":"docs/testcases/nextflow_function/#nextflow-script","title":"Nextflow script","text":"Create a new file and name it functions.nf
.
def say_hello(name) {\nif (name == null) {\nerror('Cannot greet a null person')\n}\n\ndef greeting = \"Hello ${name}\"\n\nprintln(greeting)\nreturn greeting\n}\n
"},{"location":"docs/testcases/nextflow_function/#nf-test-script","title":"nf-test script","text":"Create a new file and name it functions.nf.test
.
nextflow_function {\n\nname \"Test Function Say Hello\"\n\nscript \"functions.nf\"\nfunction \"say_hello\"\n\ntest(\"Passing case\") {\n\nwhen {\nfunction {\n\"\"\"\n input[0] = \"aaron\"\n \"\"\"\n}\n}\n\nthen {\nassert function.success\nassert function.result == \"Hello aaron\"\nassert function.stdout.contains(\"Hello aaron\")\nassert function.stderr.isEmpty()\n}\n\n}\n\ntest(\"Failure Case\") {\n\nwhen {\nfunction {\n\"\"\"\n input[0] = null\n \"\"\"\n}\n}\n\nthen {\nassert function.failed\n//It seems to me that error(..) writes message to stdout\nassert function.stdout.contains(\"Cannot greet a null person\")\n}\n}\n}\n
"},{"location":"docs/testcases/nextflow_function/#execute-test","title":"Execute test","text":"nf-test test functions.nf.test\n
"},{"location":"docs/testcases/nextflow_pipeline/","title":"Pipeline Testing","text":"nf-test also allows to test the complete pipeline end-to-end. Please checkout the CLI to generate a pipeline test.
"},{"location":"docs/testcases/nextflow_pipeline/#syntax","title":"Syntax","text":"nextflow_pipeline {\n\nname \"<NAME>\"\nscript \"<PATH/TO/NEXTFLOW_SCRIPT.nf>\"\n\ntest(\"<TEST_NAME>\") {\n\n}\n}\n
"},{"location":"docs/testcases/nextflow_pipeline/#assertions","title":"Assertions","text":"The workflow
object can be used in asserts to check its status, error messages or traces.
// workflow status\nassert workflow.success\nassert workflow.failed\nassert workflow.exitStatus == 0\n\n// workflow error message\nassert workflow.errorReport.contains(\"....\")\n\n// trace\n//returns a list containing succeeded tasks\nassert workflow.trace.succeeded().size() == 3\n\n//returns a list containing failed tasks\nassert workflow.trace.failed().size() == 0\n\n//returns a list containing all tasks\nassert workflow.trace.tasks().size() == 3\n
"},{"location":"docs/testcases/nextflow_pipeline/#example","title":"Example","text":""},{"location":"docs/testcases/nextflow_pipeline/#nextflow-script","title":"Nextflow script","text":"Create a new file and name it pipeline.nf
.
#!/usr/bin/env nextflow\nnextflow.enable.dsl=2\n\nprocess SAY_HELLO {\ninput:\nval cheers\n\noutput:\nstdout emit: verbiage_ch\npath '*.txt', emit: verbiage_ch2\n\nscript:\n\"\"\"\n echo -n $cheers\n echo -n $cheers > ${cheers}.txt\n \"\"\"\n}\n\nworkflow {\ninput = params.input_text.trim().split(',')\nChannel.from(input) | SAY_HELLO\n}\n
"},{"location":"docs/testcases/nextflow_pipeline/#nf-test-script","title":"nf-test script","text":"Create a new file and name it pipeline.nf.test
.
nextflow_pipeline {\n\nname \"Test Pipeline with 1 process\"\nscript \"pipeline.nf\"\n\ntest(\"Should run without failures\") {\n\nwhen {\nparams {\ninput_text = \"hello,nf-test\"\n}\n}\n\nthen {\nassert workflow.success\nassert workflow.trace.tasks().size() == 2\n}\n\n}\n\n}\n
"},{"location":"docs/testcases/nextflow_pipeline/#execute-test","title":"Execute test","text":"nf-test init\nnf-test test pipeline.nf.test\n
"},{"location":"docs/testcases/nextflow_process/","title":"Process Testing","text":"nf-test allows to test each process defined in a module file. Please checkout the CLI to generate a process test.
"},{"location":"docs/testcases/nextflow_process/#syntax","title":"Syntax","text":"nextflow_process {\n\nname \"<NAME>\"\nscript \"<PATH/TO/NEXTFLOW_SCRIPT.nf>\"\nprocess \"<PROCESS_NAME>\"\n\ntest(\"<TEST_NAME>\") {\n\n}\n}\n
Script paths that start with ./
or ../
are considered relative paths. These paths are resolved based on the location of the test script. Relative paths are beneficial when you want to reference files or directories located within the same directory as your test script or in a parent directory. These paths provide a convenient way to access files without specifying the entire path.
The process
object can be used in asserts to check its status or error messages.
// process status\nassert process.success\nassert process.failed\nassert process.exitStatus == 0\n\n// Analyze Nextflow trace file\nassert process.trace.tasks().size() == 1\n\n// process error message\nassert process.errorReport.contains(\"....\")\n\n//returns a list containing all lines from stdout\nassert process.stdout.contains(\"Hello World\") == 3\n
"},{"location":"docs/testcases/nextflow_process/#output-channels","title":"Output Channels","text":"The process.out
object provides access to the content of all named output Channels (see Nextflow emit
):
// channel exists\nassert process.out.my_channel != null\n\n// channel contains 3 elements\nassert process.out.my_channel.size() == 3\n\n// first element is \"hello\"\nassert process.out.my_channel.get(0) == \"hello\"\n
Channels that lack explicit names can be addressed using square brackets and the corresponding index. This indexing method provides a straightforward way to interact with channels without the need for predefined names. To access the first output channel, you can use the index [0] as demonstrated below:
// channel exists\nassert process.out[0] != null\n\n// channel contains 3 elements\nassert process.out[0].size() == 3\n\n// first element is \"hello\"\nassert process.out[0].get(0) == \"hello\"\n
"},{"location":"docs/testcases/nextflow_process/#example","title":"Example","text":""},{"location":"docs/testcases/nextflow_process/#nextflow-script","title":"Nextflow script","text":"Create a new file and name it say_hello.nf
.
#!/usr/bin/env nextflow\nnextflow.enable.dsl=2\n\nprocess SAY_HELLO {\ninput:\nval cheers\n\noutput:\nstdout emit: verbiage_ch\npath '*.txt', emit: verbiage_ch2\n\nscript:\n\"\"\"\n echo -n $cheers\n echo -n $cheers > ${cheers}.txt\n \"\"\"\n}\n
"},{"location":"docs/testcases/nextflow_process/#nf-test-script","title":"nf-test script","text":"Create a new file and name it say_hello.nf.test
.
nextflow_process {\n\nname \"Test Process SAY_HELLO\"\nscript \"say_hello.nf\"\nprocess \"SAY_HELLO\"\n\ntest(\"Should run without failures\") {\n\nwhen {\nprocess {\n\"\"\"\n input[0] = Channel.from('hello','nf-test')\n \"\"\"\n}\n}\n\nthen {\n\nassert process.success\nassert process.trace.tasks().size() == 2\n\nwith(process.out.trial_out_ch) {\nassert size() == 2\nassert path(get(0)).readLines().size() == 1\nassert path(get(1)).readLines().size() == 1\nassert path(get(1)).md5 == \"4a17df7a54b41a84df492da3f1bab1e3\"\n}\n\n}\n\n}\n}\n
"},{"location":"docs/testcases/nextflow_process/#execute-test","title":"Execute test","text":"nf-test init\nnf-test test say_hello.nf.test\n
"},{"location":"docs/testcases/nextflow_workflow/","title":"Workflow Testing","text":"nf-test also allows to test a specific workflow. Please checkout the CLI to generate a workflow test.
"},{"location":"docs/testcases/nextflow_workflow/#syntax","title":"Syntax","text":"nextflow_workflow {\n\nname \"<NAME>\"\nscript \"<PATH/TO/NEXTFLOW_SCRIPT.nf>\"\nworkflow \"<WORKFLOW_NAME>\"\n\ntest(\"<TEST_NAME>\") {\n\n}\n}\n
Script paths that start with ./
or ../
are considered relative paths. These paths are resolved based on the location of the test script. Relative paths are beneficial when you want to reference files or directories located within the same directory as your test script or in a parent directory. These paths provide a convenient way to access files without specifying the entire path.
The workflow
object can be used in asserts to check its status, error messages or traces.
// workflow status\nassert workflow.success\nassert workflow.failed\nassert workflow.exitStatus == 0\n\n// workflow error message\nassert workflow.errorReport.contains(\"....\")\n\n// trace\n//returns a list containing succeeded tasks\nassert workflow.trace.succeeded().size() == 3\n\n//returns a list containing failed tasks\nassert workflow.trace.failed().size() == 0\n\n//returns a list containing all tasks\nassert workflow.trace.tasks().size() == 3\n\n//returns a list containing all lines from stdout\nassert workflow.stdout.contains(\"Hello World\") == 3\n
"},{"location":"docs/testcases/nextflow_workflow/#output-channels","title":"Output Channels","text":"The workflow.out
object provides access to the content of all named output Channels (see Nextflow emit
):
// channel exists\nassert workflow.out.my_channel != null\n\n// channel contains 3 elements\nassert workflow.out.my_channel.size() == 3\n\n// first element is \"hello\"\nassert workflow.out.my_channel.get(0) == \"hello\"\n
"},{"location":"docs/testcases/nextflow_workflow/#example","title":"Example","text":""},{"location":"docs/testcases/nextflow_workflow/#nextflow-script","title":"Nextflow script","text":"Create a new file and name it trial.nf
.
#!/usr/bin/env nextflow\nnextflow.enable.dsl=2\n\nprocess sayHello {\ninput:\nval cheers\n\noutput:\nstdout emit: verbiage_ch\npath '*.txt', emit: verbiage_ch2\n\nscript:\n\"\"\"\n echo -n $cheers\n echo -n $cheers > ${cheers}.txt\n \"\"\"\n}\n\nworkflow trial {\ntake: things\nmain:\nsayHello(things)\nsayHello.out.verbiage_ch.view()\nemit:\ntrial_out_ch = sayHello.out.verbiage_ch2\n}\n\nworkflow {\nChannel.from('hello','nf-test') | trial\n}\n
"},{"location":"docs/testcases/nextflow_workflow/#nf-test-script","title":"nf-test script","text":"Create a new file and name it trial.nf.test
.
nextflow_workflow {\n\nname \"Test Workflow Trial\"\nscript \"trial.nf\"\nworkflow \"trial\"\n\ntest(\"Should run without failures\") {\n\nwhen {\nworkflow {\n\"\"\"\n input[0] = Channel.from('hello','nf-test')\n \"\"\"\n}\n}\n\nthen {\n\nassert workflow.success\n\nwith(workflow.out.trial_out_ch) {\nassert size() == 2\nassert path(get(0)).readLines().size() == 1\nassert path(get(1)).readLines().size() == 1\nassert path(get(1)).md5 == \"4a17df7a54b41a84df492da3f1bab1e3\"\n}\n\n}\n\n}\n\n}\n
"},{"location":"docs/testcases/nextflow_workflow/#execute-test","title":"Execute test","text":"nf-test init\nnf-test test trial.nf.test\n
"},{"location":"docs/testcases/params/","title":"Params Dictionary","text":"The params
block is optional and is a simple map that can be used to overwrite Nextflow's input params
. The params
block is located in the when
block of a testcase. You can set params manually:
when {\nparams {\noutdir = \"output\"\n}\n}\n
It is also possible to set nested params using the same syntax as in your Nextflow script:
when {\nparams {\noutput {\ndir = \"output\"\n}\n}\n}\n
The params
map can also be used in the then
block:
then {\nassert params.output == \"output\" }\n
"},{"location":"docs/testcases/params/#load-params-from-files","title":"Load params from files","text":"In addition, you can load the params
from a JSON file:
when {\nparams {\nload(\"$baseDir/tests/params.json\")\n}\n}\n
or from a YAML file:
when {\nparams {\nload(\"$baseDir/tests/params.yaml\")\n}\n}\n
nf-test allows to combine both techniques and therefor it is possible to overwrite one or more params
from the json file:
when {\nparams {\nload(\"$baseDir/tests/params.json\")\noutputDir = \"new/output/path\"\n}\n}\n
"}]}
\ No newline at end of file
diff --git a/sitemap.xml.gz b/sitemap.xml.gz
index 9278c649..b48714c8 100644
Binary files a/sitemap.xml.gz and b/sitemap.xml.gz differ