Skip to content

Commit

Permalink
Merge branch 'master' into UCD_SONIC
Browse files Browse the repository at this point in the history
  • Loading branch information
brucemoran authored Aug 15, 2023
2 parents 7e113e2 + c9a2e3b commit 2f45ca0
Show file tree
Hide file tree
Showing 29 changed files with 679 additions and 84 deletions.
9 changes: 8 additions & 1 deletion .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -32,13 +32,15 @@ jobs:
- "abims"
- "adcra"
- "alice"
- "apollo"
- "aws_tower"
- "awsbatch"
- "azurebatch"
- "bi"
- "bigpurple"
- "binac"
- "biohpc_gen"
- "biowulf"
- "cambridge"
- "cbe"
- "ccga_dx"
Expand All @@ -55,10 +57,13 @@ jobs:
- "denbi_qbic"
- "dkfz"
- "ebc"
- "ebi_codon"
- "ebi_codon_slurm"
- "eddie"
- "engaging"
- "eva"
- "fgcz"
- "fub_curta"
- "genotoul"
- "genouest"
- "gis"
Expand All @@ -85,7 +90,7 @@ jobs:
- "mjolnir_globe"
- "mpcdf"
- "munin"
- "nihbiowulf"
- "nci_gadi"
- "nu_genomics"
- "oist"
- "pasteur"
Expand All @@ -112,11 +117,13 @@ jobs:
- "uppmax"
- "utd_ganymede"
- "utd_sysbio"
- "uw_hyak_pedslabs"
- "uzh"
- "vai"
- "vsc_kul_uhasselt"
- "vsc_ugent"
- "wehi"
- "wustl_htcf"
- "xanadu"

steps:
Expand Down
9 changes: 8 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -90,13 +90,15 @@ Currently documentation is available for the following systems:
- [ABIMS](docs/abims.md)
- [ADCRA](docs/adcra.md)
- [ALICE](docs/alice.md)
- [APOLLO](docs/apollo.md)
- [AWSBATCH](docs/awsbatch.md)
- [AWS_TOWER](docs/aws_tower.md)
- [AZUREBATCH](docs/azurebatch.md)
- [BIGPURPLE](docs/bigpurple.md)
- [BI](docs/bi.md)
- [BINAC](docs/binac.md)
- [BIOHPC_GEN](docs/biohpc_gen.md)
- [BIOWULF](docs/biowulf.md)
- [CAMBRIDGE](docs/cambridge.md)
- [CBE](docs/cbe.md)
- [CCGA_DX](docs/ccga_dx.md)
Expand All @@ -113,9 +115,12 @@ Currently documentation is available for the following systems:
- [DENBI_QBIC](docs/denbi_qbic.md)
- [DKFZ](docs/dkfz.md)
- [EBC](docs/ebc.md)
- [EBI_CODON](docs/ebi_codon.md)
- [EBI_CODON_SLURM](docs/ebi_codon_slurm.md)
- [Engaging](docs/engaging.md)
- [EVA](docs/eva.md)
- [FGCZ](docs/fgcz.md)
- [FUB Curta](docs/fub_curta.md)
- [GENOTOUL](docs/genotoul.md)
- [GENOUEST](docs/genouest.md)
- [GIS](docs/gis.md)
Expand All @@ -141,8 +146,8 @@ Currently documentation is available for the following systems:
- [MJOLNIR_GLOBE](docs/mjolnir_globe.md)
- [MPCDF](docs/mpcdf.md)
- [MUNIN](docs/munin.md)
- [NCI GADI](docs/nci_gadi.md)
- [NU_GENOMICS](docs/nu_genomics.md)
- [NIHBIOWULF](docs/nihbiowulf.md)
- [OIST](docs/oist.md)
- [PASTEUR](docs/pasteur.md)
- [PAWSEY NIMBUS](docs/pawsey_nimbus.md)
Expand All @@ -167,11 +172,13 @@ Currently documentation is available for the following systems:
- [UPPMAX](docs/uppmax.md)
- [UTD_GANYMEDE](docs/utd_ganymede.md)
- [UTD_SYSBIO](docs/utd_sysbio.md)
- [UW_HYAK_PEDSLABS](docs/uw_hyak_pedslabs.md)
- [UZH](docs/uzh.md)
- [VAI](docs/vai.md)
- [VSC_KUL_UHASSELT](docs/vsc_kul_uhasselt.md)
- [VSC_UGENT](docs/vsc_ugent.md)
- [WEHI](docs/wehi.md)
- [WUSTL_HTCF](docs/wustl_htcf.md)
- [XANADU](docs/xanadu.md)

### Uploading to `nf-core/configs`
Expand Down
35 changes: 35 additions & 0 deletions conf/apollo.config
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@

params {
config_profile_name = 'apollo'
config_profile_description = 'COH Apollo cluster profile provided by nf-core/configs.'
config_profile_contact = 'Denis O\'Meally (@drejom) or HPRCC Helpdesk ([email protected])'
config_profile_url = 'https://www.cityofhope.org/ Intranet http://hprcc.coh.org/'
max_memory = 1500.GB
max_cpus = 72
max_time = 500.h
igenomes_base = '/ref_genome/igenomes'
}

process {
executor = 'slurm'
queue = 'all'
beforeScript = 'module load singularity'
}

singularity {
enabled = true
libraryDir = '/opt/singularity-images/nextflow'
}

executor {
name = 'slurm'
queueSize = 1000
}

cleanup = true

profiles {
debug {
cleanup = false
}
}
54 changes: 54 additions & 0 deletions conf/biowulf.config
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@

params {
config_profile_description = 'Biowulf nf-core config'
config_profile_contact = '[email protected]'
config_profile_url = 'https://hpc.nih.gov/apps/nextflow.html'
max_memory = '224 GB'
max_cpus = 32
max_time = '72 h'

igenomes_base = '/fdb/igenomes/'
}

executor {

$slurm {
queue = 'norm'
queueSize = 200
pollInterval = '2 min'
queueStatInterval = '5 min'
submitRateLimit = '6/1min'
retry.maxAttempts = 1
}
}

singularity {
enabled = true
autoMounts = true
cacheDir = "/data/$USER/singularity"
envWhitelist='https_proxy,http_proxy,ftp_proxy,DISPLAY,SLURM_JOBID,SINGULARITY_BINDPATH'
}

env {
SINGULARITY_CACHEDIR="/data/$USER/singularity"
PYTHONNOUSERSITE = 1
}


process {
executor = 'slurm'
maxRetries = 1

clusterOptions = ' --gres=lscratch:200 '

scratch = '/lscratch/$SLURM_JOBID'

stageInMode = 'symlink'
stageOutMode = 'rsync'

// for running pipeline on group sharing data directory, this can avoid inconsistent files timestamps
cache = 'lenient'
}



1 change: 1 addition & 0 deletions conf/computerome.config
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ params {
project = null
cache_dir = "/home/projects/$params.project/scratch"
schema_ignore_params = "project,cache_dir,genomes,modules"
validationSchemaIgnoreParams = "project,cache_dir,genomes,modules,schema_ignore_params"

//Thin nodes with 192GB and Fat nodes with ~1500GB. Torque should be allowed to handle this
max_memory = 1500.GB
Expand Down
54 changes: 54 additions & 0 deletions conf/ebi_codon.config
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
/*
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Nextflow config file for EMBL-EBI Codon Cluster
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Author: Saul Pierotti
Mail: [email protected]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*/

params {
config_profile_contact = "Saul Pierotti (@saulpierotti-ebi)"
config_profile_description = "The European Bioinformatics Institute HPC cluster (codon) profile"
config_profile_url = "https://www.ebi.ac.uk/"
}

singularity {
enabled = true
// the default is 20 minutes and fails with large images
pullTimeout = "3 hours"
}

process{
queue = {
if ( task.time > 7.day && task.memory > 300.GB ) {
"long_bigmem"
} else if ( task.memory > 300.GB ) {
"bigmem"
} else if ( task.time > 7.day ) {
"long"
} else if ( task.time > 1.day ) {
"research"
} else {
"short"
}
}
// this is to avoid errors for missing files due to shared filesystem latency
maxRetries = 3
errorStrategy = { ( task.exitStatus == 0 ) ? "retry" : "terminate" }
cache = "lenient"
afterScript = "sleep 60" // to avoid fail when using storeDir for missing output
}

executor {
name = "lsf"
queueSize = 2000
submitRateLimit = "10/1sec"
exitReadTimeout = "30 min"
jobName = {
task.name // [] and " " not allowed in lsf job names
.replace("[", "(")
.replace("]", ")")
.replace(" ", "_")
}
}
41 changes: 41 additions & 0 deletions conf/ebi_codon_slurm.config
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
/*
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Nextflow config file for EMBL-EBI Codon Cluster for the SLURM login nodes
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Author: Saul Pierotti
Mail: [email protected]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*/

params {
config_profile_contact = "Saul Pierotti (@saulpierotti-ebi)"
config_profile_description = "The European Bioinformatics Institute HPC cluster (codon) profile for the SLURM login nodes"
config_profile_url = "https://www.ebi.ac.uk/"
}

singularity {
enabled = true
// the default is 20 minutes and fails with large images
pullTimeout = "3 hours"
}

process{
// this is to avoid errors for missing files due to shared filesystem latency
maxRetries = 3
errorStrategy = { ( task.exitStatus == 0 ) ? "retry" : "terminate" }
cache = "lenient"
afterScript = "sleep 60" // to avoid fail when using storeDir for missing output
}

executor {
name = "slurm"
queueSize = 2000
submitRateLimit = "10/1sec"
exitReadTimeout = "30 min"
jobName = {
task.name // [] and " " not allowed in lsf job names
.replace("[", "(")
.replace("]", ")")
.replace(" ", "_")
}
}
26 changes: 26 additions & 0 deletions conf/fub_curta.config
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
// Config profile metadata
params {
config_profile_name = 'FUB Curta'
config_profile_description = 'Freie Universität Berlin HPC cluster (Curta) profile'
config_profile_contact = '[email protected]'
config_profile_url = 'https://www.fu-berlin.de/en/sites/high-performance-computing/index.html'
max_memory = 93.GB
max_time = 14.d
max_cpus = 32
}

// Slurm configuration parameters
process {
scratch = true
executor = 'slurm'
clusterOptions = '--qos=standard'
maxRetries = 2
}

// Singularity parameters
singularity {
enabled = true
autoMounts = true
}

cleanup = true
1 change: 1 addition & 0 deletions conf/hasta.config
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ params {
priority = null
clusterOptions = null
schema_ignore_params = "priority,clusterOptions"
validationSchemaIgnoreParams = "priority,clusterOptions,schema_ignore_params"
}

singularity {
Expand Down
1 change: 1 addition & 0 deletions conf/janelia.config
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ params {

lsf_opts = ''
schema_ignore_params = 'genomes,lsf_opts'
validationSchemaIgnoreParams = "genomes,lsf_opts,schema_ignore_params"
}

singularity {
Expand Down
44 changes: 44 additions & 0 deletions conf/nci_gadi.config
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
// NCI Gadi nf-core configuration profile
params {
config_profile_description = 'NCI Gadi HPC profile provided by nf-core/configs'
config_profile_contact = 'Georgie Samaha (@georgiesamaha), Matthew Downton (@mattdton)'
config_profile_url = 'https://opus.nci.org.au/display/Help/Gadi+User+Guide'
project = System.getenv("PROJECT")
}

// Enable use of Singularity to run containers
singularity {
enabled = true
autoMounts = true
}

// Submit up to 300 concurrent jobs (Gadi exec max)
// pollInterval and queueStatInterval of every 5 minutes
// submitRateLimit of 20 per minute
executor {
queueSize = 300
pollInterval = '5 min'
queueStatInterval = '5 min'
submitRateLimit = '20 min'
}

// Define process resource limits
process {
executor = 'pbspro'
project = System.getenv("PROJECT")
storage = 'scratch/params.project'
module = 'singularity'
cache = 'lenient'
stageInMode = 'symlink'
queue = { task.memory < 128.GB ? 'normalbw' : (task.memory >= 128.GB && task.memory <= 190.GB ? 'normal' : (task.memory > 190.GB && task.memory <= 1020.GB ? 'hugemembw' : '')) }
beforeScript = 'module load singularity'
}

// Write custom trace file with outputs required for SU calculation
def trace_timestamp = new java.util.Date().format('yyyy-MM-dd_HH-mm-ss')
trace {
enabled = true
overwrite = false
file = "./gadi-nf-core-trace-${trace_timestamp}.txt"
fields = 'name,status,exit,duration,realtime,cpus,%cpu,memory,%mem,rss'
}
Loading

0 comments on commit 2f45ca0

Please sign in to comment.