diff --git a/conf/base.config b/conf/base.config index e918941de9b24f1c4d0af26f1e91c966e0a84b02..0060fd2aebdffc2d7c90fcbcf6e8445fa3616b19 100644 --- a/conf/base.config +++ b/conf/base.config @@ -34,7 +34,7 @@ process { // ----- DTM withName: PARSE_REPORTS { executor = 'local' - memory = { 500.MB * task.attempt } + memory = { checkMax( 500.MB * task.attempt, 'memory' ) } time = { 5.m * task.attempt } publishDir = [ @@ -57,8 +57,8 @@ process { withName: MERGE_LANES { cpus = 1 - memory = { 1.GB * task.attempt } - time = { 2.h * task.attempt } + memory = { checkMax( 1.GB * task.attempt, 'memory' ) } + time = { checkMax( 2.h * task.attempt, 'time' ) } } withName: ILLUMINA_FILTER { @@ -70,8 +70,8 @@ process { ] module = toolsModuleHash['ILLUMINA_FILTER'] - cpus = { 3 * task.attempt } - time = { 4.h * task.attempt } + cpus = { checkMax( 3 * task.attempt, 'cpus' ) } + time = { checkMax( 4.h * task.attempt, 'time' ) } } withName: DUPLICATED_READS { @@ -84,9 +84,9 @@ process { ext.args = "--reads_to_process ${params.fastp_n_reads}" module = toolsModuleHash['FASTP'] - time = { 5.h * task.attempt } - memory = { 3.GB * task.attempt } - cpus = { 3 * task.attempt } + time = { checkMax( 5.h * task.attempt, 'time' ) } + memory = { checkMax( 3.GB * task.attempt, 'memory' ) } + cpus = { checkMax( 3 * task.attempt, 'cpus' ) } } withName: FASTQC { @@ -105,11 +105,11 @@ process { module = toolsModuleHash['FASTQC'] maxRetries = 4 - time = { 5.h * task.attempt * params.resource_factor } + time = { checkMax( 5.h * task.attempt * params.resource_factor, 'time' ) } } withName: FASTQSCREEN { - time = { 1.h * task.attempt } + time = { checkMax( 1.h * task.attempt, 'time' ) } module = toolsModuleHash['FASTQSCREEN'] ext.args = "--conf ${params.inputdir}/fastq_screen.conf" @@ -123,9 +123,9 @@ process { // ----- DNA ----- // withLabel: bwa { module = toolsModuleHash['BWA'] - cpus = { 6 * task.attempt } - memory = { 8.GB * task.attempt } - time = { 3.d * task.attempt } + cpus = { checkMax( 6 * task.attempt, 'cpus' ) } + memory = { checkMax( 8.GB * task.attempt, 'memory' ) } + time = { checkMax( 3.d * task.attempt, 'time' ) } publishDir = [ path: "${params.outdir}/alignment/bwa", @@ -136,15 +136,15 @@ process { // ----- RNA ----- // withName: SALMON_INDEX { module = toolsModuleHash['SALMON'] - time = { 1.h * task.attempt } - memory = { 3.GB * task.attempt } + time = { checkMax( 1.h * task.attempt, 'time' ) } + memory = { checkMax( 3.GB * task.attempt, 'memory' ) } cpus = 8 } withName: SALMON_QUANT { module = toolsModuleHash['SALMON'] - time = { 1.h * task.attempt } - memory = { 3.GB * task.attempt } + time = { checkMax( 1.h * task.attempt, 'time' ) } + memory = { checkMax( 3.GB * task.attempt, 'memory' ) } cpus = 8 publishDir = [ @@ -156,22 +156,28 @@ process { withName: STAR_INDEX { module = toolsModuleHash['STAR'] - memory = { 50.GB * task.attempt } + memory = { checkMax( 50.GB * task.attempt, 'memory' ) } cpus = 8 } withName: STAR_ALIGN { module = toolsModuleHash['STAR'] - memory = { 20.GB * task.attempt } + memory = { checkMax( 20.GB * task.attempt, 'memory' ) } cpus = 2 - time = { 1.d * task.attempt } + time = { checkMax( 1.d * task.attempt, 'memory' ) } + + publishDir = [ + path: "${params.outdir}/AlignmentStats", + mode: 'copy', + pattern: '*.log' + ] } // ----- 16S/Amplicon ----- // withName: JOIN_PAIR { module = toolsModuleHash['FLASH'] - time = { 30.m * task.attempt } - memory = { 500.MB * task.attempt } + time = { checkMax( 30.m * task.attempt, 'time' ) } + memory = { checkMax( 500.MB * task.attempt, 'memory' ) } cpus = 2 ext.args = [ @@ -189,8 +195,8 @@ process { withName: BLAST_N { module = toolsModuleHash['BLAST'] - time = { 5.h * task.attempt } - memory = { 2.GB * task.attempt } + time = { checkMax( 5.h * task.attempt, 'time' ) } + memory = { checkMax( 2.GB * task.attempt, 'memory' ) } cpus = 4 ext.args = [ @@ -207,9 +213,9 @@ process { withLabel: samtools { module = toolsModuleHash['SAMTOOLS'] - cpus = { 6 * task.attempt } - memory = { 8.GB * task.attempt } - time = { 3.h * task.attempt } + cpus = { checkMax( 6 * task.attempt, 'cpus' ) } + memory = { checkMax( 8.GB * task.attempt, 'memory' ) } + time = { checkMax( 3.h * task.attempt, 'time' ) } } withLabel: alignment { @@ -265,14 +271,14 @@ process { '-f' ].join(' ') - time = { 2.h * task.attempt } + time = { checkMax( 2.h * task.attempt, 'time' ) } } withName: SEQTK_SAMPLE { ext.args = '-s100' ext.args2 = params.subset_seq - memory = { 50.GB * task.attempt } + memory = { checkMax( 50.GB * task.attempt, 'memory' ) } module = toolsModuleHash['SEQTK'] publishDir = [ @@ -304,7 +310,7 @@ process { ext.args = '-s100' ext.args2 = "1000000" - memory = { 50.GB * task.attempt } + memory = { checkMax( 50.GB * task.attempt, 'memory' ) } module = toolsModuleHash['SEQTK'] } @@ -320,7 +326,7 @@ process { beforeScript = "module purge" module = toolsModuleHash['MULTIQC'] - memory = { 10.GB * task.attempt * params.resource_factor } + memory = { checkMax( 10.GB * task.attempt * params.resource_factor, 'memory' ) } publishDir = [ path: "${params.outdir}/MultiQC", @@ -332,9 +338,9 @@ process { withName: SORTMERNA { module = toolsModuleHash['SORTMERNA'] - memory = { 2.GB * task.attempt } - time = { 10.h * task.attempt } - cpus = { 1 * task.attempt } + memory = { checkMax( 2.GB * task.attempt, 'memory' ) } + time = { checkMax( 10.h * task.attempt, 'time' ) } + cpus = { checkMax( 1 * task.attempt, 'cpus' ) } publishDir = [ path: "${params.outdir}/rRNA", @@ -344,7 +350,7 @@ process { } withName: MD5SUM { - time = { 3.h * task.attempt * params.resource_factor } + time = { checkMax( 3.h * task.attempt * params.resource_factor, 'time' ) } publishDir = [ path: "${params.outdir}/fastq", mode: 'copy', @@ -354,9 +360,9 @@ process { withName: QUALIMAP { module = toolsModuleHash['QUALIMAP'] - cpus = { 8 * task.attempt } - memory = { 8.GB * task.attempt } - time = { 3.h * task.attempt } + cpus = { checkMax( 8 * task.attempt, 'cpus' ) } + memory = { checkMax( 8.GB * task.attempt, 'memory' ) } + time = { checkMax( 3.h * task.attempt, 'time' ) } publishDir = [ path: "${params.outdir}/alignmentStats/qualimap", @@ -372,8 +378,8 @@ process { } withName: GC_BIAS { - time = { 2.h * task.attempt } - memory = { 4.GB * task.attempt } + time = { checkMax( 2.h * task.attempt, 'time' ) } + memory = { checkMax( 4.GB * task.attempt, 'memory' ) } cpus = 1 module = toolsModuleHash['PICARD'] diff --git a/docs/usage.md b/docs/usage.md index 10c24273dac19b16a2e83c9b239cbc737e241e48..216ae194029385c692d88ce1d3f8e645ff0727c6 100644 --- a/docs/usage.md +++ b/docs/usage.md @@ -219,7 +219,19 @@ _Default_ : null Path to the fasta of the lambda (unmethylated control). _Default_ : null -## Other paramters +## Other parameters +- **`--max_memory`** [str] +Maximum memory to launch sbatch jobs +_Default_ : 250.GB + +- **`--max_time`** [str] +Maximum time to launch sbatch jobs +_Default_ : 90.d + +- **`--max_cpus`** [str] +Maximum cpus to launch sbatch jobs +_Default_ : 48 + - **`--cluster_options`** [str] Option used to launch slurm jobs. Usefull to exclude some busy nodes for example. _Default_ : null diff --git a/modules/local/module_NGL-Bi.nf b/modules/local/module_NGL-Bi.nf index 416eb721b37c47d693d2040bcbb4f95a40a33088..8da6904ca455267cc15f4c596b5868bdf8432220 100644 --- a/modules/local/module_NGL-Bi.nf +++ b/modules/local/module_NGL-Bi.nf @@ -34,6 +34,7 @@ process TREATMENT_DEMUXSTAT { script: def args = task.ext.args ?: '' forceOption = workflow.resume ? "--force" : '' + def lane = params.lane ?: '0' """ perl ${params.ngl_bi_client}/GeT/perl/illumina/createNGL-BiTreatmentDemultiplexStat.pl \\ --code $nglCode \\ diff --git a/nextflow.config b/nextflow.config index 242b32738d07e474432152d992b9127f45548fa2..4627edbf8205e964a7f2a7c635c5eaa868a9fc5b 100644 --- a/nextflow.config +++ b/nextflow.config @@ -68,6 +68,11 @@ params { // Shared Modules shared_modules = '/home/sbsuser/save/scripts-ngs/shared_modules_Current' + // SLURM MAX RESSOURCES + max_memory = "250.GB" + max_time = "90.d" + max_cpus = "48" + // OTHERS cluster_options = '' is_dev_mode = false @@ -108,7 +113,7 @@ params { // Dynamics params, depend on others samplesheet = inputdir.toString() + "/SampleSheet.csv" nf_uniqueness = uniqueness_format.format(new Date()) - if (outdir_prefix == "") {outdir_prefix = project + "_" + run_name} + outdir_prefix = outdir_prefix ?: project + "_" + run_name outdir = inputdir + "/nextflow/" + outdir_prefix + "_" + nf_uniqueness subset_seq = miseq_subset_seq @@ -150,3 +155,36 @@ docker.runOptions = '-u \$(id -u):\$(id -g)' // Capture exit codes from upstream processes when piping process.shell = ['/bin/bash', '-euo', 'pipefail'] + +// Function to ensure that resource requirements don't go beyond +// a maximum limit +def checkMax(obj, type) { + if (type == 'memory') { + try { + if (obj.compareTo(params.max_memory as nextflow.util.MemoryUnit) == 1) + return params.max_memory as nextflow.util.MemoryUnit + else + return obj + } catch (all) { + println " ### ERROR ### Max memory '${params.max_memory}' is not valid! Using default value: $obj" + return obj + } + } else if (type == 'time') { + try { + if (obj.compareTo(params.max_time as nextflow.util.Duration) == 1) + return params.max_time as nextflow.util.Duration + else + return obj + } catch (all) { + println " ### ERROR ### Max time '${params.max_time}' is not valid! Using default value: $obj" + return obj + } + } else if (type == 'cpus') { + try { + return Math.min( obj, params.max_cpus as int ) + } catch (all) { + println " ### ERROR ### Max cpus '${params.max_cpus}' is not valid! Using default value: $obj" + return obj + } + } +} diff --git a/workflow/illumina_qc.nf b/workflow/illumina_qc.nf index 619c3c16669fe0f83e4011a9daf79ad5cf379f5d..9b17503f6ef00485e758588b9dbe1d8fb54a9a53 100644 --- a/workflow/illumina_qc.nf +++ b/workflow/illumina_qc.nf @@ -188,7 +188,7 @@ workflow.onComplete { end_mail_sent = sendFinalMail(format.format(new Date()), params.summary) // remove work directory if pipeline is successful - if (workflow.success && !params.is_dev_mode) { + if (workflow.success && (!params.is_dev_mode || !params.DTM_mode)) { println "Pipeline terminé avec succès => suppression du workdir : $workflow.workDir" exec: workflow.workDir.deleteDir()