diff --git a/.prettierignore b/.prettierignore
index a55074abfb..b923532bd7 100644
--- a/.prettierignore
+++ b/.prettierignore
@@ -6,6 +6,7 @@ testing
nf_core/module-template/meta.yml
nf_core/module-template/tests/tags.yml
nf_core/subworkflow-template/tests/tags.yml
+nf_core/pipeline-template/nextflow_schema.json
# don't run on things handled by ruff
*.py
*.pyc
diff --git a/nf_core/create.py b/nf_core/create.py
index 8038a995c5..c094d33a22 100644
--- a/nf_core/create.py
+++ b/nf_core/create.py
@@ -292,7 +292,7 @@ def render_template(self):
short_name = self.template_params["short_name"]
rename_files = {
"workflows/pipeline.nf": f"workflows/{short_name}.nf",
- "lib/WorkflowPipeline.groovy": f"lib/Workflow{short_name[0].upper()}{short_name[1:]}.groovy",
+ "subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf": f"subworkflows/local/utils_nfcore_{short_name}_pipeline/main.nf",
}
# Set the paths to skip according to customization
diff --git a/nf_core/lint/files_exist.py b/nf_core/lint/files_exist.py
index ecd7bc4bbd..5d62a23bf8 100644
--- a/nf_core/lint/files_exist.py
+++ b/nf_core/lint/files_exist.py
@@ -52,9 +52,6 @@ def files_exist(self) -> Dict[str, Union[List[str], bool]]:
docs/output.md
docs/README.md
docs/usage.md
- lib/NfcoreTemplate.groovy
- lib/Utils.groovy
- lib/WorkflowMain.groovy
nextflow_schema.json
nextflow.config
README.md
@@ -69,7 +66,6 @@ def files_exist(self) -> Dict[str, Union[List[str], bool]]:
conf/igenomes.config
.github/workflows/awstest.yml
.github/workflows/awsfulltest.yml
- lib/WorkflowPIPELINE.groovy
pyproject.toml
Files that *must not* be present, due to being renamed or removed in the template:
@@ -91,6 +87,11 @@ def files_exist(self) -> Dict[str, Union[List[str], bool]]:
lib/Checks.groovy
lib/Completion.groovy
lib/Workflow.groovy
+ lib/WorkflowPIPELINE.groovy
+ lib/NfcoreTemplate.groovy
+ lib/Utils.groovy
+ lib/WorkflowMain.groovy
+
Files that *should not* be present:
@@ -165,9 +166,6 @@ def files_exist(self) -> Dict[str, Union[List[str], bool]]:
[Path("docs", "README.md")],
[Path("docs", "README.md")],
[Path("docs", "usage.md")],
- [Path("lib", "NfcoreTemplate.groovy")],
- [Path("lib", "Utils.groovy")],
- [Path("lib", "WorkflowMain.groovy")],
]
files_warn = [
@@ -177,7 +175,6 @@ def files_exist(self) -> Dict[str, Union[List[str], bool]]:
[Path("conf", "igenomes.config")],
[Path(".github", "workflows", "awstest.yml")],
[Path(".github", "workflows", "awsfulltest.yml")],
- [Path("lib", f"Workflow{short_name[0].upper()}{short_name[1:]}.groovy")],
[Path("modules.json")],
[Path("pyproject.toml")],
]
@@ -199,6 +196,10 @@ def files_exist(self) -> Dict[str, Union[List[str], bool]]:
Path("lib", "Checks.groovy"),
Path("lib", "Completion.groovy"),
Path("lib", "Workflow.groovy"),
+ Path("lib", "Utils.groovy"),
+ Path("lib", "WorkflowMain.groovy"),
+ Path("lib", "NfcoreTemplate.groovy"),
+ Path("lib", f"Workflow{short_name[0].upper()}{short_name[1:]}.groovy"),
]
files_warn_ifexists = [Path(".travis.yml")]
files_fail_ifinconfig: List[Tuple[Path, Dict[str, str]]] = [
diff --git a/nf_core/lint/files_unchanged.py b/nf_core/lint/files_unchanged.py
index 7f63973f16..3038c8af51 100644
--- a/nf_core/lint/files_unchanged.py
+++ b/nf_core/lint/files_unchanged.py
@@ -40,7 +40,6 @@ def files_unchanged(self) -> Dict[str, Union[List[str], bool]]:
docs/images/nf-core-PIPELINE_logo_light.png
docs/images/nf-core-PIPELINE_logo_dark.png
docs/README.md'
- lib/NfcoreTemplate.groovy
['LICENSE', 'LICENSE.md', 'LICENCE', 'LICENCE.md'], # NB: British / American spelling
Files that can have additional content but must include the template contents::
@@ -104,7 +103,6 @@ def files_unchanged(self) -> Dict[str, Union[List[str], bool]]:
[Path("docs", "images", f"nf-core-{short_name}_logo_light.png")],
[Path("docs", "images", f"nf-core-{short_name}_logo_dark.png")],
[Path("docs", "README.md")],
- [Path("lib", "NfcoreTemplate.groovy")],
]
files_partial = [
[Path(".gitignore"), Path(".prettierignore"), Path("pyproject.toml")],
diff --git a/nf_core/pipeline-template/.editorconfig b/nf_core/pipeline-template/.editorconfig
index dd991a0e95..dd9ffa5387 100644
--- a/nf_core/pipeline-template/.editorconfig
+++ b/nf_core/pipeline-template/.editorconfig
@@ -18,7 +18,12 @@ end_of_line = unset
insert_final_newline = unset
trim_trailing_whitespace = unset
indent_style = unset
-indent_size = unset
+[/subworkflows/nf-core/**]
+charset = unset
+end_of_line = unset
+insert_final_newline = unset
+trim_trailing_whitespace = unset
+indent_style = unset
[/assets/email*]
indent_size = unset
diff --git a/nf_core/pipeline-template/assets/multiqc_config.yml b/nf_core/pipeline-template/assets/multiqc_config.yml
index 39943ffe49..b13b7ae074 100644
--- a/nf_core/pipeline-template/assets/multiqc_config.yml
+++ b/nf_core/pipeline-template/assets/multiqc_config.yml
@@ -17,3 +17,5 @@ report_section_order:
order: -1002
export_plots: true
+
+disable_version_detection: true
diff --git a/nf_core/pipeline-template/assets/schema_input.json b/nf_core/pipeline-template/assets/schema_input.json
index 509048bd8a..e76b95fa99 100644
--- a/nf_core/pipeline-template/assets/schema_input.json
+++ b/nf_core/pipeline-template/assets/schema_input.json
@@ -10,25 +10,22 @@
"sample": {
"type": "string",
"pattern": "^\\S+$",
- "errorMessage": "Sample name must be provided and cannot contain spaces"
+ "errorMessage": "Sample name must be provided and cannot contain spaces",
+ "meta": ["id"]
},
"fastq_1": {
"type": "string",
+ "format": "file-path",
+ "exists": true,
"pattern": "^\\S+\\.f(ast)?q\\.gz$",
"errorMessage": "FastQ file for reads 1 must be provided, cannot contain spaces and must have extension '.fq.gz' or '.fastq.gz'"
},
"fastq_2": {
- "errorMessage": "FastQ file for reads 2 cannot contain spaces and must have extension '.fq.gz' or '.fastq.gz'",
- "anyOf": [
- {
- "type": "string",
- "pattern": "^\\S+\\.f(ast)?q\\.gz$"
- },
- {
- "type": "string",
- "maxLength": 0
- }
- ]
+ "type": "string",
+ "format": "file-path",
+ "exists": true,
+ "pattern": "^\\S+\\.f(ast)?q\\.gz$",
+ "errorMessage": "FastQ file for reads 2 cannot contain spaces and must have extension '.fq.gz' or '.fastq.gz'"
}
},
"required": ["sample", "fastq_1"]
diff --git a/nf_core/pipeline-template/bin/check_samplesheet.py b/nf_core/pipeline-template/bin/check_samplesheet.py
deleted file mode 100755
index 4a758fe003..0000000000
--- a/nf_core/pipeline-template/bin/check_samplesheet.py
+++ /dev/null
@@ -1,259 +0,0 @@
-#!/usr/bin/env python
-
-
-"""Provide a command line tool to validate and transform tabular samplesheets."""
-
-
-import argparse
-import csv
-import logging
-import sys
-from collections import Counter
-from pathlib import Path
-
-logger = logging.getLogger()
-
-
-class RowChecker:
- """
- Define a service that can validate and transform each given row.
-
- Attributes:
- modified (list): A list of dicts, where each dict corresponds to a previously
- validated and transformed row. The order of rows is maintained.
-
- """
-
- VALID_FORMATS = (
- ".fq.gz",
- ".fastq.gz",
- )
-
- def __init__(
- self,
- sample_col="sample",
- first_col="fastq_1",
- second_col="fastq_2",
- single_col="single_end",
- **kwargs,
- ):
- """
- Initialize the row checker with the expected column names.
-
- Args:
- sample_col (str): The name of the column that contains the sample name
- (default "sample").
- first_col (str): The name of the column that contains the first (or only)
- FASTQ file path (default "fastq_1").
- second_col (str): The name of the column that contains the second (if any)
- FASTQ file path (default "fastq_2").
- single_col (str): The name of the new column that will be inserted and
- records whether the sample contains single- or paired-end sequencing
- reads (default "single_end").
-
- """
- super().__init__(**kwargs)
- self._sample_col = sample_col
- self._first_col = first_col
- self._second_col = second_col
- self._single_col = single_col
- self._seen = set()
- self.modified = []
-
- def validate_and_transform(self, row):
- """
- Perform all validations on the given row and insert the read pairing status.
-
- Args:
- row (dict): A mapping from column headers (keys) to elements of that row
- (values).
-
- """
- self._validate_sample(row)
- self._validate_first(row)
- self._validate_second(row)
- self._validate_pair(row)
- self._seen.add((row[self._sample_col], row[self._first_col]))
- self.modified.append(row)
-
- def _validate_sample(self, row):
- """Assert that the sample name exists and convert spaces to underscores."""
- if len(row[self._sample_col]) <= 0:
- raise AssertionError("Sample input is required.")
- # Sanitize samples slightly.
- row[self._sample_col] = row[self._sample_col].replace(" ", "_")
-
- def _validate_first(self, row):
- """Assert that the first FASTQ entry is non-empty and has the right format."""
- if len(row[self._first_col]) <= 0:
- raise AssertionError("At least the first FASTQ file is required.")
- self._validate_fastq_format(row[self._first_col])
-
- def _validate_second(self, row):
- """Assert that the second FASTQ entry has the right format if it exists."""
- if len(row[self._second_col]) > 0:
- self._validate_fastq_format(row[self._second_col])
-
- def _validate_pair(self, row):
- """Assert that read pairs have the same file extension. Report pair status."""
- if row[self._first_col] and row[self._second_col]:
- row[self._single_col] = False
- first_col_suffix = Path(row[self._first_col]).suffixes[-2:]
- second_col_suffix = Path(row[self._second_col]).suffixes[-2:]
- if first_col_suffix != second_col_suffix:
- raise AssertionError("FASTQ pairs must have the same file extensions.")
- else:
- row[self._single_col] = True
-
- def _validate_fastq_format(self, filename):
- """Assert that a given filename has one of the expected FASTQ extensions."""
- if not any(filename.endswith(extension) for extension in self.VALID_FORMATS):
- raise AssertionError(
- f"The FASTQ file has an unrecognized extension: {filename}\n"
- f"It should be one of: {', '.join(self.VALID_FORMATS)}"
- )
-
- def validate_unique_samples(self):
- """
- Assert that the combination of sample name and FASTQ filename is unique.
-
- In addition to the validation, also rename all samples to have a suffix of _T{n}, where n is the
- number of times the same sample exist, but with different FASTQ files, e.g., multiple runs per experiment.
-
- """
- if len(self._seen) != len(self.modified):
- raise AssertionError("The pair of sample name and FASTQ must be unique.")
- seen = Counter()
- for row in self.modified:
- sample = row[self._sample_col]
- seen[sample] += 1
- row[self._sample_col] = f"{sample}_T{seen[sample]}"
-
-
-def read_head(handle, num_lines=10):
- """Read the specified number of lines from the current position in the file."""
- lines = []
- for idx, line in enumerate(handle):
- if idx == num_lines:
- break
- lines.append(line)
- return "".join(lines)
-
-
-def sniff_format(handle):
- """
- Detect the tabular format.
-
- Args:
- handle (text file): A handle to a `text file`_ object. The read position is
- expected to be at the beginning (index 0).
-
- Returns:
- csv.Dialect: The detected tabular format.
-
- .. _text file:
- https://docs.python.org/3/glossary.html#term-text-file
-
- """
- peek = read_head(handle)
- handle.seek(0)
- sniffer = csv.Sniffer()
- dialect = sniffer.sniff(peek)
- return dialect
-
-
-def check_samplesheet(file_in, file_out):
- """
- Check that the tabular samplesheet has the structure expected by nf-core pipelines.
-
- Validate the general shape of the table, expected columns, and each row. Also add
- an additional column which records whether one or two FASTQ reads were found.
-
- Args:
- file_in (pathlib.Path): The given tabular samplesheet. The format can be either
- CSV, TSV, or any other format automatically recognized by ``csv.Sniffer``.
- file_out (pathlib.Path): Where the validated and transformed samplesheet should
- be created; always in CSV format.
-
- Example:
- This function checks that the samplesheet follows the following structure,
- see also the `viral recon samplesheet`_::
-
- sample,fastq_1,fastq_2
- SAMPLE_PE,SAMPLE_PE_RUN1_1.fastq.gz,SAMPLE_PE_RUN1_2.fastq.gz
- SAMPLE_PE,SAMPLE_PE_RUN2_1.fastq.gz,SAMPLE_PE_RUN2_2.fastq.gz
- SAMPLE_SE,SAMPLE_SE_RUN1_1.fastq.gz,
-
- .. _viral recon samplesheet:
- https://raw.githubusercontent.com/nf-core/test-datasets/viralrecon/samplesheet/samplesheet_test_illumina_amplicon.csv
-
- """
- required_columns = {"sample", "fastq_1", "fastq_2"}
- # See https://docs.python.org/3.9/library/csv.html#id3 to read up on `newline=""`.
- with file_in.open(newline="") as in_handle:
- reader = csv.DictReader(in_handle, dialect=sniff_format(in_handle))
- # Validate the existence of the expected header columns.
- if not required_columns.issubset(reader.fieldnames):
- req_cols = ", ".join(required_columns)
- logger.critical(f"The sample sheet **must** contain these column headers: {req_cols}.")
- sys.exit(1)
- # Validate each row.
- checker = RowChecker()
- for i, row in enumerate(reader):
- try:
- checker.validate_and_transform(row)
- except AssertionError as error:
- logger.critical(f"{str(error)} On line {i + 2}.")
- sys.exit(1)
- checker.validate_unique_samples()
- header = list(reader.fieldnames)
- header.insert(1, "single_end")
- # See https://docs.python.org/3.9/library/csv.html#id3 to read up on `newline=""`.
- with file_out.open(mode="w", newline="") as out_handle:
- writer = csv.DictWriter(out_handle, header, delimiter=",")
- writer.writeheader()
- for row in checker.modified:
- writer.writerow(row)
-
-
-def parse_args(argv=None):
- """Define and immediately parse command line arguments."""
- parser = argparse.ArgumentParser(
- description="Validate and transform a tabular samplesheet.",
- epilog="Example: python check_samplesheet.py samplesheet.csv samplesheet.valid.csv",
- )
- parser.add_argument(
- "file_in",
- metavar="FILE_IN",
- type=Path,
- help="Tabular input samplesheet in CSV or TSV format.",
- )
- parser.add_argument(
- "file_out",
- metavar="FILE_OUT",
- type=Path,
- help="Transformed output samplesheet in CSV format.",
- )
- parser.add_argument(
- "-l",
- "--log-level",
- help="The desired log level (default WARNING).",
- choices=("CRITICAL", "ERROR", "WARNING", "INFO", "DEBUG"),
- default="WARNING",
- )
- return parser.parse_args(argv)
-
-
-def main(argv=None):
- """Coordinate argument parsing and program execution."""
- args = parse_args(argv)
- logging.basicConfig(level=args.log_level, format="[%(levelname)s] %(message)s")
- if not args.file_in.is_file():
- logger.error(f"The given input file {args.file_in} was not found!")
- sys.exit(2)
- args.file_out.parent.mkdir(parents=True, exist_ok=True)
- check_samplesheet(args.file_in, args.file_out)
-
-
-if __name__ == "__main__":
- sys.exit(main())
diff --git a/nf_core/pipeline-template/conf/modules.config b/nf_core/pipeline-template/conf/modules.config
index d91c6aba0b..e3ea8fa6c4 100644
--- a/nf_core/pipeline-template/conf/modules.config
+++ b/nf_core/pipeline-template/conf/modules.config
@@ -18,14 +18,6 @@ process {
saveAs: { filename -> filename.equals('versions.yml') ? null : filename }
]
- withName: SAMPLESHEET_CHECK {
- publishDir = [
- path: { "${params.outdir}/pipeline_info" },
- mode: params.publish_dir_mode,
- saveAs: { filename -> filename.equals('versions.yml') ? null : filename }
- ]
- }
-
withName: FASTQC {
ext.args = '--quiet'
}
diff --git a/nf_core/pipeline-template/conf/test.config b/nf_core/pipeline-template/conf/test.config
index 49bfe8a6db..32b9619ebb 100644
--- a/nf_core/pipeline-template/conf/test.config
+++ b/nf_core/pipeline-template/conf/test.config
@@ -27,8 +27,5 @@ params {
{% if igenomes -%}
// Genome references
genome = 'R64-1-1'
- {%- else -%}
- // Fasta references
- fasta = 'https://raw.githubusercontent.com/nf-core/test-datasets/viralrecon/genome/NC_045512.2/GCF_009858895.2_ASM985889v3_genomic.200409.fna.gz'
{%- endif %}
}
diff --git a/nf_core/pipeline-template/lib/NfcoreTemplate.groovy b/nf_core/pipeline-template/lib/NfcoreTemplate.groovy
deleted file mode 100755
index 0f8d021a03..0000000000
--- a/nf_core/pipeline-template/lib/NfcoreTemplate.groovy
+++ /dev/null
@@ -1,356 +0,0 @@
-//
-// This file holds several functions used within the nf-core pipeline template.
-//
-
-import org.yaml.snakeyaml.Yaml
-import groovy.json.JsonOutput
-import nextflow.extension.FilesEx
-
-class NfcoreTemplate {
-
- //
- // Check AWS Batch related parameters have been specified correctly
- //
- public static void awsBatch(workflow, params) {
- if (workflow.profile.contains('awsbatch')) {
- // Check params.awsqueue and params.awsregion have been set if running on AWSBatch
- assert (params.awsqueue && params.awsregion) : "Specify correct --awsqueue and --awsregion parameters on AWSBatch!"
- // Check outdir paths to be S3 buckets if running on AWSBatch
- assert params.outdir.startsWith('s3:') : "Outdir not on S3 - specify S3 Bucket to run on AWSBatch!"
- }
- }
-
- //
- // Warn if a -profile or Nextflow config has not been provided to run the pipeline
- //
- public static void checkConfigProvided(workflow, log) {
- if (workflow.profile == 'standard' && workflow.configFiles.size() <= 1) {
- log.warn "[$workflow.manifest.name] You are attempting to run the pipeline without any custom configuration!\n\n" +
- "This will be dependent on your local compute environment but can be achieved via one or more of the following:\n" +
- " (1) Using an existing pipeline profile e.g. `-profile docker` or `-profile singularity`\n" +
- " (2) Using an existing nf-core/configs for your Institution e.g. `-profile crick` or `-profile uppmax`\n" +
- " (3) Using your own local custom config e.g. `-c /path/to/your/custom.config`\n\n" +
- "Please refer to the quick start section and usage docs for the pipeline.\n "
- }
- }
-
- //
- // Generate version string
- //
- public static String version(workflow) {
- String version_string = ""
-
- if (workflow.manifest.version) {
- def prefix_v = workflow.manifest.version[0] != 'v' ? 'v' : ''
- version_string += "${prefix_v}${workflow.manifest.version}"
- }
-
- if (workflow.commitId) {
- def git_shortsha = workflow.commitId.substring(0, 7)
- version_string += "-g${git_shortsha}"
- }
-
- return version_string
- }
-
- //
- // Construct and send completion email
- //
- public static void email(workflow, params, summary_params, projectDir, log, multiqc_report=[]) {
-
- // Set up the e-mail variables
- def subject = "[$workflow.manifest.name] Successful: $workflow.runName"
- if (!workflow.success) {
- subject = "[$workflow.manifest.name] FAILED: $workflow.runName"
- }
-
- def summary = [:]
- for (group in summary_params.keySet()) {
- summary << summary_params[group]
- }
-
- def misc_fields = [:]
- misc_fields['Date Started'] = workflow.start
- misc_fields['Date Completed'] = workflow.complete
- misc_fields['Pipeline script file path'] = workflow.scriptFile
- misc_fields['Pipeline script hash ID'] = workflow.scriptId
- if (workflow.repository) misc_fields['Pipeline repository Git URL'] = workflow.repository
- if (workflow.commitId) misc_fields['Pipeline repository Git Commit'] = workflow.commitId
- if (workflow.revision) misc_fields['Pipeline Git branch/tag'] = workflow.revision
- misc_fields['Nextflow Version'] = workflow.nextflow.version
- misc_fields['Nextflow Build'] = workflow.nextflow.build
- misc_fields['Nextflow Compile Timestamp'] = workflow.nextflow.timestamp
-
- def email_fields = [:]
- email_fields['version'] = NfcoreTemplate.version(workflow)
- email_fields['runName'] = workflow.runName
- email_fields['success'] = workflow.success
- email_fields['dateComplete'] = workflow.complete
- email_fields['duration'] = workflow.duration
- email_fields['exitStatus'] = workflow.exitStatus
- email_fields['errorMessage'] = (workflow.errorMessage ?: 'None')
- email_fields['errorReport'] = (workflow.errorReport ?: 'None')
- email_fields['commandLine'] = workflow.commandLine
- email_fields['projectDir'] = workflow.projectDir
- email_fields['summary'] = summary << misc_fields
-
- // On success try attach the multiqc report
- def mqc_report = null
- try {
- if (workflow.success) {
- mqc_report = multiqc_report.getVal()
- if (mqc_report.getClass() == ArrayList && mqc_report.size() >= 1) {
- if (mqc_report.size() > 1) {
- log.warn "[$workflow.manifest.name] Found multiple reports from process 'MULTIQC', will use only one"
- }
- mqc_report = mqc_report[0]
- }
- }
- } catch (all) {
- if (multiqc_report) {
- log.warn "[$workflow.manifest.name] Could not attach MultiQC report to summary email"
- }
- }
-
- // Check if we are only sending emails on failure
- def email_address = params.email
- if (!params.email && params.email_on_fail && !workflow.success) {
- email_address = params.email_on_fail
- }
-
- // Render the TXT template
- def engine = new groovy.text.GStringTemplateEngine()
- def tf = new File("$projectDir/assets/email_template.txt")
- def txt_template = engine.createTemplate(tf).make(email_fields)
- def email_txt = txt_template.toString()
-
- // Render the HTML template
- def hf = new File("$projectDir/assets/email_template.html")
- def html_template = engine.createTemplate(hf).make(email_fields)
- def email_html = html_template.toString()
-
- // Render the sendmail template
- def max_multiqc_email_size = (params.containsKey('max_multiqc_email_size') ? params.max_multiqc_email_size : 0) as nextflow.util.MemoryUnit
- def smail_fields = [ email: email_address, subject: subject, email_txt: email_txt, email_html: email_html, projectDir: "$projectDir", mqcFile: mqc_report, mqcMaxSize: max_multiqc_email_size.toBytes() ]
- def sf = new File("$projectDir/assets/sendmail_template.txt")
- def sendmail_template = engine.createTemplate(sf).make(smail_fields)
- def sendmail_html = sendmail_template.toString()
-
- // Send the HTML e-mail
- Map colors = logColours(params.monochrome_logs)
- if (email_address) {
- try {
- if (params.plaintext_email) { throw GroovyException('Send plaintext e-mail, not HTML') }
- // Try to send HTML e-mail using sendmail
- def sendmail_tf = new File(workflow.launchDir.toString(), ".sendmail_tmp.html")
- sendmail_tf.withWriter { w -> w << sendmail_html }
- [ 'sendmail', '-t' ].execute() << sendmail_html
- log.info "-${colors.purple}[$workflow.manifest.name]${colors.green} Sent summary e-mail to $email_address (sendmail)-"
- } catch (all) {
- // Catch failures and try with plaintext
- def mail_cmd = [ 'mail', '-s', subject, '--content-type=text/html', email_address ]
- if ( mqc_report != null && mqc_report.size() <= max_multiqc_email_size.toBytes() ) {
- mail_cmd += [ '-A', mqc_report ]
- }
- mail_cmd.execute() << email_html
- log.info "-${colors.purple}[$workflow.manifest.name]${colors.green} Sent summary e-mail to $email_address (mail)-"
- }
- }
-
- // Write summary e-mail HTML to a file
- def output_hf = new File(workflow.launchDir.toString(), ".pipeline_report.html")
- output_hf.withWriter { w -> w << email_html }
- FilesEx.copyTo(output_hf.toPath(), "${params.outdir}/pipeline_info/pipeline_report.html");
- output_hf.delete()
-
- // Write summary e-mail TXT to a file
- def output_tf = new File(workflow.launchDir.toString(), ".pipeline_report.txt")
- output_tf.withWriter { w -> w << email_txt }
- FilesEx.copyTo(output_tf.toPath(), "${params.outdir}/pipeline_info/pipeline_report.txt");
- output_tf.delete()
- }
-
- //
- // Construct and send a notification to a web server as JSON
- // e.g. Microsoft Teams and Slack
- //
- public static void IM_notification(workflow, params, summary_params, projectDir, log) {
- def hook_url = params.hook_url
-
- def summary = [:]
- for (group in summary_params.keySet()) {
- summary << summary_params[group]
- }
-
- def misc_fields = [:]
- misc_fields['start'] = workflow.start
- misc_fields['complete'] = workflow.complete
- misc_fields['scriptfile'] = workflow.scriptFile
- misc_fields['scriptid'] = workflow.scriptId
- if (workflow.repository) misc_fields['repository'] = workflow.repository
- if (workflow.commitId) misc_fields['commitid'] = workflow.commitId
- if (workflow.revision) misc_fields['revision'] = workflow.revision
- misc_fields['nxf_version'] = workflow.nextflow.version
- misc_fields['nxf_build'] = workflow.nextflow.build
- misc_fields['nxf_timestamp'] = workflow.nextflow.timestamp
-
- def msg_fields = [:]
- msg_fields['version'] = NfcoreTemplate.version(workflow)
- msg_fields['runName'] = workflow.runName
- msg_fields['success'] = workflow.success
- msg_fields['dateComplete'] = workflow.complete
- msg_fields['duration'] = workflow.duration
- msg_fields['exitStatus'] = workflow.exitStatus
- msg_fields['errorMessage'] = (workflow.errorMessage ?: 'None')
- msg_fields['errorReport'] = (workflow.errorReport ?: 'None')
- msg_fields['commandLine'] = workflow.commandLine.replaceFirst(/ +--hook_url +[^ ]+/, "")
- msg_fields['projectDir'] = workflow.projectDir
- msg_fields['summary'] = summary << misc_fields
-
- // Render the JSON template
- def engine = new groovy.text.GStringTemplateEngine()
- // Different JSON depending on the service provider
- // Defaults to "Adaptive Cards" (https://adaptivecards.io), except Slack which has its own format
- def json_path = hook_url.contains("hooks.slack.com") ? "slackreport.json" : "adaptivecard.json"
- def hf = new File("$projectDir/assets/${json_path}")
- def json_template = engine.createTemplate(hf).make(msg_fields)
- def json_message = json_template.toString()
-
- // POST
- def post = new URL(hook_url).openConnection();
- post.setRequestMethod("POST")
- post.setDoOutput(true)
- post.setRequestProperty("Content-Type", "application/json")
- post.getOutputStream().write(json_message.getBytes("UTF-8"));
- def postRC = post.getResponseCode();
- if (! postRC.equals(200)) {
- log.warn(post.getErrorStream().getText());
- }
- }
-
- //
- // Dump pipeline parameters in a json file
- //
- public static void dump_parameters(workflow, params) {
- def timestamp = new java.util.Date().format( 'yyyy-MM-dd_HH-mm-ss')
- def filename = "params_${timestamp}.json"
- def temp_pf = new File(workflow.launchDir.toString(), ".${filename}")
- def jsonStr = JsonOutput.toJson(params)
- temp_pf.text = JsonOutput.prettyPrint(jsonStr)
-
- FilesEx.copyTo(temp_pf.toPath(), "${params.outdir}/pipeline_info/params_${timestamp}.json")
- temp_pf.delete()
- }
-
- //
- // Print pipeline summary on completion
- //
- public static void summary(workflow, params, log) {
- Map colors = logColours(params.monochrome_logs)
- if (workflow.success) {
- if (workflow.stats.ignoredCount == 0) {
- log.info "-${colors.purple}[$workflow.manifest.name]${colors.green} Pipeline completed successfully${colors.reset}-"
- } else {
- log.info "-${colors.purple}[$workflow.manifest.name]${colors.yellow} Pipeline completed successfully, but with errored process(es) ${colors.reset}-"
- }
- } else {
- log.info "-${colors.purple}[$workflow.manifest.name]${colors.red} Pipeline completed with errors${colors.reset}-"
- }
- }
-
- //
- // ANSII Colours used for terminal logging
- //
- public static Map logColours(Boolean monochrome_logs) {
- Map colorcodes = [:]
-
- // Reset / Meta
- colorcodes['reset'] = monochrome_logs ? '' : "\033[0m"
- colorcodes['bold'] = monochrome_logs ? '' : "\033[1m"
- colorcodes['dim'] = monochrome_logs ? '' : "\033[2m"
- colorcodes['underlined'] = monochrome_logs ? '' : "\033[4m"
- colorcodes['blink'] = monochrome_logs ? '' : "\033[5m"
- colorcodes['reverse'] = monochrome_logs ? '' : "\033[7m"
- colorcodes['hidden'] = monochrome_logs ? '' : "\033[8m"
-
- // Regular Colors
- colorcodes['black'] = monochrome_logs ? '' : "\033[0;30m"
- colorcodes['red'] = monochrome_logs ? '' : "\033[0;31m"
- colorcodes['green'] = monochrome_logs ? '' : "\033[0;32m"
- colorcodes['yellow'] = monochrome_logs ? '' : "\033[0;33m"
- colorcodes['blue'] = monochrome_logs ? '' : "\033[0;34m"
- colorcodes['purple'] = monochrome_logs ? '' : "\033[0;35m"
- colorcodes['cyan'] = monochrome_logs ? '' : "\033[0;36m"
- colorcodes['white'] = monochrome_logs ? '' : "\033[0;37m"
-
- // Bold
- colorcodes['bblack'] = monochrome_logs ? '' : "\033[1;30m"
- colorcodes['bred'] = monochrome_logs ? '' : "\033[1;31m"
- colorcodes['bgreen'] = monochrome_logs ? '' : "\033[1;32m"
- colorcodes['byellow'] = monochrome_logs ? '' : "\033[1;33m"
- colorcodes['bblue'] = monochrome_logs ? '' : "\033[1;34m"
- colorcodes['bpurple'] = monochrome_logs ? '' : "\033[1;35m"
- colorcodes['bcyan'] = monochrome_logs ? '' : "\033[1;36m"
- colorcodes['bwhite'] = monochrome_logs ? '' : "\033[1;37m"
-
- // Underline
- colorcodes['ublack'] = monochrome_logs ? '' : "\033[4;30m"
- colorcodes['ured'] = monochrome_logs ? '' : "\033[4;31m"
- colorcodes['ugreen'] = monochrome_logs ? '' : "\033[4;32m"
- colorcodes['uyellow'] = monochrome_logs ? '' : "\033[4;33m"
- colorcodes['ublue'] = monochrome_logs ? '' : "\033[4;34m"
- colorcodes['upurple'] = monochrome_logs ? '' : "\033[4;35m"
- colorcodes['ucyan'] = monochrome_logs ? '' : "\033[4;36m"
- colorcodes['uwhite'] = monochrome_logs ? '' : "\033[4;37m"
-
- // High Intensity
- colorcodes['iblack'] = monochrome_logs ? '' : "\033[0;90m"
- colorcodes['ired'] = monochrome_logs ? '' : "\033[0;91m"
- colorcodes['igreen'] = monochrome_logs ? '' : "\033[0;92m"
- colorcodes['iyellow'] = monochrome_logs ? '' : "\033[0;93m"
- colorcodes['iblue'] = monochrome_logs ? '' : "\033[0;94m"
- colorcodes['ipurple'] = monochrome_logs ? '' : "\033[0;95m"
- colorcodes['icyan'] = monochrome_logs ? '' : "\033[0;96m"
- colorcodes['iwhite'] = monochrome_logs ? '' : "\033[0;97m"
-
- // Bold High Intensity
- colorcodes['biblack'] = monochrome_logs ? '' : "\033[1;90m"
- colorcodes['bired'] = monochrome_logs ? '' : "\033[1;91m"
- colorcodes['bigreen'] = monochrome_logs ? '' : "\033[1;92m"
- colorcodes['biyellow'] = monochrome_logs ? '' : "\033[1;93m"
- colorcodes['biblue'] = monochrome_logs ? '' : "\033[1;94m"
- colorcodes['bipurple'] = monochrome_logs ? '' : "\033[1;95m"
- colorcodes['bicyan'] = monochrome_logs ? '' : "\033[1;96m"
- colorcodes['biwhite'] = monochrome_logs ? '' : "\033[1;97m"
-
- return colorcodes
- }
-
- //
- // Does what is says on the tin
- //
- public static String dashedLine(monochrome_logs) {
- Map colors = logColours(monochrome_logs)
- return "-${colors.dim}----------------------------------------------------${colors.reset}-"
- }
-
- //
- // nf-core logo
- //
- public static String logo(workflow, monochrome_logs) {
- Map colors = logColours(monochrome_logs)
- String workflow_version = NfcoreTemplate.version(workflow)
- String.format(
- """\n
- ${dashedLine(monochrome_logs)}{% if branded %}
- ${colors.green},--.${colors.black}/${colors.green},-.${colors.reset}
- ${colors.blue} ___ __ __ __ ___ ${colors.green}/,-._.--~\'${colors.reset}
- ${colors.blue} |\\ | |__ __ / ` / \\ |__) |__ ${colors.yellow}} {${colors.reset}
- ${colors.blue} | \\| | \\__, \\__/ | \\ |___ ${colors.green}\\`-._,-`-,${colors.reset}
- ${colors.green}`._,._,\'${colors.reset}{% endif %}
- ${colors.purple} ${workflow.manifest.name} ${workflow_version}${colors.reset}
- ${dashedLine(monochrome_logs)}
- """.stripIndent()
- )
- }
-}
diff --git a/nf_core/pipeline-template/lib/Utils.groovy b/nf_core/pipeline-template/lib/Utils.groovy
deleted file mode 100644
index 8d030f4e84..0000000000
--- a/nf_core/pipeline-template/lib/Utils.groovy
+++ /dev/null
@@ -1,47 +0,0 @@
-//
-// This file holds several Groovy functions that could be useful for any Nextflow pipeline
-//
-
-import org.yaml.snakeyaml.Yaml
-
-class Utils {
-
- //
- // When running with -profile conda, warn if channels have not been set-up appropriately
- //
- public static void checkCondaChannels(log) {
- Yaml parser = new Yaml()
- def channels = []
- try {
- def config = parser.load("conda config --show channels".execute().text)
- channels = config.channels
- } catch(NullPointerException | IOException e) {
- log.warn "Could not verify conda channel configuration."
- return
- }
-
- // Check that all channels are present
- // This channel list is ordered by required channel priority.
- def required_channels_in_order = ['conda-forge', 'bioconda', 'defaults']
- def channels_missing = ((required_channels_in_order as Set) - (channels as Set)) as Boolean
-
- // Check that they are in the right order
- def channel_priority_violation = false
- def n = required_channels_in_order.size()
- for (int i = 0; i < n - 1; i++) {
- channel_priority_violation |= !(channels.indexOf(required_channels_in_order[i]) < channels.indexOf(required_channels_in_order[i+1]))
- }
-
- if (channels_missing | channel_priority_violation) {
- log.warn "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n" +
- " There is a problem with your Conda configuration!\n\n" +
- " You will need to set-up the conda-forge and bioconda channels correctly.\n" +
- " Please refer to https://bioconda.github.io/\n" +
- " The observed channel order is \n" +
- " ${channels}\n" +
- " but the following channel order is required:\n" +
- " ${required_channels_in_order}\n" +
- "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
- }
- }
-}
diff --git a/nf_core/pipeline-template/lib/WorkflowMain.groovy b/nf_core/pipeline-template/lib/WorkflowMain.groovy
deleted file mode 100755
index a254b2b22a..0000000000
--- a/nf_core/pipeline-template/lib/WorkflowMain.groovy
+++ /dev/null
@@ -1,80 +0,0 @@
-//
-// This file holds several functions specific to the main.nf workflow in the {{ name }} pipeline
-//
-
-import nextflow.Nextflow
-
-class WorkflowMain {
-
- //
- // Citation string for pipeline
- //
- public static String citation(workflow) {
- return "If you use ${workflow.manifest.name} for your analysis please cite:\n\n" +
- // TODO nf-core: Add Zenodo DOI for pipeline after first release
- //"* The pipeline\n" +
- //" https://doi.org/10.5281/zenodo.XXXXXXX\n\n" +
- "* The nf-core framework\n" +
- " https://doi.org/10.1038/s41587-020-0439-x\n\n" +
- "* Software dependencies\n" +
- " https://github.com/${workflow.manifest.name}/blob/master/CITATIONS.md"
- }
-
-
- //
- // Validate parameters and print summary to screen
- //
- public static void initialise(workflow, params, log, args) {
-
- // Print workflow version and exit on --version
- if (params.version) {
- String workflow_version = NfcoreTemplate.version(workflow)
- log.info "${workflow.manifest.name} ${workflow_version}"
- System.exit(0)
- }
-
- // Check that a -profile or Nextflow config has been provided to run the pipeline
- NfcoreTemplate.checkConfigProvided(workflow, log)
- // Check that the profile doesn't contain spaces and doesn't end with a trailing comma
- checkProfile(workflow.profile, args, log)
-
- // Check that conda channels are set-up correctly
- if (workflow.profile.tokenize(',').intersect(['conda', 'mamba']).size() >= 1) {
- Utils.checkCondaChannels(log)
- }
-
- // Check AWS batch settings
- NfcoreTemplate.awsBatch(workflow, params)
-
- // Check input has been provided
- if (!params.input) {
- Nextflow.error("Please provide an input samplesheet to the pipeline e.g. '--input samplesheet.csv'")
- }
- }
-
- {%- if igenomes %}
- //
- // Get attribute from genome config file e.g. fasta
- //
- public static Object getGenomeAttribute(params, attribute) {
- if (params.genomes && params.genome && params.genomes.containsKey(params.genome)) {
- if (params.genomes[ params.genome ].containsKey(attribute)) {
- return params.genomes[ params.genome ][ attribute ]
- }
- }
- return null
- }
- {%- endif %}
-
- //
- // Exit pipeline if --profile contains spaces
- //
- private static void checkProfile(profile, args, log) {
- if (profile.endsWith(',')) {
- Nextflow.error "Profile cannot end with a trailing comma. Please remove the comma from the end of the profile string.\nHint: A common mistake is to provide multiple values to `-profile` separated by spaces. Please use commas to separate profiles instead,e.g., `-profile docker,test`."
- }
- if (args[0]) {
- log.warn "nf-core pipelines do not accept positional arguments. The positional argument `${args[0]}` has been detected.\n Hint: A common mistake is to provide multiple values to `-profile` separated by spaces. Please use commas to separate profiles instead,e.g., `-profile docker,test`."
- }
- }
-}
diff --git a/nf_core/pipeline-template/lib/WorkflowPipeline.groovy b/nf_core/pipeline-template/lib/WorkflowPipeline.groovy
deleted file mode 100755
index f9a7859ef5..0000000000
--- a/nf_core/pipeline-template/lib/WorkflowPipeline.groovy
+++ /dev/null
@@ -1,123 +0,0 @@
-//
-// This file holds several functions specific to the workflow/{{ short_name }}.nf in the {{ name }} pipeline
-//
-
-import nextflow.Nextflow
-import groovy.text.SimpleTemplateEngine
-
-class Workflow{{ short_name[0]|upper }}{{ short_name[1:] }} {
-
- //
- // Check and validate parameters
- //
- public static void initialise(params, log) {
-{% if igenomes %}
- genomeExistsError(params, log)
-{% endif %}
-
- if (!params.fasta) {
- Nextflow.error "Genome fasta file not specified with e.g. '--fasta genome.fa' or via a detectable config file."
- }
- }
-
- //
- // Get workflow summary for MultiQC
- //
- public static String paramsSummaryMultiqc(workflow, summary) {
- String summary_section = ''
- for (group in summary.keySet()) {
- def group_params = summary.get(group) // This gets the parameters of that particular group
- if (group_params) {
- summary_section += "
$group
\n"
- summary_section += " \n"
- for (param in group_params.keySet()) {
- summary_section += " - $param
- ${group_params.get(param) ?: 'N/A'}
\n"
- }
- summary_section += "
\n"
- }
- }
-
- String yaml_file_text = "id: '${workflow.manifest.name.replace('/','-')}-summary'\n"
- yaml_file_text += "description: ' - this information is collected when the pipeline is started.'\n"
- yaml_file_text += "section_name: '${workflow.manifest.name} Workflow Summary'\n"
- yaml_file_text += "section_href: 'https://github.com/${workflow.manifest.name}'\n"
- yaml_file_text += "plot_type: 'html'\n"
- yaml_file_text += "data: |\n"
- yaml_file_text += "${summary_section}"
- return yaml_file_text
- }
-
- //
- // Generate methods description for MultiQC
- //
-
- public static String toolCitationText(params) {
-
- // TODO nf-core: Optionally add in-text citation tools to this list.
- // Can use ternary operators to dynamically construct based conditions, e.g. params["run_xyz"] ? "Tool (Foo et al. 2023)" : "",
- // Uncomment function in methodsDescriptionText to render in MultiQC report
- def citation_text = [
- "Tools used in the workflow included:",
- "FastQC (Andrews 2010),",
- "MultiQC (Ewels et al. 2016)",
- "."
- ].join(' ').trim()
-
- return citation_text
- }
-
- public static String toolBibliographyText(params) {
-
- // TODO Optionally add bibliographic entries to this list.
- // Can use ternary operators to dynamically construct based conditions, e.g. params["run_xyz"] ? "Author (2023) Pub name, Journal, DOI" : "",
- // Uncomment function in methodsDescriptionText to render in MultiQC report
- def reference_text = [
- "Andrews S, (2010) FastQC, URL: https://www.bioinformatics.babraham.ac.uk/projects/fastqc/).",
- "Ewels, P., Magnusson, M., Lundin, S., & KΓ€ller, M. (2016). MultiQC: summarize analysis results for multiple tools and samples in a single report. Bioinformatics , 32(19), 3047β3048. doi: /10.1093/bioinformatics/btw354"
- ].join(' ').trim()
-
- return reference_text
- }
-
- public static String methodsDescriptionText(run_workflow, mqc_methods_yaml, params) {
- // Convert to a named map so can be used as with familar NXF ${workflow} variable syntax in the MultiQC YML file
- def meta = [:]
- meta.workflow = run_workflow.toMap()
- meta["manifest_map"] = run_workflow.manifest.toMap()
-
- // Pipeline DOI
- meta["doi_text"] = meta.manifest_map.doi ? "(doi: ${meta.manifest_map.doi})" : ""
- meta["nodoi_text"] = meta.manifest_map.doi ? "": "If available, make sure to update the text to include the Zenodo DOI of version of the pipeline used. "
-
- // Tool references
- meta["tool_citations"] = ""
- meta["tool_bibliography"] = ""
-
- // TODO Only uncomment below if logic in toolCitationText/toolBibliographyText has been filled!
- //meta["tool_citations"] = toolCitationText(params).replaceAll(", \\.", ".").replaceAll("\\. \\.", ".").replaceAll(", \\.", ".")
- //meta["tool_bibliography"] = toolBibliographyText(params)
-
-
- def methods_text = mqc_methods_yaml.text
-
- def engine = new SimpleTemplateEngine()
- def description_html = engine.createTemplate(methods_text).make(meta)
-
- return description_html
- }
- {%- if igenomes %}
-
- //
- // Exit pipeline if incorrect --genome key provided
- //
- private static void genomeExistsError(params, log) {
- if (params.genomes && params.genome && !params.genomes.containsKey(params.genome)) {
- def error_string = "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n" +
- " Genome '${params.genome}' not found in any config files provided to the pipeline.\n" +
- " Currently, the available genome keys are:\n" +
- " ${params.genomes.keySet().join(", ")}\n" +
- "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
- Nextflow.error(error_string)
- }
- }
-{% endif -%}}
diff --git a/nf_core/pipeline-template/main.nf b/nf_core/pipeline-template/main.nf
index 78da158856..2590f7467b 100644
--- a/nf_core/pipeline-template/main.nf
+++ b/nf_core/pipeline-template/main.nf
@@ -12,7 +12,19 @@
*/
nextflow.enable.dsl = 2
+
+/*
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+ IMPORT FUNCTIONS / MODULES / SUBWORKFLOWS / WORKFLOWS
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+*/
+
+include { {{ short_name|upper }} } from './workflows/{{ short_name }}'
+include { PIPELINE_INITIALISATION } from './subworkflows/local/utils_nfcore_{{ short_name }}_pipeline'
+include { PIPELINE_COMPLETION } from './subworkflows/local/utils_nfcore_{{ short_name }}_pipeline'
{% if igenomes %}
+include { getGenomeAttribute } from './subworkflows/local/utils_nfcore_{{ short_name }}_pipeline'
+
/*
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
GENOME PARAMETER VALUES
@@ -22,59 +34,77 @@ nextflow.enable.dsl = 2
// TODO nf-core: Remove this line if you don't need a FASTA file
// This is an example of how to use getGenomeAttribute() to fetch parameters
// from igenomes.config using `--genome`
-params.fasta = WorkflowMain.getGenomeAttribute(params, 'fasta')
+params.fasta = getGenomeAttribute('fasta')
{% endif %}
/*
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- VALIDATE & PRINT PARAMETER SUMMARY
+ NAMED WORKFLOWS FOR PIPELINE
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*/
-include { validateParameters; paramsHelp } from 'plugin/nf-validation'
+//
+// WORKFLOW: Run main analysis pipeline depending on type of input
+//
+workflow {{ prefix_nodash|upper }}_{{ short_name|upper }} {
-// Print help message if needed
-if (params.help) {
- def logo = NfcoreTemplate.logo(workflow, params.monochrome_logs)
- def citation = '\n' + WorkflowMain.citation(workflow) + '\n'
- def String command = "nextflow run ${workflow.manifest.name} --input samplesheet.csv --genome GRCh37 -profile docker"
- log.info logo + paramsHelp(command) + citation + NfcoreTemplate.dashedLine(params.monochrome_logs)
- System.exit(0)
-}
+ take:
+ samplesheet // channel: samplesheet read in from --input
-// Validate input parameters
-if (params.validate_params) {
- validateParameters()
-}
+ main:
-WorkflowMain.initialise(workflow, params, log, args)
+ //
+ // WORKFLOW: Run pipeline
+ //
+ {{ short_name|upper }} (
+ samplesheet
+ )
-/*
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- NAMED WORKFLOW FOR PIPELINE
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-*/
+ emit:
+ multiqc_report = {{ short_name|upper }}.out.multiqc_report // channel: /path/to/multiqc_report.html
-include { {{ short_name|upper }} } from './workflows/{{ short_name }}'
-
-//
-// WORKFLOW: Run main {{ name }} analysis pipeline
-//
-workflow {{ prefix_nodash|upper }}_{{ short_name|upper }} {
- {{ short_name|upper }} ()
}
-
/*
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- RUN ALL WORKFLOWS
+ RUN MAIN WORKFLOW
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*/
-//
-// WORKFLOW: Execute a single named workflow for the pipeline
-// See: https://github.com/nf-core/rnaseq/issues/619
-//
workflow {
- {{ prefix_nodash|upper }}_{{ short_name|upper }} ()
+
+ main:
+
+ //
+ // SUBWORKFLOW: Run initialisation tasks
+ //
+ PIPELINE_INITIALISATION (
+ params.version,
+ params.help,
+ params.validate_params,
+ params.monochrome_logs,
+ args,
+ params.outdir,
+ params.input
+ )
+
+ //
+ // WORKFLOW: Run main workflow
+ //
+ {{ prefix_nodash|upper }}_{{ short_name|upper }} (
+ PIPELINE_INITIALISATION.out.samplesheet
+ )
+
+ //
+ // SUBWORKFLOW: Run completion tasks
+ //
+ PIPELINE_COMPLETION (
+ params.email,
+ params.email_on_fail,
+ params.plaintext_email,
+ params.outdir,
+ params.monochrome_logs,
+ params.hook_url,
+ {{ prefix_nodash|upper }}_{{ short_name|upper }}.out.multiqc_report
+ )
}
/*
diff --git a/nf_core/pipeline-template/modules.json b/nf_core/pipeline-template/modules.json
index 8660da2d42..96cb7942e9 100644
--- a/nf_core/pipeline-template/modules.json
+++ b/nf_core/pipeline-template/modules.json
@@ -5,11 +5,6 @@
"https://github.com/nf-core/modules.git": {
"modules": {
"nf-core": {
- "custom/dumpsoftwareversions": {
- "branch": "master",
- "git_sha": "8ec825f465b9c17f9d83000022995b4f7de6fe93",
- "installed_by": ["modules"]
- },
"fastqc": {
"branch": "master",
"git_sha": "c9488585ce7bd35ccd2a30faa2371454c8112fb9",
@@ -21,6 +16,25 @@
"installed_by": ["modules"]
}
}
+ },
+ "subworkflows": {
+ "nf-core": {
+ "utils_nextflow_pipeline": {
+ "branch": "master",
+ "git_sha": "cd08c91373cd00a73255081340e4914485846ba1",
+ "installed_by": ["subworkflows"]
+ },
+ "utils_nfcore_pipeline": {
+ "branch": "master",
+ "git_sha": "262b17ed2aad591039f914951659177e6c39a8d8",
+ "installed_by": ["subworkflows"]
+ },
+ "utils_nfvalidation_plugin": {
+ "branch": "master",
+ "git_sha": "cd08c91373cd00a73255081340e4914485846ba1",
+ "installed_by": ["subworkflows"]
+ }
+ }
}
}
}
diff --git a/nf_core/pipeline-template/modules/local/samplesheet_check.nf b/nf_core/pipeline-template/modules/local/samplesheet_check.nf
deleted file mode 100644
index 77be6dfff4..0000000000
--- a/nf_core/pipeline-template/modules/local/samplesheet_check.nf
+++ /dev/null
@@ -1,31 +0,0 @@
-process SAMPLESHEET_CHECK {
- tag "$samplesheet"
- label 'process_single'
-
- conda "conda-forge::python=3.8.3"
- container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
- 'https://depot.galaxyproject.org/singularity/python:3.8.3' :
- 'biocontainers/python:3.8.3' }"
-
- input:
- path samplesheet
-
- output:
- path '*.csv' , emit: csv
- path "versions.yml", emit: versions
-
- when:
- task.ext.when == null || task.ext.when
-
- script: // This script is bundled with the pipeline, in {{ name }}/bin/
- """
- check_samplesheet.py \\
- $samplesheet \\
- samplesheet.valid.csv
-
- cat <<-END_VERSIONS > versions.yml
- "${task.process}":
- python: \$(python --version | sed 's/Python //g')
- END_VERSIONS
- """
-}
diff --git a/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/environment.yml b/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/environment.yml
deleted file mode 100644
index 9b3272bc11..0000000000
--- a/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/environment.yml
+++ /dev/null
@@ -1,7 +0,0 @@
-name: custom_dumpsoftwareversions
-channels:
- - conda-forge
- - bioconda
- - defaults
-dependencies:
- - bioconda::multiqc=1.19
diff --git a/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/main.nf b/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/main.nf
deleted file mode 100644
index f2187611cc..0000000000
--- a/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/main.nf
+++ /dev/null
@@ -1,24 +0,0 @@
-process CUSTOM_DUMPSOFTWAREVERSIONS {
- label 'process_single'
-
- // Requires `pyyaml` which does not have a dedicated container but is in the MultiQC container
- conda "${moduleDir}/environment.yml"
- container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
- 'https://depot.galaxyproject.org/singularity/multiqc:1.19--pyhdfd78af_0' :
- 'biocontainers/multiqc:1.19--pyhdfd78af_0' }"
-
- input:
- path versions
-
- output:
- path "software_versions.yml" , emit: yml
- path "software_versions_mqc.yml", emit: mqc_yml
- path "versions.yml" , emit: versions
-
- when:
- task.ext.when == null || task.ext.when
-
- script:
- def args = task.ext.args ?: ''
- template 'dumpsoftwareversions.py'
-}
diff --git a/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/meta.yml b/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/meta.yml
deleted file mode 100644
index 5f15a5fde0..0000000000
--- a/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/meta.yml
+++ /dev/null
@@ -1,37 +0,0 @@
-# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/meta-schema.json
-name: custom_dumpsoftwareversions
-description: Custom module used to dump software versions within the nf-core pipeline template
-keywords:
- - custom
- - dump
- - version
-tools:
- - custom:
- description: Custom module used to dump software versions within the nf-core pipeline template
- homepage: https://github.com/nf-core/tools
- documentation: https://github.com/nf-core/tools
- licence: ["MIT"]
-input:
- - versions:
- type: file
- description: YML file containing software versions
- pattern: "*.yml"
-output:
- - yml:
- type: file
- description: Standard YML file containing software versions
- pattern: "software_versions.yml"
- - mqc_yml:
- type: file
- description: MultiQC custom content YML file containing software versions
- pattern: "software_versions_mqc.yml"
- - versions:
- type: file
- description: File containing software versions
- pattern: "versions.yml"
-authors:
- - "@drpatelh"
- - "@grst"
-maintainers:
- - "@drpatelh"
- - "@grst"
diff --git a/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py b/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py
deleted file mode 100755
index e55b8d43a9..0000000000
--- a/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py
+++ /dev/null
@@ -1,102 +0,0 @@
-#!/usr/bin/env python
-
-
-"""Provide functions to merge multiple versions.yml files."""
-
-
-import platform
-from textwrap import dedent
-
-import yaml
-
-
-def _make_versions_html(versions):
- """Generate a tabular HTML output of all versions for MultiQC."""
- html = [
- dedent(
- """\\
-
-
-
-
- Process Name |
- Software |
- Version |
-
-
- """
- )
- ]
- for process, tmp_versions in sorted(versions.items()):
- html.append("")
- for i, (tool, version) in enumerate(sorted(tmp_versions.items())):
- html.append(
- dedent(
- f"""\\
-
- {process if (i == 0) else ''} |
- {tool} |
- {version} |
-
- """
- )
- )
- html.append("")
- html.append("
")
- return "\\n".join(html)
-
-
-def main():
- """Load all version files and generate merged output."""
- versions_this_module = {}
- versions_this_module["${task.process}"] = {
- "python": platform.python_version(),
- "yaml": yaml.__version__,
- }
-
- with open("$versions") as f:
- versions_by_process = yaml.load(f, Loader=yaml.BaseLoader) | versions_this_module
-
- # aggregate versions by the module name (derived from fully-qualified process name)
- versions_by_module = {}
- for process, process_versions in versions_by_process.items():
- module = process.split(":")[-1]
- try:
- if versions_by_module[module] != process_versions:
- raise AssertionError(
- "We assume that software versions are the same between all modules. "
- "If you see this error-message it means you discovered an edge-case "
- "and should open an issue in nf-core/tools. "
- )
- except KeyError:
- versions_by_module[module] = process_versions
-
- versions_by_module["Workflow"] = {
- "Nextflow": "$workflow.nextflow.version",
- "$workflow.manifest.name": "$workflow.manifest.version",
- }
-
- versions_mqc = {
- "id": "software_versions",
- "section_name": "${workflow.manifest.name} Software Versions",
- "section_href": "https://github.com/${workflow.manifest.name}",
- "plot_type": "html",
- "description": "are collected at run time from the software output.",
- "data": _make_versions_html(versions_by_module),
- }
-
- with open("software_versions.yml", "w") as f:
- yaml.dump(versions_by_module, f, default_flow_style=False)
- with open("software_versions_mqc.yml", "w") as f:
- yaml.dump(versions_mqc, f, default_flow_style=False)
-
- with open("versions.yml", "w") as f:
- yaml.dump(versions_this_module, f, default_flow_style=False)
-
-
-if __name__ == "__main__":
- main()
diff --git a/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/tests/main.nf.test b/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/tests/main.nf.test
deleted file mode 100644
index b1e1630bb3..0000000000
--- a/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/tests/main.nf.test
+++ /dev/null
@@ -1,43 +0,0 @@
-nextflow_process {
-
- name "Test Process CUSTOM_DUMPSOFTWAREVERSIONS"
- script "../main.nf"
- process "CUSTOM_DUMPSOFTWAREVERSIONS"
- tag "modules"
- tag "modules_nfcore"
- tag "custom"
- tag "dumpsoftwareversions"
- tag "custom/dumpsoftwareversions"
-
- test("Should run without failures") {
- when {
- process {
- """
- def tool1_version = '''
- TOOL1:
- tool1: 0.11.9
- '''.stripIndent()
-
- def tool2_version = '''
- TOOL2:
- tool2: 1.9
- '''.stripIndent()
-
- input[0] = Channel.of(tool1_version, tool2_version).collectFile()
- """
- }
- }
-
- then {
- assertAll(
- { assert process.success },
- { assert snapshot(
- process.out.versions,
- file(process.out.mqc_yml[0]).readLines()[0..10],
- file(process.out.yml[0]).readLines()[0..7]
- ).match()
- }
- )
- }
- }
-}
diff --git a/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/tests/main.nf.test.snap b/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/tests/main.nf.test.snap
deleted file mode 100644
index 5f59a936d7..0000000000
--- a/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/tests/main.nf.test.snap
+++ /dev/null
@@ -1,33 +0,0 @@
-{
- "Should run without failures": {
- "content": [
- [
- "versions.yml:md5,76d454d92244589d32455833f7c1ba6d"
- ],
- [
- "data: \"\\n\\n \\n \\n Process Name | \\n \\",
- " \\ Software | \\n Version | \\n
\\n \\n\\",
- " \\n\\n\\n CUSTOM_DUMPSOFTWAREVERSIONS | \\n python | \\n\\",
- " \\ 3.11.7 | \\n
\\n\\n\\n | \\n \\",
- " \\ yaml | \\n 5.4.1 | \\n
\\n\\n\\n\\",
- " \\n\\n TOOL1 | \\n tool1 | \\n\\",
- " \\ 0.11.9 | \\n
\\n\\n\\n\\n\\n TOOL2 | \\n\\",
- " \\ tool2 | \\n 1.9 | \\n
\\n\\n\\n\\",
- " \\n\\n Workflow | \\n Nextflow | \\n\\"
- ],
- [
- "CUSTOM_DUMPSOFTWAREVERSIONS:",
- " python: 3.11.7",
- " yaml: 5.4.1",
- "TOOL1:",
- " tool1: 0.11.9",
- "TOOL2:",
- " tool2: '1.9'",
- "Workflow:"
- ]
- ],
- "timestamp": "2024-01-09T23:01:18.710682"
- }
-}
\ No newline at end of file
diff --git a/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/tests/tags.yml b/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/tests/tags.yml
deleted file mode 100644
index 405aa24ae3..0000000000
--- a/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/tests/tags.yml
+++ /dev/null
@@ -1,2 +0,0 @@
-custom/dumpsoftwareversions:
- - modules/nf-core/custom/dumpsoftwareversions/**
diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config
index 3153ff70d6..17e75f18a4 100644
--- a/nf_core/pipeline-template/nextflow.config
+++ b/nf_core/pipeline-template/nextflow.config
@@ -18,9 +18,8 @@ params {
genome = null
igenomes_base = 's3://ngi-igenomes/igenomes/'
igenomes_ignore = false
- {% else %}
fasta = null
- {%- endif %}
+ {%- endif -%}
// MultiQC options
multiqc_config = null
@@ -49,7 +48,7 @@ params {
custom_config_base = "https://raw.githubusercontent.com/nf-core/configs/${params.custom_config_version}"
config_profile_contact = null
config_profile_url = null
- {% endif %}
+ {%- endif %}
// Max resource options
// Defaults only, expecting to be overwritten
diff --git a/nf_core/pipeline-template/nextflow_schema.json b/nf_core/pipeline-template/nextflow_schema.json
index 080797b4eb..77b1a7a070 100644
--- a/nf_core/pipeline-template/nextflow_schema.json
+++ b/nf_core/pipeline-template/nextflow_schema.json
@@ -16,6 +16,7 @@
"type": "string",
"format": "file-path",
"exists": true,
+ "schema": "assets/schema_input.json",
"mimetype": "text/csv",
"pattern": "^\\S+\\.csv$",
"description": "Path to comma-separated file containing information about the samples in the experiment.",
@@ -42,6 +43,7 @@
}
}
},
+ {%- if igenomes %}
"reference_genome_options": {
"title": "Reference genome options",
"type": "object",
@@ -73,6 +75,7 @@
}
}
},
+ {%- endif %}
"institutional_config_options": {
"title": "Institutional config options",
"type": "object",
@@ -272,9 +275,9 @@
{
"$ref": "#/definitions/input_output_options"
},
- {
+ {% if igenomes %}{
"$ref": "#/definitions/reference_genome_options"
- },
+ },{% endif %}
{
"$ref": "#/definitions/institutional_config_options"
},
diff --git a/nf_core/pipeline-template/subworkflows/local/input_check.nf b/nf_core/pipeline-template/subworkflows/local/input_check.nf
deleted file mode 100644
index 0aecf87fb7..0000000000
--- a/nf_core/pipeline-template/subworkflows/local/input_check.nf
+++ /dev/null
@@ -1,44 +0,0 @@
-//
-// Check input samplesheet and get read channels
-//
-
-include { SAMPLESHEET_CHECK } from '../../modules/local/samplesheet_check'
-
-workflow INPUT_CHECK {
- take:
- samplesheet // file: /path/to/samplesheet.csv
-
- main:
- SAMPLESHEET_CHECK ( samplesheet )
- .csv
- .splitCsv ( header:true, sep:',' )
- .map { create_fastq_channel(it) }
- .set { reads }
-
- emit:
- reads // channel: [ val(meta), [ reads ] ]
- versions = SAMPLESHEET_CHECK.out.versions // channel: [ versions.yml ]
-}
-
-// Function to get list of [ meta, [ fastq_1, fastq_2 ] ]
-def create_fastq_channel(LinkedHashMap row) {
- // create meta map
- def meta = [:]
- meta.id = row.sample
- meta.single_end = row.single_end.toBoolean()
-
- // add path(s) of the fastq file(s) to the meta map
- def fastq_meta = []
- if (!file(row.fastq_1).exists()) {
- exit 1, "ERROR: Please check input samplesheet -> Read 1 FastQ file does not exist!\n${row.fastq_1}"
- }
- if (meta.single_end) {
- fastq_meta = [ meta, [ file(row.fastq_1) ] ]
- } else {
- if (!file(row.fastq_2).exists()) {
- exit 1, "ERROR: Please check input samplesheet -> Read 2 FastQ file does not exist!\n${row.fastq_2}"
- }
- fastq_meta = [ meta, [ file(row.fastq_1), file(row.fastq_2) ] ]
- }
- return fastq_meta
-}
diff --git a/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf b/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf
new file mode 100644
index 0000000000..24d4c2d7e1
--- /dev/null
+++ b/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf
@@ -0,0 +1,260 @@
+//
+// Subworkflow with functionality specific to the nf-core/pipeline pipeline
+//
+
+/*
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+ IMPORT FUNCTIONS / MODULES / SUBWORKFLOWS
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+*/
+
+include { UTILS_NFVALIDATION_PLUGIN } from '../../nf-core/utils_nfvalidation_plugin'
+include { paramsSummaryMap } from 'plugin/nf-validation'
+include { fromSamplesheet } from 'plugin/nf-validation'
+include { UTILS_NEXTFLOW_PIPELINE } from '../../nf-core/utils_nextflow_pipeline'
+include { completionEmail } from '../../nf-core/utils_nfcore_pipeline'
+include { completionSummary } from '../../nf-core/utils_nfcore_pipeline'
+include { dashedLine } from '../../nf-core/utils_nfcore_pipeline'
+include { nfCoreLogo } from '../../nf-core/utils_nfcore_pipeline'
+include { imNotification } from '../../nf-core/utils_nfcore_pipeline'
+include { UTILS_NFCORE_PIPELINE } from '../../nf-core/utils_nfcore_pipeline'
+include { workflowCitation } from '../../nf-core/utils_nfcore_pipeline'
+
+/*
+========================================================================================
+ SUBWORKFLOW TO INITIALISE PIPELINE
+========================================================================================
+*/
+
+workflow PIPELINE_INITIALISATION {
+
+ take:
+ version // boolean: Display version and exit
+ help // boolean: Display help text
+ validate_params // boolean: Boolean whether to validate parameters against the schema at runtime
+ monochrome_logs // boolean: Do not use coloured log outputs
+ nextflow_cli_args // array: List of positional nextflow CLI args
+ outdir // string: The output directory where the results will be saved
+ input // string: Path to input samplesheet
+
+ main:
+
+ ch_versions = Channel.empty()
+
+ //
+ // Print version and exit if required and dump pipeline parameters to JSON file
+ //
+ UTILS_NEXTFLOW_PIPELINE (
+ version,
+ true,
+ outdir,
+ workflow.profile.tokenize(',').intersect(['conda', 'mamba']).size() >= 1
+ )
+
+ //
+ // Validate parameters and generate parameter summary to stdout
+ //
+ pre_help_text = nfCoreLogo(monochrome_logs)
+ post_help_text = '\n' + workflowCitation() + '\n' + dashedLine(monochrome_logs)
+ def String workflow_command = "nextflow run ${workflow.manifest.name} -profile --input samplesheet.csv --outdir "
+ UTILS_NFVALIDATION_PLUGIN (
+ help,
+ workflow_command,
+ pre_help_text,
+ post_help_text,
+ validate_params,
+ "nextflow_schema.json"
+ )
+
+ //
+ // Check config provided to the pipeline
+ //
+ UTILS_NFCORE_PIPELINE (
+ nextflow_cli_args
+ )
+
+ {%- if igenomes %}
+ //
+ // Custom validation for pipeline parameters
+ //
+ validateInputParameters()
+ {%- endif %}
+
+ //
+ // Create channel from input file provided through params.input
+ //
+ Channel
+ .fromSamplesheet("input")
+ .map {
+ meta, fastq_1, fastq_2 ->
+ if (!fastq_2) {
+ return [ meta.id, meta + [ single_end:true ], [ fastq_1 ] ]
+ } else {
+ return [ meta.id, meta + [ single_end:false ], [ fastq_1, fastq_2 ] ]
+ }
+ }
+ .groupTuple()
+ .map {
+ validateInputSamplesheet(it)
+ }
+ .map {
+ meta, fastqs ->
+ return [ meta, fastqs.flatten() ]
+ }
+ .set { ch_samplesheet }
+
+ emit:
+ samplesheet = ch_samplesheet
+ versions = ch_versions
+}
+
+/*
+========================================================================================
+ SUBWORKFLOW FOR PIPELINE COMPLETION
+========================================================================================
+*/
+
+workflow PIPELINE_COMPLETION {
+
+ take:
+ email // string: email address
+ email_on_fail // string: email address sent on pipeline failure
+ plaintext_email // boolean: Send plain-text email instead of HTML
+ outdir // path: Path to output directory where results will be published
+ monochrome_logs // boolean: Disable ANSI colour codes in log output
+ hook_url // string: hook URL for notifications
+ multiqc_report // string: Path to MultiQC report
+
+ main:
+
+ summary_params = paramsSummaryMap(workflow, parameters_schema: "nextflow_schema.json")
+
+ //
+ // Completion email and summary
+ //
+ workflow.onComplete {
+ if (email || email_on_fail) {
+ completionEmail(summary_params, email, email_on_fail, plaintext_email, outdir, monochrome_logs, multiqc_report.toList())
+ }
+
+ completionSummary(monochrome_logs)
+
+ if (hook_url) {
+ imNotification(summary_params, hook_url)
+ }
+ }
+}
+
+/*
+========================================================================================
+ FUNCTIONS
+========================================================================================
+*/
+
+{%- if igenomes %}
+//
+// Check and validate pipeline parameters
+//
+def validateInputParameters() {
+ genomeExistsError()
+}
+{%- endif -%}
+
+//
+// Validate channels from input samplesheet
+//
+def validateInputSamplesheet(input) {
+ def (metas, fastqs) = input[1..2]
+
+ // Check that multiple runs of the same sample are of the same datatype i.e. single-end / paired-end
+ def endedness_ok = metas.collect{ it.single_end }.unique().size == 1
+ if (!endedness_ok) {
+ error("Please check input samplesheet -> Multiple runs of a sample must be of the same datatype i.e. single-end or paired-end: ${metas[0].id}")
+ }
+
+ return [ metas[0], fastqs ]
+}
+
+{%- if igenomes %}
+//
+// Get attribute from genome config file e.g. fasta
+//
+def getGenomeAttribute(attribute) {
+ if (params.genomes && params.genome && params.genomes.containsKey(params.genome)) {
+ if (params.genomes[ params.genome ].containsKey(attribute)) {
+ return params.genomes[ params.genome ][ attribute ]
+ }
+ }
+ return null
+}
+
+//
+// Exit pipeline if incorrect --genome key provided
+//
+def genomeExistsError() {
+ if (params.genomes && params.genome && !params.genomes.containsKey(params.genome)) {
+ def error_string = "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n" +
+ " Genome '${params.genome}' not found in any config files provided to the pipeline.\n" +
+ " Currently, the available genome keys are:\n" +
+ " ${params.genomes.keySet().join(", ")}\n" +
+ "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
+ error(error_string)
+ }
+}
+{%- endif -%}
+
+//
+// Generate methods description for MultiQC
+//
+def toolCitationText() {
+ // TODO nf-core: Optionally add in-text citation tools to this list.
+ // Can use ternary operators to dynamically construct based conditions, e.g. params["run_xyz"] ? "Tool (Foo et al. 2023)" : "",
+ // Uncomment function in methodsDescriptionText to render in MultiQC report
+ def citation_text = [
+ "Tools used in the workflow included:",
+ "FastQC (Andrews 2010),",
+ "MultiQC (Ewels et al. 2016)",
+ "."
+ ].join(' ').trim()
+
+ return citation_text
+}
+
+def toolBibliographyText() {
+ // TODO nf-core: Optionally add bibliographic entries to this list.
+ // Can use ternary operators to dynamically construct based conditions, e.g. params["run_xyz"] ? "Author (2023) Pub name, Journal, DOI" : "",
+ // Uncomment function in methodsDescriptionText to render in MultiQC report
+ def reference_text = [
+ "Andrews S, (2010) FastQC, URL: https://www.bioinformatics.babraham.ac.uk/projects/fastqc/).",
+ "Ewels, P., Magnusson, M., Lundin, S., & KΓ€ller, M. (2016). MultiQC: summarize analysis results for multiple tools and samples in a single report. Bioinformatics , 32(19), 3047β3048. doi: /10.1093/bioinformatics/btw354"
+ ].join(' ').trim()
+
+ return reference_text
+}
+
+def methodsDescriptionText(mqc_methods_yaml) {
+ // Convert to a named map so can be used as with familar NXF ${workflow} variable syntax in the MultiQC YML file
+ def meta = [:]
+ meta.workflow = workflow.toMap()
+ meta["manifest_map"] = workflow.manifest.toMap()
+
+ // Pipeline DOI
+ meta["doi_text"] = meta.manifest_map.doi ? "(doi: ${meta.manifest_map.doi})" : ""
+ meta["nodoi_text"] = meta.manifest_map.doi ? "": "If available, make sure to update the text to include the Zenodo DOI of version of the pipeline used. "
+
+ // Tool references
+ meta["tool_citations"] = ""
+ meta["tool_bibliography"] = ""
+
+ // TODO nf-core: Only uncomment below if logic in toolCitationText/toolBibliographyText has been filled!
+ // meta["tool_citations"] = toolCitationText().replaceAll(", \\.", ".").replaceAll("\\. \\.", ".").replaceAll(", \\.", ".")
+ // meta["tool_bibliography"] = toolBibliographyText()
+
+
+ def methods_text = mqc_methods_yaml.text
+
+ def engine = new groovy.text.SimpleTemplateEngine()
+ def description_html = engine.createTemplate(methods_text).make(meta)
+
+ return description_html.toString()
+}
diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/main.nf b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/main.nf
new file mode 100644
index 0000000000..ac31f28f66
--- /dev/null
+++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/main.nf
@@ -0,0 +1,126 @@
+//
+// Subworkflow with functionality that may be useful for any Nextflow pipeline
+//
+
+import org.yaml.snakeyaml.Yaml
+import groovy.json.JsonOutput
+import nextflow.extension.FilesEx
+
+/*
+========================================================================================
+ SUBWORKFLOW DEFINITION
+========================================================================================
+*/
+
+workflow UTILS_NEXTFLOW_PIPELINE {
+
+ take:
+ print_version // boolean: print version
+ dump_parameters // boolean: dump parameters
+ outdir // path: base directory used to publish pipeline results
+ check_conda_channels // boolean: check conda channels
+
+ main:
+
+ //
+ // Print workflow version and exit on --version
+ //
+ if (print_version) {
+ log.info "${workflow.manifest.name} ${getWorkflowVersion()}"
+ System.exit(0)
+ }
+
+ //
+ // Dump pipeline parameters to a JSON file
+ //
+ if (dump_parameters && outdir) {
+ dumpParametersToJSON(outdir)
+ }
+
+ //
+ // When running with Conda, warn if channels have not been set-up appropriately
+ //
+ if (check_conda_channels) {
+ checkCondaChannels()
+ }
+
+ emit:
+ dummy_emit = true
+}
+
+/*
+========================================================================================
+ FUNCTIONS
+========================================================================================
+*/
+
+//
+// Generate version string
+//
+def getWorkflowVersion() {
+ String version_string = ""
+ if (workflow.manifest.version) {
+ def prefix_v = workflow.manifest.version[0] != 'v' ? 'v' : ''
+ version_string += "${prefix_v}${workflow.manifest.version}"
+ }
+
+ if (workflow.commitId) {
+ def git_shortsha = workflow.commitId.substring(0, 7)
+ version_string += "-g${git_shortsha}"
+ }
+
+ return version_string
+}
+
+//
+// Dump pipeline parameters to a JSON file
+//
+def dumpParametersToJSON(outdir) {
+ def timestamp = new java.util.Date().format( 'yyyy-MM-dd_HH-mm-ss')
+ def filename = "params_${timestamp}.json"
+ def temp_pf = new File(workflow.launchDir.toString(), ".${filename}")
+ def jsonStr = JsonOutput.toJson(params)
+ temp_pf.text = JsonOutput.prettyPrint(jsonStr)
+
+ FilesEx.copyTo(temp_pf.toPath(), "${outdir}/pipeline_info/params_${timestamp}.json")
+ temp_pf.delete()
+}
+
+//
+// When running with -profile conda, warn if channels have not been set-up appropriately
+//
+def checkCondaChannels() {
+ Yaml parser = new Yaml()
+ def channels = []
+ try {
+ def config = parser.load("conda config --show channels".execute().text)
+ channels = config.channels
+ } catch(NullPointerException | IOException e) {
+ log.warn "Could not verify conda channel configuration."
+ return
+ }
+
+ // Check that all channels are present
+ // This channel list is ordered by required channel priority.
+ def required_channels_in_order = ['conda-forge', 'bioconda', 'defaults']
+ def channels_missing = ((required_channels_in_order as Set) - (channels as Set)) as Boolean
+
+ // Check that they are in the right order
+ def channel_priority_violation = false
+ def n = required_channels_in_order.size()
+ for (int i = 0; i < n - 1; i++) {
+ channel_priority_violation |= !(channels.indexOf(required_channels_in_order[i]) < channels.indexOf(required_channels_in_order[i+1]))
+ }
+
+ if (channels_missing | channel_priority_violation) {
+ log.warn "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n" +
+ " There is a problem with your Conda configuration!\n\n" +
+ " You will need to set-up the conda-forge and bioconda channels correctly.\n" +
+ " Please refer to https://bioconda.github.io/\n" +
+ " The observed channel order is \n" +
+ " ${channels}\n" +
+ " but the following channel order is required:\n" +
+ " ${required_channels_in_order}\n" +
+ "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
+ }
+}
diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/meta.yml b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/meta.yml
new file mode 100644
index 0000000000..e5c3a0a828
--- /dev/null
+++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/meta.yml
@@ -0,0 +1,38 @@
+# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/subworkflows/yaml-schema.json
+name: "UTILS_NEXTFLOW_PIPELINE"
+description: Subworkflow with functionality that may be useful for any Nextflow pipeline
+keywords:
+ - utility
+ - pipeline
+ - initialise
+ - version
+components: []
+input:
+ - print_version:
+ type: boolean
+ description: |
+ Print the version of the pipeline and exit
+ - dump_parameters:
+ type: boolean
+ description: |
+ Dump the parameters of the pipeline to a JSON file
+ - output_directory:
+ type: directory
+ description: Path to output dir to write JSON file to.
+ pattern: "results/"
+ - check_conda_channel:
+ type: boolean
+ description: |
+ Check if the conda channel priority is correct.
+output:
+ - dummy_emit:
+ type: boolean
+ description: |
+ Dummy emit to make nf-core subworkflows lint happy
+authors:
+ - "@adamrtalbot"
+ - "@drpatelh"
+maintainers:
+ - "@adamrtalbot"
+ - "@drpatelh"
+ - "@maxulysse"
diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.function.nf.test b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.function.nf.test
new file mode 100644
index 0000000000..8ed4310cac
--- /dev/null
+++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.function.nf.test
@@ -0,0 +1,54 @@
+
+nextflow_function {
+
+ name "Test Functions"
+ script "subworkflows/nf-core/utils_nextflow_pipeline/main.nf"
+ config "subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config"
+ tag 'subworkflows'
+ tag 'utils_nextflow_pipeline'
+ tag 'subworkflows/utils_nextflow_pipeline'
+
+ test("Test Function getWorkflowVersion") {
+
+ function "getWorkflowVersion"
+
+ then {
+ assertAll(
+ { assert function.success },
+ { assert snapshot(function.result).match() }
+ )
+ }
+ }
+
+ test("Test Function dumpParametersToJSON") {
+
+ function "dumpParametersToJSON"
+
+ when {
+ function {
+ """
+ // define inputs of the function here. Example:
+ input[0] = "$outputDir"
+ """.stripIndent()
+ }
+ }
+
+ then {
+ assertAll(
+ { assert function.success }
+ )
+ }
+ }
+
+ test("Test Function checkCondaChannels") {
+
+ function "checkCondaChannels"
+
+ then {
+ assertAll(
+ { assert function.success },
+ { assert snapshot(function.result).match() }
+ )
+ }
+ }
+}
\ No newline at end of file
diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.function.nf.test.snap b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.function.nf.test.snap
new file mode 100644
index 0000000000..db2030f8b0
--- /dev/null
+++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.function.nf.test.snap
@@ -0,0 +1,12 @@
+{
+ "Test Function getWorkflowVersion": {
+ "content": [
+ "v9.9.9"
+ ],
+ "timestamp": "2024-01-19T11:32:36.031083"
+ },
+ "Test Function checkCondaChannels": {
+ "content": null,
+ "timestamp": "2024-01-19T11:32:50.456"
+ }
+}
\ No newline at end of file
diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test
new file mode 100644
index 0000000000..f7c54bc68f
--- /dev/null
+++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test
@@ -0,0 +1,123 @@
+nextflow_workflow {
+
+ name "Test Workflow UTILS_NEXTFLOW_PIPELINE"
+ script "../main.nf"
+ config "subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config"
+ workflow "UTILS_NEXTFLOW_PIPELINE"
+ tag 'subworkflows'
+ tag 'utils_nextflow_pipeline'
+ tag 'subworkflows/utils_nextflow_pipeline'
+
+ test("Should run no inputs") {
+
+ when {
+ params {
+ outdir = "tests/results"
+ }
+ workflow {
+ """
+ print_version = false
+ dump_parameters = false
+ outdir = null
+ check_conda_channels = false
+
+ input[0] = print_version
+ input[1] = dump_parameters
+ input[2] = outdir
+ input[3] = check_conda_channels
+ """
+ }
+ }
+
+ then {
+ assertAll(
+ { assert workflow.success }
+ )
+ }
+ }
+
+ test("Should print version") {
+
+ when {
+ params {
+ outdir = "tests/results"
+ }
+ workflow {
+ """
+ print_version = true
+ dump_parameters = false
+ outdir = null
+ check_conda_channels = false
+
+ input[0] = print_version
+ input[1] = dump_parameters
+ input[2] = outdir
+ input[3] = check_conda_channels
+ """
+ }
+ }
+
+ then {
+ assertAll(
+ { assert workflow.success },
+ { assert workflow.stdout.contains("nextflow_workflow v9.9.9") }
+ )
+ }
+ }
+
+ test("Should dump params") {
+
+ when {
+ params {
+ outdir = "$outputDir"
+ }
+ workflow {
+ """
+ print_version = false
+ dump_parameters = true
+ outdir = params.outdir
+ check_conda_channels = false
+
+ input[0] = false
+ input[1] = true
+ input[2] = params.outdir
+ input[3] = false
+ """
+ }
+ }
+
+ then {
+ assertAll(
+ { assert workflow.success }
+ )
+ }
+ }
+
+ test("Should not create params JSON if no output directory") {
+
+ when {
+ params {
+ outdir = "$outputDir"
+ }
+ workflow {
+ """
+ print_version = false
+ dump_parameters = true
+ outdir = params.outdir
+ check_conda_channels = false
+
+ input[0] = false
+ input[1] = true
+ input[2] = null
+ input[3] = false
+ """
+ }
+ }
+
+ then {
+ assertAll(
+ { assert workflow.success }
+ )
+ }
+ }
+}
diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config
new file mode 100644
index 0000000000..53574ffec4
--- /dev/null
+++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config
@@ -0,0 +1,9 @@
+manifest {
+ name = 'nextflow_workflow'
+ author = """nf-core"""
+ homePage = 'https://127.0.0.1'
+ description = """Dummy pipeline"""
+ nextflowVersion = '!>=23.04.0'
+ version = '9.9.9'
+ doi = 'https://doi.org/10.5281/zenodo.5070524'
+}
\ No newline at end of file
diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/tags.yml b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/tags.yml
new file mode 100644
index 0000000000..f84761125a
--- /dev/null
+++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/tags.yml
@@ -0,0 +1,2 @@
+subworkflows/utils_nextflow_pipeline:
+ - subworkflows/nf-core/utils_nextflow_pipeline/**
diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf
new file mode 100644
index 0000000000..a8b55d6fe1
--- /dev/null
+++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf
@@ -0,0 +1,440 @@
+//
+// Subworkflow with utility functions specific to the nf-core pipeline template
+//
+
+import org.yaml.snakeyaml.Yaml
+import nextflow.extension.FilesEx
+
+/*
+========================================================================================
+ SUBWORKFLOW DEFINITION
+========================================================================================
+*/
+
+workflow UTILS_NFCORE_PIPELINE {
+
+ take:
+ nextflow_cli_args
+
+ main:
+ valid_config = checkConfigProvided()
+ checkProfileProvided(nextflow_cli_args)
+
+ emit:
+ valid_config
+}
+
+/*
+========================================================================================
+ FUNCTIONS
+========================================================================================
+*/
+
+//
+// Warn if a -profile or Nextflow config has not been provided to run the pipeline
+//
+def checkConfigProvided() {
+ valid_config = true
+ if (workflow.profile == 'standard' && workflow.configFiles.size() <= 1) {
+ log.warn "[$workflow.manifest.name] You are attempting to run the pipeline without any custom configuration!\n\n" +
+ "This will be dependent on your local compute environment but can be achieved via one or more of the following:\n" +
+ " (1) Using an existing pipeline profile e.g. `-profile docker` or `-profile singularity`\n" +
+ " (2) Using an existing nf-core/configs for your Institution e.g. `-profile crick` or `-profile uppmax`\n" +
+ " (3) Using your own local custom config e.g. `-c /path/to/your/custom.config`\n\n" +
+ "Please refer to the quick start section and usage docs for the pipeline.\n "
+ valid_config = false
+ }
+ return valid_config
+}
+
+//
+// Exit pipeline if --profile contains spaces
+//
+def checkProfileProvided(nextflow_cli_args) {
+ if (workflow.profile.endsWith(',')) {
+ error "The `-profile` option cannot end with a trailing comma, please remove it and re-run the pipeline!\n" +
+ "HINT: A common mistake is to provide multiple values separated by spaces e.g. `-profile test, docker`.\n"
+ }
+ if (nextflow_cli_args[0]) {
+ log.warn "nf-core pipelines do not accept positional arguments. The positional argument `${nextflow_cli_args[0]}` has been detected.\n" +
+ "HINT: A common mistake is to provide multiple values separated by spaces e.g. `-profile test, docker`.\n"
+ }
+}
+
+//
+// Citation string for pipeline
+//
+def workflowCitation() {
+ return "If you use ${workflow.manifest.name} for your analysis please cite:\n\n" +
+ "* The pipeline\n" +
+ " ${workflow.manifest.doi}\n\n" +
+ "* The nf-core framework\n" +
+ " https://doi.org/10.1038/s41587-020-0439-x\n\n" +
+ "* Software dependencies\n" +
+ " https://github.com/${workflow.manifest.name}/blob/master/CITATIONS.md"
+}
+
+//
+// Generate workflow version string
+//
+def getWorkflowVersion() {
+ String version_string = ""
+ if (workflow.manifest.version) {
+ def prefix_v = workflow.manifest.version[0] != 'v' ? 'v' : ''
+ version_string += "${prefix_v}${workflow.manifest.version}"
+ }
+
+ if (workflow.commitId) {
+ def git_shortsha = workflow.commitId.substring(0, 7)
+ version_string += "-g${git_shortsha}"
+ }
+
+ return version_string
+}
+
+//
+// Get software versions for pipeline
+//
+def processVersionsFromYAML(yaml_file) {
+ Yaml yaml = new Yaml()
+ versions = yaml.load(yaml_file).collectEntries { k, v -> [ k.tokenize(':')[-1], v ] }
+ return yaml.dumpAsMap(versions).trim()
+}
+
+//
+// Get workflow version for pipeline
+//
+def workflowVersionToYAML() {
+ return """
+ Workflow:
+ $workflow.manifest.name: ${getWorkflowVersion()}
+ Nextflow: $workflow.nextflow.version
+ """.stripIndent().trim()
+}
+
+//
+// Get channel of software versions used in pipeline in YAML format
+//
+def softwareVersionsToYAML(ch_versions) {
+ return ch_versions
+ .unique()
+ .map { processVersionsFromYAML(it) }
+ .unique()
+ .mix(Channel.of(workflowVersionToYAML()))
+}
+
+//
+// Get workflow summary for MultiQC
+//
+def paramsSummaryMultiqc(summary_params) {
+ def summary_section = ''
+ for (group in summary_params.keySet()) {
+ def group_params = summary_params.get(group) // This gets the parameters of that particular group
+ if (group_params) {
+ summary_section += " $group
\n"
+ summary_section += " \n"
+ for (param in group_params.keySet()) {
+ summary_section += " - $param
- ${group_params.get(param) ?: 'N/A'}
\n"
+ }
+ summary_section += "
\n"
+ }
+ }
+
+ String yaml_file_text = "id: '${workflow.manifest.name.replace('/','-')}-summary'\n"
+ yaml_file_text += "description: ' - this information is collected when the pipeline is started.'\n"
+ yaml_file_text += "section_name: '${workflow.manifest.name} Workflow Summary'\n"
+ yaml_file_text += "section_href: 'https://github.com/${workflow.manifest.name}'\n"
+ yaml_file_text += "plot_type: 'html'\n"
+ yaml_file_text += "data: |\n"
+ yaml_file_text += "${summary_section}"
+
+ return yaml_file_text
+}
+
+//
+// nf-core logo
+//
+def nfCoreLogo(monochrome_logs=true) {
+ Map colors = logColours(monochrome_logs)
+ String.format(
+ """\n
+ ${dashedLine(monochrome_logs)}
+ ${colors.green},--.${colors.black}/${colors.green},-.${colors.reset}
+ ${colors.blue} ___ __ __ __ ___ ${colors.green}/,-._.--~\'${colors.reset}
+ ${colors.blue} |\\ | |__ __ / ` / \\ |__) |__ ${colors.yellow}} {${colors.reset}
+ ${colors.blue} | \\| | \\__, \\__/ | \\ |___ ${colors.green}\\`-._,-`-,${colors.reset}
+ ${colors.green}`._,._,\'${colors.reset}
+ ${colors.purple} ${workflow.manifest.name} ${getWorkflowVersion()}${colors.reset}
+ ${dashedLine(monochrome_logs)}
+ """.stripIndent()
+ )
+}
+
+//
+// Return dashed line
+//
+def dashedLine(monochrome_logs=true) {
+ Map colors = logColours(monochrome_logs)
+ return "-${colors.dim}----------------------------------------------------${colors.reset}-"
+}
+
+//
+// ANSII colours used for terminal logging
+//
+def logColours(monochrome_logs=true) {
+ Map colorcodes = [:]
+
+ // Reset / Meta
+ colorcodes['reset'] = monochrome_logs ? '' : "\033[0m"
+ colorcodes['bold'] = monochrome_logs ? '' : "\033[1m"
+ colorcodes['dim'] = monochrome_logs ? '' : "\033[2m"
+ colorcodes['underlined'] = monochrome_logs ? '' : "\033[4m"
+ colorcodes['blink'] = monochrome_logs ? '' : "\033[5m"
+ colorcodes['reverse'] = monochrome_logs ? '' : "\033[7m"
+ colorcodes['hidden'] = monochrome_logs ? '' : "\033[8m"
+
+ // Regular Colors
+ colorcodes['black'] = monochrome_logs ? '' : "\033[0;30m"
+ colorcodes['red'] = monochrome_logs ? '' : "\033[0;31m"
+ colorcodes['green'] = monochrome_logs ? '' : "\033[0;32m"
+ colorcodes['yellow'] = monochrome_logs ? '' : "\033[0;33m"
+ colorcodes['blue'] = monochrome_logs ? '' : "\033[0;34m"
+ colorcodes['purple'] = monochrome_logs ? '' : "\033[0;35m"
+ colorcodes['cyan'] = monochrome_logs ? '' : "\033[0;36m"
+ colorcodes['white'] = monochrome_logs ? '' : "\033[0;37m"
+
+ // Bold
+ colorcodes['bblack'] = monochrome_logs ? '' : "\033[1;30m"
+ colorcodes['bred'] = monochrome_logs ? '' : "\033[1;31m"
+ colorcodes['bgreen'] = monochrome_logs ? '' : "\033[1;32m"
+ colorcodes['byellow'] = monochrome_logs ? '' : "\033[1;33m"
+ colorcodes['bblue'] = monochrome_logs ? '' : "\033[1;34m"
+ colorcodes['bpurple'] = monochrome_logs ? '' : "\033[1;35m"
+ colorcodes['bcyan'] = monochrome_logs ? '' : "\033[1;36m"
+ colorcodes['bwhite'] = monochrome_logs ? '' : "\033[1;37m"
+
+ // Underline
+ colorcodes['ublack'] = monochrome_logs ? '' : "\033[4;30m"
+ colorcodes['ured'] = monochrome_logs ? '' : "\033[4;31m"
+ colorcodes['ugreen'] = monochrome_logs ? '' : "\033[4;32m"
+ colorcodes['uyellow'] = monochrome_logs ? '' : "\033[4;33m"
+ colorcodes['ublue'] = monochrome_logs ? '' : "\033[4;34m"
+ colorcodes['upurple'] = monochrome_logs ? '' : "\033[4;35m"
+ colorcodes['ucyan'] = monochrome_logs ? '' : "\033[4;36m"
+ colorcodes['uwhite'] = monochrome_logs ? '' : "\033[4;37m"
+
+ // High Intensity
+ colorcodes['iblack'] = monochrome_logs ? '' : "\033[0;90m"
+ colorcodes['ired'] = monochrome_logs ? '' : "\033[0;91m"
+ colorcodes['igreen'] = monochrome_logs ? '' : "\033[0;92m"
+ colorcodes['iyellow'] = monochrome_logs ? '' : "\033[0;93m"
+ colorcodes['iblue'] = monochrome_logs ? '' : "\033[0;94m"
+ colorcodes['ipurple'] = monochrome_logs ? '' : "\033[0;95m"
+ colorcodes['icyan'] = monochrome_logs ? '' : "\033[0;96m"
+ colorcodes['iwhite'] = monochrome_logs ? '' : "\033[0;97m"
+
+ // Bold High Intensity
+ colorcodes['biblack'] = monochrome_logs ? '' : "\033[1;90m"
+ colorcodes['bired'] = monochrome_logs ? '' : "\033[1;91m"
+ colorcodes['bigreen'] = monochrome_logs ? '' : "\033[1;92m"
+ colorcodes['biyellow'] = monochrome_logs ? '' : "\033[1;93m"
+ colorcodes['biblue'] = monochrome_logs ? '' : "\033[1;94m"
+ colorcodes['bipurple'] = monochrome_logs ? '' : "\033[1;95m"
+ colorcodes['bicyan'] = monochrome_logs ? '' : "\033[1;96m"
+ colorcodes['biwhite'] = monochrome_logs ? '' : "\033[1;97m"
+
+ return colorcodes
+}
+
+//
+// Attach the multiqc report to email
+//
+def attachMultiqcReport(multiqc_report) {
+ def mqc_report = null
+ try {
+ if (workflow.success) {
+ mqc_report = multiqc_report.getVal()
+ if (mqc_report.getClass() == ArrayList && mqc_report.size() >= 1) {
+ if (mqc_report.size() > 1) {
+ log.warn "[$workflow.manifest.name] Found multiple reports from process 'MULTIQC', will use only one"
+ }
+ mqc_report = mqc_report[0]
+ }
+ }
+ } catch (all) {
+ if (multiqc_report) {
+ log.warn "[$workflow.manifest.name] Could not attach MultiQC report to summary email"
+ }
+ }
+ return mqc_report
+}
+
+//
+// Construct and send completion email
+//
+def completionEmail(summary_params, email, email_on_fail, plaintext_email, outdir, monochrome_logs=true, multiqc_report=null) {
+
+ // Set up the e-mail variables
+ def subject = "[$workflow.manifest.name] Successful: $workflow.runName"
+ if (!workflow.success) {
+ subject = "[$workflow.manifest.name] FAILED: $workflow.runName"
+ }
+
+ def summary = [:]
+ for (group in summary_params.keySet()) {
+ summary << summary_params[group]
+ }
+
+ def misc_fields = [:]
+ misc_fields['Date Started'] = workflow.start
+ misc_fields['Date Completed'] = workflow.complete
+ misc_fields['Pipeline script file path'] = workflow.scriptFile
+ misc_fields['Pipeline script hash ID'] = workflow.scriptId
+ if (workflow.repository) misc_fields['Pipeline repository Git URL'] = workflow.repository
+ if (workflow.commitId) misc_fields['Pipeline repository Git Commit'] = workflow.commitId
+ if (workflow.revision) misc_fields['Pipeline Git branch/tag'] = workflow.revision
+ misc_fields['Nextflow Version'] = workflow.nextflow.version
+ misc_fields['Nextflow Build'] = workflow.nextflow.build
+ misc_fields['Nextflow Compile Timestamp'] = workflow.nextflow.timestamp
+
+ def email_fields = [:]
+ email_fields['version'] = getWorkflowVersion()
+ email_fields['runName'] = workflow.runName
+ email_fields['success'] = workflow.success
+ email_fields['dateComplete'] = workflow.complete
+ email_fields['duration'] = workflow.duration
+ email_fields['exitStatus'] = workflow.exitStatus
+ email_fields['errorMessage'] = (workflow.errorMessage ?: 'None')
+ email_fields['errorReport'] = (workflow.errorReport ?: 'None')
+ email_fields['commandLine'] = workflow.commandLine
+ email_fields['projectDir'] = workflow.projectDir
+ email_fields['summary'] = summary << misc_fields
+
+ // On success try attach the multiqc report
+ def mqc_report = attachMultiqcReport(multiqc_report)
+
+ // Check if we are only sending emails on failure
+ def email_address = email
+ if (!email && email_on_fail && !workflow.success) {
+ email_address = email_on_fail
+ }
+
+ // Render the TXT template
+ def engine = new groovy.text.GStringTemplateEngine()
+ def tf = new File("${workflow.projectDir}/assets/email_template.txt")
+ def txt_template = engine.createTemplate(tf).make(email_fields)
+ def email_txt = txt_template.toString()
+
+ // Render the HTML template
+ def hf = new File("${workflow.projectDir}/assets/email_template.html")
+ def html_template = engine.createTemplate(hf).make(email_fields)
+ def email_html = html_template.toString()
+
+ // Render the sendmail template
+ def max_multiqc_email_size = (params.containsKey('max_multiqc_email_size') ? params.max_multiqc_email_size : 0) as nextflow.util.MemoryUnit
+ def smail_fields = [ email: email_address, subject: subject, email_txt: email_txt, email_html: email_html, projectDir: "${workflow.projectDir}", mqcFile: mqc_report, mqcMaxSize: max_multiqc_email_size.toBytes() ]
+ def sf = new File("${workflow.projectDir}/assets/sendmail_template.txt")
+ def sendmail_template = engine.createTemplate(sf).make(smail_fields)
+ def sendmail_html = sendmail_template.toString()
+
+ // Send the HTML e-mail
+ Map colors = logColours(monochrome_logs)
+ if (email_address) {
+ try {
+ if (plaintext_email) { throw GroovyException('Send plaintext e-mail, not HTML') }
+ // Try to send HTML e-mail using sendmail
+ def sendmail_tf = new File(workflow.launchDir.toString(), ".sendmail_tmp.html")
+ sendmail_tf.withWriter { w -> w << sendmail_html }
+ [ 'sendmail', '-t' ].execute() << sendmail_html
+ log.info "-${colors.purple}[$workflow.manifest.name]${colors.green} Sent summary e-mail to $email_address (sendmail)-"
+ } catch (all) {
+ // Catch failures and try with plaintext
+ def mail_cmd = [ 'mail', '-s', subject, '--content-type=text/html', email_address ]
+ mail_cmd.execute() << email_html
+ log.info "-${colors.purple}[$workflow.manifest.name]${colors.green} Sent summary e-mail to $email_address (mail)-"
+ }
+ }
+
+ // Write summary e-mail HTML to a file
+ def output_hf = new File(workflow.launchDir.toString(), ".pipeline_report.html")
+ output_hf.withWriter { w -> w << email_html }
+ FilesEx.copyTo(output_hf.toPath(), "${outdir}/pipeline_info/pipeline_report.html");
+ output_hf.delete()
+
+ // Write summary e-mail TXT to a file
+ def output_tf = new File(workflow.launchDir.toString(), ".pipeline_report.txt")
+ output_tf.withWriter { w -> w << email_txt }
+ FilesEx.copyTo(output_tf.toPath(), "${outdir}/pipeline_info/pipeline_report.txt");
+ output_tf.delete()
+}
+
+//
+// Print pipeline summary on completion
+//
+def completionSummary(monochrome_logs=true) {
+ Map colors = logColours(monochrome_logs)
+ if (workflow.success) {
+ if (workflow.stats.ignoredCount == 0) {
+ log.info "-${colors.purple}[$workflow.manifest.name]${colors.green} Pipeline completed successfully${colors.reset}-"
+ } else {
+ log.info "-${colors.purple}[$workflow.manifest.name]${colors.yellow} Pipeline completed successfully, but with errored process(es) ${colors.reset}-"
+ }
+ } else {
+ log.info "-${colors.purple}[$workflow.manifest.name]${colors.red} Pipeline completed with errors${colors.reset}-"
+ }
+}
+
+//
+// Construct and send a notification to a web server as JSON e.g. Microsoft Teams and Slack
+//
+def imNotification(summary_params, hook_url) {
+ def summary = [:]
+ for (group in summary_params.keySet()) {
+ summary << summary_params[group]
+ }
+
+ def misc_fields = [:]
+ misc_fields['start'] = workflow.start
+ misc_fields['complete'] = workflow.complete
+ misc_fields['scriptfile'] = workflow.scriptFile
+ misc_fields['scriptid'] = workflow.scriptId
+ if (workflow.repository) misc_fields['repository'] = workflow.repository
+ if (workflow.commitId) misc_fields['commitid'] = workflow.commitId
+ if (workflow.revision) misc_fields['revision'] = workflow.revision
+ misc_fields['nxf_version'] = workflow.nextflow.version
+ misc_fields['nxf_build'] = workflow.nextflow.build
+ misc_fields['nxf_timestamp'] = workflow.nextflow.timestamp
+
+ def msg_fields = [:]
+ msg_fields['version'] = getWorkflowVersion()
+ msg_fields['runName'] = workflow.runName
+ msg_fields['success'] = workflow.success
+ msg_fields['dateComplete'] = workflow.complete
+ msg_fields['duration'] = workflow.duration
+ msg_fields['exitStatus'] = workflow.exitStatus
+ msg_fields['errorMessage'] = (workflow.errorMessage ?: 'None')
+ msg_fields['errorReport'] = (workflow.errorReport ?: 'None')
+ msg_fields['commandLine'] = workflow.commandLine.replaceFirst(/ +--hook_url +[^ ]+/, "")
+ msg_fields['projectDir'] = workflow.projectDir
+ msg_fields['summary'] = summary << misc_fields
+
+ // Render the JSON template
+ def engine = new groovy.text.GStringTemplateEngine()
+ // Different JSON depending on the service provider
+ // Defaults to "Adaptive Cards" (https://adaptivecards.io), except Slack which has its own format
+ def json_path = hook_url.contains("hooks.slack.com") ? "slackreport.json" : "adaptivecard.json"
+ def hf = new File("${workflow.projectDir}/assets/${json_path}")
+ def json_template = engine.createTemplate(hf).make(msg_fields)
+ def json_message = json_template.toString()
+
+ // POST
+ def post = new URL(hook_url).openConnection();
+ post.setRequestMethod("POST")
+ post.setDoOutput(true)
+ post.setRequestProperty("Content-Type", "application/json")
+ post.getOutputStream().write(json_message.getBytes("UTF-8"));
+ def postRC = post.getResponseCode();
+ if (! postRC.equals(200)) {
+ log.warn(post.getErrorStream().getText());
+ }
+}
diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/meta.yml b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/meta.yml
new file mode 100644
index 0000000000..d08d24342d
--- /dev/null
+++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/meta.yml
@@ -0,0 +1,24 @@
+# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/subworkflows/yaml-schema.json
+name: "UTILS_NFCORE_PIPELINE"
+description: Subworkflow with utility functions specific to the nf-core pipeline template
+keywords:
+ - utility
+ - pipeline
+ - initialise
+ - version
+components: []
+input:
+ - nextflow_cli_args:
+ type: list
+ description: |
+ Nextflow CLI positional arguments
+output:
+ - success:
+ type: boolean
+ description: |
+ Dummy output to indicate success
+authors:
+ - "@adamrtalbot"
+maintainers:
+ - "@adamrtalbot"
+ - "@maxulysse"
diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test
new file mode 100644
index 0000000000..1dc317f8f7
--- /dev/null
+++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test
@@ -0,0 +1,134 @@
+
+nextflow_function {
+
+ name "Test Functions"
+ script "../main.nf"
+ config "subworkflows/nf-core/utils_nfcore_pipeline/tests/nextflow.config"
+ tag "subworkflows"
+ tag "subworkflows_nfcore"
+ tag "utils_nfcore_pipeline"
+ tag "subworkflows/utils_nfcore_pipeline"
+
+ test("Test Function checkConfigProvided") {
+
+ function "checkConfigProvided"
+
+ then {
+ assertAll(
+ { assert function.success },
+ { assert snapshot(function.result).match() }
+ )
+ }
+ }
+
+ test("Test Function checkProfileProvided") {
+
+ function "checkProfileProvided"
+
+ when {
+ function {
+ """
+ input[0] = []
+ """
+ }
+ }
+
+ then {
+ assertAll(
+ { assert function.success },
+ { assert snapshot(function.result).match() }
+ )
+ }
+ }
+
+ test("Test Function workflowCitation") {
+
+ function "workflowCitation"
+
+ then {
+ assertAll(
+ { assert function.success },
+ { assert snapshot(function.result).match() }
+ )
+ }
+ }
+
+ test("Test Function nfCoreLogo") {
+
+ function "nfCoreLogo"
+
+ when {
+ function {
+ """
+ input[0] = false
+ """
+ }
+ }
+
+ then {
+ assertAll(
+ { assert function.success },
+ { assert snapshot(function.result).match() }
+ )
+ }
+ }
+
+ test("Test Function dashedLine") {
+
+ function "dashedLine"
+
+ when {
+ function {
+ """
+ input[0] = false
+ """
+ }
+ }
+
+ then {
+ assertAll(
+ { assert function.success },
+ { assert snapshot(function.result).match() }
+ )
+ }
+ }
+
+ test("Test Function without logColours") {
+
+ function "logColours"
+
+ when {
+ function {
+ """
+ input[0] = true
+ """
+ }
+ }
+
+ then {
+ assertAll(
+ { assert function.success },
+ { assert snapshot(function.result).match() }
+ )
+ }
+ }
+
+ test("Test Function with logColours") {
+ function "logColours"
+
+ when {
+ function {
+ """
+ input[0] = false
+ """
+ }
+ }
+
+ then {
+ assertAll(
+ { assert function.success },
+ { assert snapshot(function.result).match() }
+ )
+ }
+ }
+}
diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test.snap b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test.snap
new file mode 100644
index 0000000000..10f948e629
--- /dev/null
+++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test.snap
@@ -0,0 +1,138 @@
+{
+ "Test Function checkProfileProvided": {
+ "content": null,
+ "timestamp": "2024-02-09T15:43:55.145717"
+ },
+ "Test Function checkConfigProvided": {
+ "content": [
+ true
+ ],
+ "timestamp": "2024-01-19T11:34:13.548431224"
+ },
+ "Test Function nfCoreLogo": {
+ "content": [
+ "\n\n-\u001b[2m----------------------------------------------------\u001b[0m-\n \u001b[0;32m,--.\u001b[0;30m/\u001b[0;32m,-.\u001b[0m\n\u001b[0;34m ___ __ __ __ ___ \u001b[0;32m/,-._.--~'\u001b[0m\n\u001b[0;34m |\\ | |__ __ / ` / \\ |__) |__ \u001b[0;33m} {\u001b[0m\n\u001b[0;34m | \\| | \\__, \\__/ | \\ |___ \u001b[0;32m\\`-._,-`-,\u001b[0m\n \u001b[0;32m`._,._,'\u001b[0m\n\u001b[0;35m nextflow_workflow v9.9.9\u001b[0m\n-\u001b[2m----------------------------------------------------\u001b[0m-\n"
+ ],
+ "timestamp": "2024-01-19T11:34:38.840454873"
+ },
+ "Test Function workflowCitation": {
+ "content": [
+ "If you use nextflow_workflow for your analysis please cite:\n\n* The pipeline\n https://doi.org/10.5281/zenodo.5070524\n\n* The nf-core framework\n https://doi.org/10.1038/s41587-020-0439-x\n\n* Software dependencies\n https://github.com/nextflow_workflow/blob/master/CITATIONS.md"
+ ],
+ "timestamp": "2024-01-19T11:34:22.24352016"
+ },
+ "Test Function without logColours": {
+ "content": [
+ {
+ "reset": "",
+ "bold": "",
+ "dim": "",
+ "underlined": "",
+ "blink": "",
+ "reverse": "",
+ "hidden": "",
+ "black": "",
+ "red": "",
+ "green": "",
+ "yellow": "",
+ "blue": "",
+ "purple": "",
+ "cyan": "",
+ "white": "",
+ "bblack": "",
+ "bred": "",
+ "bgreen": "",
+ "byellow": "",
+ "bblue": "",
+ "bpurple": "",
+ "bcyan": "",
+ "bwhite": "",
+ "ublack": "",
+ "ured": "",
+ "ugreen": "",
+ "uyellow": "",
+ "ublue": "",
+ "upurple": "",
+ "ucyan": "",
+ "uwhite": "",
+ "iblack": "",
+ "ired": "",
+ "igreen": "",
+ "iyellow": "",
+ "iblue": "",
+ "ipurple": "",
+ "icyan": "",
+ "iwhite": "",
+ "biblack": "",
+ "bired": "",
+ "bigreen": "",
+ "biyellow": "",
+ "biblue": "",
+ "bipurple": "",
+ "bicyan": "",
+ "biwhite": ""
+ }
+ ],
+ "timestamp": "2024-01-19T11:35:04.418416984"
+ },
+ "Test Function dashedLine": {
+ "content": [
+ "-\u001b[2m----------------------------------------------------\u001b[0m-"
+ ],
+ "timestamp": "2024-01-19T11:34:55.420000755"
+ },
+ "Test Function with logColours": {
+ "content": [
+ {
+ "reset": "\u001b[0m",
+ "bold": "\u001b[1m",
+ "dim": "\u001b[2m",
+ "underlined": "\u001b[4m",
+ "blink": "\u001b[5m",
+ "reverse": "\u001b[7m",
+ "hidden": "\u001b[8m",
+ "black": "\u001b[0;30m",
+ "red": "\u001b[0;31m",
+ "green": "\u001b[0;32m",
+ "yellow": "\u001b[0;33m",
+ "blue": "\u001b[0;34m",
+ "purple": "\u001b[0;35m",
+ "cyan": "\u001b[0;36m",
+ "white": "\u001b[0;37m",
+ "bblack": "\u001b[1;30m",
+ "bred": "\u001b[1;31m",
+ "bgreen": "\u001b[1;32m",
+ "byellow": "\u001b[1;33m",
+ "bblue": "\u001b[1;34m",
+ "bpurple": "\u001b[1;35m",
+ "bcyan": "\u001b[1;36m",
+ "bwhite": "\u001b[1;37m",
+ "ublack": "\u001b[4;30m",
+ "ured": "\u001b[4;31m",
+ "ugreen": "\u001b[4;32m",
+ "uyellow": "\u001b[4;33m",
+ "ublue": "\u001b[4;34m",
+ "upurple": "\u001b[4;35m",
+ "ucyan": "\u001b[4;36m",
+ "uwhite": "\u001b[4;37m",
+ "iblack": "\u001b[0;90m",
+ "ired": "\u001b[0;91m",
+ "igreen": "\u001b[0;92m",
+ "iyellow": "\u001b[0;93m",
+ "iblue": "\u001b[0;94m",
+ "ipurple": "\u001b[0;95m",
+ "icyan": "\u001b[0;96m",
+ "iwhite": "\u001b[0;97m",
+ "biblack": "\u001b[1;90m",
+ "bired": "\u001b[1;91m",
+ "bigreen": "\u001b[1;92m",
+ "biyellow": "\u001b[1;93m",
+ "biblue": "\u001b[1;94m",
+ "bipurple": "\u001b[1;95m",
+ "bicyan": "\u001b[1;96m",
+ "biwhite": "\u001b[1;97m"
+ }
+ ],
+ "timestamp": "2024-01-19T11:35:13.436366565"
+ }
+}
\ No newline at end of file
diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.workflow.nf.test b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.workflow.nf.test
new file mode 100644
index 0000000000..8940d32d1e
--- /dev/null
+++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.workflow.nf.test
@@ -0,0 +1,29 @@
+nextflow_workflow {
+
+ name "Test Workflow UTILS_NFCORE_PIPELINE"
+ script "../main.nf"
+ config "subworkflows/nf-core/utils_nfcore_pipeline/tests/nextflow.config"
+ workflow "UTILS_NFCORE_PIPELINE"
+ tag "subworkflows"
+ tag "subworkflows_nfcore"
+ tag "utils_nfcore_pipeline"
+ tag "subworkflows/utils_nfcore_pipeline"
+
+ test("Should run without failures") {
+
+ when {
+ workflow {
+ """
+ input[0] = []
+ """
+ }
+ }
+
+ then {
+ assertAll(
+ { assert workflow.success },
+ { assert snapshot(workflow.out).match() }
+ )
+ }
+ }
+}
diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.workflow.nf.test.snap b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.workflow.nf.test.snap
new file mode 100644
index 0000000000..d07ce54c51
--- /dev/null
+++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.workflow.nf.test.snap
@@ -0,0 +1,15 @@
+{
+ "Should run without failures": {
+ "content": [
+ {
+ "0": [
+ true
+ ],
+ "valid_config": [
+ true
+ ]
+ }
+ ],
+ "timestamp": "2024-01-19T11:35:22.538940073"
+ }
+}
\ No newline at end of file
diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/nextflow.config b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/nextflow.config
new file mode 100644
index 0000000000..d0a926bf6d
--- /dev/null
+++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/nextflow.config
@@ -0,0 +1,9 @@
+manifest {
+ name = 'nextflow_workflow'
+ author = """nf-core"""
+ homePage = 'https://127.0.0.1'
+ description = """Dummy pipeline"""
+ nextflowVersion = '!>=23.04.0'
+ version = '9.9.9'
+ doi = 'https://doi.org/10.5281/zenodo.5070524'
+}
diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/tags.yml b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/tags.yml
new file mode 100644
index 0000000000..ac8523c9a2
--- /dev/null
+++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/tags.yml
@@ -0,0 +1,2 @@
+subworkflows/utils_nfcore_pipeline:
+ - subworkflows/nf-core/utils_nfcore_pipeline/**
diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/main.nf b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/main.nf
new file mode 100644
index 0000000000..2585b65d1b
--- /dev/null
+++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/main.nf
@@ -0,0 +1,62 @@
+//
+// Subworkflow that uses the nf-validation plugin to render help text and parameter summary
+//
+
+/*
+========================================================================================
+ IMPORT NF-VALIDATION PLUGIN
+========================================================================================
+*/
+
+include { paramsHelp } from 'plugin/nf-validation'
+include { paramsSummaryLog } from 'plugin/nf-validation'
+include { validateParameters } from 'plugin/nf-validation'
+
+/*
+========================================================================================
+ SUBWORKFLOW DEFINITION
+========================================================================================
+*/
+
+workflow UTILS_NFVALIDATION_PLUGIN {
+
+ take:
+ print_help // boolean: print help
+ workflow_command // string: default commmand used to run pipeline
+ pre_help_text // string: string to be printed before help text and summary log
+ post_help_text // string: string to be printed after help text and summary log
+ validate_params // boolean: validate parameters
+ schema_filename // path: JSON schema file, null to use default value
+
+ main:
+
+ log.debug "Using schema file: ${schema_filename}"
+
+ // Default values for strings
+ pre_help_text = pre_help_text ?: ''
+ post_help_text = post_help_text ?: ''
+ workflow_command = workflow_command ?: ''
+
+ //
+ // Print help message if needed
+ //
+ if (print_help) {
+ log.info pre_help_text + paramsHelp(workflow_command, parameters_schema: schema_filename) + post_help_text
+ System.exit(0)
+ }
+
+ //
+ // Print parameter summary to stdout
+ //
+ log.info pre_help_text + paramsSummaryLog(workflow, parameters_schema: schema_filename) + post_help_text
+
+ //
+ // Validate parameters relative to the parameter JSON schema
+ //
+ if (validate_params){
+ validateParameters(parameters_schema: schema_filename)
+ }
+
+ emit:
+ dummy_emit = true
+}
diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/meta.yml b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/meta.yml
new file mode 100644
index 0000000000..3d4a6b04f5
--- /dev/null
+++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/meta.yml
@@ -0,0 +1,44 @@
+# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/subworkflows/yaml-schema.json
+name: "UTILS_NFVALIDATION_PLUGIN"
+description: Use nf-validation to initiate and validate a pipeline
+keywords:
+ - utility
+ - pipeline
+ - initialise
+ - validation
+components: []
+input:
+ - print_help:
+ type: boolean
+ description: |
+ Print help message and exit
+ - workflow_command:
+ type: string
+ description: |
+ The command to run the workflow e.g. "nextflow run main.nf"
+ - pre_help_text:
+ type: string
+ description: |
+ Text to print before the help message
+ - post_help_text:
+ type: string
+ description: |
+ Text to print after the help message
+ - validate_params:
+ type: boolean
+ description: |
+ Validate the parameters and error if invalid.
+ - schema_filename:
+ type: string
+ description: |
+ The filename of the schema to validate against.
+output:
+ - dummy_emit:
+ type: boolean
+ description: |
+ Dummy emit to make nf-core subworkflows lint happy
+authors:
+ - "@adamrtalbot"
+maintainers:
+ - "@adamrtalbot"
+ - "@maxulysse"
diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/main.nf.test b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/main.nf.test
new file mode 100644
index 0000000000..517ee54e48
--- /dev/null
+++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/main.nf.test
@@ -0,0 +1,200 @@
+nextflow_workflow {
+
+ name "Test Workflow UTILS_NFVALIDATION_PLUGIN"
+ script "../main.nf"
+ workflow "UTILS_NFVALIDATION_PLUGIN"
+ tag "subworkflows"
+ tag "subworkflows_nfcore"
+ tag "plugin/nf-validation"
+ tag "'plugin/nf-validation'"
+ tag "utils_nfvalidation_plugin"
+ tag "subworkflows/utils_nfvalidation_plugin"
+
+ test("Should run nothing") {
+
+ when {
+
+ params {
+ monochrome_logs = true
+ test_data = ''
+ }
+
+ workflow {
+ """
+ help = false
+ workflow_command = null
+ pre_help_text = null
+ post_help_text = null
+ validate_params = false
+ schema_filename = "$moduleTestDir/nextflow_schema.json"
+
+ input[0] = help
+ input[1] = workflow_command
+ input[2] = pre_help_text
+ input[3] = post_help_text
+ input[4] = validate_params
+ input[5] = schema_filename
+ """
+ }
+ }
+
+ then {
+ assertAll(
+ { assert workflow.success }
+ )
+ }
+ }
+
+ test("Should run help") {
+
+
+ when {
+
+ params {
+ monochrome_logs = true
+ test_data = ''
+ }
+ workflow {
+ """
+ help = true
+ workflow_command = null
+ pre_help_text = null
+ post_help_text = null
+ validate_params = false
+ schema_filename = "$moduleTestDir/nextflow_schema.json"
+
+ input[0] = help
+ input[1] = workflow_command
+ input[2] = pre_help_text
+ input[3] = post_help_text
+ input[4] = validate_params
+ input[5] = schema_filename
+ """
+ }
+ }
+
+ then {
+ assertAll(
+ { assert workflow.success },
+ { assert workflow.exitStatus == 0 },
+ { assert workflow.stdout.any { it.contains('Input/output options') } },
+ { assert workflow.stdout.any { it.contains('--outdir') } }
+ )
+ }
+ }
+
+ test("Should run help with command") {
+
+ when {
+
+ params {
+ monochrome_logs = true
+ test_data = ''
+ }
+ workflow {
+ """
+ help = true
+ workflow_command = "nextflow run noorg/doesntexist"
+ pre_help_text = null
+ post_help_text = null
+ validate_params = false
+ schema_filename = "$moduleTestDir/nextflow_schema.json"
+
+ input[0] = help
+ input[1] = workflow_command
+ input[2] = pre_help_text
+ input[3] = post_help_text
+ input[4] = validate_params
+ input[5] = schema_filename
+ """
+ }
+ }
+
+ then {
+ assertAll(
+ { assert workflow.success },
+ { assert workflow.exitStatus == 0 },
+ { assert workflow.stdout.any { it.contains('nextflow run noorg/doesntexist') } },
+ { assert workflow.stdout.any { it.contains('Input/output options') } },
+ { assert workflow.stdout.any { it.contains('--outdir') } }
+ )
+ }
+ }
+
+ test("Should run help with extra text") {
+
+
+ when {
+
+ params {
+ monochrome_logs = true
+ test_data = ''
+ }
+ workflow {
+ """
+ help = true
+ workflow_command = "nextflow run noorg/doesntexist"
+ pre_help_text = "pre-help-text"
+ post_help_text = "post-help-text"
+ validate_params = false
+ schema_filename = "$moduleTestDir/nextflow_schema.json"
+
+ input[0] = help
+ input[1] = workflow_command
+ input[2] = pre_help_text
+ input[3] = post_help_text
+ input[4] = validate_params
+ input[5] = schema_filename
+ """
+ }
+ }
+
+ then {
+ assertAll(
+ { assert workflow.success },
+ { assert workflow.exitStatus == 0 },
+ { assert workflow.stdout.any { it.contains('pre-help-text') } },
+ { assert workflow.stdout.any { it.contains('nextflow run noorg/doesntexist') } },
+ { assert workflow.stdout.any { it.contains('Input/output options') } },
+ { assert workflow.stdout.any { it.contains('--outdir') } },
+ { assert workflow.stdout.any { it.contains('post-help-text') } }
+ )
+ }
+ }
+
+ test("Should validate params") {
+
+ when {
+
+ params {
+ monochrome_logs = true
+ test_data = ''
+ outdir = 1
+ }
+ workflow {
+ """
+ help = false
+ workflow_command = null
+ pre_help_text = null
+ post_help_text = null
+ validate_params = true
+ schema_filename = "$moduleTestDir/nextflow_schema.json"
+
+ input[0] = help
+ input[1] = workflow_command
+ input[2] = pre_help_text
+ input[3] = post_help_text
+ input[4] = validate_params
+ input[5] = schema_filename
+ """
+ }
+ }
+
+ then {
+ assertAll(
+ { assert workflow.failed },
+ { assert workflow.stdout.any { it.contains('ERROR ~ ERROR: Validation of pipeline parameters failed!') } }
+ )
+ }
+ }
+}
\ No newline at end of file
diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/nextflow_schema.json b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/nextflow_schema.json
new file mode 100644
index 0000000000..7626c1c93e
--- /dev/null
+++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/nextflow_schema.json
@@ -0,0 +1,96 @@
+{
+ "$schema": "http://json-schema.org/draft-07/schema",
+ "$id": "https://raw.githubusercontent.com/./master/nextflow_schema.json",
+ "title": ". pipeline parameters",
+ "description": "",
+ "type": "object",
+ "definitions": {
+ "input_output_options": {
+ "title": "Input/output options",
+ "type": "object",
+ "fa_icon": "fas fa-terminal",
+ "description": "Define where the pipeline should find input data and save output data.",
+ "required": ["outdir"],
+ "properties": {
+ "validate_params": {
+ "type": "boolean",
+ "description": "Validate parameters?",
+ "default": true,
+ "hidden": true
+ },
+ "outdir": {
+ "type": "string",
+ "format": "directory-path",
+ "description": "The output directory where the results will be saved. You have to use absolute paths to storage on Cloud infrastructure.",
+ "fa_icon": "fas fa-folder-open"
+ },
+ "test_data_base": {
+ "type": "string",
+ "default": "https://raw.githubusercontent.com/nf-core/test-datasets/modules",
+ "description": "Base for test data directory",
+ "hidden": true
+ },
+ "test_data": {
+ "type": "string",
+ "description": "Fake test data param",
+ "hidden": true
+ }
+ }
+ },
+ "generic_options": {
+ "title": "Generic options",
+ "type": "object",
+ "fa_icon": "fas fa-file-import",
+ "description": "Less common options for the pipeline, typically set in a config file.",
+ "help_text": "These options are common to all nf-core pipelines and allow you to customise some of the core preferences for how the pipeline runs.\n\nTypically these options would be set in a Nextflow config file loaded for all pipeline runs, such as `~/.nextflow/config`.",
+ "properties": {
+ "help": {
+ "type": "boolean",
+ "description": "Display help text.",
+ "fa_icon": "fas fa-question-circle",
+ "hidden": true
+ },
+ "version": {
+ "type": "boolean",
+ "description": "Display version and exit.",
+ "fa_icon": "fas fa-question-circle",
+ "hidden": true
+ },
+ "logo": {
+ "type": "boolean",
+ "default": true,
+ "description": "Display nf-core logo in console output.",
+ "fa_icon": "fas fa-image",
+ "hidden": true
+ },
+ "singularity_pull_docker_container": {
+ "type": "boolean",
+ "description": "Pull Singularity container from Docker?",
+ "hidden": true
+ },
+ "publish_dir_mode": {
+ "type": "string",
+ "default": "copy",
+ "description": "Method used to save pipeline results to output directory.",
+ "help_text": "The Nextflow `publishDir` option specifies which intermediate files should be saved to the output directory. This option tells the pipeline what method should be used to move these files. See [Nextflow docs](https://www.nextflow.io/docs/latest/process.html#publishdir) for details.",
+ "fa_icon": "fas fa-copy",
+ "enum": ["symlink", "rellink", "link", "copy", "copyNoFollow", "move"],
+ "hidden": true
+ },
+ "monochrome_logs": {
+ "type": "boolean",
+ "description": "Use monochrome_logs",
+ "hidden": true
+ }
+ }
+ }
+ },
+ "allOf": [
+ {
+ "$ref": "#/definitions/input_output_options"
+ },
+ {
+ "$ref": "#/definitions/generic_options"
+ }
+ ]
+}
diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/tags.yml b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/tags.yml
new file mode 100644
index 0000000000..60b1cfff49
--- /dev/null
+++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfvalidation_plugin/tests/tags.yml
@@ -0,0 +1,2 @@
+subworkflows/utils_nfvalidation_plugin:
+ - subworkflows/nf-core/utils_nfvalidation_plugin/**
diff --git a/nf_core/pipeline-template/workflows/pipeline.nf b/nf_core/pipeline-template/workflows/pipeline.nf
index 4583f2a9d6..68adbaa328 100644
--- a/nf_core/pipeline-template/workflows/pipeline.nf
+++ b/nf_core/pipeline-template/workflows/pipeline.nf
@@ -1,54 +1,15 @@
/*
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- PRINT PARAMS SUMMARY
+ IMPORT MODULES / SUBWORKFLOWS / FUNCTIONS
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*/
-include { paramsSummaryLog; paramsSummaryMap } from 'plugin/nf-validation'
-
-def logo = NfcoreTemplate.logo(workflow, params.monochrome_logs)
-def citation = '\n' + WorkflowMain.citation(workflow) + '\n'
-def summary_params = paramsSummaryMap(workflow)
-
-// Print parameter summary log to screen
-log.info logo + paramsSummaryLog(workflow) + citation
-
-Workflow{{ short_name[0]|upper }}{{ short_name[1:] }}.initialise(params, log)
-
-/*
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- CONFIG FILES
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-*/
-
-ch_multiqc_config = Channel.fromPath("$projectDir/assets/multiqc_config.yml", checkIfExists: true)
-ch_multiqc_custom_config = params.multiqc_config ? Channel.fromPath( params.multiqc_config, checkIfExists: true ) : Channel.empty()
-ch_multiqc_logo = params.multiqc_logo ? Channel.fromPath( params.multiqc_logo, checkIfExists: true ) : Channel.empty()
-ch_multiqc_custom_methods_description = params.multiqc_methods_description ? file(params.multiqc_methods_description, checkIfExists: true) : file("$projectDir/assets/methods_description_template.yml", checkIfExists: true)
-
-/*
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- IMPORT LOCAL MODULES/SUBWORKFLOWS
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-*/
-
-//
-// SUBWORKFLOW: Consisting of a mix of local and nf-core/modules
-//
-include { INPUT_CHECK } from '../subworkflows/local/input_check'
-
-/*
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- IMPORT NF-CORE MODULES/SUBWORKFLOWS
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-*/
-
-//
-// MODULE: Installed directly from nf-core/modules
-//
-include { FASTQC } from '../modules/nf-core/fastqc/main'
-include { MULTIQC } from '../modules/nf-core/multiqc/main'
-include { CUSTOM_DUMPSOFTWAREVERSIONS } from '../modules/nf-core/custom/dumpsoftwareversions/main'
+include { FASTQC } from '../modules/nf-core/fastqc/main'
+include { MULTIQC } from '../modules/nf-core/multiqc/main'
+include { paramsSummaryMap } from 'plugin/nf-validation'
+include { paramsSummaryMultiqc } from '../subworkflows/nf-core/utils_nfcore_pipeline'
+include { softwareVersionsToYAML } from '../subworkflows/nf-core/utils_nfcore_pipeline'
+include { methodsDescriptionText } from '../subworkflows/local/utils_nfcore_{{ short_name }}_pipeline'
/*
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -56,50 +17,45 @@ include { CUSTOM_DUMPSOFTWAREVERSIONS } from '../modules/nf-core/custom/dumpsoft
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*/
-// Info required for completion email and summary
-def multiqc_report = []
-
workflow {{ short_name|upper }} {
- ch_versions = Channel.empty()
+ take:
+ ch_samplesheet // channel: samplesheet read in from --input
- //
- // SUBWORKFLOW: Read in samplesheet, validate and stage input files
- //
- INPUT_CHECK (
- file(params.input)
- )
- ch_versions = ch_versions.mix(INPUT_CHECK.out.versions)
- // TODO: OPTIONAL, you can use nf-validation plugin to create an input channel from the samplesheet with Channel.fromSamplesheet("input")
- // See the documentation https://nextflow-io.github.io/nf-validation/samplesheets/fromSamplesheet/
- // ! There is currently no tooling to help you write a sample sheet schema
+ main:
+
+ ch_versions = Channel.empty()
+ ch_multiqc_files = Channel.empty()
//
// MODULE: Run FastQC
//
FASTQC (
- INPUT_CHECK.out.reads
+ ch_samplesheet
)
+ ch_multiqc_files = ch_multiqc_files.mix(FASTQC.out.zip.collect{it[1]})
ch_versions = ch_versions.mix(FASTQC.out.versions.first())
- CUSTOM_DUMPSOFTWAREVERSIONS (
- ch_versions.unique().collectFile(name: 'collated_versions.yml')
- )
+ //
+ // Collate and save software versions
+ //
+ softwareVersionsToYAML(ch_versions)
+ .collectFile(storeDir: "${params.outdir}/pipeline_info", name: 'nf_core_pipeline_software_mqc_versions.yml', sort: true, newLine: true)
+ .set { ch_collated_versions }
//
// MODULE: MultiQC
//
- workflow_summary = Workflow{{ short_name[0]|upper }}{{ short_name[1:] }}.paramsSummaryMultiqc(workflow, summary_params)
- ch_workflow_summary = Channel.value(workflow_summary)
-
- methods_description = Workflow{{ short_name[0]|upper }}{{ short_name[1:] }}.methodsDescriptionText(workflow, ch_multiqc_custom_methods_description, params)
- ch_methods_description = Channel.value(methods_description)
-
- ch_multiqc_files = Channel.empty()
- ch_multiqc_files = ch_multiqc_files.mix(ch_workflow_summary.collectFile(name: 'workflow_summary_mqc.yaml'))
- ch_multiqc_files = ch_multiqc_files.mix(ch_methods_description.collectFile(name: 'methods_description_mqc.yaml'))
- ch_multiqc_files = ch_multiqc_files.mix(CUSTOM_DUMPSOFTWAREVERSIONS.out.mqc_yml.collect())
- ch_multiqc_files = ch_multiqc_files.mix(FASTQC.out.zip.collect{it[1]}.ifEmpty([]))
+ ch_multiqc_config = Channel.fromPath("$projectDir/assets/multiqc_config.yml", checkIfExists: true)
+ ch_multiqc_custom_config = params.multiqc_config ? Channel.fromPath(params.multiqc_config, checkIfExists: true) : Channel.empty()
+ ch_multiqc_logo = params.multiqc_logo ? Channel.fromPath(params.multiqc_logo, checkIfExists: true) : Channel.empty()
+ summary_params = paramsSummaryMap(workflow, parameters_schema: "nextflow_schema.json")
+ ch_workflow_summary = Channel.value(paramsSummaryMultiqc(summary_params))
+ ch_multiqc_custom_methods_description = params.multiqc_methods_description ? file(params.multiqc_methods_description, checkIfExists: true) : file("$projectDir/assets/methods_description_template.yml", checkIfExists: true)
+ ch_methods_description = Channel.value(methodsDescriptionText(ch_multiqc_custom_methods_description))
+ ch_multiqc_files = ch_multiqc_files.mix(ch_workflow_summary.collectFile(name: 'workflow_summary_mqc.yaml'))
+ ch_multiqc_files = ch_multiqc_files.mix(ch_collated_versions)
+ ch_multiqc_files = ch_multiqc_files.mix(ch_methods_description.collectFile(name: 'methods_description_mqc.yaml', sort: false))
MULTIQC (
ch_multiqc_files.collect(),
@@ -107,31 +63,10 @@ workflow {{ short_name|upper }} {
ch_multiqc_custom_config.toList(),
ch_multiqc_logo.toList()
)
- multiqc_report = MULTIQC.out.report.toList()
-}
-
-/*
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- COMPLETION EMAIL AND SUMMARY
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-*/
-
-workflow.onComplete {
- if (params.email || params.email_on_fail) {
- NfcoreTemplate.email(workflow, params, summary_params, projectDir, log, multiqc_report)
- }
- NfcoreTemplate.dump_parameters(workflow, params)
- NfcoreTemplate.summary(workflow, params, log)
- if (params.hook_url) {
- NfcoreTemplate.IM_notification(workflow, params, summary_params, projectDir, log)
- }
-}
-workflow.onError {
- if (workflow.errorReport.contains("Process requirement exceeds available memory")) {
- println("π Default resources exceed availability π ")
- println("π‘ See here on how to configure pipeline: https://nf-co.re/docs/usage/configuration#tuning-workflow-resources π‘")
- }
+ emit:
+ multiqc_report = MULTIQC.out.report.toList() // channel: /path/to/multiqc_report.html
+ versions = ch_versions // channel: [ path(versions.yml) ]
}
/*
diff --git a/tests/lint/files_exist.py b/tests/lint/files_exist.py
index d3a6a25e82..5081522899 100644
--- a/tests/lint/files_exist.py
+++ b/tests/lint/files_exist.py
@@ -61,7 +61,9 @@ def test_files_exist_pass_conditional(self):
lint_obj = nf_core.lint.PipelineLint(new_pipeline)
lint_obj._load()
lint_obj.nf_config["plugins"] = []
- Path(new_pipeline, "lib/nfcore_external_java_deps.jar").touch()
+ lib_dir = Path(new_pipeline, "lib")
+ lib_dir.mkdir()
+ (lib_dir / "nfcore_external_java_deps.jar").touch()
results = lint_obj.files_exist()
assert results["failed"] == []
assert results["ignored"] == []
@@ -71,7 +73,9 @@ def test_files_exist_fail_conditional(self):
new_pipeline = self._make_pipeline_copy()
lint_obj = nf_core.lint.PipelineLint(new_pipeline)
lint_obj._load()
- Path(new_pipeline, "lib/nfcore_external_java_deps.jar").touch()
+ lib_dir = Path(new_pipeline, "lib")
+ lib_dir.mkdir()
+ (lib_dir / "nfcore_external_java_deps.jar").touch()
results = lint_obj.files_exist()
assert results["failed"] == ["File must be removed: `lib/nfcore_external_java_deps.jar`"]
assert results["ignored"] == []
diff --git a/tests/modules/lint.py b/tests/modules/lint.py
index a5d8567b76..9bd280ddd8 100644
--- a/tests/modules/lint.py
+++ b/tests/modules/lint.py
@@ -40,7 +40,6 @@ def test_modules_lint_empty(self):
"""Test linting a pipeline with no modules installed"""
self.mods_remove.remove("fastqc", force=True)
self.mods_remove.remove("multiqc", force=True)
- self.mods_remove.remove("custom/dumpsoftwareversions", force=True)
with pytest.raises(LookupError):
nf_core.modules.ModuleLint(dir=self.pipeline_dir)
@@ -58,7 +57,6 @@ def test_modules_lint_no_gitlab(self):
"""Test linting a pipeline with no modules installed"""
self.mods_remove.remove("fastqc", force=True)
self.mods_remove.remove("multiqc", force=True)
- self.mods_remove.remove("custom/dumpsoftwareversions", force=True)
with pytest.raises(LookupError):
nf_core.modules.ModuleLint(dir=self.pipeline_dir, remote_url=GITLAB_URL)
diff --git a/tests/modules/update.py b/tests/modules/update.py
index 5208070fa5..5cf24b56f0 100644
--- a/tests/modules/update.py
+++ b/tests/modules/update.py
@@ -317,13 +317,13 @@ def test_update_only_show_differences(self, mock_prompt):
mod_json = modules_json.get_modules_json()
# Loop through all modules and check that they are NOT updated (according to the modules.json file)
- # Modules that can be updated but shouldn't are custom/dumpsoftwareversions and fastqc
+ # A module that can be updated but shouldn't is fastqc
# Module multiqc is already up to date so don't check
- for mod in ["custom/dumpsoftwareversions", "fastqc"]:
- correct_git_sha = list(update_obj.modules_repo.get_component_git_log(mod, "modules", depth=1))[0]["git_sha"]
- current_git_sha = mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME][mod]["git_sha"]
- assert correct_git_sha != current_git_sha
- assert cmp_module(Path(tmpdir, mod), Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, mod)) is True
+ mod = "fastqc"
+ correct_git_sha = list(update_obj.modules_repo.get_component_git_log(mod, "modules", depth=1))[0]["git_sha"]
+ current_git_sha = mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME][mod]["git_sha"]
+ assert correct_git_sha != current_git_sha
+ assert cmp_module(Path(tmpdir, mod), Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, mod)) is True
# Mock questionary answer: do not update module, only show diffs
@@ -357,19 +357,19 @@ def test_update_only_show_differences_when_patch(self, mock_prompt):
patch_obj = ModulePatch(self.pipeline_dir)
patch_obj.patch("fastqc")
# Check that a patch file with the correct name has been created
- assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", "fastqc.diff"}
+ assert "fastqc.diff" in set(os.listdir(module_path))
# Update all modules
assert update_obj.update() is True
mod_json = modules_json.get_modules_json()
# Loop through all modules and check that they are NOT updated (according to the modules.json file)
- # Modules that can be updated but shouldn't are custom/dumpsoftwareversions and fastqc
+ # A module that can be updated but shouldn't is fastqc
# Module multiqc is already up to date so don't check
- for mod in ["custom/dumpsoftwareversions", "fastqc"]:
- correct_git_sha = list(update_obj.modules_repo.get_component_git_log(mod, "modules", depth=1))[0]["git_sha"]
- current_git_sha = mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME][mod]["git_sha"]
- assert correct_git_sha != current_git_sha
+ mod = "fastqc"
+ correct_git_sha = list(update_obj.modules_repo.get_component_git_log(mod, "modules", depth=1))[0]["git_sha"]
+ current_git_sha = mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME][mod]["git_sha"]
+ assert correct_git_sha != current_git_sha
def cmp_module(dir1, dir2):
diff --git a/tests/subworkflows/lint.py b/tests/subworkflows/lint.py
index b53fef7f0e..8804f8bf6f 100644
--- a/tests/subworkflows/lint.py
+++ b/tests/subworkflows/lint.py
@@ -19,6 +19,9 @@ def test_subworkflows_lint(self):
def test_subworkflows_lint_empty(self):
"""Test linting a pipeline with no subworkflows installed"""
+ self.subworkflow_remove.remove("utils_nextflow_pipeline", force=True)
+ self.subworkflow_remove.remove("utils_nfcore_pipeline", force=True)
+ self.subworkflow_remove.remove("utils_nfvalidation_plugin", force=True)
with pytest.raises(LookupError):
nf_core.subworkflows.SubworkflowLint(dir=self.pipeline_dir)
diff --git a/tests/subworkflows/remove.py b/tests/subworkflows/remove.py
index dec67875bd..c6a3b98454 100644
--- a/tests/subworkflows/remove.py
+++ b/tests/subworkflows/remove.py
@@ -20,13 +20,15 @@ def test_subworkflows_remove_subworkflow(self):
mod_json_before = ModulesJson(self.pipeline_dir).get_modules_json()
assert self.subworkflow_remove.remove("bam_sort_stats_samtools")
mod_json_after = ModulesJson(self.pipeline_dir).get_modules_json()
- assert Path.exists(subworkflow_path) is False
assert Path.exists(bam_sort_stats_samtools_path) is False
assert Path.exists(bam_stats_samtools_path) is False
assert Path.exists(samtools_index_path) is False
assert mod_json_before != mod_json_after
# assert subworkflows key is removed from modules.json
- assert "subworkflows" not in mod_json_after["repos"]["https://github.com/nf-core/modules.git"].keys()
+ assert (
+ "bam_sort_stats_samtools"
+ not in mod_json_after["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"].keys()
+ )
assert "samtools/index" not in mod_json_after["repos"]["https://github.com/nf-core/modules.git"]["modules"].keys()
@@ -44,13 +46,15 @@ def test_subworkflows_remove_subworkflow_keep_installed_module(self):
assert self.subworkflow_remove.remove("bam_sort_stats_samtools")
mod_json_after = ModulesJson(self.pipeline_dir).get_modules_json()
- assert Path.exists(subworkflow_path) is False
assert Path.exists(bam_sort_stats_samtools_path) is False
assert Path.exists(bam_stats_samtools_path) is False
assert Path.exists(samtools_index_path) is True
assert mod_json_before != mod_json_after
# assert subworkflows key is removed from modules.json
- assert "subworkflows" not in mod_json_after["repos"]["https://github.com/nf-core/modules.git"].keys()
+ assert (
+ "bam_sort_stats_samtools"
+ not in mod_json_after["repos"]["https://github.com/nf-core/modules.git"]["subworkflows"].keys()
+ )
assert (
"samtools/index"
in mod_json_after["repos"]["https://github.com/nf-core/modules.git"]["modules"]["nf-core"].keys()
diff --git a/tests/subworkflows/update.py b/tests/subworkflows/update.py
index 32a69ba180..9ddc9bec0c 100644
--- a/tests/subworkflows/update.py
+++ b/tests/subworkflows/update.py
@@ -171,9 +171,9 @@ def test_update_with_config_fix_all(self):
with open(Path(self.pipeline_dir, config_fn), "w") as f:
yaml.dump(tools_config, f)
- # Update all subworkflows in the pipeline
- update_obj = SubworkflowUpdate(self.pipeline_dir, update_all=True, show_diff=False)
- assert update_obj.update() is True
+ # Update fastq_align_bowtie2
+ update_obj = SubworkflowUpdate(self.pipeline_dir, update_all=False, update_deps=True, show_diff=False)
+ assert update_obj.update("fastq_align_bowtie2") is True
# Check that the git sha for fastq_align_bowtie2 is correctly downgraded
mod_json = ModulesJson(self.pipeline_dir).get_modules_json()