{
items &&
@@ -68,9 +69,8 @@ url = url.replace(/\.html$/, '');
'stretched-link',
]}
href={child.slug}
- >
- {child.title}
-
+ />
+
);
})}
@@ -185,4 +185,17 @@ url = url.replace(/\.html$/, '');
}
}
}
+ // show only the active section
+ .nav.accordion {
+ ul {
+ display: none;
+ }
+ h6:has(.active) ~ ul,
+ ul:has(.active) {
+ display: block;
+ }
+ div.mb-3:not(:has(.active)) {
+ margin-bottom: 0 !important;
+ }
+ }
diff --git a/src/components/sidebar/SidebarToc.svelte b/src/components/sidebar/SidebarToc.svelte
index 4af946fe8b..d9a4b9ebac 100644
--- a/src/components/sidebar/SidebarToc.svelte
+++ b/src/components/sidebar/SidebarToc.svelte
@@ -24,7 +24,7 @@
// make margin classes from min to max heading depth
let headingMargin = {};
for (let i = minHeadingDepth; i <= 4; i++) {
- headingMargin[i] = 'ps-' + (i - minHeadingDepth) * 2;
+ headingMargin[i] = 'ps-' + (i - minHeadingDepth);
}
let activeHeading = {};
onMount(() => {
@@ -47,7 +47,7 @@
-
+
{#if headings.length > 1}
On this page
{/if}
@@ -57,7 +57,7 @@
{#each headings as heading (heading)}
diff --git a/src/content/config.ts b/src/content/config.ts
index a4de3ad68d..474761451b 100644
--- a/src/content/config.ts
+++ b/src/content/config.ts
@@ -124,10 +124,13 @@ const blog = defineCollection({
const pipelines = defineCollection({});
+const tools = defineCollection({});
+
export const collections = {
events: events,
docs: docs,
about: about,
pipelines: pipelines,
blog: blog,
+ tools: tools,
};
diff --git a/src/content/tools/docs/1.10.1/bump_version.md b/src/content/tools/docs/1.10.1/bump_version.md
new file mode 100644
index 0000000000..388370558e
--- /dev/null
+++ b/src/content/tools/docs/1.10.1/bump_version.md
@@ -0,0 +1,36 @@
+# nf_core.bump_version
+
+Bumps the version number in all appropriate files for
+a nf-core pipeline.
+
+### `nf_core.bump_version.bump_nextflow_version(lint_obj, new_version){:python}`
+
+Bumps the required Nextflow version number of a pipeline.
+
+- **Parameters:**
+ - **lint_obj** ([_nf_core.lint.PipelineLint_](lint#nf_core.lint.PipelineLint)) – A PipelineLint object that holds information
+ about the pipeline contents and build files.
+ - **new_version** (_str_) – The new version tag for the required Nextflow version.
+
+### `nf_core.bump_version.bump_pipeline_version(lint_obj, new_version){:python}`
+
+Bumps a pipeline version number.
+
+- **Parameters:**
+ - **lint_obj** ([_nf_core.lint.PipelineLint_](lint#nf_core.lint.PipelineLint)) – A PipelineLint object that holds information
+ about the pipeline contents and build files.
+ - **new_version** (_str_) – The new version tag for the pipeline. Semantic versioning only.
+
+### `nf_core.bump_version.update_file_version(filename, lint_obj, pattern, newstr, allow_multiple=False){:python}`
+
+Updates the version number in a requested file.
+
+- **Parameters:**
+ - **filename** (_str_) – File to scan.
+ - **lint_obj** ([_nf_core.lint.PipelineLint_](lint#nf_core.lint.PipelineLint)) – A PipelineLint object that holds information
+ about the pipeline contents and build files.
+ - **pattern** (_str_) – Regex pattern to apply.
+ - **newstr** (_str_) – The replaced string.
+ - **allow_multiple** (_bool_) – Replace all pattern hits, not only the first. Defaults to False.
+- **Raises:**
+ **SyntaxError**\*\*,\*\* **if the version number cannot be found.** –
diff --git a/src/content/tools/docs/1.10.1/create.md b/src/content/tools/docs/1.10.1/create.md
new file mode 100644
index 0000000000..354aadee7b
--- /dev/null
+++ b/src/content/tools/docs/1.10.1/create.md
@@ -0,0 +1 @@
+# nf_core.create
diff --git a/src/content/tools/docs/1.10.1/download.md b/src/content/tools/docs/1.10.1/download.md
new file mode 100644
index 0000000000..fbea76fc98
--- /dev/null
+++ b/src/content/tools/docs/1.10.1/download.md
@@ -0,0 +1,68 @@
+# nf_core.download
+
+Downloads a nf-core pipeline to the local file system.
+
+### _`class{:python}`_`nf_core.download.DownloadWorkflow(pipeline, release=None, singularity=False, outdir=None, compress_type='tar.gz'){:python}`
+
+Downloads a nf-core workflow from GitHub to the local file system.
+
+Can also download its Singularity container image if required.
+
+- **Parameters:**
+ - **pipeline** (_str_) – A nf-core pipeline name.
+ - **release** (_str_) – The workflow release version to download, like 1.0. Defaults to None.
+ - **singularity** (_bool_) – Flag, if the Singularity container should be downloaded as well. Defaults to False.
+ - **outdir** (_str_) – Path to the local download directory. Defaults to None.
+
+#### `compress_download(){:python}`
+
+Take the downloaded files and make a compressed .tar.gz archive.
+
+#### `download_configs(){:python}`
+
+Downloads the centralised config profiles from nf-core/configs to `self.outdir`.
+
+#### `download_wf_files(){:python}`
+
+Downloads workflow files from GitHub to the `self.outdir`.
+
+#### `download_workflow(){:python}`
+
+Starts a nf-core workflow download.
+
+#### `fetch_workflow_details(wfs){:python}`
+
+Fetches details of a nf-core workflow to download.
+
+- **Parameters:**
+ **wfs** ([_nf_core.list.Workflows_](list#nf_core.list.Workflows)) – A nf_core.list.Workflows object
+- **Raises:**
+ **LockupError**\*\*,\*\* **if the pipeline can not be found.** –
+
+#### `find_container_images(){:python}`
+
+Find container image names for workflow
+
+#### `pull_singularity_image(container){:python}`
+
+Uses a local installation of singularity to pull an image from Docker Hub.
+
+- **Parameters:**
+ **container** (_str_) – A pipeline’s container name. Usually it is of similar format
+ to nfcore/name:dev.
+- **Raises:**
+ **Various exceptions possible from subprocess execution** **of** **Singularity.** –
+
+#### `validate_md5(fname, expected=None){:python}`
+
+Calculates the md5sum for a file on the disk and validate with expected.
+
+- **Parameters:**
+ - **fname** (_str_) – Path to a local file.
+ - **expected** (_str_) – The expected md5sum.
+- **Raises:**
+ **IOError**\*\*,\*\* **if the md5sum does not match the remote sum.** –
+
+#### `wf_use_local_configs(){:python}`
+
+Edit the downloaded nextflow.config file to use the local config files
diff --git a/src/content/tools/docs/1.10.1/index.md b/src/content/tools/docs/1.10.1/index.md
new file mode 100644
index 0000000000..2ad86b1300
--- /dev/null
+++ b/src/content/tools/docs/1.10.1/index.md
@@ -0,0 +1,41 @@
+
+
+# Welcome to nf-core tools API documentation!
+
+# Contents:
+
+- [nf_core.bump_version](bump_version)
+ - [`bump_nextflow_version()`](bump_version#nf_core.bump_version.bump_nextflow_version)
+ - [`bump_pipeline_version()`](bump_version#nf_core.bump_version.bump_pipeline_version)
+ - [`update_file_version()`](bump_version#nf_core.bump_version.update_file_version)
+- [nf_core.create](create)
+- [nf_core.download](download)
+ - [`DownloadWorkflow`](download#nf_core.download.DownloadWorkflow)
+- [nf_core.licences](licences)
+ - [`WorkflowLicences`](licences#nf_core.licences.WorkflowLicences)
+- [nf_core.lint](lint)
+ - [`PipelineLint`](lint#nf_core.lint.PipelineLint)
+ - [`run_linting()`](lint#nf_core.lint.run_linting)
+- [nf_core.list](list)
+ - [`LocalWorkflow`](list#nf_core.list.LocalWorkflow)
+ - [`RemoteWorkflow`](list#nf_core.list.RemoteWorkflow)
+ - [`Workflows`](list#nf_core.list.Workflows)
+ - [`get_local_wf()`](list#nf_core.list.get_local_wf)
+ - [`list_workflows()`](list#nf_core.list.list_workflows)
+ - [`pretty_date()`](list#nf_core.list.pretty_date)
+- [nf_core.utils](utils)
+ - [`check_if_outdated()`](utils#nf_core.utils.check_if_outdated)
+ - [`fetch_wf_config()`](utils#nf_core.utils.fetch_wf_config)
+ - [`poll_nfcore_web_api()`](utils#nf_core.utils.poll_nfcore_web_api)
+ - [`setup_requests_cachedir()`](utils#nf_core.utils.setup_requests_cachedir)
+ - [`wait_cli_function()`](utils#nf_core.utils.wait_cli_function)
+- [nf_core.list](workflow)
+
+# Indices and tables
+
+- [Index](genindex)
+- [Module Index](py-modindex)
+- [Search Page](search)
diff --git a/src/content/tools/docs/1.10.1/licences.md b/src/content/tools/docs/1.10.1/licences.md
new file mode 100644
index 0000000000..dcc395683a
--- /dev/null
+++ b/src/content/tools/docs/1.10.1/licences.md
@@ -0,0 +1,47 @@
+# nf_core.licences
+
+Lists software licences for a given workflow.
+
+### _`class{:python}`_`nf_core.licences.WorkflowLicences(pipeline){:python}`
+
+A nf-core workflow licenses collection.
+
+Tries to retrieve the license information from all dependencies
+of a given nf-core pipeline.
+
+A condensed overview with license per dependency can be printed out.
+
+- **Parameters:**
+ **pipeline** (_str_) – An existing nf-core pipeline name, like nf-core/hlatyping
+ or short hlatyping.
+
+#### `clean_licence_names(licences){:python}`
+
+Normalises varying licence names.
+
+- **Parameters:**
+ **licences** (_list_) – A list of licences which are basically raw string objects from
+ the licence content information.
+- **Returns:**
+ Cleaned licences.
+- **Return type:**
+ list
+
+#### `fetch_conda_licences(){:python}`
+
+Fetch package licences from Anaconda and PyPi.
+
+#### `get_environment_file(){:python}`
+
+Get the conda environment file for the pipeline
+
+#### `print_licences(){:python}`
+
+Prints the fetched license information.
+
+- **Parameters:**
+ **as_json** (_boolean_) – Prints the information in JSON. Defaults to False.
+
+#### `run_licences(){:python}`
+
+Run the nf-core licences action
diff --git a/src/content/tools/docs/1.10.1/lint.md b/src/content/tools/docs/1.10.1/lint.md
new file mode 100644
index 0000000000..754628ce81
--- /dev/null
+++ b/src/content/tools/docs/1.10.1/lint.md
@@ -0,0 +1,372 @@
+# nf_core.lint
+
+Linting policy for nf-core pipeline projects.
+
+Tests Nextflow-based pipelines to check that they adhere to
+the nf-core community guidelines.
+
+### _`class{:python}`_`nf_core.lint.PipelineLint(path){:python}`
+
+Object to hold linting information and results.
+All objects attributes are set, after the [`PipelineLint.lint_pipeline()`](#nf_core.lint.PipelineLint.lint_pipeline) function was called.
+
+- **Parameters:**
+ **path** (_str_) – The path to the nf-core pipeline directory.
+
+#### `conda_config{:python}`
+
+The parsed conda configuration file content (environment.yml).
+
+- **Type:**
+ dict
+
+#### `conda_package_info{:python}`
+
+The conda package(s) information, based on the API requests to Anaconda cloud.
+
+- **Type:**
+ dict
+
+#### `config{:python}`
+
+The Nextflow pipeline configuration file content.
+
+- **Type:**
+ dict
+
+#### `dockerfile{:python}`
+
+A list of lines (str) from the parsed Dockerfile.
+
+- **Type:**
+ list
+
+#### `failed{:python}`
+
+A list of tuples of the form: (, )
+
+- **Type:**
+ list
+
+#### `files{:python}`
+
+A list of files found during the linting process.
+
+- **Type:**
+ list
+
+#### `minNextflowVersion{:python}`
+
+The minimum required Nextflow version to run the pipeline.
+
+- **Type:**
+ str
+
+#### `passed{:python}`
+
+A list of tuples of the form: (, )
+
+- **Type:**
+ list
+
+#### `path{:python}`
+
+Path to the pipeline directory.
+
+- **Type:**
+ str
+
+#### `pipeline_name{:python}`
+
+The pipeline name, without the nf-core tag, for example hlatyping.
+
+- **Type:**
+ str
+
+#### `release_mode{:python}`
+
+True, if you the to linting was run in release mode, False else.
+
+- **Type:**
+ bool
+
+#### `warned{:python}`
+
+A list of tuples of the form: (, )
+
+- **Type:**
+ list
+
+**Attribute specifications**
+
+Some of the more complex attributes of a PipelineLint object.
+
+- conda_config:
+ ```default
+ # Example
+ {
+ 'name': 'nf-core-hlatyping',
+ 'channels': ['bioconda', 'conda-forge'],
+ 'dependencies': ['optitype=1.3.2', 'yara=0.9.6']
+ }
+ ```
+- conda_package_info:
+ ```default
+ # See https://api.anaconda.org/package/bioconda/bioconda-utils as an example.
+ {
+ :
+ }
+ ```
+- config: Produced by calling Nextflow with `nextflow config -flat `. Here is an example from
+ : the [nf-core/hlatyping](https://github.com/nf-core/hlatyping) pipeline:
+ ```default
+ process.container = 'nfcore/hlatyping:1.1.1'
+ params.help = false
+ params.outdir = './results'
+ params.bam = false
+ params.single_end = false
+ params.seqtype = 'dna'
+ params.solver = 'glpk'
+ params.igenomes_base = './iGenomes'
+ params.clusterOptions = false
+ ...
+ ```
+
+#### `check_actions_awsfulltest(){:python}`
+
+Checks the GitHub Actions awsfulltest is valid.
+
+Makes sure it is triggered only on `release`.
+
+#### `check_actions_awstest(){:python}`
+
+Checks the GitHub Actions awstest is valid.
+
+Makes sure it is triggered only on `push` to `master`.
+
+#### `check_actions_branch_protection(){:python}`
+
+Checks that the GitHub Actions branch protection workflow is valid.
+
+Makes sure PRs can only come from nf-core dev or ‘patch’ of a fork.
+
+#### `check_actions_ci(){:python}`
+
+Checks that the GitHub Actions CI workflow is valid
+
+Makes sure tests run with the required nextflow version.
+
+#### `check_actions_lint(){:python}`
+
+Checks that the GitHub Actions lint workflow is valid
+
+Makes sure `nf-core lint` and `markdownlint` runs.
+
+#### `check_anaconda_package(dep){:python}`
+
+Query conda package information.
+
+Sends a HTTP GET request to the Anaconda remote API.
+
+- **Parameters:**
+ **dep** (_str_) – A conda package name.
+- **Raises:**
+ **A ValueError**\*\*,\*\* **if the package name can not be resolved.** –
+
+#### `check_conda_dockerfile(){:python}`
+
+Checks the Docker build file.
+
+Checks that:
+: \* a name is given and is consistent with the pipeline name
+
+- dependency versions are pinned
+- dependency versions are the latest available
+
+#### `check_conda_env_yaml(){:python}`
+
+Checks that the conda environment file is valid.
+
+Checks that:
+: \* a name is given and is consistent with the pipeline name
+
+- check that dependency versions are pinned
+- dependency versions are the latest available
+
+#### `check_cookiecutter_strings(){:python}`
+
+Look for the string ‘cookiecutter’ in all pipeline files.
+Finding it probably means that there has been a copy+paste error from the template.
+
+#### `check_docker(){:python}`
+
+Checks that Dockerfile contains the string `FROM`.
+
+#### `check_files_exist(){:python}`
+
+Checks a given pipeline directory for required files.
+
+Iterates through the pipeline’s directory content and checkmarks files
+for presence.
+Files that **must** be present:
+
+```default
+'nextflow.config',
+'nextflow_schema.json',
+'Dockerfile',
+['LICENSE', 'LICENSE.md', 'LICENCE', 'LICENCE.md'], # NB: British / American spelling
+'README.md',
+'CHANGELOG.md',
+'docs/README.md',
+'docs/output.md',
+'docs/usage.md',
+'.github/workflows/branch.yml',
+'.github/workflows/ci.yml',
+'.github/workflows/linting.yml'
+```
+
+Files that _should_ be present:
+
+```default
+'main.nf',
+'environment.yml',
+'conf/base.config',
+'.github/workflows/awstest.yml',
+'.github/workflows/awsfulltest.yml'
+```
+
+Files that _must not_ be present:
+
+```default
+'Singularity'
+```
+
+Files that _should not_ be present:
+
+```default
+'.travis.yml'
+```
+
+- **Raises:**
+ **An AssertionError if neither nextflow.config** **or** **main.nf found.** –
+
+#### `check_licence(){:python}`
+
+Checks licence file is MIT.
+
+Currently the checkpoints are:
+: \* licence file must be long enough (4 or more lines)
+
+- licence contains the string _without restriction_
+- licence doesn’t have any placeholder variables
+
+#### `check_nextflow_config(){:python}`
+
+Checks a given pipeline for required config variables.
+
+At least one string in each list must be present for fail and warn.
+Any config in config_fail_ifdefined results in a failure.
+
+Uses `nextflow config -flat` to parse pipeline `nextflow.config`
+and print all config variables.
+NB: Does NOT parse contents of main.nf / nextflow script
+
+#### `check_pip_package(dep){:python}`
+
+Query PyPi package information.
+
+Sends a HTTP GET request to the PyPi remote API.
+
+- **Parameters:**
+ **dep** (_str_) – A PyPi package name.
+- **Raises:**
+ **A ValueError**\*\*,\*\* **if the package name can not be resolved** **or** **the connection timed out.** –
+
+#### `check_pipeline_name(){:python}`
+
+Check whether pipeline name adheres to lower case/no hyphen naming convention
+
+#### `check_pipeline_todos(){:python}`
+
+Go through all template files looking for the string ‘TODO nf-core:’
+
+#### `check_readme(){:python}`
+
+Checks the repository README file for errors.
+
+Currently just checks the badges at the top of the README.
+
+#### `check_schema_lint(){:python}`
+
+Lint the pipeline schema
+
+#### `check_schema_params(){:python}`
+
+Check that the schema describes all flat params in the pipeline
+
+#### `check_version_consistency(){:python}`
+
+Checks container tags versions.
+
+Runs on `process.container` (if set) and `$GITHUB_REF` (if a GitHub Actions release).
+
+Checks that:
+: \* the container has a tag
+
+- the version numbers are numeric
+- the version numbers are the same as one-another
+
+#### `get_results_md(){:python}`
+
+Function to create a markdown file suitable for posting in a GitHub comment
+
+#### `github_comment(){:python}`
+
+If we are running in a GitHub PR, try to post results as a comment
+
+#### `lint_pipeline(release_mode=False){:python}`
+
+Main linting function.
+
+Takes the pipeline directory as the primary input and iterates through
+the different linting checks in order. Collects any warnings or errors
+and returns summary at completion. Raises an exception if there is a
+critical error that makes the rest of the tests pointless (eg. no
+pipeline script). Results from this function are printed by the main script.
+
+- **Parameters:**
+ **release_mode** (_boolean_) – Activates the release mode, which checks for
+ consistent version tags of containers. Default is False.
+- **Returns:**
+ Summary of test result messages structured as follows:
+ ```default
+ {
+ 'pass': [
+ ( test-id (int), message (string) ),
+ ( test-id (int), message (string) )
+ ],
+ 'warn': [(id, msg)],
+ 'fail': [(id, msg)],
+ }
+ ```
+- **Return type:**
+ dict
+- **Raises:**
+ **If a critical problem is found**\*\*,\*\* **an AssertionError is raised.** –
+
+#### `save_json_results(json_fn){:python}`
+
+Function to dump lint results to a JSON file for downstream use
+
+### `nf_core.lint.run_linting(pipeline_dir, release_mode=False, show_passed=False, md_fn=None, json_fn=None){:python}`
+
+Runs all nf-core linting checks on a given Nextflow pipeline project
+in either release mode or normal mode (default). Returns an object
+of type [`PipelineLint`](#nf_core.lint.PipelineLint) after finished.
+
+- **Parameters:**
+ - **pipeline_dir** (_str_) – The path to the Nextflow pipeline root directory
+ - **release_mode** (_bool_) – Set this to True, if the linting should be run in the release mode.
+ See [`PipelineLint`](#nf_core.lint.PipelineLint) for more information.
+- **Returns:**
+ An object of type [`PipelineLint`](#nf_core.lint.PipelineLint) that contains all the linting results.
diff --git a/src/content/tools/docs/1.10.1/list.md b/src/content/tools/docs/1.10.1/list.md
new file mode 100644
index 0000000000..0e4cec26f3
--- /dev/null
+++ b/src/content/tools/docs/1.10.1/list.md
@@ -0,0 +1,93 @@
+# nf_core.list
+
+Lists available nf-core pipelines and versions.
+
+### _`class{:python}`_`nf_core.list.LocalWorkflow(name){:python}`
+
+Class to handle local workflows pulled by nextflow
+
+#### `get_local_nf_workflow_details(){:python}`
+
+Get full details about a local cached workflow
+
+### _`class{:python}`_`nf_core.list.RemoteWorkflow(data){:python}`
+
+A information container for a remote workflow.
+
+- **Parameters:**
+ **data** (_dict_) – workflow information as they are retrieved from the GitHub repository REST API request
+ ().
+
+### _`class{:python}`_`nf_core.list.Workflows(filter_by=None, sort_by='release', show_archived=False){:python}`
+
+Workflow container class.
+
+Is used to collect local and remote nf-core pipelines. Pipelines
+can be sorted, filtered and compared.
+
+- **Parameters:**
+ - **filter_by** (_list_) – A list of strings that can be used for filtering.
+ - **sort_by** (_str_) – workflows can be sorted by keywords. Keyword must be one of
+ release (default), name, stars.
+
+#### `compare_remote_local(){:python}`
+
+Matches local to remote workflows.
+
+If a matching remote workflow is found, the local workflow’s Git commit hash is compared
+with the latest one from remote.
+
+A boolean flag in `RemoteWorkflow.local_is_latest` is set to True, if the local workflow
+is the latest.
+
+#### `filtered_workflows(){:python}`
+
+Filters remote workflows for keywords.
+
+- **Returns:**
+ Filtered remote workflows.
+- **Return type:**
+ list
+
+#### `get_local_nf_workflows(){:python}`
+
+Retrieves local Nextflow workflows.
+
+Local workflows are stored in `self.local_workflows` list.
+
+#### `get_remote_workflows(){:python}`
+
+Retrieves remote workflows from [nf-co.re](https://nf-co.re).
+
+Remote workflows are stored in `self.remote_workflows` list.
+
+#### `print_json(){:python}`
+
+Dump JSON of all parsed information
+
+#### `print_summary(){:python}`
+
+Prints a summary of all pipelines.
+
+### `nf_core.list.get_local_wf(workflow, revision=None){:python}`
+
+Check if this workflow has a local copy and use nextflow to pull it if not
+
+### `nf_core.list.list_workflows(filter_by=None, sort_by='release', as_json=False, show_archived=False){:python}`
+
+Prints out a list of all nf-core workflows.
+
+- **Parameters:**
+ - **filter_by** (_list_) – A list of strings that can be used for filtering.
+ - **sort_by** (_str_) – workflows can be sorted by keywords. Keyword must be one of
+ release (default), name, stars.
+ - **as_json** (_boolean_) – Set to true, if the lists should be printed in JSON.
+
+### `nf_core.list.pretty_date(time){:python}`
+
+Transforms a datetime object or a int() Epoch timestamp into a
+pretty string like ‘an hour ago’, ‘Yesterday’, ‘3 months ago’,
+‘just now’, etc
+
+Based on
+Adapted by sven1103
diff --git a/src/content/tools/docs/1.10.1/utils.md b/src/content/tools/docs/1.10.1/utils.md
new file mode 100644
index 0000000000..7988a40936
--- /dev/null
+++ b/src/content/tools/docs/1.10.1/utils.md
@@ -0,0 +1,46 @@
+# nf_core.utils
+
+Common utility functions for the nf-core python package.
+
+### `nf_core.utils.check_if_outdated(current_version=None, remote_version=None, source_url='https://nf-co.re/tools_version'){:python}`
+
+Check if the current version of nf-core is outdated
+
+### `nf_core.utils.fetch_wf_config(wf_path){:python}`
+
+Uses Nextflow to retrieve the the configuration variables
+from a Nextflow workflow.
+
+- **Parameters:**
+ **wf_path** (_str_) – Nextflow workflow file system path.
+- **Returns:**
+ Workflow configuration settings.
+- **Return type:**
+ dict
+
+### `nf_core.utils.poll_nfcore_web_api(api_url, post_data=None){:python}`
+
+Poll the nf-core website API
+
+Takes argument api_url for URL
+
+Expects API reponse to be valid JSON and contain a top-level ‘status’ key.
+
+### `nf_core.utils.setup_requests_cachedir(){:python}`
+
+Sets up local caching for faster remote HTTP requests.
+
+Caching directory will be set up in the user’s home directory under
+a .nfcore_cache subdir.
+
+### `nf_core.utils.wait_cli_function(poll_func, poll_every=20){:python}`
+
+Display a command-line spinner while calling a function repeatedly.
+
+Keep waiting until that function returns True
+
+- **Parameters:**
+ - **poll_func** (_function_) – Function to call
+ - **poll_every** (_int_) – How many tenths of a second to wait between function calls. Default: 20.
+- **Returns:**
+ None. Just sits in an infite loop until the function returns True.
diff --git a/src/content/tools/docs/1.10.1/workflow.md b/src/content/tools/docs/1.10.1/workflow.md
new file mode 100644
index 0000000000..9e44b801a9
--- /dev/null
+++ b/src/content/tools/docs/1.10.1/workflow.md
@@ -0,0 +1 @@
+# nf_core.list
diff --git a/src/content/tools/docs/1.10.2/bump_version.md b/src/content/tools/docs/1.10.2/bump_version.md
new file mode 100644
index 0000000000..388370558e
--- /dev/null
+++ b/src/content/tools/docs/1.10.2/bump_version.md
@@ -0,0 +1,36 @@
+# nf_core.bump_version
+
+Bumps the version number in all appropriate files for
+a nf-core pipeline.
+
+### `nf_core.bump_version.bump_nextflow_version(lint_obj, new_version){:python}`
+
+Bumps the required Nextflow version number of a pipeline.
+
+- **Parameters:**
+ - **lint_obj** ([_nf_core.lint.PipelineLint_](lint#nf_core.lint.PipelineLint)) – A PipelineLint object that holds information
+ about the pipeline contents and build files.
+ - **new_version** (_str_) – The new version tag for the required Nextflow version.
+
+### `nf_core.bump_version.bump_pipeline_version(lint_obj, new_version){:python}`
+
+Bumps a pipeline version number.
+
+- **Parameters:**
+ - **lint_obj** ([_nf_core.lint.PipelineLint_](lint#nf_core.lint.PipelineLint)) – A PipelineLint object that holds information
+ about the pipeline contents and build files.
+ - **new_version** (_str_) – The new version tag for the pipeline. Semantic versioning only.
+
+### `nf_core.bump_version.update_file_version(filename, lint_obj, pattern, newstr, allow_multiple=False){:python}`
+
+Updates the version number in a requested file.
+
+- **Parameters:**
+ - **filename** (_str_) – File to scan.
+ - **lint_obj** ([_nf_core.lint.PipelineLint_](lint#nf_core.lint.PipelineLint)) – A PipelineLint object that holds information
+ about the pipeline contents and build files.
+ - **pattern** (_str_) – Regex pattern to apply.
+ - **newstr** (_str_) – The replaced string.
+ - **allow_multiple** (_bool_) – Replace all pattern hits, not only the first. Defaults to False.
+- **Raises:**
+ **SyntaxError**\*\*,\*\* **if the version number cannot be found.** –
diff --git a/src/content/tools/docs/1.10.2/create.md b/src/content/tools/docs/1.10.2/create.md
new file mode 100644
index 0000000000..354aadee7b
--- /dev/null
+++ b/src/content/tools/docs/1.10.2/create.md
@@ -0,0 +1 @@
+# nf_core.create
diff --git a/src/content/tools/docs/1.10.2/download.md b/src/content/tools/docs/1.10.2/download.md
new file mode 100644
index 0000000000..fbea76fc98
--- /dev/null
+++ b/src/content/tools/docs/1.10.2/download.md
@@ -0,0 +1,68 @@
+# nf_core.download
+
+Downloads a nf-core pipeline to the local file system.
+
+### _`class{:python}`_`nf_core.download.DownloadWorkflow(pipeline, release=None, singularity=False, outdir=None, compress_type='tar.gz'){:python}`
+
+Downloads a nf-core workflow from GitHub to the local file system.
+
+Can also download its Singularity container image if required.
+
+- **Parameters:**
+ - **pipeline** (_str_) – A nf-core pipeline name.
+ - **release** (_str_) – The workflow release version to download, like 1.0. Defaults to None.
+ - **singularity** (_bool_) – Flag, if the Singularity container should be downloaded as well. Defaults to False.
+ - **outdir** (_str_) – Path to the local download directory. Defaults to None.
+
+#### `compress_download(){:python}`
+
+Take the downloaded files and make a compressed .tar.gz archive.
+
+#### `download_configs(){:python}`
+
+Downloads the centralised config profiles from nf-core/configs to `self.outdir`.
+
+#### `download_wf_files(){:python}`
+
+Downloads workflow files from GitHub to the `self.outdir`.
+
+#### `download_workflow(){:python}`
+
+Starts a nf-core workflow download.
+
+#### `fetch_workflow_details(wfs){:python}`
+
+Fetches details of a nf-core workflow to download.
+
+- **Parameters:**
+ **wfs** ([_nf_core.list.Workflows_](list#nf_core.list.Workflows)) – A nf_core.list.Workflows object
+- **Raises:**
+ **LockupError**\*\*,\*\* **if the pipeline can not be found.** –
+
+#### `find_container_images(){:python}`
+
+Find container image names for workflow
+
+#### `pull_singularity_image(container){:python}`
+
+Uses a local installation of singularity to pull an image from Docker Hub.
+
+- **Parameters:**
+ **container** (_str_) – A pipeline’s container name. Usually it is of similar format
+ to nfcore/name:dev.
+- **Raises:**
+ **Various exceptions possible from subprocess execution** **of** **Singularity.** –
+
+#### `validate_md5(fname, expected=None){:python}`
+
+Calculates the md5sum for a file on the disk and validate with expected.
+
+- **Parameters:**
+ - **fname** (_str_) – Path to a local file.
+ - **expected** (_str_) – The expected md5sum.
+- **Raises:**
+ **IOError**\*\*,\*\* **if the md5sum does not match the remote sum.** –
+
+#### `wf_use_local_configs(){:python}`
+
+Edit the downloaded nextflow.config file to use the local config files
diff --git a/src/content/tools/docs/1.10.2/index.md b/src/content/tools/docs/1.10.2/index.md
new file mode 100644
index 0000000000..2ad86b1300
--- /dev/null
+++ b/src/content/tools/docs/1.10.2/index.md
@@ -0,0 +1,41 @@
+
+
+# Welcome to nf-core tools API documentation!
+
+# Contents:
+
+- [nf_core.bump_version](bump_version)
+ - [`bump_nextflow_version()`](bump_version#nf_core.bump_version.bump_nextflow_version)
+ - [`bump_pipeline_version()`](bump_version#nf_core.bump_version.bump_pipeline_version)
+ - [`update_file_version()`](bump_version#nf_core.bump_version.update_file_version)
+- [nf_core.create](create)
+- [nf_core.download](download)
+ - [`DownloadWorkflow`](download#nf_core.download.DownloadWorkflow)
+- [nf_core.licences](licences)
+ - [`WorkflowLicences`](licences#nf_core.licences.WorkflowLicences)
+- [nf_core.lint](lint)
+ - [`PipelineLint`](lint#nf_core.lint.PipelineLint)
+ - [`run_linting()`](lint#nf_core.lint.run_linting)
+- [nf_core.list](list)
+ - [`LocalWorkflow`](list#nf_core.list.LocalWorkflow)
+ - [`RemoteWorkflow`](list#nf_core.list.RemoteWorkflow)
+ - [`Workflows`](list#nf_core.list.Workflows)
+ - [`get_local_wf()`](list#nf_core.list.get_local_wf)
+ - [`list_workflows()`](list#nf_core.list.list_workflows)
+ - [`pretty_date()`](list#nf_core.list.pretty_date)
+- [nf_core.utils](utils)
+ - [`check_if_outdated()`](utils#nf_core.utils.check_if_outdated)
+ - [`fetch_wf_config()`](utils#nf_core.utils.fetch_wf_config)
+ - [`poll_nfcore_web_api()`](utils#nf_core.utils.poll_nfcore_web_api)
+ - [`setup_requests_cachedir()`](utils#nf_core.utils.setup_requests_cachedir)
+ - [`wait_cli_function()`](utils#nf_core.utils.wait_cli_function)
+- [nf_core.list](workflow)
+
+# Indices and tables
+
+- [Index](genindex)
+- [Module Index](py-modindex)
+- [Search Page](search)
diff --git a/src/content/tools/docs/1.10.2/licences.md b/src/content/tools/docs/1.10.2/licences.md
new file mode 100644
index 0000000000..dcc395683a
--- /dev/null
+++ b/src/content/tools/docs/1.10.2/licences.md
@@ -0,0 +1,47 @@
+# nf_core.licences
+
+Lists software licences for a given workflow.
+
+### _`class{:python}`_`nf_core.licences.WorkflowLicences(pipeline){:python}`
+
+A nf-core workflow licenses collection.
+
+Tries to retrieve the license information from all dependencies
+of a given nf-core pipeline.
+
+A condensed overview with license per dependency can be printed out.
+
+- **Parameters:**
+ **pipeline** (_str_) – An existing nf-core pipeline name, like nf-core/hlatyping
+ or short hlatyping.
+
+#### `clean_licence_names(licences){:python}`
+
+Normalises varying licence names.
+
+- **Parameters:**
+ **licences** (_list_) – A list of licences which are basically raw string objects from
+ the licence content information.
+- **Returns:**
+ Cleaned licences.
+- **Return type:**
+ list
+
+#### `fetch_conda_licences(){:python}`
+
+Fetch package licences from Anaconda and PyPi.
+
+#### `get_environment_file(){:python}`
+
+Get the conda environment file for the pipeline
+
+#### `print_licences(){:python}`
+
+Prints the fetched license information.
+
+- **Parameters:**
+ **as_json** (_boolean_) – Prints the information in JSON. Defaults to False.
+
+#### `run_licences(){:python}`
+
+Run the nf-core licences action
diff --git a/src/content/tools/docs/1.10.2/lint.md b/src/content/tools/docs/1.10.2/lint.md
new file mode 100644
index 0000000000..754628ce81
--- /dev/null
+++ b/src/content/tools/docs/1.10.2/lint.md
@@ -0,0 +1,372 @@
+# nf_core.lint
+
+Linting policy for nf-core pipeline projects.
+
+Tests Nextflow-based pipelines to check that they adhere to
+the nf-core community guidelines.
+
+### _`class{:python}`_`nf_core.lint.PipelineLint(path){:python}`
+
+Object to hold linting information and results.
+All objects attributes are set, after the [`PipelineLint.lint_pipeline()`](#nf_core.lint.PipelineLint.lint_pipeline) function was called.
+
+- **Parameters:**
+ **path** (_str_) – The path to the nf-core pipeline directory.
+
+#### `conda_config{:python}`
+
+The parsed conda configuration file content (environment.yml).
+
+- **Type:**
+ dict
+
+#### `conda_package_info{:python}`
+
+The conda package(s) information, based on the API requests to Anaconda cloud.
+
+- **Type:**
+ dict
+
+#### `config{:python}`
+
+The Nextflow pipeline configuration file content.
+
+- **Type:**
+ dict
+
+#### `dockerfile{:python}`
+
+A list of lines (str) from the parsed Dockerfile.
+
+- **Type:**
+ list
+
+#### `failed{:python}`
+
+A list of tuples of the form: (, )
+
+- **Type:**
+ list
+
+#### `files{:python}`
+
+A list of files found during the linting process.
+
+- **Type:**
+ list
+
+#### `minNextflowVersion{:python}`
+
+The minimum required Nextflow version to run the pipeline.
+
+- **Type:**
+ str
+
+#### `passed{:python}`
+
+A list of tuples of the form: (, )
+
+- **Type:**
+ list
+
+#### `path{:python}`
+
+Path to the pipeline directory.
+
+- **Type:**
+ str
+
+#### `pipeline_name{:python}`
+
+The pipeline name, without the nf-core tag, for example hlatyping.
+
+- **Type:**
+ str
+
+#### `release_mode{:python}`
+
+True, if you the to linting was run in release mode, False else.
+
+- **Type:**
+ bool
+
+#### `warned{:python}`
+
+A list of tuples of the form: (, )
+
+- **Type:**
+ list
+
+**Attribute specifications**
+
+Some of the more complex attributes of a PipelineLint object.
+
+- conda_config:
+ ```default
+ # Example
+ {
+ 'name': 'nf-core-hlatyping',
+ 'channels': ['bioconda', 'conda-forge'],
+ 'dependencies': ['optitype=1.3.2', 'yara=0.9.6']
+ }
+ ```
+- conda_package_info:
+ ```default
+ # See https://api.anaconda.org/package/bioconda/bioconda-utils as an example.
+ {
+ :
+ }
+ ```
+- config: Produced by calling Nextflow with `nextflow config -flat `. Here is an example from
+ : the [nf-core/hlatyping](https://github.com/nf-core/hlatyping) pipeline:
+ ```default
+ process.container = 'nfcore/hlatyping:1.1.1'
+ params.help = false
+ params.outdir = './results'
+ params.bam = false
+ params.single_end = false
+ params.seqtype = 'dna'
+ params.solver = 'glpk'
+ params.igenomes_base = './iGenomes'
+ params.clusterOptions = false
+ ...
+ ```
+
+#### `check_actions_awsfulltest(){:python}`
+
+Checks the GitHub Actions awsfulltest is valid.
+
+Makes sure it is triggered only on `release`.
+
+#### `check_actions_awstest(){:python}`
+
+Checks the GitHub Actions awstest is valid.
+
+Makes sure it is triggered only on `push` to `master`.
+
+#### `check_actions_branch_protection(){:python}`
+
+Checks that the GitHub Actions branch protection workflow is valid.
+
+Makes sure PRs can only come from nf-core dev or ‘patch’ of a fork.
+
+#### `check_actions_ci(){:python}`
+
+Checks that the GitHub Actions CI workflow is valid
+
+Makes sure tests run with the required nextflow version.
+
+#### `check_actions_lint(){:python}`
+
+Checks that the GitHub Actions lint workflow is valid
+
+Makes sure `nf-core lint` and `markdownlint` runs.
+
+#### `check_anaconda_package(dep){:python}`
+
+Query conda package information.
+
+Sends a HTTP GET request to the Anaconda remote API.
+
+- **Parameters:**
+ **dep** (_str_) – A conda package name.
+- **Raises:**
+ **A ValueError**\*\*,\*\* **if the package name can not be resolved.** –
+
+#### `check_conda_dockerfile(){:python}`
+
+Checks the Docker build file.
+
+Checks that:
+: \* a name is given and is consistent with the pipeline name
+
+- dependency versions are pinned
+- dependency versions are the latest available
+
+#### `check_conda_env_yaml(){:python}`
+
+Checks that the conda environment file is valid.
+
+Checks that:
+: \* a name is given and is consistent with the pipeline name
+
+- check that dependency versions are pinned
+- dependency versions are the latest available
+
+#### `check_cookiecutter_strings(){:python}`
+
+Look for the string ‘cookiecutter’ in all pipeline files.
+Finding it probably means that there has been a copy+paste error from the template.
+
+#### `check_docker(){:python}`
+
+Checks that Dockerfile contains the string `FROM`.
+
+#### `check_files_exist(){:python}`
+
+Checks a given pipeline directory for required files.
+
+Iterates through the pipeline’s directory content and checkmarks files
+for presence.
+Files that **must** be present:
+
+```default
+'nextflow.config',
+'nextflow_schema.json',
+'Dockerfile',
+['LICENSE', 'LICENSE.md', 'LICENCE', 'LICENCE.md'], # NB: British / American spelling
+'README.md',
+'CHANGELOG.md',
+'docs/README.md',
+'docs/output.md',
+'docs/usage.md',
+'.github/workflows/branch.yml',
+'.github/workflows/ci.yml',
+'.github/workflows/linting.yml'
+```
+
+Files that _should_ be present:
+
+```default
+'main.nf',
+'environment.yml',
+'conf/base.config',
+'.github/workflows/awstest.yml',
+'.github/workflows/awsfulltest.yml'
+```
+
+Files that _must not_ be present:
+
+```default
+'Singularity'
+```
+
+Files that _should not_ be present:
+
+```default
+'.travis.yml'
+```
+
+- **Raises:**
+ **An AssertionError if neither nextflow.config** **or** **main.nf found.** –
+
+#### `check_licence(){:python}`
+
+Checks licence file is MIT.
+
+Currently the checkpoints are:
+: \* licence file must be long enough (4 or more lines)
+
+- licence contains the string _without restriction_
+- licence doesn’t have any placeholder variables
+
+#### `check_nextflow_config(){:python}`
+
+Checks a given pipeline for required config variables.
+
+At least one string in each list must be present for fail and warn.
+Any config in config_fail_ifdefined results in a failure.
+
+Uses `nextflow config -flat` to parse pipeline `nextflow.config`
+and print all config variables.
+NB: Does NOT parse contents of main.nf / nextflow script
+
+#### `check_pip_package(dep){:python}`
+
+Query PyPi package information.
+
+Sends a HTTP GET request to the PyPi remote API.
+
+- **Parameters:**
+ **dep** (_str_) – A PyPi package name.
+- **Raises:**
+ **A ValueError**\*\*,\*\* **if the package name can not be resolved** **or** **the connection timed out.** –
+
+#### `check_pipeline_name(){:python}`
+
+Check whether pipeline name adheres to lower case/no hyphen naming convention
+
+#### `check_pipeline_todos(){:python}`
+
+Go through all template files looking for the string ‘TODO nf-core:’
+
+#### `check_readme(){:python}`
+
+Checks the repository README file for errors.
+
+Currently just checks the badges at the top of the README.
+
+#### `check_schema_lint(){:python}`
+
+Lint the pipeline schema
+
+#### `check_schema_params(){:python}`
+
+Check that the schema describes all flat params in the pipeline
+
+#### `check_version_consistency(){:python}`
+
+Checks container tags versions.
+
+Runs on `process.container` (if set) and `$GITHUB_REF` (if a GitHub Actions release).
+
+Checks that:
+: \* the container has a tag
+
+- the version numbers are numeric
+- the version numbers are the same as one-another
+
+#### `get_results_md(){:python}`
+
+Function to create a markdown file suitable for posting in a GitHub comment
+
+#### `github_comment(){:python}`
+
+If we are running in a GitHub PR, try to post results as a comment
+
+#### `lint_pipeline(release_mode=False){:python}`
+
+Main linting function.
+
+Takes the pipeline directory as the primary input and iterates through
+the different linting checks in order. Collects any warnings or errors
+and returns summary at completion. Raises an exception if there is a
+critical error that makes the rest of the tests pointless (eg. no
+pipeline script). Results from this function are printed by the main script.
+
+- **Parameters:**
+ **release_mode** (_boolean_) – Activates the release mode, which checks for
+ consistent version tags of containers. Default is False.
+- **Returns:**
+ Summary of test result messages structured as follows:
+ ```default
+ {
+ 'pass': [
+ ( test-id (int), message (string) ),
+ ( test-id (int), message (string) )
+ ],
+ 'warn': [(id, msg)],
+ 'fail': [(id, msg)],
+ }
+ ```
+- **Return type:**
+ dict
+- **Raises:**
+ **If a critical problem is found**\*\*,\*\* **an AssertionError is raised.** –
+
+#### `save_json_results(json_fn){:python}`
+
+Function to dump lint results to a JSON file for downstream use
+
+### `nf_core.lint.run_linting(pipeline_dir, release_mode=False, show_passed=False, md_fn=None, json_fn=None){:python}`
+
+Runs all nf-core linting checks on a given Nextflow pipeline project
+in either release mode or normal mode (default). Returns an object
+of type [`PipelineLint`](#nf_core.lint.PipelineLint) after finished.
+
+- **Parameters:**
+ - **pipeline_dir** (_str_) – The path to the Nextflow pipeline root directory
+ - **release_mode** (_bool_) – Set this to True, if the linting should be run in the release mode.
+ See [`PipelineLint`](#nf_core.lint.PipelineLint) for more information.
+- **Returns:**
+ An object of type [`PipelineLint`](#nf_core.lint.PipelineLint) that contains all the linting results.
diff --git a/src/content/tools/docs/1.10.2/list.md b/src/content/tools/docs/1.10.2/list.md
new file mode 100644
index 0000000000..0e4cec26f3
--- /dev/null
+++ b/src/content/tools/docs/1.10.2/list.md
@@ -0,0 +1,93 @@
+# nf_core.list
+
+Lists available nf-core pipelines and versions.
+
+### _`class{:python}`_`nf_core.list.LocalWorkflow(name){:python}`
+
+Class to handle local workflows pulled by nextflow
+
+#### `get_local_nf_workflow_details(){:python}`
+
+Get full details about a local cached workflow
+
+### _`class{:python}`_`nf_core.list.RemoteWorkflow(data){:python}`
+
+A information container for a remote workflow.
+
+- **Parameters:**
+ **data** (_dict_) – workflow information as they are retrieved from the GitHub repository REST API request
+ ().
+
+### _`class{:python}`_`nf_core.list.Workflows(filter_by=None, sort_by='release', show_archived=False){:python}`
+
+Workflow container class.
+
+Is used to collect local and remote nf-core pipelines. Pipelines
+can be sorted, filtered and compared.
+
+- **Parameters:**
+ - **filter_by** (_list_) – A list of strings that can be used for filtering.
+ - **sort_by** (_str_) – workflows can be sorted by keywords. Keyword must be one of
+ release (default), name, stars.
+
+#### `compare_remote_local(){:python}`
+
+Matches local to remote workflows.
+
+If a matching remote workflow is found, the local workflow’s Git commit hash is compared
+with the latest one from remote.
+
+A boolean flag in `RemoteWorkflow.local_is_latest` is set to True, if the local workflow
+is the latest.
+
+#### `filtered_workflows(){:python}`
+
+Filters remote workflows for keywords.
+
+- **Returns:**
+ Filtered remote workflows.
+- **Return type:**
+ list
+
+#### `get_local_nf_workflows(){:python}`
+
+Retrieves local Nextflow workflows.
+
+Local workflows are stored in `self.local_workflows` list.
+
+#### `get_remote_workflows(){:python}`
+
+Retrieves remote workflows from [nf-co.re](https://nf-co.re).
+
+Remote workflows are stored in `self.remote_workflows` list.
+
+#### `print_json(){:python}`
+
+Dump JSON of all parsed information
+
+#### `print_summary(){:python}`
+
+Prints a summary of all pipelines.
+
+### `nf_core.list.get_local_wf(workflow, revision=None){:python}`
+
+Check if this workflow has a local copy and use nextflow to pull it if not
+
+### `nf_core.list.list_workflows(filter_by=None, sort_by='release', as_json=False, show_archived=False){:python}`
+
+Prints out a list of all nf-core workflows.
+
+- **Parameters:**
+ - **filter_by** (_list_) – A list of strings that can be used for filtering.
+ - **sort_by** (_str_) – workflows can be sorted by keywords. Keyword must be one of
+ release (default), name, stars.
+ - **as_json** (_boolean_) – Set to true, if the lists should be printed in JSON.
+
+### `nf_core.list.pretty_date(time){:python}`
+
+Transforms a datetime object or a int() Epoch timestamp into a
+pretty string like ‘an hour ago’, ‘Yesterday’, ‘3 months ago’,
+‘just now’, etc
+
+Based on
+Adapted by sven1103
diff --git a/src/content/tools/docs/1.10.2/utils.md b/src/content/tools/docs/1.10.2/utils.md
new file mode 100644
index 0000000000..7988a40936
--- /dev/null
+++ b/src/content/tools/docs/1.10.2/utils.md
@@ -0,0 +1,46 @@
+# nf_core.utils
+
+Common utility functions for the nf-core python package.
+
+### `nf_core.utils.check_if_outdated(current_version=None, remote_version=None, source_url='https://nf-co.re/tools_version'){:python}`
+
+Check if the current version of nf-core is outdated
+
+### `nf_core.utils.fetch_wf_config(wf_path){:python}`
+
+Uses Nextflow to retrieve the the configuration variables
+from a Nextflow workflow.
+
+- **Parameters:**
+ **wf_path** (_str_) – Nextflow workflow file system path.
+- **Returns:**
+ Workflow configuration settings.
+- **Return type:**
+ dict
+
+### `nf_core.utils.poll_nfcore_web_api(api_url, post_data=None){:python}`
+
+Poll the nf-core website API
+
+Takes argument api_url for URL
+
+Expects API reponse to be valid JSON and contain a top-level ‘status’ key.
+
+### `nf_core.utils.setup_requests_cachedir(){:python}`
+
+Sets up local caching for faster remote HTTP requests.
+
+Caching directory will be set up in the user’s home directory under
+a .nfcore_cache subdir.
+
+### `nf_core.utils.wait_cli_function(poll_func, poll_every=20){:python}`
+
+Display a command-line spinner while calling a function repeatedly.
+
+Keep waiting until that function returns True
+
+- **Parameters:**
+ - **poll_func** (_function_) – Function to call
+ - **poll_every** (_int_) – How many tenths of a second to wait between function calls. Default: 20.
+- **Returns:**
+ None. Just sits in an infite loop until the function returns True.
diff --git a/src/content/tools/docs/1.10.2/workflow.md b/src/content/tools/docs/1.10.2/workflow.md
new file mode 100644
index 0000000000..9e44b801a9
--- /dev/null
+++ b/src/content/tools/docs/1.10.2/workflow.md
@@ -0,0 +1 @@
+# nf_core.list
diff --git a/src/content/tools/docs/1.10/bump_version.md b/src/content/tools/docs/1.10/bump_version.md
new file mode 100644
index 0000000000..388370558e
--- /dev/null
+++ b/src/content/tools/docs/1.10/bump_version.md
@@ -0,0 +1,36 @@
+# nf_core.bump_version
+
+Bumps the version number in all appropriate files for
+a nf-core pipeline.
+
+### `nf_core.bump_version.bump_nextflow_version(lint_obj, new_version){:python}`
+
+Bumps the required Nextflow version number of a pipeline.
+
+- **Parameters:**
+ - **lint_obj** ([_nf_core.lint.PipelineLint_](lint#nf_core.lint.PipelineLint)) – A PipelineLint object that holds information
+ about the pipeline contents and build files.
+ - **new_version** (_str_) – The new version tag for the required Nextflow version.
+
+### `nf_core.bump_version.bump_pipeline_version(lint_obj, new_version){:python}`
+
+Bumps a pipeline version number.
+
+- **Parameters:**
+ - **lint_obj** ([_nf_core.lint.PipelineLint_](lint#nf_core.lint.PipelineLint)) – A PipelineLint object that holds information
+ about the pipeline contents and build files.
+ - **new_version** (_str_) – The new version tag for the pipeline. Semantic versioning only.
+
+### `nf_core.bump_version.update_file_version(filename, lint_obj, pattern, newstr, allow_multiple=False){:python}`
+
+Updates the version number in a requested file.
+
+- **Parameters:**
+ - **filename** (_str_) – File to scan.
+ - **lint_obj** ([_nf_core.lint.PipelineLint_](lint#nf_core.lint.PipelineLint)) – A PipelineLint object that holds information
+ about the pipeline contents and build files.
+ - **pattern** (_str_) – Regex pattern to apply.
+ - **newstr** (_str_) – The replaced string.
+ - **allow_multiple** (_bool_) – Replace all pattern hits, not only the first. Defaults to False.
+- **Raises:**
+ **SyntaxError**\*\*,\*\* **if the version number cannot be found.** –
diff --git a/src/content/tools/docs/1.10/create.md b/src/content/tools/docs/1.10/create.md
new file mode 100644
index 0000000000..354aadee7b
--- /dev/null
+++ b/src/content/tools/docs/1.10/create.md
@@ -0,0 +1 @@
+# nf_core.create
diff --git a/src/content/tools/docs/1.10/download.md b/src/content/tools/docs/1.10/download.md
new file mode 100644
index 0000000000..fbea76fc98
--- /dev/null
+++ b/src/content/tools/docs/1.10/download.md
@@ -0,0 +1,68 @@
+# nf_core.download
+
+Downloads a nf-core pipeline to the local file system.
+
+### _`class{:python}`_`nf_core.download.DownloadWorkflow(pipeline, release=None, singularity=False, outdir=None, compress_type='tar.gz'){:python}`
+
+Downloads a nf-core workflow from GitHub to the local file system.
+
+Can also download its Singularity container image if required.
+
+- **Parameters:**
+ - **pipeline** (_str_) – A nf-core pipeline name.
+ - **release** (_str_) – The workflow release version to download, like 1.0. Defaults to None.
+ - **singularity** (_bool_) – Flag, if the Singularity container should be downloaded as well. Defaults to False.
+ - **outdir** (_str_) – Path to the local download directory. Defaults to None.
+
+#### `compress_download(){:python}`
+
+Take the downloaded files and make a compressed .tar.gz archive.
+
+#### `download_configs(){:python}`
+
+Downloads the centralised config profiles from nf-core/configs to `self.outdir`.
+
+#### `download_wf_files(){:python}`
+
+Downloads workflow files from GitHub to the `self.outdir`.
+
+#### `download_workflow(){:python}`
+
+Starts a nf-core workflow download.
+
+#### `fetch_workflow_details(wfs){:python}`
+
+Fetches details of a nf-core workflow to download.
+
+- **Parameters:**
+ **wfs** ([_nf_core.list.Workflows_](list#nf_core.list.Workflows)) – A nf_core.list.Workflows object
+- **Raises:**
+ **LockupError**\*\*,\*\* **if the pipeline can not be found.** –
+
+#### `find_container_images(){:python}`
+
+Find container image names for workflow
+
+#### `pull_singularity_image(container){:python}`
+
+Uses a local installation of singularity to pull an image from Docker Hub.
+
+- **Parameters:**
+ **container** (_str_) – A pipeline’s container name. Usually it is of similar format
+ to nfcore/name:dev.
+- **Raises:**
+ **Various exceptions possible from subprocess execution** **of** **Singularity.** –
+
+#### `validate_md5(fname, expected=None){:python}`
+
+Calculates the md5sum for a file on the disk and validate with expected.
+
+- **Parameters:**
+ - **fname** (_str_) – Path to a local file.
+ - **expected** (_str_) – The expected md5sum.
+- **Raises:**
+ **IOError**\*\*,\*\* **if the md5sum does not match the remote sum.** –
+
+#### `wf_use_local_configs(){:python}`
+
+Edit the downloaded nextflow.config file to use the local config files
diff --git a/src/content/tools/docs/1.10/index.md b/src/content/tools/docs/1.10/index.md
new file mode 100644
index 0000000000..2ad86b1300
--- /dev/null
+++ b/src/content/tools/docs/1.10/index.md
@@ -0,0 +1,41 @@
+
+
+# Welcome to nf-core tools API documentation!
+
+# Contents:
+
+- [nf_core.bump_version](bump_version)
+ - [`bump_nextflow_version()`](bump_version#nf_core.bump_version.bump_nextflow_version)
+ - [`bump_pipeline_version()`](bump_version#nf_core.bump_version.bump_pipeline_version)
+ - [`update_file_version()`](bump_version#nf_core.bump_version.update_file_version)
+- [nf_core.create](create)
+- [nf_core.download](download)
+ - [`DownloadWorkflow`](download#nf_core.download.DownloadWorkflow)
+- [nf_core.licences](licences)
+ - [`WorkflowLicences`](licences#nf_core.licences.WorkflowLicences)
+- [nf_core.lint](lint)
+ - [`PipelineLint`](lint#nf_core.lint.PipelineLint)
+ - [`run_linting()`](lint#nf_core.lint.run_linting)
+- [nf_core.list](list)
+ - [`LocalWorkflow`](list#nf_core.list.LocalWorkflow)
+ - [`RemoteWorkflow`](list#nf_core.list.RemoteWorkflow)
+ - [`Workflows`](list#nf_core.list.Workflows)
+ - [`get_local_wf()`](list#nf_core.list.get_local_wf)
+ - [`list_workflows()`](list#nf_core.list.list_workflows)
+ - [`pretty_date()`](list#nf_core.list.pretty_date)
+- [nf_core.utils](utils)
+ - [`check_if_outdated()`](utils#nf_core.utils.check_if_outdated)
+ - [`fetch_wf_config()`](utils#nf_core.utils.fetch_wf_config)
+ - [`poll_nfcore_web_api()`](utils#nf_core.utils.poll_nfcore_web_api)
+ - [`setup_requests_cachedir()`](utils#nf_core.utils.setup_requests_cachedir)
+ - [`wait_cli_function()`](utils#nf_core.utils.wait_cli_function)
+- [nf_core.list](workflow)
+
+# Indices and tables
+
+- [Index](genindex)
+- [Module Index](py-modindex)
+- [Search Page](search)
diff --git a/src/content/tools/docs/1.10/licences.md b/src/content/tools/docs/1.10/licences.md
new file mode 100644
index 0000000000..dcc395683a
--- /dev/null
+++ b/src/content/tools/docs/1.10/licences.md
@@ -0,0 +1,47 @@
+# nf_core.licences
+
+Lists software licences for a given workflow.
+
+### _`class{:python}`_`nf_core.licences.WorkflowLicences(pipeline){:python}`
+
+A nf-core workflow licenses collection.
+
+Tries to retrieve the license information from all dependencies
+of a given nf-core pipeline.
+
+A condensed overview with license per dependency can be printed out.
+
+- **Parameters:**
+ **pipeline** (_str_) – An existing nf-core pipeline name, like nf-core/hlatyping
+ or short hlatyping.
+
+#### `clean_licence_names(licences){:python}`
+
+Normalises varying licence names.
+
+- **Parameters:**
+ **licences** (_list_) – A list of licences which are basically raw string objects from
+ the licence content information.
+- **Returns:**
+ Cleaned licences.
+- **Return type:**
+ list
+
+#### `fetch_conda_licences(){:python}`
+
+Fetch package licences from Anaconda and PyPi.
+
+#### `get_environment_file(){:python}`
+
+Get the conda environment file for the pipeline
+
+#### `print_licences(){:python}`
+
+Prints the fetched license information.
+
+- **Parameters:**
+ **as_json** (_boolean_) – Prints the information in JSON. Defaults to False.
+
+#### `run_licences(){:python}`
+
+Run the nf-core licences action
diff --git a/src/content/tools/docs/1.10/lint.md b/src/content/tools/docs/1.10/lint.md
new file mode 100644
index 0000000000..10a9c10c22
--- /dev/null
+++ b/src/content/tools/docs/1.10/lint.md
@@ -0,0 +1,372 @@
+# nf_core.lint
+
+Linting policy for nf-core pipeline projects.
+
+Tests Nextflow-based pipelines to check that they adhere to
+the nf-core community guidelines.
+
+### _`class{:python}`_`nf_core.lint.PipelineLint(path){:python}`
+
+Object to hold linting information and results.
+All objects attributes are set, after the [`PipelineLint.lint_pipeline()`](#nf_core.lint.PipelineLint.lint_pipeline) function was called.
+
+- **Parameters:**
+ **path** (_str_) – The path to the nf-core pipeline directory.
+
+#### `conda_config{:python}`
+
+The parsed conda configuration file content (environment.yml).
+
+- **Type:**
+ dict
+
+#### `conda_package_info{:python}`
+
+The conda package(s) information, based on the API requests to Anaconda cloud.
+
+- **Type:**
+ dict
+
+#### `config{:python}`
+
+The Nextflow pipeline configuration file content.
+
+- **Type:**
+ dict
+
+#### `dockerfile{:python}`
+
+A list of lines (str) from the parsed Dockerfile.
+
+- **Type:**
+ list
+
+#### `failed{:python}`
+
+A list of tuples of the form: (, )
+
+- **Type:**
+ list
+
+#### `files{:python}`
+
+A list of files found during the linting process.
+
+- **Type:**
+ list
+
+#### `minNextflowVersion{:python}`
+
+The minimum required Nextflow version to run the pipeline.
+
+- **Type:**
+ str
+
+#### `passed{:python}`
+
+A list of tuples of the form: (, )
+
+- **Type:**
+ list
+
+#### `path{:python}`
+
+Path to the pipeline directory.
+
+- **Type:**
+ str
+
+#### `pipeline_name{:python}`
+
+The pipeline name, without the nf-core tag, for example hlatyping.
+
+- **Type:**
+ str
+
+#### `release_mode{:python}`
+
+True, if you the to linting was run in release mode, False else.
+
+- **Type:**
+ bool
+
+#### `warned{:python}`
+
+A list of tuples of the form: (, )
+
+- **Type:**
+ list
+
+**Attribute specifications**
+
+Some of the more complex attributes of a PipelineLint object.
+
+- conda_config:
+ ```default
+ # Example
+ {
+ 'name': 'nf-core-hlatyping',
+ 'channels': ['bioconda', 'conda-forge'],
+ 'dependencies': ['optitype=1.3.2', 'yara=0.9.6']
+ }
+ ```
+- conda_package_info:
+ ```default
+ # See https://api.anaconda.org/package/bioconda/bioconda-utils as an example.
+ {
+ :
+ }
+ ```
+- config: Produced by calling Nextflow with `nextflow config -flat `. Here is an example from
+ : the [nf-core/hlatyping](https://github.com/nf-core/hlatyping) pipeline:
+ ```default
+ process.container = 'nfcore/hlatyping:1.1.1'
+ params.help = false
+ params.outdir = './results'
+ params.bam = false
+ params.single_end = false
+ params.seqtype = 'dna'
+ params.solver = 'glpk'
+ params.igenomes_base = './iGenomes'
+ params.clusterOptions = false
+ ...
+ ```
+
+#### `check_actions_awsfulltest(){:python}`
+
+Checks the GitHub Actions awsfulltest is valid.
+
+Makes sure it is triggered only on `release`.
+
+#### `check_actions_awstest(){:python}`
+
+Checks the GitHub Actions awstest is valid.
+
+Makes sure it is triggered only on `push` to `master`.
+
+#### `check_actions_branch_protection(){:python}`
+
+Checks that the GitHub Actions branch protection workflow is valid.
+
+Makes sure PRs can only come from nf-core dev or ‘patch’ of a fork.
+
+#### `check_actions_ci(){:python}`
+
+Checks that the GitHub Actions CI workflow is valid
+
+Makes sure tests run with the required nextflow version.
+
+#### `check_actions_lint(){:python}`
+
+Checks that the GitHub Actions lint workflow is valid
+
+Makes sure `nf-core lint` and `markdownlint` runs.
+
+#### `check_anaconda_package(dep){:python}`
+
+Query conda package information.
+
+Sends a HTTP GET request to the Anaconda remote API.
+
+- **Parameters:**
+ **dep** (_str_) – A conda package name.
+- **Raises:**
+ **A ValueError**\*\*,\*\* **if the package name can not be resolved.** –
+
+#### `check_conda_dockerfile(){:python}`
+
+Checks the Docker build file.
+
+Checks that:
+: \* a name is given and is consistent with the pipeline name
+
+- dependency versions are pinned
+- dependency versions are the latest available
+
+#### `check_conda_env_yaml(){:python}`
+
+Checks that the conda environment file is valid.
+
+Checks that:
+: \* a name is given and is consistent with the pipeline name
+
+- check that dependency versions are pinned
+- dependency versions are the latest available
+
+#### `check_cookiecutter_strings(){:python}`
+
+Look for the string ‘cookiecutter’ in all pipeline files.
+Finding it probably means that there has been a copy+paste error from the template.
+
+#### `check_docker(){:python}`
+
+Checks that Dockerfile contains the string `FROM`.
+
+#### `check_files_exist(){:python}`
+
+Checks a given pipeline directory for required files.
+
+Iterates through the pipeline’s directory content and checkmarks files
+for presence.
+Files that **must** be present:
+
+```default
+'nextflow.config',
+'nextflow_schema.json',
+'Dockerfile',
+['LICENSE', 'LICENSE.md', 'LICENCE', 'LICENCE.md'], # NB: British / American spelling
+'README.md',
+'CHANGELOG.md',
+'docs/README.md',
+'docs/output.md',
+'docs/usage.md',
+'.github/workflows/branch.yml',
+'.github/workflows/ci.yml',
+'.github/workflows/linting.yml'
+```
+
+Files that _should_ be present:
+
+```default
+'main.nf',
+'environment.yml',
+'conf/base.config',
+'.github/workflows/awstest.yml',
+'.github/workflows/awsfulltest.yml'
+```
+
+Files that _must not_ be present:
+
+```default
+'Singularity'
+```
+
+Files that _should not_ be present:
+
+```default
+'.travis.yml'
+```
+
+- **Raises:**
+ **An AssertionError if neither nextflow.config** **or** **main.nf found.** –
+
+#### `check_licence(){:python}`
+
+Checks licence file is MIT.
+
+Currently the checkpoints are:
+: \* licence file must be long enough (4 or more lines)
+
+- licence contains the string _without restriction_
+- licence doesn’t have any placeholder variables
+
+#### `check_nextflow_config(){:python}`
+
+Checks a given pipeline for required config variables.
+
+At least one string in each list must be present for fail and warn.
+Any config in config_fail_ifdefined results in a failure.
+
+Uses `nextflow config -flat` to parse pipeline `nextflow.config`
+and print all config variables.
+NB: Does NOT parse contents of main.nf / nextflow script
+
+#### `check_pip_package(dep){:python}`
+
+Query PyPi package information.
+
+Sends a HTTP GET request to the PyPi remote API.
+
+- **Parameters:**
+ **dep** (_str_) – A PyPi package name.
+- **Raises:**
+ **A ValueError**\*\*,\*\* **if the package name can not be resolved** **or** **the connection timed out.** –
+
+#### `check_pipeline_name(){:python}`
+
+Check whether pipeline name adheres to lower case/no hyphen naming convention
+
+#### `check_pipeline_todos(){:python}`
+
+Go through all template files looking for the string ‘TODO nf-core:’
+
+#### `check_readme(){:python}`
+
+Checks the repository README file for errors.
+
+Currently just checks the badges at the top of the README.
+
+#### `check_schema_lint(){:python}`
+
+Lint the pipeline schema
+
+#### `check_schema_params(){:python}`
+
+Check that the schema describes all flat params in the pipeline
+
+#### `check_version_consistency(){:python}`
+
+Checks container tags versions.
+
+Runs on `process.container` (if set) and `$GITHUB_REF` (if a GitHub Actions release).
+
+Checks that:
+: \* the container has a tag
+
+- the version numbers are numeric
+- the version numbers are the same as one-another
+
+#### `get_results_md(){:python}`
+
+Function to create a markdown file suitable for posting in a GitHub comment
+
+#### `github_comment(){:python}`
+
+If we are running in a GitHub PR, try to post results as a comment
+
+#### `lint_pipeline(release_mode=False){:python}`
+
+Main linting function.
+
+Takes the pipeline directory as the primary input and iterates through
+the different linting checks in order. Collects any warnings or errors
+and returns summary at completion. Raises an exception if there is a
+critical error that makes the rest of the tests pointless (eg. no
+pipeline script). Results from this function are printed by the main script.
+
+- **Parameters:**
+ **release_mode** (_boolean_) – Activates the release mode, which checks for
+ consistent version tags of containers. Default is False.
+- **Returns:**
+ Summary of test result messages structured as follows:
+ ```default
+ {
+ 'pass': [
+ ( test-id (int), message (string) ),
+ ( test-id (int), message (string) )
+ ],
+ 'warn': [(id, msg)],
+ 'fail': [(id, msg)],
+ }
+ ```
+- **Return type:**
+ dict
+- **Raises:**
+ **If a critical problem is found**\*\*,\*\* **an AssertionError is raised.** –
+
+#### `save_json_results(json_fn){:python}`
+
+Function to dump lint results to a JSON file for downstream use
+
+### `nf_core.lint.run_linting(pipeline_dir, release_mode=False, md_fn=None, json_fn=None){:python}`
+
+Runs all nf-core linting checks on a given Nextflow pipeline project
+in either release mode or normal mode (default). Returns an object
+of type [`PipelineLint`](#nf_core.lint.PipelineLint) after finished.
+
+- **Parameters:**
+ - **pipeline_dir** (_str_) – The path to the Nextflow pipeline root directory
+ - **release_mode** (_bool_) – Set this to True, if the linting should be run in the release mode.
+ See [`PipelineLint`](#nf_core.lint.PipelineLint) for more information.
+- **Returns:**
+ An object of type [`PipelineLint`](#nf_core.lint.PipelineLint) that contains all the linting results.
diff --git a/src/content/tools/docs/1.10/list.md b/src/content/tools/docs/1.10/list.md
new file mode 100644
index 0000000000..0e4cec26f3
--- /dev/null
+++ b/src/content/tools/docs/1.10/list.md
@@ -0,0 +1,93 @@
+# nf_core.list
+
+Lists available nf-core pipelines and versions.
+
+### _`class{:python}`_`nf_core.list.LocalWorkflow(name){:python}`
+
+Class to handle local workflows pulled by nextflow
+
+#### `get_local_nf_workflow_details(){:python}`
+
+Get full details about a local cached workflow
+
+### _`class{:python}`_`nf_core.list.RemoteWorkflow(data){:python}`
+
+A information container for a remote workflow.
+
+- **Parameters:**
+ **data** (_dict_) – workflow information as they are retrieved from the GitHub repository REST API request
+ ().
+
+### _`class{:python}`_`nf_core.list.Workflows(filter_by=None, sort_by='release', show_archived=False){:python}`
+
+Workflow container class.
+
+Is used to collect local and remote nf-core pipelines. Pipelines
+can be sorted, filtered and compared.
+
+- **Parameters:**
+ - **filter_by** (_list_) – A list of strings that can be used for filtering.
+ - **sort_by** (_str_) – workflows can be sorted by keywords. Keyword must be one of
+ release (default), name, stars.
+
+#### `compare_remote_local(){:python}`
+
+Matches local to remote workflows.
+
+If a matching remote workflow is found, the local workflow’s Git commit hash is compared
+with the latest one from remote.
+
+A boolean flag in `RemoteWorkflow.local_is_latest` is set to True, if the local workflow
+is the latest.
+
+#### `filtered_workflows(){:python}`
+
+Filters remote workflows for keywords.
+
+- **Returns:**
+ Filtered remote workflows.
+- **Return type:**
+ list
+
+#### `get_local_nf_workflows(){:python}`
+
+Retrieves local Nextflow workflows.
+
+Local workflows are stored in `self.local_workflows` list.
+
+#### `get_remote_workflows(){:python}`
+
+Retrieves remote workflows from [nf-co.re](https://nf-co.re).
+
+Remote workflows are stored in `self.remote_workflows` list.
+
+#### `print_json(){:python}`
+
+Dump JSON of all parsed information
+
+#### `print_summary(){:python}`
+
+Prints a summary of all pipelines.
+
+### `nf_core.list.get_local_wf(workflow, revision=None){:python}`
+
+Check if this workflow has a local copy and use nextflow to pull it if not
+
+### `nf_core.list.list_workflows(filter_by=None, sort_by='release', as_json=False, show_archived=False){:python}`
+
+Prints out a list of all nf-core workflows.
+
+- **Parameters:**
+ - **filter_by** (_list_) – A list of strings that can be used for filtering.
+ - **sort_by** (_str_) – workflows can be sorted by keywords. Keyword must be one of
+ release (default), name, stars.
+ - **as_json** (_boolean_) – Set to true, if the lists should be printed in JSON.
+
+### `nf_core.list.pretty_date(time){:python}`
+
+Transforms a datetime object or a int() Epoch timestamp into a
+pretty string like ‘an hour ago’, ‘Yesterday’, ‘3 months ago’,
+‘just now’, etc
+
+Based on
+Adapted by sven1103
diff --git a/src/content/tools/docs/1.10/utils.md b/src/content/tools/docs/1.10/utils.md
new file mode 100644
index 0000000000..7988a40936
--- /dev/null
+++ b/src/content/tools/docs/1.10/utils.md
@@ -0,0 +1,46 @@
+# nf_core.utils
+
+Common utility functions for the nf-core python package.
+
+### `nf_core.utils.check_if_outdated(current_version=None, remote_version=None, source_url='https://nf-co.re/tools_version'){:python}`
+
+Check if the current version of nf-core is outdated
+
+### `nf_core.utils.fetch_wf_config(wf_path){:python}`
+
+Uses Nextflow to retrieve the the configuration variables
+from a Nextflow workflow.
+
+- **Parameters:**
+ **wf_path** (_str_) – Nextflow workflow file system path.
+- **Returns:**
+ Workflow configuration settings.
+- **Return type:**
+ dict
+
+### `nf_core.utils.poll_nfcore_web_api(api_url, post_data=None){:python}`
+
+Poll the nf-core website API
+
+Takes argument api_url for URL
+
+Expects API reponse to be valid JSON and contain a top-level ‘status’ key.
+
+### `nf_core.utils.setup_requests_cachedir(){:python}`
+
+Sets up local caching for faster remote HTTP requests.
+
+Caching directory will be set up in the user’s home directory under
+a .nfcore_cache subdir.
+
+### `nf_core.utils.wait_cli_function(poll_func, poll_every=20){:python}`
+
+Display a command-line spinner while calling a function repeatedly.
+
+Keep waiting until that function returns True
+
+- **Parameters:**
+ - **poll_func** (_function_) – Function to call
+ - **poll_every** (_int_) – How many tenths of a second to wait between function calls. Default: 20.
+- **Returns:**
+ None. Just sits in an infite loop until the function returns True.
diff --git a/src/content/tools/docs/1.10/workflow.md b/src/content/tools/docs/1.10/workflow.md
new file mode 100644
index 0000000000..9e44b801a9
--- /dev/null
+++ b/src/content/tools/docs/1.10/workflow.md
@@ -0,0 +1 @@
+# nf_core.list
diff --git a/src/content/tools/docs/1.11/bump_version.md b/src/content/tools/docs/1.11/bump_version.md
new file mode 100644
index 0000000000..388370558e
--- /dev/null
+++ b/src/content/tools/docs/1.11/bump_version.md
@@ -0,0 +1,36 @@
+# nf_core.bump_version
+
+Bumps the version number in all appropriate files for
+a nf-core pipeline.
+
+### `nf_core.bump_version.bump_nextflow_version(lint_obj, new_version){:python}`
+
+Bumps the required Nextflow version number of a pipeline.
+
+- **Parameters:**
+ - **lint_obj** ([_nf_core.lint.PipelineLint_](lint#nf_core.lint.PipelineLint)) – A PipelineLint object that holds information
+ about the pipeline contents and build files.
+ - **new_version** (_str_) – The new version tag for the required Nextflow version.
+
+### `nf_core.bump_version.bump_pipeline_version(lint_obj, new_version){:python}`
+
+Bumps a pipeline version number.
+
+- **Parameters:**
+ - **lint_obj** ([_nf_core.lint.PipelineLint_](lint#nf_core.lint.PipelineLint)) – A PipelineLint object that holds information
+ about the pipeline contents and build files.
+ - **new_version** (_str_) – The new version tag for the pipeline. Semantic versioning only.
+
+### `nf_core.bump_version.update_file_version(filename, lint_obj, pattern, newstr, allow_multiple=False){:python}`
+
+Updates the version number in a requested file.
+
+- **Parameters:**
+ - **filename** (_str_) – File to scan.
+ - **lint_obj** ([_nf_core.lint.PipelineLint_](lint#nf_core.lint.PipelineLint)) – A PipelineLint object that holds information
+ about the pipeline contents and build files.
+ - **pattern** (_str_) – Regex pattern to apply.
+ - **newstr** (_str_) – The replaced string.
+ - **allow_multiple** (_bool_) – Replace all pattern hits, not only the first. Defaults to False.
+- **Raises:**
+ **SyntaxError**\*\*,\*\* **if the version number cannot be found.** –
diff --git a/src/content/tools/docs/1.11/create.md b/src/content/tools/docs/1.11/create.md
new file mode 100644
index 0000000000..354aadee7b
--- /dev/null
+++ b/src/content/tools/docs/1.11/create.md
@@ -0,0 +1 @@
+# nf_core.create
diff --git a/src/content/tools/docs/1.11/download.md b/src/content/tools/docs/1.11/download.md
new file mode 100644
index 0000000000..fbea76fc98
--- /dev/null
+++ b/src/content/tools/docs/1.11/download.md
@@ -0,0 +1,68 @@
+# nf_core.download
+
+Downloads a nf-core pipeline to the local file system.
+
+### _`class{:python}`_`nf_core.download.DownloadWorkflow(pipeline, release=None, singularity=False, outdir=None, compress_type='tar.gz'){:python}`
+
+Downloads a nf-core workflow from GitHub to the local file system.
+
+Can also download its Singularity container image if required.
+
+- **Parameters:**
+ - **pipeline** (_str_) – A nf-core pipeline name.
+ - **release** (_str_) – The workflow release version to download, like 1.0. Defaults to None.
+ - **singularity** (_bool_) – Flag, if the Singularity container should be downloaded as well. Defaults to False.
+ - **outdir** (_str_) – Path to the local download directory. Defaults to None.
+
+#### `compress_download(){:python}`
+
+Take the downloaded files and make a compressed .tar.gz archive.
+
+#### `download_configs(){:python}`
+
+Downloads the centralised config profiles from nf-core/configs to `self.outdir`.
+
+#### `download_wf_files(){:python}`
+
+Downloads workflow files from GitHub to the `self.outdir`.
+
+#### `download_workflow(){:python}`
+
+Starts a nf-core workflow download.
+
+#### `fetch_workflow_details(wfs){:python}`
+
+Fetches details of a nf-core workflow to download.
+
+- **Parameters:**
+ **wfs** ([_nf_core.list.Workflows_](list#nf_core.list.Workflows)) – A nf_core.list.Workflows object
+- **Raises:**
+ **LockupError**\*\*,\*\* **if the pipeline can not be found.** –
+
+#### `find_container_images(){:python}`
+
+Find container image names for workflow
+
+#### `pull_singularity_image(container){:python}`
+
+Uses a local installation of singularity to pull an image from Docker Hub.
+
+- **Parameters:**
+ **container** (_str_) – A pipeline’s container name. Usually it is of similar format
+ to nfcore/name:dev.
+- **Raises:**
+ **Various exceptions possible from subprocess execution** **of** **Singularity.** –
+
+#### `validate_md5(fname, expected=None){:python}`
+
+Calculates the md5sum for a file on the disk and validate with expected.
+
+- **Parameters:**
+ - **fname** (_str_) – Path to a local file.
+ - **expected** (_str_) – The expected md5sum.
+- **Raises:**
+ **IOError**\*\*,\*\* **if the md5sum does not match the remote sum.** –
+
+#### `wf_use_local_configs(){:python}`
+
+Edit the downloaded nextflow.config file to use the local config files
diff --git a/src/content/tools/docs/1.11/index.md b/src/content/tools/docs/1.11/index.md
new file mode 100644
index 0000000000..2ad86b1300
--- /dev/null
+++ b/src/content/tools/docs/1.11/index.md
@@ -0,0 +1,41 @@
+
+
+# Welcome to nf-core tools API documentation!
+
+# Contents:
+
+- [nf_core.bump_version](bump_version)
+ - [`bump_nextflow_version()`](bump_version#nf_core.bump_version.bump_nextflow_version)
+ - [`bump_pipeline_version()`](bump_version#nf_core.bump_version.bump_pipeline_version)
+ - [`update_file_version()`](bump_version#nf_core.bump_version.update_file_version)
+- [nf_core.create](create)
+- [nf_core.download](download)
+ - [`DownloadWorkflow`](download#nf_core.download.DownloadWorkflow)
+- [nf_core.licences](licences)
+ - [`WorkflowLicences`](licences#nf_core.licences.WorkflowLicences)
+- [nf_core.lint](lint)
+ - [`PipelineLint`](lint#nf_core.lint.PipelineLint)
+ - [`run_linting()`](lint#nf_core.lint.run_linting)
+- [nf_core.list](list)
+ - [`LocalWorkflow`](list#nf_core.list.LocalWorkflow)
+ - [`RemoteWorkflow`](list#nf_core.list.RemoteWorkflow)
+ - [`Workflows`](list#nf_core.list.Workflows)
+ - [`get_local_wf()`](list#nf_core.list.get_local_wf)
+ - [`list_workflows()`](list#nf_core.list.list_workflows)
+ - [`pretty_date()`](list#nf_core.list.pretty_date)
+- [nf_core.utils](utils)
+ - [`check_if_outdated()`](utils#nf_core.utils.check_if_outdated)
+ - [`fetch_wf_config()`](utils#nf_core.utils.fetch_wf_config)
+ - [`poll_nfcore_web_api()`](utils#nf_core.utils.poll_nfcore_web_api)
+ - [`setup_requests_cachedir()`](utils#nf_core.utils.setup_requests_cachedir)
+ - [`wait_cli_function()`](utils#nf_core.utils.wait_cli_function)
+- [nf_core.list](workflow)
+
+# Indices and tables
+
+- [Index](genindex)
+- [Module Index](py-modindex)
+- [Search Page](search)
diff --git a/src/content/tools/docs/1.11/licences.md b/src/content/tools/docs/1.11/licences.md
new file mode 100644
index 0000000000..dcc395683a
--- /dev/null
+++ b/src/content/tools/docs/1.11/licences.md
@@ -0,0 +1,47 @@
+# nf_core.licences
+
+Lists software licences for a given workflow.
+
+### _`class{:python}`_`nf_core.licences.WorkflowLicences(pipeline){:python}`
+
+A nf-core workflow licenses collection.
+
+Tries to retrieve the license information from all dependencies
+of a given nf-core pipeline.
+
+A condensed overview with license per dependency can be printed out.
+
+- **Parameters:**
+ **pipeline** (_str_) – An existing nf-core pipeline name, like nf-core/hlatyping
+ or short hlatyping.
+
+#### `clean_licence_names(licences){:python}`
+
+Normalises varying licence names.
+
+- **Parameters:**
+ **licences** (_list_) – A list of licences which are basically raw string objects from
+ the licence content information.
+- **Returns:**
+ Cleaned licences.
+- **Return type:**
+ list
+
+#### `fetch_conda_licences(){:python}`
+
+Fetch package licences from Anaconda and PyPi.
+
+#### `get_environment_file(){:python}`
+
+Get the conda environment file for the pipeline
+
+#### `print_licences(){:python}`
+
+Prints the fetched license information.
+
+- **Parameters:**
+ **as_json** (_boolean_) – Prints the information in JSON. Defaults to False.
+
+#### `run_licences(){:python}`
+
+Run the nf-core licences action
diff --git a/src/content/tools/docs/1.11/lint.md b/src/content/tools/docs/1.11/lint.md
new file mode 100644
index 0000000000..8a4c15d975
--- /dev/null
+++ b/src/content/tools/docs/1.11/lint.md
@@ -0,0 +1,374 @@
+# nf_core.lint
+
+Linting policy for nf-core pipeline projects.
+
+Tests Nextflow-based pipelines to check that they adhere to
+the nf-core community guidelines.
+
+### _`class{:python}`_`nf_core.lint.PipelineLint(path){:python}`
+
+Object to hold linting information and results.
+All objects attributes are set, after the [`PipelineLint.lint_pipeline()`](#nf_core.lint.PipelineLint.lint_pipeline) function was called.
+
+- **Parameters:**
+ **path** (_str_) – The path to the nf-core pipeline directory.
+
+#### `conda_config{:python}`
+
+The parsed conda configuration file content (environment.yml).
+
+- **Type:**
+ dict
+
+#### `conda_package_info{:python}`
+
+The conda package(s) information, based on the API requests to Anaconda cloud.
+
+- **Type:**
+ dict
+
+#### `config{:python}`
+
+The Nextflow pipeline configuration file content.
+
+- **Type:**
+ dict
+
+#### `dockerfile{:python}`
+
+A list of lines (str) from the parsed Dockerfile.
+
+- **Type:**
+ list
+
+#### `failed{:python}`
+
+A list of tuples of the form: (, )
+
+- **Type:**
+ list
+
+#### `files{:python}`
+
+A list of files found during the linting process.
+
+- **Type:**
+ list
+
+#### `minNextflowVersion{:python}`
+
+The minimum required Nextflow version to run the pipeline.
+
+- **Type:**
+ str
+
+#### `passed{:python}`
+
+A list of tuples of the form: (, )
+
+- **Type:**
+ list
+
+#### `path{:python}`
+
+Path to the pipeline directory.
+
+- **Type:**
+ str
+
+#### `pipeline_name{:python}`
+
+The pipeline name, without the nf-core tag, for example hlatyping.
+
+- **Type:**
+ str
+
+#### `release_mode{:python}`
+
+True, if you the to linting was run in release mode, False else.
+
+- **Type:**
+ bool
+
+#### `warned{:python}`
+
+A list of tuples of the form: (, )
+
+- **Type:**
+ list
+
+**Attribute specifications**
+
+Some of the more complex attributes of a PipelineLint object.
+
+- conda_config:
+ ```default
+ # Example
+ {
+ 'name': 'nf-core-hlatyping',
+ 'channels': ['bioconda', 'conda-forge'],
+ 'dependencies': ['optitype=1.3.2', 'yara=0.9.6']
+ }
+ ```
+- conda_package_info:
+ ```default
+ # See https://api.anaconda.org/package/bioconda/bioconda-utils as an example.
+ {
+ :
+ }
+ ```
+- config: Produced by calling Nextflow with `nextflow config -flat `. Here is an example from
+ : the [nf-core/hlatyping](https://github.com/nf-core/hlatyping) pipeline:
+ ```default
+ process.container = 'nfcore/hlatyping:1.1.1'
+ params.help = false
+ params.outdir = './results'
+ params.bam = false
+ params.single_end = false
+ params.seqtype = 'dna'
+ params.solver = 'glpk'
+ params.igenomes_base = './iGenomes'
+ params.clusterOptions = false
+ ...
+ ```
+
+#### `check_actions_awsfulltest(){:python}`
+
+Checks the GitHub Actions awsfulltest is valid.
+
+Makes sure it is triggered only on `release` and workflow_dispatch.
+
+#### `check_actions_awstest(){:python}`
+
+Checks the GitHub Actions awstest is valid.
+
+Makes sure it is triggered only on `push` to `master`.
+
+#### `check_actions_branch_protection(){:python}`
+
+Checks that the GitHub Actions branch protection workflow is valid.
+
+Makes sure PRs can only come from nf-core dev or ‘patch’ of a fork.
+
+#### `check_actions_ci(){:python}`
+
+Checks that the GitHub Actions CI workflow is valid
+
+Makes sure tests run with the required nextflow version.
+
+#### `check_actions_lint(){:python}`
+
+Checks that the GitHub Actions lint workflow is valid
+
+Makes sure `nf-core lint` and `markdownlint` runs.
+
+#### `check_anaconda_package(dep){:python}`
+
+Query conda package information.
+
+Sends a HTTP GET request to the Anaconda remote API.
+
+- **Parameters:**
+ **dep** (_str_) – A conda package name.
+- **Raises:**
+ **A ValueError**\*\*,\*\* **if the package name can not be resolved.** –
+
+#### `check_conda_dockerfile(){:python}`
+
+Checks the Docker build file.
+
+Checks that:
+: \* a name is given and is consistent with the pipeline name
+
+- dependency versions are pinned
+- dependency versions are the latest available
+
+#### `check_conda_env_yaml(){:python}`
+
+Checks that the conda environment file is valid.
+
+Checks that:
+: \* a name is given and is consistent with the pipeline name
+
+- check that dependency versions are pinned
+- dependency versions are the latest available
+
+#### `check_cookiecutter_strings(){:python}`
+
+Look for the string ‘cookiecutter’ in all pipeline files.
+Finding it probably means that there has been a copy+paste error from the template.
+
+#### `check_docker(){:python}`
+
+Checks that Dockerfile contains the string `FROM`.
+
+#### `check_files_exist(){:python}`
+
+Checks a given pipeline directory for required files.
+
+Iterates through the pipeline’s directory content and checkmarks files
+for presence.
+Files that **must** be present:
+
+```default
+'nextflow.config',
+'nextflow_schema.json',
+['LICENSE', 'LICENSE.md', 'LICENCE', 'LICENCE.md'], # NB: British / American spelling
+'README.md',
+'CHANGELOG.md',
+'docs/README.md',
+'docs/output.md',
+'docs/usage.md',
+'.github/workflows/branch.yml',
+'.github/workflows/ci.yml',
+'.github/workflows/linting.yml'
+```
+
+Files that _should_ be present:
+
+```default
+'main.nf',
+'environment.yml',
+'Dockerfile',
+'conf/base.config',
+'.github/workflows/awstest.yml',
+'.github/workflows/awsfulltest.yml'
+```
+
+Files that _must not_ be present:
+
+```default
+'Singularity',
+'parameters.settings.json',
+'bin/markdown_to_html.r'
+```
+
+Files that _should not_ be present:
+
+```default
+'.travis.yml'
+```
+
+- **Raises:**
+ **An AssertionError if neither nextflow.config** **or** **main.nf found.** –
+
+#### `check_licence(){:python}`
+
+Checks licence file is MIT.
+
+Currently the checkpoints are:
+: \* licence file must be long enough (4 or more lines)
+
+- licence contains the string _without restriction_
+- licence doesn’t have any placeholder variables
+
+#### `check_nextflow_config(){:python}`
+
+Checks a given pipeline for required config variables.
+
+At least one string in each list must be present for fail and warn.
+Any config in config_fail_ifdefined results in a failure.
+
+Uses `nextflow config -flat` to parse pipeline `nextflow.config`
+and print all config variables.
+NB: Does NOT parse contents of main.nf / nextflow script
+
+#### `check_pip_package(dep){:python}`
+
+Query PyPi package information.
+
+Sends a HTTP GET request to the PyPi remote API.
+
+- **Parameters:**
+ **dep** (_str_) – A PyPi package name.
+- **Raises:**
+ **A ValueError**\*\*,\*\* **if the package name can not be resolved** **or** **the connection timed out.** –
+
+#### `check_pipeline_name(){:python}`
+
+Check whether pipeline name adheres to lower case/no hyphen naming convention
+
+#### `check_pipeline_todos(){:python}`
+
+Go through all template files looking for the string ‘TODO nf-core:’
+
+#### `check_readme(){:python}`
+
+Checks the repository README file for errors.
+
+Currently just checks the badges at the top of the README.
+
+#### `check_schema_lint(){:python}`
+
+Lint the pipeline schema
+
+#### `check_schema_params(){:python}`
+
+Check that the schema describes all flat params in the pipeline
+
+#### `check_version_consistency(){:python}`
+
+Checks container tags versions.
+
+Runs on `process.container` (if set) and `$GITHUB_REF` (if a GitHub Actions release).
+
+Checks that:
+: \* the container has a tag
+
+- the version numbers are numeric
+- the version numbers are the same as one-another
+
+#### `get_results_md(){:python}`
+
+Function to create a markdown file suitable for posting in a GitHub comment
+
+#### `github_comment(){:python}`
+
+If we are running in a GitHub PR, try to post results as a comment
+
+#### `lint_pipeline(release_mode=False){:python}`
+
+Main linting function.
+
+Takes the pipeline directory as the primary input and iterates through
+the different linting checks in order. Collects any warnings or errors
+and returns summary at completion. Raises an exception if there is a
+critical error that makes the rest of the tests pointless (eg. no
+pipeline script). Results from this function are printed by the main script.
+
+- **Parameters:**
+ **release_mode** (_boolean_) – Activates the release mode, which checks for
+ consistent version tags of containers. Default is False.
+- **Returns:**
+ Summary of test result messages structured as follows:
+ ```default
+ {
+ 'pass': [
+ ( test-id (int), message (string) ),
+ ( test-id (int), message (string) )
+ ],
+ 'warn': [(id, msg)],
+ 'fail': [(id, msg)],
+ }
+ ```
+- **Return type:**
+ dict
+- **Raises:**
+ **If a critical problem is found**\*\*,\*\* **an AssertionError is raised.** –
+
+#### `save_json_results(json_fn){:python}`
+
+Function to dump lint results to a JSON file for downstream use
+
+### `nf_core.lint.run_linting(pipeline_dir, release_mode=False, show_passed=False, md_fn=None, json_fn=None){:python}`
+
+Runs all nf-core linting checks on a given Nextflow pipeline project
+in either release mode or normal mode (default). Returns an object
+of type [`PipelineLint`](#nf_core.lint.PipelineLint) after finished.
+
+- **Parameters:**
+ - **pipeline_dir** (_str_) – The path to the Nextflow pipeline root directory
+ - **release_mode** (_bool_) – Set this to True, if the linting should be run in the release mode.
+ See [`PipelineLint`](#nf_core.lint.PipelineLint) for more information.
+- **Returns:**
+ An object of type [`PipelineLint`](#nf_core.lint.PipelineLint) that contains all the linting results.
diff --git a/src/content/tools/docs/1.11/list.md b/src/content/tools/docs/1.11/list.md
new file mode 100644
index 0000000000..0e4cec26f3
--- /dev/null
+++ b/src/content/tools/docs/1.11/list.md
@@ -0,0 +1,93 @@
+# nf_core.list
+
+Lists available nf-core pipelines and versions.
+
+### _`class{:python}`_`nf_core.list.LocalWorkflow(name){:python}`
+
+Class to handle local workflows pulled by nextflow
+
+#### `get_local_nf_workflow_details(){:python}`
+
+Get full details about a local cached workflow
+
+### _`class{:python}`_`nf_core.list.RemoteWorkflow(data){:python}`
+
+A information container for a remote workflow.
+
+- **Parameters:**
+ **data** (_dict_) – workflow information as they are retrieved from the GitHub repository REST API request
+ ().
+
+### _`class{:python}`_`nf_core.list.Workflows(filter_by=None, sort_by='release', show_archived=False){:python}`
+
+Workflow container class.
+
+Is used to collect local and remote nf-core pipelines. Pipelines
+can be sorted, filtered and compared.
+
+- **Parameters:**
+ - **filter_by** (_list_) – A list of strings that can be used for filtering.
+ - **sort_by** (_str_) – workflows can be sorted by keywords. Keyword must be one of
+ release (default), name, stars.
+
+#### `compare_remote_local(){:python}`
+
+Matches local to remote workflows.
+
+If a matching remote workflow is found, the local workflow’s Git commit hash is compared
+with the latest one from remote.
+
+A boolean flag in `RemoteWorkflow.local_is_latest` is set to True, if the local workflow
+is the latest.
+
+#### `filtered_workflows(){:python}`
+
+Filters remote workflows for keywords.
+
+- **Returns:**
+ Filtered remote workflows.
+- **Return type:**
+ list
+
+#### `get_local_nf_workflows(){:python}`
+
+Retrieves local Nextflow workflows.
+
+Local workflows are stored in `self.local_workflows` list.
+
+#### `get_remote_workflows(){:python}`
+
+Retrieves remote workflows from [nf-co.re](https://nf-co.re).
+
+Remote workflows are stored in `self.remote_workflows` list.
+
+#### `print_json(){:python}`
+
+Dump JSON of all parsed information
+
+#### `print_summary(){:python}`
+
+Prints a summary of all pipelines.
+
+### `nf_core.list.get_local_wf(workflow, revision=None){:python}`
+
+Check if this workflow has a local copy and use nextflow to pull it if not
+
+### `nf_core.list.list_workflows(filter_by=None, sort_by='release', as_json=False, show_archived=False){:python}`
+
+Prints out a list of all nf-core workflows.
+
+- **Parameters:**
+ - **filter_by** (_list_) – A list of strings that can be used for filtering.
+ - **sort_by** (_str_) – workflows can be sorted by keywords. Keyword must be one of
+ release (default), name, stars.
+ - **as_json** (_boolean_) – Set to true, if the lists should be printed in JSON.
+
+### `nf_core.list.pretty_date(time){:python}`
+
+Transforms a datetime object or a int() Epoch timestamp into a
+pretty string like ‘an hour ago’, ‘Yesterday’, ‘3 months ago’,
+‘just now’, etc
+
+Based on
+Adapted by sven1103
diff --git a/src/content/tools/docs/1.11/utils.md b/src/content/tools/docs/1.11/utils.md
new file mode 100644
index 0000000000..7988a40936
--- /dev/null
+++ b/src/content/tools/docs/1.11/utils.md
@@ -0,0 +1,46 @@
+# nf_core.utils
+
+Common utility functions for the nf-core python package.
+
+### `nf_core.utils.check_if_outdated(current_version=None, remote_version=None, source_url='https://nf-co.re/tools_version'){:python}`
+
+Check if the current version of nf-core is outdated
+
+### `nf_core.utils.fetch_wf_config(wf_path){:python}`
+
+Uses Nextflow to retrieve the the configuration variables
+from a Nextflow workflow.
+
+- **Parameters:**
+ **wf_path** (_str_) – Nextflow workflow file system path.
+- **Returns:**
+ Workflow configuration settings.
+- **Return type:**
+ dict
+
+### `nf_core.utils.poll_nfcore_web_api(api_url, post_data=None){:python}`
+
+Poll the nf-core website API
+
+Takes argument api_url for URL
+
+Expects API reponse to be valid JSON and contain a top-level ‘status’ key.
+
+### `nf_core.utils.setup_requests_cachedir(){:python}`
+
+Sets up local caching for faster remote HTTP requests.
+
+Caching directory will be set up in the user’s home directory under
+a .nfcore_cache subdir.
+
+### `nf_core.utils.wait_cli_function(poll_func, poll_every=20){:python}`
+
+Display a command-line spinner while calling a function repeatedly.
+
+Keep waiting until that function returns True
+
+- **Parameters:**
+ - **poll_func** (_function_) – Function to call
+ - **poll_every** (_int_) – How many tenths of a second to wait between function calls. Default: 20.
+- **Returns:**
+ None. Just sits in an infite loop until the function returns True.
diff --git a/src/content/tools/docs/1.11/workflow.md b/src/content/tools/docs/1.11/workflow.md
new file mode 100644
index 0000000000..9e44b801a9
--- /dev/null
+++ b/src/content/tools/docs/1.11/workflow.md
@@ -0,0 +1 @@
+# nf_core.list
diff --git a/src/content/tools/docs/1.12.1/bump_version.md b/src/content/tools/docs/1.12.1/bump_version.md
new file mode 100644
index 0000000000..388370558e
--- /dev/null
+++ b/src/content/tools/docs/1.12.1/bump_version.md
@@ -0,0 +1,36 @@
+# nf_core.bump_version
+
+Bumps the version number in all appropriate files for
+a nf-core pipeline.
+
+### `nf_core.bump_version.bump_nextflow_version(lint_obj, new_version){:python}`
+
+Bumps the required Nextflow version number of a pipeline.
+
+- **Parameters:**
+ - **lint_obj** ([_nf_core.lint.PipelineLint_](lint#nf_core.lint.PipelineLint)) – A PipelineLint object that holds information
+ about the pipeline contents and build files.
+ - **new_version** (_str_) – The new version tag for the required Nextflow version.
+
+### `nf_core.bump_version.bump_pipeline_version(lint_obj, new_version){:python}`
+
+Bumps a pipeline version number.
+
+- **Parameters:**
+ - **lint_obj** ([_nf_core.lint.PipelineLint_](lint#nf_core.lint.PipelineLint)) – A PipelineLint object that holds information
+ about the pipeline contents and build files.
+ - **new_version** (_str_) – The new version tag for the pipeline. Semantic versioning only.
+
+### `nf_core.bump_version.update_file_version(filename, lint_obj, pattern, newstr, allow_multiple=False){:python}`
+
+Updates the version number in a requested file.
+
+- **Parameters:**
+ - **filename** (_str_) – File to scan.
+ - **lint_obj** ([_nf_core.lint.PipelineLint_](lint#nf_core.lint.PipelineLint)) – A PipelineLint object that holds information
+ about the pipeline contents and build files.
+ - **pattern** (_str_) – Regex pattern to apply.
+ - **newstr** (_str_) – The replaced string.
+ - **allow_multiple** (_bool_) – Replace all pattern hits, not only the first. Defaults to False.
+- **Raises:**
+ **SyntaxError**\*\*,\*\* **if the version number cannot be found.** –
diff --git a/src/content/tools/docs/1.12.1/create.md b/src/content/tools/docs/1.12.1/create.md
new file mode 100644
index 0000000000..354aadee7b
--- /dev/null
+++ b/src/content/tools/docs/1.12.1/create.md
@@ -0,0 +1 @@
+# nf_core.create
diff --git a/src/content/tools/docs/1.12.1/download.md b/src/content/tools/docs/1.12.1/download.md
new file mode 100644
index 0000000000..9911aabbd3
--- /dev/null
+++ b/src/content/tools/docs/1.12.1/download.md
@@ -0,0 +1,70 @@
+# nf_core.download
+
+Downloads a nf-core pipeline to the local file system.
+
+### _`class{:python}`_`nf_core.download.DownloadWorkflow(pipeline, release=None, singularity=False, outdir=None, compress_type='tar.gz'){:python}`
+
+Bases: `object`
+
+Downloads a nf-core workflow from GitHub to the local file system.
+
+Can also download its Singularity container image if required.
+
+- **Parameters:**
+ - **pipeline** (_str_) – A nf-core pipeline name.
+ - **release** (_str_) – The workflow release version to download, like 1.0. Defaults to None.
+ - **singularity** (_bool_) – Flag, if the Singularity container should be downloaded as well. Defaults to False.
+ - **outdir** (_str_) – Path to the local download directory. Defaults to None.
+
+#### `compress_download(){:python}`
+
+Take the downloaded files and make a compressed .tar.gz archive.
+
+#### `download_configs(){:python}`
+
+Downloads the centralised config profiles from nf-core/configs to `self.outdir`.
+
+#### `download_wf_files(){:python}`
+
+Downloads workflow files from GitHub to the `self.outdir`.
+
+#### `download_workflow(){:python}`
+
+Starts a nf-core workflow download.
+
+#### `fetch_workflow_details(wfs){:python}`
+
+Fetches details of a nf-core workflow to download.
+
+- **Parameters:**
+ **wfs** ([_nf_core.list.Workflows_](list#nf_core.list.Workflows)) – A nf_core.list.Workflows object
+- **Raises:**
+ **LockupError**\*\*,\*\* **if the pipeline can not be found.** –
+
+#### `find_container_images(){:python}`
+
+Find container image names for workflow
+
+#### `pull_singularity_image(container){:python}`
+
+Uses a local installation of singularity to pull an image from Docker Hub.
+
+- **Parameters:**
+ **container** (_str_) – A pipeline’s container name. Usually it is of similar format
+ to nfcore/name:dev.
+- **Raises:**
+ **Various exceptions possible from subprocess execution** **of** **Singularity.** –
+
+#### `validate_md5(fname, expected=None){:python}`
+
+Calculates the md5sum for a file on the disk and validate with expected.
+
+- **Parameters:**
+ - **fname** (_str_) – Path to a local file.
+ - **expected** (_str_) – The expected md5sum.
+- **Raises:**
+ **IOError**\*\*,\*\* **if the md5sum does not match the remote sum.** –
+
+#### `wf_use_local_configs(){:python}`
+
+Edit the downloaded nextflow.config file to use the local config files
diff --git a/src/content/tools/docs/1.12.1/index.md b/src/content/tools/docs/1.12.1/index.md
new file mode 100644
index 0000000000..115755e1c6
--- /dev/null
+++ b/src/content/tools/docs/1.12.1/index.md
@@ -0,0 +1,49 @@
+
+
+# Welcome to nf-core tools API documentation!
+
+# Contents:
+
+- [nf_core.bump_version](bump_version)
+ - [`bump_nextflow_version()`](bump_version#nf_core.bump_version.bump_nextflow_version)
+ - [`bump_pipeline_version()`](bump_version#nf_core.bump_version.bump_pipeline_version)
+ - [`update_file_version()`](bump_version#nf_core.bump_version.update_file_version)
+- [nf_core.create](create)
+- [nf_core.download](download)
+ - [`DownloadWorkflow`](download#nf_core.download.DownloadWorkflow)
+- [nf_core.launch](launch)
+ - [`Launch`](launch#nf_core.launch.Launch)
+- [nf_core.licences](licences)
+ - [`WorkflowLicences`](licences#nf_core.licences.WorkflowLicences)
+- [nf_core.lint](lint)
+ - [`PipelineLint`](lint#nf_core.lint.PipelineLint)
+ - [`run_linting()`](lint#nf_core.lint.run_linting)
+- [nf_core.list](list)
+ - [`LocalWorkflow`](list#nf_core.list.LocalWorkflow)
+ - [`RemoteWorkflow`](list#nf_core.list.RemoteWorkflow)
+ - [`Workflows`](list#nf_core.list.Workflows)
+ - [`get_local_wf()`](list#nf_core.list.get_local_wf)
+ - [`list_workflows()`](list#nf_core.list.list_workflows)
+ - [`pretty_date()`](list#nf_core.list.pretty_date)
+- [nf_core.modules](modules)
+ - [`ModulesRepo`](modules#nf_core.modules.ModulesRepo)
+ - [`PipelineModules`](modules#nf_core.modules.PipelineModules)
+- [nf_core.schema](schema)
+ - [`PipelineSchema`](schema#nf_core.schema.PipelineSchema)
+- [nf_core.sync](sync)
+- [nf_core.utils](utils)
+ - [`check_if_outdated()`](utils#nf_core.utils.check_if_outdated)
+ - [`fetch_wf_config()`](utils#nf_core.utils.fetch_wf_config)
+ - [`poll_nfcore_web_api()`](utils#nf_core.utils.poll_nfcore_web_api)
+ - [`rich_force_colors()`](utils#nf_core.utils.rich_force_colors)
+ - [`setup_requests_cachedir()`](utils#nf_core.utils.setup_requests_cachedir)
+ - [`wait_cli_function()`](utils#nf_core.utils.wait_cli_function)
+
+# Indices and tables
+
+- [Index](genindex)
+- [Module Index](py-modindex)
+- [Search Page](search)
diff --git a/src/content/tools/docs/1.12.1/launch.md b/src/content/tools/docs/1.12.1/launch.md
new file mode 100644
index 0000000000..0369d56b93
--- /dev/null
+++ b/src/content/tools/docs/1.12.1/launch.md
@@ -0,0 +1,89 @@
+# nf_core.launch
+
+
+
+Launch a pipeline, interactively collecting params
+
+### _`class{:python}`_`nf_core.launch.Launch(pipeline=None, revision=None, command_only=False, params_in=None, params_out=None, save_all=False, show_hidden=False, url=None, web_id=None){:python}`
+
+Bases: `object`
+
+Class to hold config option to launch a pipeline
+
+#### `build_command(){:python}`
+
+Build the nextflow run command based on what we know
+
+#### `get_pipeline_schema(){:python}`
+
+Load and validate the schema from the supplied pipeline
+
+#### `get_web_launch_response(){:python}`
+
+Given a URL for a web-gui launch response, recursively query it until results are ready.
+
+#### `launch_pipeline(){:python}`
+
+#### `launch_web_gui(){:python}`
+
+Send schema to nf-core website and launch input GUI
+
+#### `launch_workflow(){:python}`
+
+Launch nextflow if required
+
+#### `merge_nxf_flag_schema(){:python}`
+
+Take the Nextflow flag schema and merge it with the pipeline schema
+
+#### `print_param_header(param_id, param_obj){:python}`
+
+#### `prompt_group(group_id, group_obj){:python}`
+
+Prompt for edits to a group of parameters (subschema in ‘definitions’)
+
+- **Parameters:**
+ - **group_id** – Paramater ID (string)
+ - **group_obj** – JSON Schema keys (dict)
+- **Returns:**
+ val answers
+- **Return type:**
+ Dict of param_id
+
+#### `prompt_param(param_id, param_obj, is_required, answers){:python}`
+
+Prompt for a single parameter
+
+#### `prompt_schema(){:python}`
+
+Go through the pipeline schema and prompt user to change defaults
+
+#### `prompt_web_gui(){:python}`
+
+Ask whether to use the web-based or cli wizard to collect params
+
+#### `sanitise_web_response(){:python}`
+
+The web builder returns everything as strings.
+Use the functions defined in the cli wizard to convert to the correct types.
+
+#### `set_schema_inputs(){:python}`
+
+Take the loaded schema and set the defaults as the input parameters
+If a nf_params.json file is supplied, apply these over the top
+
+#### `single_param_to_questionary(param_id, param_obj, answers=None, print_help=True){:python}`
+
+Convert a JSONSchema param to a Questionary question
+
+- **Parameters:**
+ - **param_id** – Parameter ID (string)
+ - **param_obj** – JSON Schema keys (dict)
+ - **answers** – Optional preexisting answers (dict)
+ - **print_help** – If description and help_text should be printed (bool)
+- **Returns:**
+ Single Questionary dict, to be appended to questions list
+
+#### `strip_default_params(){:python}`
+
+Strip parameters if they have not changed from the default
diff --git a/src/content/tools/docs/1.12.1/licences.md b/src/content/tools/docs/1.12.1/licences.md
new file mode 100644
index 0000000000..ca4d304135
--- /dev/null
+++ b/src/content/tools/docs/1.12.1/licences.md
@@ -0,0 +1,49 @@
+# nf_core.licences
+
+Lists software licences for a given workflow.
+
+### _`class{:python}`_`nf_core.licences.WorkflowLicences(pipeline){:python}`
+
+Bases: `object`
+
+A nf-core workflow licenses collection.
+
+Tries to retrieve the license information from all dependencies
+of a given nf-core pipeline.
+
+A condensed overview with license per dependency can be printed out.
+
+- **Parameters:**
+ **pipeline** (_str_) – An existing nf-core pipeline name, like nf-core/hlatyping
+ or short hlatyping.
+
+#### `clean_licence_names(licences){:python}`
+
+Normalises varying licence names.
+
+- **Parameters:**
+ **licences** (_list_) – A list of licences which are basically raw string objects from
+ the licence content information.
+- **Returns:**
+ Cleaned licences.
+- **Return type:**
+ list
+
+#### `fetch_conda_licences(){:python}`
+
+Fetch package licences from Anaconda and PyPi.
+
+#### `get_environment_file(){:python}`
+
+Get the conda environment file for the pipeline
+
+#### `print_licences(){:python}`
+
+Prints the fetched license information.
+
+- **Parameters:**
+ **as_json** (_boolean_) – Prints the information in JSON. Defaults to False.
+
+#### `run_licences(){:python}`
+
+Run the nf-core licences action
diff --git a/src/content/tools/docs/1.12.1/lint.md b/src/content/tools/docs/1.12.1/lint.md
new file mode 100644
index 0000000000..9d1e55bc56
--- /dev/null
+++ b/src/content/tools/docs/1.12.1/lint.md
@@ -0,0 +1,379 @@
+# nf_core.lint
+
+Linting policy for nf-core pipeline projects.
+
+Tests Nextflow-based pipelines to check that they adhere to
+the nf-core community guidelines.
+
+### _`class{:python}`_`nf_core.lint.PipelineLint(path){:python}`
+
+Bases: `object`
+
+Object to hold linting information and results.
+All objects attributes are set, after the [`PipelineLint.lint_pipeline()`](#nf_core.lint.PipelineLint.lint_pipeline) function was called.
+
+- **Parameters:**
+ **path** (_str_) – The path to the nf-core pipeline directory.
+
+#### `conda_config{:python}`
+
+The parsed conda configuration file content (environment.yml).
+
+- **Type:**
+ dict
+
+#### `conda_package_info{:python}`
+
+The conda package(s) information, based on the API requests to Anaconda cloud.
+
+- **Type:**
+ dict
+
+#### `config{:python}`
+
+The Nextflow pipeline configuration file content.
+
+- **Type:**
+ dict
+
+#### `dockerfile{:python}`
+
+A list of lines (str) from the parsed Dockerfile.
+
+- **Type:**
+ list
+
+#### `failed{:python}`
+
+A list of tuples of the form: (, )
+
+- **Type:**
+ list
+
+#### `files{:python}`
+
+A list of files found during the linting process.
+
+- **Type:**
+ list
+
+#### `minNextflowVersion{:python}`
+
+The minimum required Nextflow version to run the pipeline.
+
+- **Type:**
+ str
+
+#### `passed{:python}`
+
+A list of tuples of the form: (, )
+
+- **Type:**
+ list
+
+#### `path{:python}`
+
+Path to the pipeline directory.
+
+- **Type:**
+ str
+
+#### `pipeline_name{:python}`
+
+The pipeline name, without the nf-core tag, for example hlatyping.
+
+- **Type:**
+ str
+
+#### `release_mode{:python}`
+
+True, if you the to linting was run in release mode, False else.
+
+- **Type:**
+ bool
+
+#### `warned{:python}`
+
+A list of tuples of the form: (, )
+
+- **Type:**
+ list
+
+**Attribute specifications**
+
+Some of the more complex attributes of a PipelineLint object.
+
+- conda_config:
+ ```default
+ # Example
+ {
+ 'name': 'nf-core-hlatyping',
+ 'channels': ['bioconda', 'conda-forge'],
+ 'dependencies': ['optitype=1.3.2', 'yara=0.9.6']
+ }
+ ```
+- conda_package_info:
+ ```default
+ # See https://api.anaconda.org/package/bioconda/bioconda-utils as an example.
+ {
+ :
+ }
+ ```
+- config: Produced by calling Nextflow with `nextflow config -flat `. Here is an example from
+ : the [nf-core/hlatyping](https://github.com/nf-core/hlatyping) pipeline:
+ ```default
+ process.container = 'nfcore/hlatyping:1.1.1'
+ params.help = false
+ params.outdir = './results'
+ params.bam = false
+ params.single_end = false
+ params.seqtype = 'dna'
+ params.solver = 'glpk'
+ params.igenomes_base = './iGenomes'
+ params.clusterOptions = false
+ ...
+ ```
+
+#### `_strip_ansi_codes(string, replace_with=''){:python}`
+
+#### `_wrap_quotes(files){:python}`
+
+#### `check_actions_awsfulltest(){:python}`
+
+Checks the GitHub Actions awsfulltest is valid.
+
+Makes sure it is triggered only on `release` and workflow_dispatch.
+
+#### `check_actions_awstest(){:python}`
+
+Checks the GitHub Actions awstest is valid.
+
+Makes sure it is triggered only on `push` to `master`.
+
+#### `check_actions_branch_protection(){:python}`
+
+Checks that the GitHub Actions branch protection workflow is valid.
+
+Makes sure PRs can only come from nf-core dev or ‘patch’ of a fork.
+
+#### `check_actions_ci(){:python}`
+
+Checks that the GitHub Actions CI workflow is valid
+
+Makes sure tests run with the required nextflow version.
+
+#### `check_actions_lint(){:python}`
+
+Checks that the GitHub Actions lint workflow is valid
+
+Makes sure `nf-core lint` and `markdownlint` runs.
+
+#### `check_anaconda_package(dep){:python}`
+
+Query conda package information.
+
+Sends a HTTP GET request to the Anaconda remote API.
+
+- **Parameters:**
+ **dep** (_str_) – A conda package name.
+- **Raises:**
+ **A ValueError**\*\*,\*\* **if the package name can not be resolved.** –
+
+#### `check_conda_dockerfile(){:python}`
+
+Checks the Docker build file.
+
+Checks that:
+: \* a name is given and is consistent with the pipeline name
+
+- dependency versions are pinned
+- dependency versions are the latest available
+
+#### `check_conda_env_yaml(){:python}`
+
+Checks that the conda environment file is valid.
+
+Checks that:
+: \* a name is given and is consistent with the pipeline name
+
+- check that dependency versions are pinned
+- dependency versions are the latest available
+
+#### `check_cookiecutter_strings(){:python}`
+
+Look for the string ‘cookiecutter’ in all pipeline files.
+Finding it probably means that there has been a copy+paste error from the template.
+
+#### `check_docker(){:python}`
+
+Checks that Dockerfile contains the string `FROM`.
+
+#### `check_files_exist(){:python}`
+
+Checks a given pipeline directory for required files.
+
+Iterates through the pipeline’s directory content and checkmarks files
+for presence.
+Files that **must** be present:
+
+```default
+'nextflow.config',
+'nextflow_schema.json',
+['LICENSE', 'LICENSE.md', 'LICENCE', 'LICENCE.md'], # NB: British / American spelling
+'README.md',
+'CHANGELOG.md',
+'docs/README.md',
+'docs/output.md',
+'docs/usage.md',
+'.github/workflows/branch.yml',
+'.github/workflows/ci.yml',
+'.github/workflows/linting.yml'
+```
+
+Files that _should_ be present:
+
+```default
+'main.nf',
+'environment.yml',
+'Dockerfile',
+'conf/base.config',
+'.github/workflows/awstest.yml',
+'.github/workflows/awsfulltest.yml'
+```
+
+Files that _must not_ be present:
+
+```default
+'Singularity',
+'parameters.settings.json',
+'bin/markdown_to_html.r',
+'.github/workflows/push_dockerhub.yml'
+```
+
+Files that _should not_ be present:
+
+```default
+'.travis.yml'
+```
+
+- **Raises:**
+ **An AssertionError if neither nextflow.config** **or** **main.nf found.** –
+
+#### `check_licence(){:python}`
+
+Checks licence file is MIT.
+
+Currently the checkpoints are:
+: \* licence file must be long enough (4 or more lines)
+
+- licence contains the string _without restriction_
+- licence doesn’t have any placeholder variables
+
+#### `check_nextflow_config(){:python}`
+
+Checks a given pipeline for required config variables.
+
+At least one string in each list must be present for fail and warn.
+Any config in config_fail_ifdefined results in a failure.
+
+Uses `nextflow config -flat` to parse pipeline `nextflow.config`
+and print all config variables.
+NB: Does NOT parse contents of main.nf / nextflow script
+
+#### `check_pip_package(dep){:python}`
+
+Query PyPi package information.
+
+Sends a HTTP GET request to the PyPi remote API.
+
+- **Parameters:**
+ **dep** (_str_) – A PyPi package name.
+- **Raises:**
+ **A ValueError**\*\*,\*\* **if the package name can not be resolved** **or** **the connection timed out.** –
+
+#### `check_pipeline_name(){:python}`
+
+Check whether pipeline name adheres to lower case/no hyphen naming convention
+
+#### `check_pipeline_todos(){:python}`
+
+Go through all template files looking for the string ‘TODO nf-core:’
+
+#### `check_readme(){:python}`
+
+Checks the repository README file for errors.
+
+Currently just checks the badges at the top of the README.
+
+#### `check_schema_lint(){:python}`
+
+Lint the pipeline schema
+
+#### `check_schema_params(){:python}`
+
+Check that the schema describes all flat params in the pipeline
+
+#### `check_version_consistency(){:python}`
+
+Checks container tags versions.
+
+Runs on `process.container` (if set) and `$GITHUB_REF` (if a GitHub Actions release).
+
+Checks that:
+: \* the container has a tag
+
+- the version numbers are numeric
+- the version numbers are the same as one-another
+
+#### `get_results_md(){:python}`
+
+Function to create a markdown file suitable for posting in a GitHub comment
+
+#### `lint_pipeline(release_mode=False){:python}`
+
+Main linting function.
+
+Takes the pipeline directory as the primary input and iterates through
+the different linting checks in order. Collects any warnings or errors
+and returns summary at completion. Raises an exception if there is a
+critical error that makes the rest of the tests pointless (eg. no
+pipeline script). Results from this function are printed by the main script.
+
+- **Parameters:**
+ **release_mode** (_boolean_) – Activates the release mode, which checks for
+ consistent version tags of containers. Default is False.
+- **Returns:**
+ Summary of test result messages structured as follows:
+ ```default
+ {
+ 'pass': [
+ ( test-id (int), message (string) ),
+ ( test-id (int), message (string) )
+ ],
+ 'warn': [(id, msg)],
+ 'fail': [(id, msg)],
+ }
+ ```
+- **Return type:**
+ dict
+- **Raises:**
+ **If a critical problem is found**\*\*,\*\* **an AssertionError is raised.** –
+
+#### `print_results(show_passed=False){:python}`
+
+#### `save_json_results(json_fn){:python}`
+
+Function to dump lint results to a JSON file for downstream use
+
+### `nf_core.lint.run_linting(pipeline_dir, release_mode=False, show_passed=False, md_fn=None, json_fn=None){:python}`
+
+Runs all nf-core linting checks on a given Nextflow pipeline project
+in either release mode or normal mode (default). Returns an object
+of type [`PipelineLint`](#nf_core.lint.PipelineLint) after finished.
+
+- **Parameters:**
+ - **pipeline_dir** (_str_) – The path to the Nextflow pipeline root directory
+ - **release_mode** (_bool_) – Set this to True, if the linting should be run in the release mode.
+ See [`PipelineLint`](#nf_core.lint.PipelineLint) for more information.
+- **Returns:**
+ An object of type [`PipelineLint`](#nf_core.lint.PipelineLint) that contains all the linting results.
diff --git a/src/content/tools/docs/1.12.1/list.md b/src/content/tools/docs/1.12.1/list.md
new file mode 100644
index 0000000000..2fc6448de0
--- /dev/null
+++ b/src/content/tools/docs/1.12.1/list.md
@@ -0,0 +1,99 @@
+# nf_core.list
+
+Lists available nf-core pipelines and versions.
+
+### _`class{:python}`_`nf_core.list.LocalWorkflow(name){:python}`
+
+Bases: `object`
+
+Class to handle local workflows pulled by nextflow
+
+#### `get_local_nf_workflow_details(){:python}`
+
+Get full details about a local cached workflow
+
+### _`class{:python}`_`nf_core.list.RemoteWorkflow(data){:python}`
+
+Bases: `object`
+
+A information container for a remote workflow.
+
+- **Parameters:**
+ **data** (_dict_) – workflow information as they are retrieved from the GitHub repository REST API request
+ ().
+
+### _`class{:python}`_`nf_core.list.Workflows(filter_by=None, sort_by='release', show_archived=False){:python}`
+
+Bases: `object`
+
+Workflow container class.
+
+Is used to collect local and remote nf-core pipelines. Pipelines
+can be sorted, filtered and compared.
+
+- **Parameters:**
+ - **filter_by** (_list_) – A list of strings that can be used for filtering.
+ - **sort_by** (_str_) – workflows can be sorted by keywords. Keyword must be one of
+ release (default), name, stars.
+
+#### `compare_remote_local(){:python}`
+
+Matches local to remote workflows.
+
+If a matching remote workflow is found, the local workflow’s Git commit hash is compared
+with the latest one from remote.
+
+A boolean flag in `RemoteWorkflow.local_is_latest` is set to True, if the local workflow
+is the latest.
+
+#### `filtered_workflows(){:python}`
+
+Filters remote workflows for keywords.
+
+- **Returns:**
+ Filtered remote workflows.
+- **Return type:**
+ list
+
+#### `get_local_nf_workflows(){:python}`
+
+Retrieves local Nextflow workflows.
+
+Local workflows are stored in `self.local_workflows` list.
+
+#### `get_remote_workflows(){:python}`
+
+Retrieves remote workflows from [nf-co.re](https://nf-co.re).
+
+Remote workflows are stored in `self.remote_workflows` list.
+
+#### `print_json(){:python}`
+
+Dump JSON of all parsed information
+
+#### `print_summary(){:python}`
+
+Prints a summary of all pipelines.
+
+### `nf_core.list.get_local_wf(workflow, revision=None){:python}`
+
+Check if this workflow has a local copy and use nextflow to pull it if not
+
+### `nf_core.list.list_workflows(filter_by=None, sort_by='release', as_json=False, show_archived=False){:python}`
+
+Prints out a list of all nf-core workflows.
+
+- **Parameters:**
+ - **filter_by** (_list_) – A list of strings that can be used for filtering.
+ - **sort_by** (_str_) – workflows can be sorted by keywords. Keyword must be one of
+ release (default), name, stars.
+ - **as_json** (_boolean_) – Set to true, if the lists should be printed in JSON.
+
+### `nf_core.list.pretty_date(time){:python}`
+
+Transforms a datetime object or a int() Epoch timestamp into a
+pretty string like ‘an hour ago’, ‘Yesterday’, ‘3 months ago’,
+‘just now’, etc
+
+Based on
+Adapted by sven1103
diff --git a/src/content/tools/docs/1.12.1/modules.md b/src/content/tools/docs/1.12.1/modules.md
new file mode 100644
index 0000000000..74fa1142ac
--- /dev/null
+++ b/src/content/tools/docs/1.12.1/modules.md
@@ -0,0 +1,73 @@
+# nf_core.modules
+
+
+
+Code to handle DSL2 module imports from a GitHub repository
+
+### _`class{:python}`_`nf_core.modules.ModulesRepo(repo='nf-core/modules', branch='master'){:python}`
+
+Bases: `object`
+
+An object to store details about the repository being used for modules.
+
+Used by the nf-core modules top-level command with -r and -b flags,
+so that this can be used in the same way by all sucommands.
+
+### _`class{:python}`_`nf_core.modules.PipelineModules{:python}`
+
+Bases: `object`
+
+#### `check_modules(){:python}`
+
+#### `download_gh_file(dl_filename, api_url){:python}`
+
+Download a file from GitHub using the GitHub API
+
+- **Parameters:**
+ - **dl_filename** (_string_) – Path to save file to
+ - **api_url** (_string_) – GitHub API URL for file
+- **Raises:**
+ **If a problem**\*\*,\*\* **raises an error** –
+
+#### `get_module_file_urls(module){:python}`
+
+Fetch list of URLs for a specific module
+
+Takes the name of a module and iterates over the GitHub repo file tree.
+Loops over items that are prefixed with the path ‘software/\’ and ignores
+anything that’s not a blob. Also ignores the test/ subfolder.
+
+Returns a dictionary with keys as filenames and values as GitHub API URIs.
+These can be used to then download file contents.
+
+- **Parameters:**
+ **module** (_string_) – Name of module for which to fetch a set of URLs
+- **Returns:**
+ Set of files and associated URLs as follows:
+ {
+ : ‘software/fastqc/main.nf’: ‘’,
+ ‘software/fastqc/meta.yml’: ‘’
+
+ }
+
+- **Return type:**
+ dict
+
+#### `get_modules_file_tree(){:python}`
+
+Fetch the file list from the repo, using the GitHub API
+
+Sets self.modules_file_tree
+: self.modules_current_hash
+self.modules_avail_module_names
+
+#### `install(module){:python}`
+
+#### `list_modules(){:python}`
+
+Get available module names from GitHub tree for repo
+and print as list to stdout
+
+#### `remove(module){:python}`
+
+#### `update(module, force=False){:python}`
diff --git a/src/content/tools/docs/1.12.1/schema.md b/src/content/tools/docs/1.12.1/schema.md
new file mode 100644
index 0000000000..6e74442246
--- /dev/null
+++ b/src/content/tools/docs/1.12.1/schema.md
@@ -0,0 +1,106 @@
+# nf_core.schema
+
+
+
+Code to deal with pipeline JSON Schema
+
+### _`class{:python}`_`nf_core.schema.PipelineSchema{:python}`
+
+Bases: `object`
+
+Class to generate a schema object with
+functions to handle pipeline JSON Schema
+
+#### `add_schema_found_configs(){:python}`
+
+Add anything that’s found in the Nextflow params that’s missing in the pipeline schema
+
+#### `build_schema(pipeline_dir, no_prompts, web_only, url){:python}`
+
+Interactively build a new pipeline schema for a pipeline
+
+#### `build_schema_param(p_val){:python}`
+
+Build a pipeline schema dictionary for an param interactively
+
+#### `get_schema_defaults(){:python}`
+
+Generate set of default input parameters from schema.
+
+Saves defaults to self.schema_defaults
+Returns count of how many parameters were found (with or without a default value)
+
+#### `get_schema_path(path, local_only=False, revision=None){:python}`
+
+Given a pipeline name, directory, or path, set self.schema_filename
+
+#### `get_web_builder_response(){:python}`
+
+Given a URL for a Schema build response, recursively query it until results are ready.
+Once ready, validate Schema and write to disk.
+
+#### `get_wf_params(){:python}`
+
+Load the pipeline parameter defaults using nextflow config
+Strip out only the params. values and ignore anything that is not a flat variable
+
+#### `launch_web_builder(){:python}`
+
+Send pipeline schema to web builder and wait for response
+
+#### `load_input_params(params_path){:python}`
+
+Load a given a path to a parameters file (JSON/YAML)
+
+These should be input parameters used to run a pipeline with
+the Nextflow -params-file option.
+
+#### `load_lint_schema(){:python}`
+
+Load and lint a given schema to see if it looks valid
+
+#### `load_schema(){:python}`
+
+Load a pipeline schema from a file
+
+#### `make_skeleton_schema(){:python}`
+
+Make a new pipeline schema from the template
+
+#### `prompt_remove_schema_notfound_config(p_key){:python}`
+
+Check if a given key is found in the nextflow config params and prompt to remove it if note
+
+Returns True if it should be removed, False if not.
+
+#### `remove_schema_notfound_configs(){:python}`
+
+Go through top-level schema and all definitions sub-schemas to remove
+anything that’s not in the nextflow config.
+
+#### `remove_schema_notfound_configs_single_schema(schema){:python}`
+
+Go through a single schema / set of properties and strip out
+anything that’s not in the nextflow config.
+
+Takes: Schema or sub-schema with properties key
+Returns: Cleaned schema / sub-schema
+
+#### `save_schema(){:python}`
+
+Save a pipeline schema to a file
+
+#### `validate_params(){:python}`
+
+Check given parameters against a schema and validate
+
+#### `validate_schema(schema=None){:python}`
+
+Check that the Schema is valid
+
+Returns: Number of parameters found
+
+#### `validate_schema_title_description(schema=None){:python}`
+
+Extra validation command for linting.
+Checks that the schema “$id”, “title” and “description” attributes match the piipeline config.
diff --git a/src/content/tools/docs/1.12.1/sync.md b/src/content/tools/docs/1.12.1/sync.md
new file mode 100644
index 0000000000..c12523d92b
--- /dev/null
+++ b/src/content/tools/docs/1.12.1/sync.md
@@ -0,0 +1 @@
+# nf_core.sync
diff --git a/src/content/tools/docs/1.12.1/utils.md b/src/content/tools/docs/1.12.1/utils.md
new file mode 100644
index 0000000000..cc83aab208
--- /dev/null
+++ b/src/content/tools/docs/1.12.1/utils.md
@@ -0,0 +1,50 @@
+# nf_core.utils
+
+Common utility functions for the nf-core python package.
+
+### `nf_core.utils.check_if_outdated(current_version=None, remote_version=None, source_url='https://nf-co.re/tools_version'){:python}`
+
+Check if the current version of nf-core is outdated
+
+### `nf_core.utils.fetch_wf_config(wf_path){:python}`
+
+Uses Nextflow to retrieve the the configuration variables
+from a Nextflow workflow.
+
+- **Parameters:**
+ **wf_path** (_str_) – Nextflow workflow file system path.
+- **Returns:**
+ Workflow configuration settings.
+- **Return type:**
+ dict
+
+### `nf_core.utils.poll_nfcore_web_api(api_url, post_data=None){:python}`
+
+Poll the nf-core website API
+
+Takes argument api_url for URL
+
+Expects API reponse to be valid JSON and contain a top-level ‘status’ key.
+
+### `nf_core.utils.rich_force_colors(){:python}`
+
+Check if any environment variables are set to force Rich to use coloured output
+
+### `nf_core.utils.setup_requests_cachedir(){:python}`
+
+Sets up local caching for faster remote HTTP requests.
+
+Caching directory will be set up in the user’s home directory under
+a .nfcore_cache subdir.
+
+### `nf_core.utils.wait_cli_function(poll_func, poll_every=20){:python}`
+
+Display a command-line spinner while calling a function repeatedly.
+
+Keep waiting until that function returns True
+
+- **Parameters:**
+ - **poll_func** (_function_) – Function to call
+ - **poll_every** (_int_) – How many tenths of a second to wait between function calls. Default: 20.
+- **Returns:**
+ None. Just sits in an infite loop until the function returns True.
diff --git a/src/content/tools/docs/1.12/bump_version.md b/src/content/tools/docs/1.12/bump_version.md
new file mode 100644
index 0000000000..388370558e
--- /dev/null
+++ b/src/content/tools/docs/1.12/bump_version.md
@@ -0,0 +1,36 @@
+# nf_core.bump_version
+
+Bumps the version number in all appropriate files for
+a nf-core pipeline.
+
+### `nf_core.bump_version.bump_nextflow_version(lint_obj, new_version){:python}`
+
+Bumps the required Nextflow version number of a pipeline.
+
+- **Parameters:**
+ - **lint_obj** ([_nf_core.lint.PipelineLint_](lint#nf_core.lint.PipelineLint)) – A PipelineLint object that holds information
+ about the pipeline contents and build files.
+ - **new_version** (_str_) – The new version tag for the required Nextflow version.
+
+### `nf_core.bump_version.bump_pipeline_version(lint_obj, new_version){:python}`
+
+Bumps a pipeline version number.
+
+- **Parameters:**
+ - **lint_obj** ([_nf_core.lint.PipelineLint_](lint#nf_core.lint.PipelineLint)) – A PipelineLint object that holds information
+ about the pipeline contents and build files.
+ - **new_version** (_str_) – The new version tag for the pipeline. Semantic versioning only.
+
+### `nf_core.bump_version.update_file_version(filename, lint_obj, pattern, newstr, allow_multiple=False){:python}`
+
+Updates the version number in a requested file.
+
+- **Parameters:**
+ - **filename** (_str_) – File to scan.
+ - **lint_obj** ([_nf_core.lint.PipelineLint_](lint#nf_core.lint.PipelineLint)) – A PipelineLint object that holds information
+ about the pipeline contents and build files.
+ - **pattern** (_str_) – Regex pattern to apply.
+ - **newstr** (_str_) – The replaced string.
+ - **allow_multiple** (_bool_) – Replace all pattern hits, not only the first. Defaults to False.
+- **Raises:**
+ **SyntaxError**\*\*,\*\* **if the version number cannot be found.** –
diff --git a/src/content/tools/docs/1.12/create.md b/src/content/tools/docs/1.12/create.md
new file mode 100644
index 0000000000..354aadee7b
--- /dev/null
+++ b/src/content/tools/docs/1.12/create.md
@@ -0,0 +1 @@
+# nf_core.create
diff --git a/src/content/tools/docs/1.12/download.md b/src/content/tools/docs/1.12/download.md
new file mode 100644
index 0000000000..9911aabbd3
--- /dev/null
+++ b/src/content/tools/docs/1.12/download.md
@@ -0,0 +1,70 @@
+# nf_core.download
+
+Downloads a nf-core pipeline to the local file system.
+
+### _`class{:python}`_`nf_core.download.DownloadWorkflow(pipeline, release=None, singularity=False, outdir=None, compress_type='tar.gz'){:python}`
+
+Bases: `object`
+
+Downloads a nf-core workflow from GitHub to the local file system.
+
+Can also download its Singularity container image if required.
+
+- **Parameters:**
+ - **pipeline** (_str_) – A nf-core pipeline name.
+ - **release** (_str_) – The workflow release version to download, like 1.0. Defaults to None.
+ - **singularity** (_bool_) – Flag, if the Singularity container should be downloaded as well. Defaults to False.
+ - **outdir** (_str_) – Path to the local download directory. Defaults to None.
+
+#### `compress_download(){:python}`
+
+Take the downloaded files and make a compressed .tar.gz archive.
+
+#### `download_configs(){:python}`
+
+Downloads the centralised config profiles from nf-core/configs to `self.outdir`.
+
+#### `download_wf_files(){:python}`
+
+Downloads workflow files from GitHub to the `self.outdir`.
+
+#### `download_workflow(){:python}`
+
+Starts a nf-core workflow download.
+
+#### `fetch_workflow_details(wfs){:python}`
+
+Fetches details of a nf-core workflow to download.
+
+- **Parameters:**
+ **wfs** ([_nf_core.list.Workflows_](list#nf_core.list.Workflows)) – A nf_core.list.Workflows object
+- **Raises:**
+ **LockupError**\*\*,\*\* **if the pipeline can not be found.** –
+
+#### `find_container_images(){:python}`
+
+Find container image names for workflow
+
+#### `pull_singularity_image(container){:python}`
+
+Uses a local installation of singularity to pull an image from Docker Hub.
+
+- **Parameters:**
+ **container** (_str_) – A pipeline’s container name. Usually it is of similar format
+ to nfcore/name:dev.
+- **Raises:**
+ **Various exceptions possible from subprocess execution** **of** **Singularity.** –
+
+#### `validate_md5(fname, expected=None){:python}`
+
+Calculates the md5sum for a file on the disk and validate with expected.
+
+- **Parameters:**
+ - **fname** (_str_) – Path to a local file.
+ - **expected** (_str_) – The expected md5sum.
+- **Raises:**
+ **IOError**\*\*,\*\* **if the md5sum does not match the remote sum.** –
+
+#### `wf_use_local_configs(){:python}`
+
+Edit the downloaded nextflow.config file to use the local config files
diff --git a/src/content/tools/docs/1.12/index.md b/src/content/tools/docs/1.12/index.md
new file mode 100644
index 0000000000..c73153a0b1
--- /dev/null
+++ b/src/content/tools/docs/1.12/index.md
@@ -0,0 +1,48 @@
+
+
+# Welcome to nf-core tools API documentation!
+
+# Contents:
+
+- [nf_core.bump_version](bump_version)
+ - [`bump_nextflow_version()`](bump_version#nf_core.bump_version.bump_nextflow_version)
+ - [`bump_pipeline_version()`](bump_version#nf_core.bump_version.bump_pipeline_version)
+ - [`update_file_version()`](bump_version#nf_core.bump_version.update_file_version)
+- [nf_core.create](create)
+- [nf_core.download](download)
+ - [`DownloadWorkflow`](download#nf_core.download.DownloadWorkflow)
+- [nf_core.launch](launch)
+- [nf_core.licences](licences)
+ - [`WorkflowLicences`](licences#nf_core.licences.WorkflowLicences)
+- [nf_core.lint](lint)
+ - [`PipelineLint`](lint#nf_core.lint.PipelineLint)
+ - [`run_linting()`](lint#nf_core.lint.run_linting)
+- [nf_core.list](list)
+ - [`LocalWorkflow`](list#nf_core.list.LocalWorkflow)
+ - [`RemoteWorkflow`](list#nf_core.list.RemoteWorkflow)
+ - [`Workflows`](list#nf_core.list.Workflows)
+ - [`get_local_wf()`](list#nf_core.list.get_local_wf)
+ - [`list_workflows()`](list#nf_core.list.list_workflows)
+ - [`pretty_date()`](list#nf_core.list.pretty_date)
+- [nf_core.modules](modules)
+ - [`ModulesRepo`](modules#nf_core.modules.ModulesRepo)
+ - [`PipelineModules`](modules#nf_core.modules.PipelineModules)
+- [nf_core.schema](schema)
+ - [`PipelineSchema`](schema#nf_core.schema.PipelineSchema)
+- [nf_core.sync](sync)
+- [nf_core.utils](utils)
+ - [`check_if_outdated()`](utils#nf_core.utils.check_if_outdated)
+ - [`fetch_wf_config()`](utils#nf_core.utils.fetch_wf_config)
+ - [`poll_nfcore_web_api()`](utils#nf_core.utils.poll_nfcore_web_api)
+ - [`rich_force_colors()`](utils#nf_core.utils.rich_force_colors)
+ - [`setup_requests_cachedir()`](utils#nf_core.utils.setup_requests_cachedir)
+ - [`wait_cli_function()`](utils#nf_core.utils.wait_cli_function)
+
+# Indices and tables
+
+- [Index](genindex)
+- [Module Index](py-modindex)
+- [Search Page](search)
diff --git a/src/content/tools/docs/1.12/launch.md b/src/content/tools/docs/1.12/launch.md
new file mode 100644
index 0000000000..0874b30c29
--- /dev/null
+++ b/src/content/tools/docs/1.12/launch.md
@@ -0,0 +1 @@
+# nf_core.launch
diff --git a/src/content/tools/docs/1.12/licences.md b/src/content/tools/docs/1.12/licences.md
new file mode 100644
index 0000000000..ca4d304135
--- /dev/null
+++ b/src/content/tools/docs/1.12/licences.md
@@ -0,0 +1,49 @@
+# nf_core.licences
+
+Lists software licences for a given workflow.
+
+### _`class{:python}`_`nf_core.licences.WorkflowLicences(pipeline){:python}`
+
+Bases: `object`
+
+A nf-core workflow licenses collection.
+
+Tries to retrieve the license information from all dependencies
+of a given nf-core pipeline.
+
+A condensed overview with license per dependency can be printed out.
+
+- **Parameters:**
+ **pipeline** (_str_) – An existing nf-core pipeline name, like nf-core/hlatyping
+ or short hlatyping.
+
+#### `clean_licence_names(licences){:python}`
+
+Normalises varying licence names.
+
+- **Parameters:**
+ **licences** (_list_) – A list of licences which are basically raw string objects from
+ the licence content information.
+- **Returns:**
+ Cleaned licences.
+- **Return type:**
+ list
+
+#### `fetch_conda_licences(){:python}`
+
+Fetch package licences from Anaconda and PyPi.
+
+#### `get_environment_file(){:python}`
+
+Get the conda environment file for the pipeline
+
+#### `print_licences(){:python}`
+
+Prints the fetched license information.
+
+- **Parameters:**
+ **as_json** (_boolean_) – Prints the information in JSON. Defaults to False.
+
+#### `run_licences(){:python}`
+
+Run the nf-core licences action
diff --git a/src/content/tools/docs/1.12/lint.md b/src/content/tools/docs/1.12/lint.md
new file mode 100644
index 0000000000..9d1e55bc56
--- /dev/null
+++ b/src/content/tools/docs/1.12/lint.md
@@ -0,0 +1,379 @@
+# nf_core.lint
+
+Linting policy for nf-core pipeline projects.
+
+Tests Nextflow-based pipelines to check that they adhere to
+the nf-core community guidelines.
+
+### _`class{:python}`_`nf_core.lint.PipelineLint(path){:python}`
+
+Bases: `object`
+
+Object to hold linting information and results.
+All objects attributes are set, after the [`PipelineLint.lint_pipeline()`](#nf_core.lint.PipelineLint.lint_pipeline) function was called.
+
+- **Parameters:**
+ **path** (_str_) – The path to the nf-core pipeline directory.
+
+#### `conda_config{:python}`
+
+The parsed conda configuration file content (environment.yml).
+
+- **Type:**
+ dict
+
+#### `conda_package_info{:python}`
+
+The conda package(s) information, based on the API requests to Anaconda cloud.
+
+- **Type:**
+ dict
+
+#### `config{:python}`
+
+The Nextflow pipeline configuration file content.
+
+- **Type:**
+ dict
+
+#### `dockerfile{:python}`
+
+A list of lines (str) from the parsed Dockerfile.
+
+- **Type:**
+ list
+
+#### `failed{:python}`
+
+A list of tuples of the form: (, )
+
+- **Type:**
+ list
+
+#### `files{:python}`
+
+A list of files found during the linting process.
+
+- **Type:**
+ list
+
+#### `minNextflowVersion{:python}`
+
+The minimum required Nextflow version to run the pipeline.
+
+- **Type:**
+ str
+
+#### `passed{:python}`
+
+A list of tuples of the form: (, )
+
+- **Type:**
+ list
+
+#### `path{:python}`
+
+Path to the pipeline directory.
+
+- **Type:**
+ str
+
+#### `pipeline_name{:python}`
+
+The pipeline name, without the nf-core tag, for example hlatyping.
+
+- **Type:**
+ str
+
+#### `release_mode{:python}`
+
+True, if you the to linting was run in release mode, False else.
+
+- **Type:**
+ bool
+
+#### `warned{:python}`
+
+A list of tuples of the form: (, )
+
+- **Type:**
+ list
+
+**Attribute specifications**
+
+Some of the more complex attributes of a PipelineLint object.
+
+- conda_config:
+ ```default
+ # Example
+ {
+ 'name': 'nf-core-hlatyping',
+ 'channels': ['bioconda', 'conda-forge'],
+ 'dependencies': ['optitype=1.3.2', 'yara=0.9.6']
+ }
+ ```
+- conda_package_info:
+ ```default
+ # See https://api.anaconda.org/package/bioconda/bioconda-utils as an example.
+ {
+ :
+ }
+ ```
+- config: Produced by calling Nextflow with `nextflow config -flat `. Here is an example from
+ : the [nf-core/hlatyping](https://github.com/nf-core/hlatyping) pipeline:
+ ```default
+ process.container = 'nfcore/hlatyping:1.1.1'
+ params.help = false
+ params.outdir = './results'
+ params.bam = false
+ params.single_end = false
+ params.seqtype = 'dna'
+ params.solver = 'glpk'
+ params.igenomes_base = './iGenomes'
+ params.clusterOptions = false
+ ...
+ ```
+
+#### `_strip_ansi_codes(string, replace_with=''){:python}`
+
+#### `_wrap_quotes(files){:python}`
+
+#### `check_actions_awsfulltest(){:python}`
+
+Checks the GitHub Actions awsfulltest is valid.
+
+Makes sure it is triggered only on `release` and workflow_dispatch.
+
+#### `check_actions_awstest(){:python}`
+
+Checks the GitHub Actions awstest is valid.
+
+Makes sure it is triggered only on `push` to `master`.
+
+#### `check_actions_branch_protection(){:python}`
+
+Checks that the GitHub Actions branch protection workflow is valid.
+
+Makes sure PRs can only come from nf-core dev or ‘patch’ of a fork.
+
+#### `check_actions_ci(){:python}`
+
+Checks that the GitHub Actions CI workflow is valid
+
+Makes sure tests run with the required nextflow version.
+
+#### `check_actions_lint(){:python}`
+
+Checks that the GitHub Actions lint workflow is valid
+
+Makes sure `nf-core lint` and `markdownlint` runs.
+
+#### `check_anaconda_package(dep){:python}`
+
+Query conda package information.
+
+Sends a HTTP GET request to the Anaconda remote API.
+
+- **Parameters:**
+ **dep** (_str_) – A conda package name.
+- **Raises:**
+ **A ValueError**\*\*,\*\* **if the package name can not be resolved.** –
+
+#### `check_conda_dockerfile(){:python}`
+
+Checks the Docker build file.
+
+Checks that:
+: \* a name is given and is consistent with the pipeline name
+
+- dependency versions are pinned
+- dependency versions are the latest available
+
+#### `check_conda_env_yaml(){:python}`
+
+Checks that the conda environment file is valid.
+
+Checks that:
+: \* a name is given and is consistent with the pipeline name
+
+- check that dependency versions are pinned
+- dependency versions are the latest available
+
+#### `check_cookiecutter_strings(){:python}`
+
+Look for the string ‘cookiecutter’ in all pipeline files.
+Finding it probably means that there has been a copy+paste error from the template.
+
+#### `check_docker(){:python}`
+
+Checks that Dockerfile contains the string `FROM`.
+
+#### `check_files_exist(){:python}`
+
+Checks a given pipeline directory for required files.
+
+Iterates through the pipeline’s directory content and checkmarks files
+for presence.
+Files that **must** be present:
+
+```default
+'nextflow.config',
+'nextflow_schema.json',
+['LICENSE', 'LICENSE.md', 'LICENCE', 'LICENCE.md'], # NB: British / American spelling
+'README.md',
+'CHANGELOG.md',
+'docs/README.md',
+'docs/output.md',
+'docs/usage.md',
+'.github/workflows/branch.yml',
+'.github/workflows/ci.yml',
+'.github/workflows/linting.yml'
+```
+
+Files that _should_ be present:
+
+```default
+'main.nf',
+'environment.yml',
+'Dockerfile',
+'conf/base.config',
+'.github/workflows/awstest.yml',
+'.github/workflows/awsfulltest.yml'
+```
+
+Files that _must not_ be present:
+
+```default
+'Singularity',
+'parameters.settings.json',
+'bin/markdown_to_html.r',
+'.github/workflows/push_dockerhub.yml'
+```
+
+Files that _should not_ be present:
+
+```default
+'.travis.yml'
+```
+
+- **Raises:**
+ **An AssertionError if neither nextflow.config** **or** **main.nf found.** –
+
+#### `check_licence(){:python}`
+
+Checks licence file is MIT.
+
+Currently the checkpoints are:
+: \* licence file must be long enough (4 or more lines)
+
+- licence contains the string _without restriction_
+- licence doesn’t have any placeholder variables
+
+#### `check_nextflow_config(){:python}`
+
+Checks a given pipeline for required config variables.
+
+At least one string in each list must be present for fail and warn.
+Any config in config_fail_ifdefined results in a failure.
+
+Uses `nextflow config -flat` to parse pipeline `nextflow.config`
+and print all config variables.
+NB: Does NOT parse contents of main.nf / nextflow script
+
+#### `check_pip_package(dep){:python}`
+
+Query PyPi package information.
+
+Sends a HTTP GET request to the PyPi remote API.
+
+- **Parameters:**
+ **dep** (_str_) – A PyPi package name.
+- **Raises:**
+ **A ValueError**\*\*,\*\* **if the package name can not be resolved** **or** **the connection timed out.** –
+
+#### `check_pipeline_name(){:python}`
+
+Check whether pipeline name adheres to lower case/no hyphen naming convention
+
+#### `check_pipeline_todos(){:python}`
+
+Go through all template files looking for the string ‘TODO nf-core:’
+
+#### `check_readme(){:python}`
+
+Checks the repository README file for errors.
+
+Currently just checks the badges at the top of the README.
+
+#### `check_schema_lint(){:python}`
+
+Lint the pipeline schema
+
+#### `check_schema_params(){:python}`
+
+Check that the schema describes all flat params in the pipeline
+
+#### `check_version_consistency(){:python}`
+
+Checks container tags versions.
+
+Runs on `process.container` (if set) and `$GITHUB_REF` (if a GitHub Actions release).
+
+Checks that:
+: \* the container has a tag
+
+- the version numbers are numeric
+- the version numbers are the same as one-another
+
+#### `get_results_md(){:python}`
+
+Function to create a markdown file suitable for posting in a GitHub comment
+
+#### `lint_pipeline(release_mode=False){:python}`
+
+Main linting function.
+
+Takes the pipeline directory as the primary input and iterates through
+the different linting checks in order. Collects any warnings or errors
+and returns summary at completion. Raises an exception if there is a
+critical error that makes the rest of the tests pointless (eg. no
+pipeline script). Results from this function are printed by the main script.
+
+- **Parameters:**
+ **release_mode** (_boolean_) – Activates the release mode, which checks for
+ consistent version tags of containers. Default is False.
+- **Returns:**
+ Summary of test result messages structured as follows:
+ ```default
+ {
+ 'pass': [
+ ( test-id (int), message (string) ),
+ ( test-id (int), message (string) )
+ ],
+ 'warn': [(id, msg)],
+ 'fail': [(id, msg)],
+ }
+ ```
+- **Return type:**
+ dict
+- **Raises:**
+ **If a critical problem is found**\*\*,\*\* **an AssertionError is raised.** –
+
+#### `print_results(show_passed=False){:python}`
+
+#### `save_json_results(json_fn){:python}`
+
+Function to dump lint results to a JSON file for downstream use
+
+### `nf_core.lint.run_linting(pipeline_dir, release_mode=False, show_passed=False, md_fn=None, json_fn=None){:python}`
+
+Runs all nf-core linting checks on a given Nextflow pipeline project
+in either release mode or normal mode (default). Returns an object
+of type [`PipelineLint`](#nf_core.lint.PipelineLint) after finished.
+
+- **Parameters:**
+ - **pipeline_dir** (_str_) – The path to the Nextflow pipeline root directory
+ - **release_mode** (_bool_) – Set this to True, if the linting should be run in the release mode.
+ See [`PipelineLint`](#nf_core.lint.PipelineLint) for more information.
+- **Returns:**
+ An object of type [`PipelineLint`](#nf_core.lint.PipelineLint) that contains all the linting results.
diff --git a/src/content/tools/docs/1.12/list.md b/src/content/tools/docs/1.12/list.md
new file mode 100644
index 0000000000..2fc6448de0
--- /dev/null
+++ b/src/content/tools/docs/1.12/list.md
@@ -0,0 +1,99 @@
+# nf_core.list
+
+Lists available nf-core pipelines and versions.
+
+### _`class{:python}`_`nf_core.list.LocalWorkflow(name){:python}`
+
+Bases: `object`
+
+Class to handle local workflows pulled by nextflow
+
+#### `get_local_nf_workflow_details(){:python}`
+
+Get full details about a local cached workflow
+
+### _`class{:python}`_`nf_core.list.RemoteWorkflow(data){:python}`
+
+Bases: `object`
+
+A information container for a remote workflow.
+
+- **Parameters:**
+ **data** (_dict_) – workflow information as they are retrieved from the GitHub repository REST API request
+ ().
+
+### _`class{:python}`_`nf_core.list.Workflows(filter_by=None, sort_by='release', show_archived=False){:python}`
+
+Bases: `object`
+
+Workflow container class.
+
+Is used to collect local and remote nf-core pipelines. Pipelines
+can be sorted, filtered and compared.
+
+- **Parameters:**
+ - **filter_by** (_list_) – A list of strings that can be used for filtering.
+ - **sort_by** (_str_) – workflows can be sorted by keywords. Keyword must be one of
+ release (default), name, stars.
+
+#### `compare_remote_local(){:python}`
+
+Matches local to remote workflows.
+
+If a matching remote workflow is found, the local workflow’s Git commit hash is compared
+with the latest one from remote.
+
+A boolean flag in `RemoteWorkflow.local_is_latest` is set to True, if the local workflow
+is the latest.
+
+#### `filtered_workflows(){:python}`
+
+Filters remote workflows for keywords.
+
+- **Returns:**
+ Filtered remote workflows.
+- **Return type:**
+ list
+
+#### `get_local_nf_workflows(){:python}`
+
+Retrieves local Nextflow workflows.
+
+Local workflows are stored in `self.local_workflows` list.
+
+#### `get_remote_workflows(){:python}`
+
+Retrieves remote workflows from [nf-co.re](https://nf-co.re).
+
+Remote workflows are stored in `self.remote_workflows` list.
+
+#### `print_json(){:python}`
+
+Dump JSON of all parsed information
+
+#### `print_summary(){:python}`
+
+Prints a summary of all pipelines.
+
+### `nf_core.list.get_local_wf(workflow, revision=None){:python}`
+
+Check if this workflow has a local copy and use nextflow to pull it if not
+
+### `nf_core.list.list_workflows(filter_by=None, sort_by='release', as_json=False, show_archived=False){:python}`
+
+Prints out a list of all nf-core workflows.
+
+- **Parameters:**
+ - **filter_by** (_list_) – A list of strings that can be used for filtering.
+ - **sort_by** (_str_) – workflows can be sorted by keywords. Keyword must be one of
+ release (default), name, stars.
+ - **as_json** (_boolean_) – Set to true, if the lists should be printed in JSON.
+
+### `nf_core.list.pretty_date(time){:python}`
+
+Transforms a datetime object or a int() Epoch timestamp into a
+pretty string like ‘an hour ago’, ‘Yesterday’, ‘3 months ago’,
+‘just now’, etc
+
+Based on
+Adapted by sven1103
diff --git a/src/content/tools/docs/1.12/modules.md b/src/content/tools/docs/1.12/modules.md
new file mode 100644
index 0000000000..74fa1142ac
--- /dev/null
+++ b/src/content/tools/docs/1.12/modules.md
@@ -0,0 +1,73 @@
+# nf_core.modules
+
+
+
+Code to handle DSL2 module imports from a GitHub repository
+
+### _`class{:python}`_`nf_core.modules.ModulesRepo(repo='nf-core/modules', branch='master'){:python}`
+
+Bases: `object`
+
+An object to store details about the repository being used for modules.
+
+Used by the nf-core modules top-level command with -r and -b flags,
+so that this can be used in the same way by all sucommands.
+
+### _`class{:python}`_`nf_core.modules.PipelineModules{:python}`
+
+Bases: `object`
+
+#### `check_modules(){:python}`
+
+#### `download_gh_file(dl_filename, api_url){:python}`
+
+Download a file from GitHub using the GitHub API
+
+- **Parameters:**
+ - **dl_filename** (_string_) – Path to save file to
+ - **api_url** (_string_) – GitHub API URL for file
+- **Raises:**
+ **If a problem**\*\*,\*\* **raises an error** –
+
+#### `get_module_file_urls(module){:python}`
+
+Fetch list of URLs for a specific module
+
+Takes the name of a module and iterates over the GitHub repo file tree.
+Loops over items that are prefixed with the path ‘software/\’ and ignores
+anything that’s not a blob. Also ignores the test/ subfolder.
+
+Returns a dictionary with keys as filenames and values as GitHub API URIs.
+These can be used to then download file contents.
+
+- **Parameters:**
+ **module** (_string_) – Name of module for which to fetch a set of URLs
+- **Returns:**
+ Set of files and associated URLs as follows:
+ {
+ : ‘software/fastqc/main.nf’: ‘’,
+ ‘software/fastqc/meta.yml’: ‘’
+
+ }
+
+- **Return type:**
+ dict
+
+#### `get_modules_file_tree(){:python}`
+
+Fetch the file list from the repo, using the GitHub API
+
+Sets self.modules_file_tree
+: self.modules_current_hash
+self.modules_avail_module_names
+
+#### `install(module){:python}`
+
+#### `list_modules(){:python}`
+
+Get available module names from GitHub tree for repo
+and print as list to stdout
+
+#### `remove(module){:python}`
+
+#### `update(module, force=False){:python}`
diff --git a/src/content/tools/docs/1.12/schema.md b/src/content/tools/docs/1.12/schema.md
new file mode 100644
index 0000000000..6e74442246
--- /dev/null
+++ b/src/content/tools/docs/1.12/schema.md
@@ -0,0 +1,106 @@
+# nf_core.schema
+
+
+
+Code to deal with pipeline JSON Schema
+
+### _`class{:python}`_`nf_core.schema.PipelineSchema{:python}`
+
+Bases: `object`
+
+Class to generate a schema object with
+functions to handle pipeline JSON Schema
+
+#### `add_schema_found_configs(){:python}`
+
+Add anything that’s found in the Nextflow params that’s missing in the pipeline schema
+
+#### `build_schema(pipeline_dir, no_prompts, web_only, url){:python}`
+
+Interactively build a new pipeline schema for a pipeline
+
+#### `build_schema_param(p_val){:python}`
+
+Build a pipeline schema dictionary for an param interactively
+
+#### `get_schema_defaults(){:python}`
+
+Generate set of default input parameters from schema.
+
+Saves defaults to self.schema_defaults
+Returns count of how many parameters were found (with or without a default value)
+
+#### `get_schema_path(path, local_only=False, revision=None){:python}`
+
+Given a pipeline name, directory, or path, set self.schema_filename
+
+#### `get_web_builder_response(){:python}`
+
+Given a URL for a Schema build response, recursively query it until results are ready.
+Once ready, validate Schema and write to disk.
+
+#### `get_wf_params(){:python}`
+
+Load the pipeline parameter defaults using nextflow config
+Strip out only the params. values and ignore anything that is not a flat variable
+
+#### `launch_web_builder(){:python}`
+
+Send pipeline schema to web builder and wait for response
+
+#### `load_input_params(params_path){:python}`
+
+Load a given a path to a parameters file (JSON/YAML)
+
+These should be input parameters used to run a pipeline with
+the Nextflow -params-file option.
+
+#### `load_lint_schema(){:python}`
+
+Load and lint a given schema to see if it looks valid
+
+#### `load_schema(){:python}`
+
+Load a pipeline schema from a file
+
+#### `make_skeleton_schema(){:python}`
+
+Make a new pipeline schema from the template
+
+#### `prompt_remove_schema_notfound_config(p_key){:python}`
+
+Check if a given key is found in the nextflow config params and prompt to remove it if note
+
+Returns True if it should be removed, False if not.
+
+#### `remove_schema_notfound_configs(){:python}`
+
+Go through top-level schema and all definitions sub-schemas to remove
+anything that’s not in the nextflow config.
+
+#### `remove_schema_notfound_configs_single_schema(schema){:python}`
+
+Go through a single schema / set of properties and strip out
+anything that’s not in the nextflow config.
+
+Takes: Schema or sub-schema with properties key
+Returns: Cleaned schema / sub-schema
+
+#### `save_schema(){:python}`
+
+Save a pipeline schema to a file
+
+#### `validate_params(){:python}`
+
+Check given parameters against a schema and validate
+
+#### `validate_schema(schema=None){:python}`
+
+Check that the Schema is valid
+
+Returns: Number of parameters found
+
+#### `validate_schema_title_description(schema=None){:python}`
+
+Extra validation command for linting.
+Checks that the schema “$id”, “title” and “description” attributes match the piipeline config.
diff --git a/src/content/tools/docs/1.12/sync.md b/src/content/tools/docs/1.12/sync.md
new file mode 100644
index 0000000000..c12523d92b
--- /dev/null
+++ b/src/content/tools/docs/1.12/sync.md
@@ -0,0 +1 @@
+# nf_core.sync
diff --git a/src/content/tools/docs/1.12/utils.md b/src/content/tools/docs/1.12/utils.md
new file mode 100644
index 0000000000..cc83aab208
--- /dev/null
+++ b/src/content/tools/docs/1.12/utils.md
@@ -0,0 +1,50 @@
+# nf_core.utils
+
+Common utility functions for the nf-core python package.
+
+### `nf_core.utils.check_if_outdated(current_version=None, remote_version=None, source_url='https://nf-co.re/tools_version'){:python}`
+
+Check if the current version of nf-core is outdated
+
+### `nf_core.utils.fetch_wf_config(wf_path){:python}`
+
+Uses Nextflow to retrieve the the configuration variables
+from a Nextflow workflow.
+
+- **Parameters:**
+ **wf_path** (_str_) – Nextflow workflow file system path.
+- **Returns:**
+ Workflow configuration settings.
+- **Return type:**
+ dict
+
+### `nf_core.utils.poll_nfcore_web_api(api_url, post_data=None){:python}`
+
+Poll the nf-core website API
+
+Takes argument api_url for URL
+
+Expects API reponse to be valid JSON and contain a top-level ‘status’ key.
+
+### `nf_core.utils.rich_force_colors(){:python}`
+
+Check if any environment variables are set to force Rich to use coloured output
+
+### `nf_core.utils.setup_requests_cachedir(){:python}`
+
+Sets up local caching for faster remote HTTP requests.
+
+Caching directory will be set up in the user’s home directory under
+a .nfcore_cache subdir.
+
+### `nf_core.utils.wait_cli_function(poll_func, poll_every=20){:python}`
+
+Display a command-line spinner while calling a function repeatedly.
+
+Keep waiting until that function returns True
+
+- **Parameters:**
+ - **poll_func** (_function_) – Function to call
+ - **poll_every** (_int_) – How many tenths of a second to wait between function calls. Default: 20.
+- **Returns:**
+ None. Just sits in an infite loop until the function returns True.
diff --git a/src/content/tools/docs/1.13.1/api/bump_version.md b/src/content/tools/docs/1.13.1/api/bump_version.md
new file mode 100644
index 0000000000..3f45f9fefb
--- /dev/null
+++ b/src/content/tools/docs/1.13.1/api/bump_version.md
@@ -0,0 +1,35 @@
+# nf_core.bump_version
+
+Bumps the version number in all appropriate files for
+a nf-core pipeline.
+
+### `nf_core.bump_version.bump_nextflow_version(pipeline_obj, new_version){:python}`
+
+Bumps the required Nextflow version number of a pipeline.
+
+- **Parameters:**
+ - **pipeline_obj** ([_nf_core.utils.Pipeline_](utils#nf_core.utils.Pipeline)) – A Pipeline object that holds information
+ about the pipeline contents and build files.
+ - **new_version** (_str_) – The new version tag for the required Nextflow version.
+
+### `nf_core.bump_version.bump_pipeline_version(pipeline_obj, new_version){:python}`
+
+Bumps a pipeline version number.
+
+- **Parameters:**
+ - **pipeline_obj** ([_nf_core.utils.Pipeline_](utils#nf_core.utils.Pipeline)) – A Pipeline object that holds information
+ about the pipeline contents and build files.
+ - **new_version** (_str_) – The new version tag for the pipeline. Semantic versioning only.
+
+### `nf_core.bump_version.update_file_version(filename, pipeline_obj, patterns){:python}`
+
+Updates the version number in a requested file.
+
+- **Parameters:**
+ - **filename** (_str_) – File to scan.
+ - **pipeline_obj** ([_nf_core.lint.PipelineLint_](lint#nf_core.lint.PipelineLint)) – A PipelineLint object that holds information
+ about the pipeline contents and build files.
+ - **pattern** (_str_) – Regex pattern to apply.
+ - **newstr** (_str_) – The replaced string.
+- **Raises:**
+ **ValueError**\*\*,\*\* **if the version number cannot be found.** –
diff --git a/src/content/tools/docs/1.13.1/api/create.md b/src/content/tools/docs/1.13.1/api/create.md
new file mode 100644
index 0000000000..92dcb9fb5e
--- /dev/null
+++ b/src/content/tools/docs/1.13.1/api/create.md
@@ -0,0 +1,36 @@
+# nf_core.create
+
+Creates a nf-core pipeline matching the current
+organization’s specification based on a template.
+
+### _`class{:python}`_`nf_core.create.PipelineCreate(name, description, author, version='1.0dev', no_git=False, force=False, outdir=None){:python}`
+
+Bases: `object`
+
+Creates a nf-core pipeline a la carte from the nf-core best-practise template.
+
+- **Parameters:**
+ - **name** (_str_) – Name for the pipeline.
+ - **description** (_str_) – Description for the pipeline.
+ - **author** (_str_) – Authors name of the pipeline.
+ - **version** (_str_) – Version flag. Semantic versioning only. Defaults to 1.0dev.
+ - **no_git** (_bool_) – Prevents the creation of a local Git repository for the pipeline. Defaults to False.
+ - **force** (_bool_) – Overwrites a given workflow directory with the same name. Defaults to False.
+ May the force be with you.
+ - **outdir** (_str_) – Path to the local output directory.
+
+#### `git_init_pipeline(){:python}`
+
+Initialises the new pipeline as a Git repository and submits first commit.
+
+#### `init_pipeline(){:python}`
+
+Creates the nf-core pipeline.
+
+#### `make_pipeline_logo(){:python}`
+
+Fetch a logo for the new pipeline from the nf-core website
+
+#### `render_template(){:python}`
+
+Runs Jinja to create a new nf-core pipeline.
diff --git a/src/content/tools/docs/1.13.1/api/download.md b/src/content/tools/docs/1.13.1/api/download.md
new file mode 100644
index 0000000000..92c33262e9
--- /dev/null
+++ b/src/content/tools/docs/1.13.1/api/download.md
@@ -0,0 +1,123 @@
+# nf_core.download
+
+Downloads a nf-core pipeline to the local file system.
+
+### _`class{:python}`_`nf_core.download.DownloadProgress(*columns: str | ProgressColumn, console: Console | None = None, auto_refresh: bool = True, refresh_per_second: float = 10, speed_estimate_period: float = 30.0, transient: bool = False, redirect_stdout: bool = True, redirect_stderr: bool = True, get_time: Callable[[], float] | None = None, disable: bool = False, expand: bool = False){:python}`
+
+Bases: `Progress`
+
+Custom Progress bar class, allowing us to have two progress
+bars with different columns / layouts.
+
+#### `get_renderables(){:python}`
+
+Get a number of renderables for the progress display.
+
+### _`class{:python}`_`nf_core.download.DownloadWorkflow(pipeline, release=None, outdir=None, compress_type='tar.gz', force=False, singularity=False, singularity_cache_only=False, parallel_downloads=4){:python}`
+
+Bases: `object`
+
+Downloads a nf-core workflow from GitHub to the local file system.
+
+Can also download its Singularity container image if required.
+
+- **Parameters:**
+ - **pipeline** (_str_) – A nf-core pipeline name.
+ - **release** (_str_) – The workflow release version to download, like 1.0. Defaults to None.
+ - **singularity** (_bool_) – Flag, if the Singularity container should be downloaded as well. Defaults to False.
+ - **outdir** (_str_) – Path to the local download directory. Defaults to None.
+
+#### `compress_download(){:python}`
+
+Take the downloaded files and make a compressed .tar.gz archive.
+
+#### `download_configs(){:python}`
+
+Downloads the centralised config profiles from nf-core/configs to `self.outdir`.
+
+#### `download_wf_files(){:python}`
+
+Downloads workflow files from GitHub to the `self.outdir`.
+
+#### `download_workflow(){:python}`
+
+Starts a nf-core workflow download.
+
+#### `fetch_workflow_details(wfs){:python}`
+
+Fetches details of a nf-core workflow to download.
+
+- **Parameters:**
+ **wfs** ([_nf_core.list.Workflows_](list#nf_core.list.Workflows)) – A nf_core.list.Workflows object
+- **Raises:**
+ **LockupError**\*\*,\*\* **if the pipeline can not be found.** –
+
+#### `find_container_images(){:python}`
+
+Find container image names for workflow.
+
+Starts by using nextflow config to pull out any process.container
+declarations. This works for DSL1.
+
+Second, we look for DSL2 containers. These can’t be found with
+nextflow config at the time of writing, so we scrape the pipeline files.
+
+#### `get_singularity_images(){:python}`
+
+Loop through container names and download Singularity images
+
+#### `singularity_copy_cache_image(container, out_path, cache_path){:python}`
+
+Copy Singularity image from NXF_SINGULARITY_CACHEDIR to target folder.
+
+#### `singularity_download_image(container, out_path, cache_path, progress){:python}`
+
+Download a singularity image from the web.
+
+Use native Python to download the file.
+
+- **Parameters:**
+ - **container** (_str_) – A pipeline’s container name. Usually it is of similar format
+ to `https://depot.galaxyproject.org/singularity/name:version`
+ - **out_path** (_str_) – The final target output path
+ - **cache_path** (_str_\*,\* _None_) – The NXF_SINGULARITY_CACHEDIR path if set, None if not
+ - **progress** (_Progress_) – Rich progress bar instance to add tasks to.
+
+#### `singularity_image_filenames(container){:python}`
+
+Check Singularity cache for image, copy to destination folder if found.
+
+- **Parameters:**
+ **container** (_str_) – A pipeline’s container name. Can be direct download URL
+ or a Docker Hub repository ID.
+- **Returns:**
+ Returns True if we have the image in the target location.
+ : Returns a download path if not.
+- **Return type:**
+ results (bool, str)
+
+#### `singularity_pull_image(container, out_path, cache_path, progress){:python}`
+
+Pull a singularity image using `singularity pull`
+
+Attempt to use a local installation of singularity to pull the image.
+
+- **Parameters:**
+ **container** (_str_) – A pipeline’s container name. Usually it is of similar format
+ to `nfcore/name:version`.
+- **Raises:**
+ **Various exceptions possible from subprocess execution** **of** **Singularity.** –
+
+#### `validate_md5(fname, expected=None){:python}`
+
+Calculates the md5sum for a file on the disk and validate with expected.
+
+- **Parameters:**
+ - **fname** (_str_) – Path to a local file.
+ - **expected** (_str_) – The expected md5sum.
+- **Raises:**
+ **IOError**\*\*,\*\* **if the md5sum does not match the remote sum.** –
+
+#### `wf_use_local_configs(){:python}`
+
+Edit the downloaded nextflow.config file to use the local config files
diff --git a/src/content/tools/docs/1.13.1/api/index.md b/src/content/tools/docs/1.13.1/api/index.md
new file mode 100644
index 0000000000..8fcbeab99d
--- /dev/null
+++ b/src/content/tools/docs/1.13.1/api/index.md
@@ -0,0 +1,49 @@
+# API Reference
+
+# Tests:
+
+- [nf_core.bump_version](bump_version)
+ - [`bump_nextflow_version()`](bump_version#nf_core.bump_version.bump_nextflow_version)
+ - [`bump_pipeline_version()`](bump_version#nf_core.bump_version.bump_pipeline_version)
+ - [`update_file_version()`](bump_version#nf_core.bump_version.update_file_version)
+- [nf_core.create](create)
+ - [`PipelineCreate`](create#nf_core.create.PipelineCreate)
+- [nf_core.download](download)
+ - [`DownloadProgress`](download#nf_core.download.DownloadProgress)
+ - [`DownloadWorkflow`](download#nf_core.download.DownloadWorkflow)
+- [nf_core.launch](launch)
+ - [`Launch`](launch#nf_core.launch.Launch)
+- [nf_core.licences](licences)
+ - [`WorkflowLicences`](licences#nf_core.licences.WorkflowLicences)
+- [nf_core.lint](lint)
+ - [`run_linting()`](lint#nf_core.lint.run_linting)
+ - [`PipelineLint`](lint#nf_core.lint.PipelineLint)
+- [nf_core.list](list)
+ - [`LocalWorkflow`](list#nf_core.list.LocalWorkflow)
+ - [`RemoteWorkflow`](list#nf_core.list.RemoteWorkflow)
+ - [`Workflows`](list#nf_core.list.Workflows)
+ - [`get_local_wf()`](list#nf_core.list.get_local_wf)
+ - [`list_workflows()`](list#nf_core.list.list_workflows)
+ - [`pretty_date()`](list#nf_core.list.pretty_date)
+- [nf_core.modules](modules)
+- [nf_core.schema](schema)
+ - [`PipelineSchema`](schema#nf_core.schema.PipelineSchema)
+- [nf_core.sync](sync)
+ - [`PipelineSync`](sync#nf_core.sync.PipelineSync)
+ - [`PullRequestException`](sync#nf_core.sync.PullRequestException)
+ - [`SyncException`](sync#nf_core.sync.SyncException)
+- [nf_core.utils](utils)
+ - [`Pipeline`](utils#nf_core.utils.Pipeline)
+ - [`anaconda_package()`](utils#nf_core.utils.anaconda_package)
+ - [`check_if_outdated()`](utils#nf_core.utils.check_if_outdated)
+ - [`custom_yaml_dumper()`](utils#nf_core.utils.custom_yaml_dumper)
+ - [`fetch_wf_config()`](utils#nf_core.utils.fetch_wf_config)
+ - [`get_biocontainer_tag()`](utils#nf_core.utils.get_biocontainer_tag)
+ - [`github_api_auto_auth()`](utils#nf_core.utils.github_api_auto_auth)
+ - [`nextflow_cmd()`](utils#nf_core.utils.nextflow_cmd)
+ - [`parse_anaconda_licence()`](utils#nf_core.utils.parse_anaconda_licence)
+ - [`pip_package()`](utils#nf_core.utils.pip_package)
+ - [`poll_nfcore_web_api()`](utils#nf_core.utils.poll_nfcore_web_api)
+ - [`rich_force_colors()`](utils#nf_core.utils.rich_force_colors)
+ - [`setup_requests_cachedir()`](utils#nf_core.utils.setup_requests_cachedir)
+ - [`wait_cli_function()`](utils#nf_core.utils.wait_cli_function)
diff --git a/src/content/tools/docs/1.13.1/api/launch.md b/src/content/tools/docs/1.13.1/api/launch.md
new file mode 100644
index 0000000000..8affe0fa4d
--- /dev/null
+++ b/src/content/tools/docs/1.13.1/api/launch.md
@@ -0,0 +1,87 @@
+# nf_core.launch
+
+Launch a pipeline, interactively collecting params
+
+### _`class{:python}`_`nf_core.launch.Launch(pipeline=None, revision=None, command_only=False, params_in=None, params_out=None, save_all=False, show_hidden=False, url=None, web_id=None){:python}`
+
+Bases: `object`
+
+Class to hold config option to launch a pipeline
+
+#### `build_command(){:python}`
+
+Build the nextflow run command based on what we know
+
+#### `get_pipeline_schema(){:python}`
+
+Load and validate the schema from the supplied pipeline
+
+#### `get_web_launch_response(){:python}`
+
+Given a URL for a web-gui launch response, recursively query it until results are ready.
+
+#### `launch_pipeline(){:python}`
+
+#### `launch_web_gui(){:python}`
+
+Send schema to nf-core website and launch input GUI
+
+#### `launch_workflow(){:python}`
+
+Launch nextflow if required
+
+#### `merge_nxf_flag_schema(){:python}`
+
+Take the Nextflow flag schema and merge it with the pipeline schema
+
+#### `print_param_header(param_id, param_obj, is_group=False){:python}`
+
+#### `prompt_group(group_id, group_obj){:python}`
+
+Prompt for edits to a group of parameters (subschema in ‘definitions’)
+
+- **Parameters:**
+ - **group_id** – Paramater ID (string)
+ - **group_obj** – JSON Schema keys (dict)
+- **Returns:**
+ val answers
+- **Return type:**
+ Dict of param_id
+
+#### `prompt_param(param_id, param_obj, is_required, answers){:python}`
+
+Prompt for a single parameter
+
+#### `prompt_schema(){:python}`
+
+Go through the pipeline schema and prompt user to change defaults
+
+#### `prompt_web_gui(){:python}`
+
+Ask whether to use the web-based or cli wizard to collect params
+
+#### `sanitise_web_response(){:python}`
+
+The web builder returns everything as strings.
+Use the functions defined in the cli wizard to convert to the correct types.
+
+#### `set_schema_inputs(){:python}`
+
+Take the loaded schema and set the defaults as the input parameters
+If a nf_params.json file is supplied, apply these over the top
+
+#### `single_param_to_questionary(param_id, param_obj, answers=None, print_help=True){:python}`
+
+Convert a JSONSchema param to a Questionary question
+
+- **Parameters:**
+ - **param_id** – Parameter ID (string)
+ - **param_obj** – JSON Schema keys (dict)
+ - **answers** – Optional preexisting answers (dict)
+ - **print_help** – If description and help_text should be printed (bool)
+- **Returns:**
+ Single Questionary dict, to be appended to questions list
+
+#### `strip_default_params(){:python}`
+
+Strip parameters if they have not changed from the default
diff --git a/src/content/tools/docs/1.13.1/api/licences.md b/src/content/tools/docs/1.13.1/api/licences.md
new file mode 100644
index 0000000000..578ddaae1d
--- /dev/null
+++ b/src/content/tools/docs/1.13.1/api/licences.md
@@ -0,0 +1,37 @@
+# nf_core.licences
+
+Lists software licences for a given workflow.
+
+### _`class{:python}`_`nf_core.licences.WorkflowLicences(pipeline){:python}`
+
+Bases: `object`
+
+A nf-core workflow licenses collection.
+
+Tries to retrieve the license information from all dependencies
+of a given nf-core pipeline.
+
+A condensed overview with license per dependency can be printed out.
+
+- **Parameters:**
+ **pipeline** (_str_) – An existing nf-core pipeline name, like nf-core/hlatyping
+ or short hlatyping.
+
+#### `fetch_conda_licences(){:python}`
+
+Fetch package licences from Anaconda and PyPi.
+
+#### `get_environment_file(){:python}`
+
+Get the conda environment file for the pipeline
+
+#### `print_licences(){:python}`
+
+Prints the fetched license information.
+
+- **Parameters:**
+ **as_json** (_boolean_) – Prints the information in JSON. Defaults to False.
+
+#### `run_licences(){:python}`
+
+Run the nf-core licences action
diff --git a/src/content/tools/docs/1.13.1/api/lint.md b/src/content/tools/docs/1.13.1/api/lint.md
new file mode 100644
index 0000000000..97f22f4911
--- /dev/null
+++ b/src/content/tools/docs/1.13.1/api/lint.md
@@ -0,0 +1,138 @@
+# nf_core.lint
+
+#### `SEE ALSO{:python}`
+
+See the [Lint Tests](../lint_tests/index.html) docs for information about specific linting functions.
+
+
+
+Linting policy for nf-core pipeline projects.
+
+Tests Nextflow-based pipelines to check that they adhere to
+the nf-core community guidelines.
+
+### `nf_core.lint.run_linting(pipeline_dir, release_mode=False, fix=(), show_passed=False, fail_ignored=False, md_fn=None, json_fn=None){:python}`
+
+Runs all nf-core linting checks on a given Nextflow pipeline project
+in either release mode or normal mode (default). Returns an object
+of type [`PipelineLint`](#nf_core.lint.PipelineLint) after finished.
+
+- **Parameters:**
+ - **pipeline_dir** (_str_) – The path to the Nextflow pipeline root directory
+ - **release_mode** (_bool_) – Set this to True, if the linting should be run in the release mode.
+ See [`PipelineLint`](#nf_core.lint.PipelineLint) for more information.
+- **Returns:**
+ An object of type [`PipelineLint`](#nf_core.lint.PipelineLint) that contains all the linting results.
+
+### _`class{:python}`_`nf_core.lint.PipelineLint(wf_path, release_mode=False, fix=(), fail_ignored=False){:python}`
+
+Bases: [`Pipeline`](utils#nf_core.utils.Pipeline)
+
+Object to hold linting information and results.
+
+Inherits [`nf_core.utils.Pipeline`](utils#nf_core.utils.Pipeline) class.
+
+Use the [`PipelineLint._lint_pipeline()`](#nf_core.lint.PipelineLint._lint_pipeline) function to run lint tests.
+
+- **Parameters:**
+ **path** (_str_) – The path to the nf-core pipeline directory.
+
+#### `failed{:python}`
+
+A list of tuples of the form: `(, )`
+
+- **Type:**
+ list
+
+#### `ignored{:python}`
+
+A list of tuples of the form: `(, )`
+
+- **Type:**
+ list
+
+#### `lint_config{:python}`
+
+The parsed nf-core linting config for this pipeline
+
+- **Type:**
+ dict
+
+#### `passed{:python}`
+
+A list of tuples of the form: `(, )`
+
+- **Type:**
+ list
+
+#### `release_mode{:python}`
+
+True, if you the to linting was run in release mode, False else.
+
+- **Type:**
+ bool
+
+#### `warned{:python}`
+
+A list of tuples of the form: `(, )`
+
+- **Type:**
+ list
+
+#### `_get_results_md(){:python}`
+
+Create a markdown file suitable for posting in a GitHub comment.
+
+- **Returns:**
+ Formatting markdown content
+- **Return type:**
+ markdown (str)
+
+#### `_lint_pipeline(){:python}`
+
+Main linting function.
+
+Takes the pipeline directory as the primary input and iterates through
+the different linting checks in order. Collects any warnings or errors
+into object attributes: `passed`, `ignored`, `warned` and `failed`.
+
+#### `_print_results(show_passed=False){:python}`
+
+Print linting results to the command line.
+
+Uses the `rich` library to print a set of formatted tables to the command line
+summarising the linting results.
+
+#### `_save_json_results(json_fn){:python}`
+
+Function to dump lint results to a JSON file for downstream use
+
+- **Parameters:**
+ **json_fn** (_str_) – File path to write JSON to.
+
+#### `_strip_ansi_codes(string, replace_with=''){:python}`
+
+Strip ANSI colouring codes from a string to return plain text.
+
+Solution found on Stack Overflow:
+
+#### `_wrap_quotes(files){:python}`
+
+Helper function to take a list of filenames and format with markdown.
+
+- **Parameters:**
+ **files** (_list_) –
+
+ List of filenames, eg:
+
+ ```default
+ ['foo', 'bar', 'baz']
+ ```
+
+- **Returns:**
+ Formatted string of paths separated by word `or`, eg:
+ ```default
+ `foo` or bar` or `baz`
+ ```
+- **Return type:**
+ markdown (str)
diff --git a/src/content/tools/docs/1.13.1/api/list.md b/src/content/tools/docs/1.13.1/api/list.md
new file mode 100644
index 0000000000..2fc6448de0
--- /dev/null
+++ b/src/content/tools/docs/1.13.1/api/list.md
@@ -0,0 +1,99 @@
+# nf_core.list
+
+Lists available nf-core pipelines and versions.
+
+### _`class{:python}`_`nf_core.list.LocalWorkflow(name){:python}`
+
+Bases: `object`
+
+Class to handle local workflows pulled by nextflow
+
+#### `get_local_nf_workflow_details(){:python}`
+
+Get full details about a local cached workflow
+
+### _`class{:python}`_`nf_core.list.RemoteWorkflow(data){:python}`
+
+Bases: `object`
+
+A information container for a remote workflow.
+
+- **Parameters:**
+ **data** (_dict_) – workflow information as they are retrieved from the GitHub repository REST API request
+ ().
+
+### _`class{:python}`_`nf_core.list.Workflows(filter_by=None, sort_by='release', show_archived=False){:python}`
+
+Bases: `object`
+
+Workflow container class.
+
+Is used to collect local and remote nf-core pipelines. Pipelines
+can be sorted, filtered and compared.
+
+- **Parameters:**
+ - **filter_by** (_list_) – A list of strings that can be used for filtering.
+ - **sort_by** (_str_) – workflows can be sorted by keywords. Keyword must be one of
+ release (default), name, stars.
+
+#### `compare_remote_local(){:python}`
+
+Matches local to remote workflows.
+
+If a matching remote workflow is found, the local workflow’s Git commit hash is compared
+with the latest one from remote.
+
+A boolean flag in `RemoteWorkflow.local_is_latest` is set to True, if the local workflow
+is the latest.
+
+#### `filtered_workflows(){:python}`
+
+Filters remote workflows for keywords.
+
+- **Returns:**
+ Filtered remote workflows.
+- **Return type:**
+ list
+
+#### `get_local_nf_workflows(){:python}`
+
+Retrieves local Nextflow workflows.
+
+Local workflows are stored in `self.local_workflows` list.
+
+#### `get_remote_workflows(){:python}`
+
+Retrieves remote workflows from [nf-co.re](https://nf-co.re).
+
+Remote workflows are stored in `self.remote_workflows` list.
+
+#### `print_json(){:python}`
+
+Dump JSON of all parsed information
+
+#### `print_summary(){:python}`
+
+Prints a summary of all pipelines.
+
+### `nf_core.list.get_local_wf(workflow, revision=None){:python}`
+
+Check if this workflow has a local copy and use nextflow to pull it if not
+
+### `nf_core.list.list_workflows(filter_by=None, sort_by='release', as_json=False, show_archived=False){:python}`
+
+Prints out a list of all nf-core workflows.
+
+- **Parameters:**
+ - **filter_by** (_list_) – A list of strings that can be used for filtering.
+ - **sort_by** (_str_) – workflows can be sorted by keywords. Keyword must be one of
+ release (default), name, stars.
+ - **as_json** (_boolean_) – Set to true, if the lists should be printed in JSON.
+
+### `nf_core.list.pretty_date(time){:python}`
+
+Transforms a datetime object or a int() Epoch timestamp into a
+pretty string like ‘an hour ago’, ‘Yesterday’, ‘3 months ago’,
+‘just now’, etc
+
+Based on
+Adapted by sven1103
diff --git a/src/content/tools/docs/1.13.1/api/modules.md b/src/content/tools/docs/1.13.1/api/modules.md
new file mode 100644
index 0000000000..2dd47b6359
--- /dev/null
+++ b/src/content/tools/docs/1.13.1/api/modules.md
@@ -0,0 +1 @@
+# nf_core.modules
diff --git a/src/content/tools/docs/1.13.1/api/schema.md b/src/content/tools/docs/1.13.1/api/schema.md
new file mode 100644
index 0000000000..162788eb1a
--- /dev/null
+++ b/src/content/tools/docs/1.13.1/api/schema.md
@@ -0,0 +1,109 @@
+# nf_core.schema
+
+Code to deal with pipeline JSON Schema
+
+### _`class{:python}`_`nf_core.schema.PipelineSchema{:python}`
+
+Bases: `object`
+
+Class to generate a schema object with
+functions to handle pipeline JSON Schema
+
+#### `add_schema_found_configs(){:python}`
+
+Add anything that’s found in the Nextflow params that’s missing in the pipeline schema
+
+#### `build_schema(pipeline_dir, no_prompts, web_only, url){:python}`
+
+Interactively build a new pipeline schema for a pipeline
+
+#### `build_schema_param(p_val){:python}`
+
+Build a pipeline schema dictionary for an param interactively
+
+#### `get_schema_defaults(){:python}`
+
+Generate set of default input parameters from schema.
+
+Saves defaults to self.schema_defaults
+Returns count of how many parameters were found (with or without a default value)
+
+#### `get_schema_path(path, local_only=False, revision=None){:python}`
+
+Given a pipeline name, directory, or path, set self.schema_filename
+
+#### `get_web_builder_response(){:python}`
+
+Given a URL for a Schema build response, recursively query it until results are ready.
+Once ready, validate Schema and write to disk.
+
+#### `get_wf_params(){:python}`
+
+Load the pipeline parameter defaults using nextflow config
+Strip out only the params. values and ignore anything that is not a flat variable
+
+#### `launch_web_builder(){:python}`
+
+Send pipeline schema to web builder and wait for response
+
+#### `load_input_params(params_path){:python}`
+
+Load a given a path to a parameters file (JSON/YAML)
+
+These should be input parameters used to run a pipeline with
+the Nextflow -params-file option.
+
+#### `load_lint_schema(){:python}`
+
+Load and lint a given schema to see if it looks valid
+
+#### `load_schema(){:python}`
+
+Load a pipeline schema from a file
+
+#### `make_skeleton_schema(){:python}`
+
+Make a new pipeline schema from the template
+
+#### `prompt_remove_schema_notfound_config(p_key){:python}`
+
+Check if a given key is found in the nextflow config params and prompt to remove it if note
+
+Returns True if it should be removed, False if not.
+
+#### `remove_schema_notfound_configs(){:python}`
+
+Go through top-level schema and all definitions sub-schemas to remove
+anything that’s not in the nextflow config.
+
+#### `remove_schema_notfound_configs_single_schema(schema){:python}`
+
+Go through a single schema / set of properties and strip out
+anything that’s not in the nextflow config.
+
+Takes: Schema or sub-schema with properties key
+Returns: Cleaned schema / sub-schema
+
+#### `save_schema(){:python}`
+
+Save a pipeline schema to a file
+
+#### `validate_default_params(){:python}`
+
+Check that all default parameters in the schema are valid
+Ignores ‘required’ flag, as required parameters might have no defaults
+
+#### `validate_params(){:python}`
+
+Check given parameters against a schema and validate
+
+#### `validate_schema(schema=None){:python}`
+
+Check that the Schema is valid
+
+Returns: Number of parameters found
+
+#### `validate_schema_title_description(schema=None){:python}`
+
+Extra validation command for linting.
+Checks that the schema “$id”, “title” and “description” attributes match the piipeline config.
diff --git a/src/content/tools/docs/1.13.1/api/sync.md b/src/content/tools/docs/1.13.1/api/sync.md
new file mode 100644
index 0000000000..e327553c3f
--- /dev/null
+++ b/src/content/tools/docs/1.13.1/api/sync.md
@@ -0,0 +1,151 @@
+# nf_core.sync
+
+Synchronise a pipeline TEMPLATE branch with the template.
+
+### _`class{:python}`_`nf_core.sync.PipelineSync(pipeline_dir, from_branch=None, make_pr=False, gh_repo=None, gh_username=None){:python}`
+
+Bases: `object`
+
+Object to hold syncing information and results.
+
+- **Parameters:**
+ - **pipeline_dir** (_str_) – The path to the Nextflow pipeline root directory
+ - **from_branch** (_str_) – The branch to use to fetch config vars. If not set, will use current active branch
+ - **make_pr** (_bool_) – Set this to True to create a GitHub pull-request with the changes
+ - **gh_username** (_str_) – GitHub username
+ - **gh_repo** (_str_) – GitHub repository name
+
+#### `pipeline_dir{:python}`
+
+Path to target pipeline directory
+
+- **Type:**
+ str
+
+#### `from_branch{:python}`
+
+Repo branch to use when collecting workflow variables. Default: active branch.
+
+- **Type:**
+ str
+
+#### `original_branch{:python}`
+
+Repo branch that was checked out before we started.
+
+- **Type:**
+ str
+
+#### `made_changes{:python}`
+
+Whether making the new template pipeline introduced any changes
+
+- **Type:**
+ bool
+
+#### `make_pr{:python}`
+
+Whether to try to automatically make a PR on GitHub.com
+
+- **Type:**
+ bool
+
+#### `required_config_vars{:python}`
+
+List of nextflow variables required to make template pipeline
+
+- **Type:**
+ list
+
+#### `gh_username{:python}`
+
+GitHub username
+
+- **Type:**
+ str
+
+#### `gh_repo{:python}`
+
+GitHub repository name
+
+- **Type:**
+ str
+
+#### `checkout_template_branch(){:python}`
+
+Try to check out the origin/TEMPLATE in a new TEMPLATE branch.
+If this fails, try to check out an existing local TEMPLATE branch.
+
+#### `close_open_pr(pr){:python}`
+
+Given a PR API response, add a comment and close.
+
+#### `close_open_template_merge_prs(){:python}`
+
+Get all template merging branches (starting with ‘nf-core-template-merge-‘)
+and check for any open PRs from these branches to the self.from_branch
+If open PRs are found, add a comment and close them
+
+#### `commit_template_changes(){:python}`
+
+If we have any changes with the new template files, make a git commit
+
+#### `create_merge_base_branch(){:python}`
+
+Create a new branch from the updated TEMPLATE branch
+This branch will then be used to create the PR
+
+#### `delete_template_branch_files(){:python}`
+
+Delete all files in the TEMPLATE branch
+
+#### `get_wf_config(){:python}`
+
+Check out the target branch if requested and fetch the nextflow config.
+Check that we have the required config variables.
+
+#### `inspect_sync_dir(){:python}`
+
+Takes a look at the target directory for syncing. Checks that it’s a git repo
+and makes sure that there are no uncommitted changes.
+
+#### `make_pull_request(){:python}`
+
+Create a pull request to a base branch (default: dev),
+from a head branch (default: TEMPLATE)
+
+Returns: An instance of class requests.Response
+
+#### `make_template_pipeline(){:python}`
+
+Delete all files and make a fresh template using the workflow variables
+
+#### `push_merge_branch(){:python}`
+
+Push the newly created merge branch to the remote repository
+
+#### `push_template_branch(){:python}`
+
+If we made any changes, push the TEMPLATE branch to the default remote
+and try to make a PR. If we don’t have the auth token, try to figure out a URL
+for the PR and print this to the console.
+
+#### `reset_target_dir(){:python}`
+
+Reset the target pipeline directory. Check out the original branch.
+
+#### `sync(){:python}`
+
+Find workflow attributes, create a new template pipeline on TEMPLATE
+
+### _`exception{:python}`_`nf_core.sync.PullRequestException{:python}`
+
+Bases: `Exception`
+
+Exception raised when there was an error creating a Pull-Request on GitHub.com
+
+### _`exception{:python}`_`nf_core.sync.SyncException{:python}`
+
+Bases: `Exception`
+
+Exception raised when there was an error with TEMPLATE branch synchronisation
diff --git a/src/content/tools/docs/1.13.1/api/utils.md b/src/content/tools/docs/1.13.1/api/utils.md
new file mode 100644
index 0000000000..782b715f4a
--- /dev/null
+++ b/src/content/tools/docs/1.13.1/api/utils.md
@@ -0,0 +1,200 @@
+# nf_core.utils
+
+Common utility functions for the nf-core python package.
+
+### _`class{:python}`_`nf_core.utils.Pipeline(wf_path){:python}`
+
+Bases: `object`
+
+Object to hold information about a local pipeline.
+
+- **Parameters:**
+ **path** (_str_) – The path to the nf-core pipeline directory.
+
+#### `conda_config{:python}`
+
+The parsed conda configuration file content (`environment.yml`).
+
+- **Type:**
+ dict
+
+#### `conda_package_info{:python}`
+
+The conda package(s) information, based on the API requests to Anaconda cloud.
+
+- **Type:**
+ dict
+
+#### `nf_config{:python}`
+
+The Nextflow pipeline configuration file content.
+
+- **Type:**
+ dict
+
+#### `files{:python}`
+
+A list of files found during the linting process.
+
+- **Type:**
+ list
+
+#### `git_sha{:python}`
+
+The git sha for the repo commit / current GitHub pull-request ($GITHUB_PR_COMMIT)
+
+- **Type:**
+ str
+
+#### `minNextflowVersion{:python}`
+
+The minimum required Nextflow version to run the pipeline.
+
+- **Type:**
+ str
+
+#### `wf_path{:python}`
+
+Path to the pipeline directory.
+
+- **Type:**
+ str
+
+#### `pipeline_name{:python}`
+
+The pipeline name, without the nf-core tag, for example hlatyping.
+
+- **Type:**
+ str
+
+#### `schema_obj{:python}`
+
+A `PipelineSchema` object
+
+- **Type:**
+ obj
+
+#### `_fp(fn){:python}`
+
+Convenience function to get full path to a file in the pipeline
+
+#### `_list_files(){:python}`
+
+Get a list of all files in the pipeline
+
+#### `_load(){:python}`
+
+Run core load functions
+
+#### `_load_conda_environment(){:python}`
+
+Try to load the pipeline environment.yml file, if it exists
+
+#### `_load_pipeline_config(){:python}`
+
+Get the nextflow config for this pipeline
+
+Once loaded, set a few convienence reference class attributes
+
+### `nf_core.utils.anaconda_package(dep, dep_channels=['conda-forge', 'bioconda', 'defaults']){:python}`
+
+Query conda package information.
+
+Sends a HTTP GET request to the Anaconda remote API.
+
+- **Parameters:**
+ - **dep** (_str_) – A conda package name.
+ - **dep_channels** (_list_) – list of conda channels to use
+- **Raises:**
+ - **A LookupError**\*\*,\*\* **if the connection fails** **or** **times out** **or** **gives an unexpected status code** –
+ - **A ValueError**\*\*,\*\* **if the package name can not be found** **(\*\***404\***\*)** –
+
+### `nf_core.utils.check_if_outdated(current_version=None, remote_version=None, source_url='https://nf-co.re/tools_version'){:python}`
+
+Check if the current version of nf-core is outdated
+
+### `nf_core.utils.custom_yaml_dumper(){:python}`
+
+Overwrite default PyYAML output to make Prettier YAML linting happy
+
+### `nf_core.utils.fetch_wf_config(wf_path){:python}`
+
+Uses Nextflow to retrieve the the configuration variables
+from a Nextflow workflow.
+
+- **Parameters:**
+ **wf_path** (_str_) – Nextflow workflow file system path.
+- **Returns:**
+ Workflow configuration settings.
+- **Return type:**
+ dict
+
+### `nf_core.utils.get_biocontainer_tag(package, version){:python}`
+
+Given a bioconda package and version, look for a container
+at quay.io and returns the tag of the most recent image
+that matches the package version
+Sends a HTTP GET request to the quay.io API.
+:param package: A bioconda package name.
+:type package: str
+:param version: Version of the bioconda package
+:type version: str
+
+- **Raises:**
+ - **A LookupError**\*\*,\*\* **if the connection fails** **or** **times out** **or** **gives an unexpected status code** –
+ - **A ValueError**\*\*,\*\* **if the package name can not be found** **(\*\***404\***\*)** –
+
+### `nf_core.utils.github_api_auto_auth(){:python}`
+
+### `nf_core.utils.nextflow_cmd(cmd){:python}`
+
+Run a Nextflow command and capture the output. Handle errors nicely
+
+### `nf_core.utils.parse_anaconda_licence(anaconda_response, version=None){:python}`
+
+Given a response from the anaconda API using anaconda_package, parse the software licences.
+
+Returns: Set of licence types
+
+### `nf_core.utils.pip_package(dep){:python}`
+
+Query PyPI package information.
+
+Sends a HTTP GET request to the PyPI remote API.
+
+- **Parameters:**
+ **dep** (_str_) – A PyPI package name.
+- **Raises:**
+ - **A LookupError**\*\*,\*\* **if the connection fails** **or** **times out** –
+ - **A ValueError**\*\*,\*\* **if the package name can not be found** –
+
+### `nf_core.utils.poll_nfcore_web_api(api_url, post_data=None){:python}`
+
+Poll the nf-core website API
+
+Takes argument api_url for URL
+
+Expects API reponse to be valid JSON and contain a top-level ‘status’ key.
+
+### `nf_core.utils.rich_force_colors(){:python}`
+
+Check if any environment variables are set to force Rich to use coloured output
+
+### `nf_core.utils.setup_requests_cachedir(){:python}`
+
+Sets up local caching for faster remote HTTP requests.
+
+Caching directory will be set up in the user’s home directory under
+a .nfcore_cache subdir.
+
+### `nf_core.utils.wait_cli_function(poll_func, poll_every=20){:python}`
+
+Display a command-line spinner while calling a function repeatedly.
+
+Keep waiting until that function returns True
+
+- **Parameters:**
+ - **poll_func** (_function_) – Function to call
+ - **poll_every** (_int_) – How many tenths of a second to wait between function calls. Default: 20.
+- **Returns:**
+ None. Just sits in an infite loop until the function returns True.
diff --git a/src/content/tools/docs/1.13.1/index.md b/src/content/tools/docs/1.13.1/index.md
new file mode 100644
index 0000000000..c5988c4254
--- /dev/null
+++ b/src/content/tools/docs/1.13.1/index.md
@@ -0,0 +1,18 @@
+
+
+# Welcome to nf-core tools API documentation!
+
+This documentation is for the `nf-core/tools` package.
+
+Primarily, it describes the different [code lint tests](lint_tests/index.html)
+run by `nf-core lint` (typically visited by a developer when their pipeline fails a given
+test), and also reference for the `nf_core` [Python package API](api/index.html).
+
+# Indices and tables
+
+- [Index](genindex)
+- [Module Index](py-modindex)
+- [Search Page](search)
diff --git a/src/content/tools/docs/1.13.1/lint_tests/actions_awsfulltest.md b/src/content/tools/docs/1.13.1/lint_tests/actions_awsfulltest.md
new file mode 100644
index 0000000000..fd32fa4a32
--- /dev/null
+++ b/src/content/tools/docs/1.13.1/lint_tests/actions_awsfulltest.md
@@ -0,0 +1,30 @@
+# actions_awsfulltest
+
+#### `PipelineLint.actions_awsfulltest(){:python}`
+
+Checks the GitHub Actions awsfulltest is valid.
+
+In addition to small test datasets run on GitHub Actions, we provide the possibility of testing the pipeline on full size datasets on AWS.
+This should ensure that the pipeline runs as expected on AWS and provide a resource estimation.
+
+The GitHub Actions workflow is called `awsfulltest.yml`, and it can be found in the `.github/workflows/` directory.
+
+:::warning
+This workflow incurs AWS costs, therefore it should only be triggered for pipeline releases:
+`workflow_run` (after the docker hub release workflow) and `workflow_dispatch`.
+:::
+
+:::note
+You can manually trigger the AWS tests by going to the Actions tab on the pipeline GitHub repository and selecting the
+nf-core AWS full size tests workflow on the left.
+:::
+
+:::note
+For tests on full data prior to release, [Nextflow Tower](https://tower.nf) launch feature can be employed.
+:::
+
+The `.github/workflows/awsfulltest.yml` file is tested for the following:
+
+- Must be turned on `workflow_dispatch`.
+- Must be turned on for `workflow_run` with `workflows: ["nf-core Docker push (release)"]` and `types: [completed]`
+- Should run the profile `test_full` that should be edited to provide the links to full-size datasets. If it runs the profile `test`, a warning is given.
diff --git a/src/content/tools/docs/1.13.1/lint_tests/actions_awstest.md b/src/content/tools/docs/1.13.1/lint_tests/actions_awstest.md
new file mode 100644
index 0000000000..4e4698c4e9
--- /dev/null
+++ b/src/content/tools/docs/1.13.1/lint_tests/actions_awstest.md
@@ -0,0 +1,24 @@
+# actions_awstest
+
+#### `PipelineLint.actions_awstest(){:python}`
+
+Checks the GitHub Actions awstest is valid.
+
+In addition to small test datasets run on GitHub Actions, we provide the possibility of testing the pipeline on AWS.
+This should ensure that the pipeline runs as expected on AWS (which often has its own unique edge cases).
+
+:::warning
+Running tests on AWS incurs costs, so these tests are not triggered automatically.
+Instead, they use the `workflow_dispatch` trigger, which allows for manual triggering
+of the workflow when testing on AWS is desired.
+:::
+
+:::note
+You can trigger the tests by going to the Actions tab on the pipeline GitHub repository
+and selecting the nf-core AWS test workflow on the left.
+:::
+
+The `.github/workflows/awstest.yml` file is tested for the following:
+
+- Must _not_ be turned on for `push` or `pull_request`.
+- Must be turned on for `workflow_dispatch`.
diff --git a/src/content/tools/docs/1.13.1/lint_tests/actions_ci.md b/src/content/tools/docs/1.13.1/lint_tests/actions_ci.md
new file mode 100644
index 0000000000..34f2096391
--- /dev/null
+++ b/src/content/tools/docs/1.13.1/lint_tests/actions_ci.md
@@ -0,0 +1,63 @@
+# actions_ci
+
+#### `PipelineLint.actions_ci(){:python}`
+
+Checks that the GitHub Actions pipeline CI (Continuous Integration) workflow is valid.
+
+The `.github/workflows/ci.yml` GitHub Actions workflow runs the pipeline on a minimal test
+dataset using `-profile test` to check that no breaking changes have been introduced.
+Final result files are not checked, just that the pipeline exists successfully.
+
+This lint test checks this GitHub Actions workflow file for the following:
+
+- Workflow must be triggered on the following events:
+ ```yaml
+ on:
+ push:
+ branches:
+ - dev
+ pull_request:
+ release:
+ types: [published]
+ ```
+- The minimum Nextflow version specified in the pipeline’s `nextflow.config` matches that defined by `nxf_ver` in the test matrix:
+
+ ```yaml
+ strategy:
+ matrix:
+ # Nextflow versions: check pipeline minimum and current latest
+ nxf_ver: ['19.10.0', '']
+ ```
+
+ :::note
+ These `matrix` variables run the test workflow twice, varying the `nxf_ver` variable each time.
+ This is used in the `nextflow run` commands to test the pipeline with both the latest available version
+ of the pipeline (`''`) and the stated minimum required version.
+ :::
+
+- The Docker container for the pipeline must use the correct pipeline version number:
+
+ > - Development pipelines:
+ > ```bash
+ > docker tag nfcore/:dev nfcore/:dev
+ > ```
+ > - Released pipelines:
+ > ```bash
+ > docker tag nfcore/:dev nfcore/:
+ > ```
+ > - Complete example for a released pipeline called _nf-core/example_ with version number `1.0.0`:
+ > ```yaml
+ > - name: Build new docker image
+ > if: env.GIT_DIFF
+ > run: docker build --no-cache . -t nfcore/example:1.0.0
+ > ```
+
+ > - name: Pull docker image
+ > if: ${{ !env.GIT\_DIFF }}
+ > run: |
+ > docker pull nfcore/example:dev
+ > docker tag nfcore/example:dev nfcore/example:1.0.0
+ >
+ > ```
+ >
+ > ```
diff --git a/src/content/tools/docs/1.13.1/lint_tests/actions_schema_validation.md b/src/content/tools/docs/1.13.1/lint_tests/actions_schema_validation.md
new file mode 100644
index 0000000000..caa60db6c4
--- /dev/null
+++ b/src/content/tools/docs/1.13.1/lint_tests/actions_schema_validation.md
@@ -0,0 +1,18 @@
+# actions_schema_validation
+
+#### `PipelineLint.actions_schema_validation(){:python}`
+
+Checks that the GitHub Action workflow yml/yaml files adhere to the correct schema
+
+nf-core pipelines use GitHub actions workflows to run CI tests, check formatting and also linting, among others.
+These workflows are defined by `yml``scripts in ``.github/workflows/`. This lint test verifies that these scripts are valid
+by comparing them against the JSON schema for GitHub workflows <>
+
+To pass this test, make sure that all your workflows contain the required properties `on` and
+
+```
+``
+```
+
+jobs\`\`and that
+all other properties are of the correct type, as specified in the schema (link above).
diff --git a/src/content/tools/docs/1.13.1/lint_tests/conda_dockerfile.md b/src/content/tools/docs/1.13.1/lint_tests/conda_dockerfile.md
new file mode 100644
index 0000000000..388e51dd9a
--- /dev/null
+++ b/src/content/tools/docs/1.13.1/lint_tests/conda_dockerfile.md
@@ -0,0 +1,33 @@
+# conda_dockerfile
+
+#### `PipelineLint.conda_dockerfile(){:python}`
+
+Checks the Dockerfile for use with Conda environments
+
+:::note
+This test only runs if there is both an `environment.yml`
+and `Dockerfile` present in the pipeline root directory.
+:::
+
+If a workflow has a conda `environment.yml` file, the `Dockerfile` should use this
+to create the docker image. These files are typically very short, just creating the conda
+environment inside the container.
+
+This linting test checks for the following:
+
+- All of the following lines are present in the file (where `PIPELINE` is your pipeline name):
+ > ```Dockerfile
+ > FROM nfcore/base:VERSION
+ > COPY environment.yml /
+ > RUN conda env create --quiet -f /environment.yml && conda clean -a
+ > RUN conda env export --name PIPELINE > PIPELINE.yml
+ > ENV PATH /opt/conda/envs/PIPELINE/bin:$PATH
+ > ```
+- That the `FROM nfcore/base:VERSION` is tagged to the most recent release of nf-core/tools
+ > - The linting tool compares the tag against the currently installed version of tools.
+ > - This line is not checked if running a development version of nf-core/tools.
+
+:::note
+Additional lines and different metadata can be added to the `Dockerfile`
+without causing this lint test to fail.
+:::
diff --git a/src/content/tools/docs/1.13.1/lint_tests/conda_env_yaml.md b/src/content/tools/docs/1.13.1/lint_tests/conda_env_yaml.md
new file mode 100644
index 0000000000..f2dab4fba1
--- /dev/null
+++ b/src/content/tools/docs/1.13.1/lint_tests/conda_env_yaml.md
@@ -0,0 +1,36 @@
+# conda_env_yaml
+
+#### `PipelineLint.conda_env_yaml(){:python}`
+
+Checks that the conda environment file is valid.
+
+:::note
+This test is ignored if there is not an `environment.yml`
+file present in the pipeline root directory.
+:::
+
+DSL1 nf-core pipelines use a single Conda environment to manage all software
+dependencies for a workflow. This can be used directly with `-profile conda`
+and is also used in the `Dockerfile` to build a docker image.
+
+This test checks the conda `environment.yml` file to ensure that it follows nf-core guidelines.
+Each dependency is checked using the [Anaconda API service](https://api.anaconda.org/docs).
+Dependency sublists are ignored with the exception of `- pip`: these packages are also checked
+for pinned version numbers and checked using the [PyPI JSON API](https://wiki.python.org/moin/PyPIJSON).
+
+Specifically, this lint test makes sure that:
+
+- The environment `name` must match the pipeline name and version
+ > - The pipeline name is defined in the config variable `manifest.name`
+ > - Replace the slash with a hyphen as environment names shouldn’t contain that character
+ > - Example: For `nf-core/test` version 1.4, the conda environment name should be `nf-core-test-1.4`
+- All package dependencies have a specific version number pinned
+ > :::warning
+ > Remember that Conda package versions should be pinned with one equals sign (`toolname=1.1`),
+ > but pip uses two (`toolname==1.2`)
+ > :::
+- That package versions can be found and are the latest available
+ > - Test will go through all conda channels listed in the file, or check PyPI if `pip`
+ > - Conda dependencies with pinned channels (eg. `conda-forge::openjdk`) are ok too
+ > - In addition to the package name, the pinned version is checked
+ > - If a newer version is available, a warning will be reported
diff --git a/src/content/tools/docs/1.13.1/lint_tests/files_exist.md b/src/content/tools/docs/1.13.1/lint_tests/files_exist.md
new file mode 100644
index 0000000000..927fbadce4
--- /dev/null
+++ b/src/content/tools/docs/1.13.1/lint_tests/files_exist.md
@@ -0,0 +1,77 @@
+# files_exist
+
+#### `PipelineLint.files_exist(){:python}`
+
+Checks a given pipeline directory for required files.
+
+Iterates through the pipeline’s directory content and checks that specified
+files are either present or absent, as required.
+
+:::note
+This test raises an `AssertionError` if neither `nextflow.config` or `main.nf` are found.
+If these files are not found then this cannot be a Nextflow pipeline and something has gone badly wrong.
+All lint tests are stopped immediately with a critical error message.
+:::
+
+Files that **must** be present:
+
+```default
+.gitattributes
+.github/.dockstore.yml
+.github/CONTRIBUTING.md
+.github/ISSUE_TEMPLATE/bug_report.md
+.github/ISSUE_TEMPLATE/config.yml
+.github/ISSUE_TEMPLATE/feature_request.md
+.github/markdownlint.yml
+.github/PULL_REQUEST_TEMPLATE.md
+.github/workflows/branch.yml
+.github/workflows/ci.yml
+.github/workflows/linting_comment.yml
+.github/workflows/linting.yml
+[LICENSE, LICENSE.md, LICENCE, LICENCE.md] # NB: British / American spelling
+assets/email_template.html
+assets/email_template.txt
+assets/nf-core-PIPELINE_logo.png
+assets/sendmail_template.txt
+bin/markdown_to_html.py
+CHANGELOG.md
+CODE_OF_CONDUCT.md
+CODE_OF_CONDUCT.md
+docs/images/nf-core-PIPELINE_logo.png
+docs/output.md
+docs/README.md
+docs/README.md
+docs/usage.md
+lib/nfcore_external_java_deps.jar
+lib/NfcoreSchema.groovy
+nextflow_schema.json
+nextflow.config
+README.md
+```
+
+Files that _should_ be present:
+
+```default
+'main.nf',
+'environment.yml',
+'Dockerfile',
+'conf/base.config',
+'.github/workflows/awstest.yml',
+'.github/workflows/awsfulltest.yml'
+```
+
+Files that _must not_ be present:
+
+```default
+'Singularity',
+'parameters.settings.json',
+'bin/markdown_to_html.r',
+'conf/aws.config',
+'.github/workflows/push_dockerhub.yml'
+```
+
+Files that _should not_ be present:
+
+```default
+'.travis.yml'
+```
diff --git a/src/content/tools/docs/1.13.1/lint_tests/files_unchanged.md b/src/content/tools/docs/1.13.1/lint_tests/files_unchanged.md
new file mode 100644
index 0000000000..ea06ada29f
--- /dev/null
+++ b/src/content/tools/docs/1.13.1/lint_tests/files_unchanged.md
@@ -0,0 +1,57 @@
+# files_unchanged
+
+#### `PipelineLint.files_unchanged(){:python}`
+
+Checks that certain pipeline files are not modified from template output.
+
+Iterates through the pipeline’s directory content and compares specified files
+against output from the template using the pipeline’s metadata. File content
+should not be modified / missing.
+
+Files that must be unchanged:
+
+```default
+'.gitattributes',
+'.github/.dockstore.yml',
+'.github/CONTRIBUTING.md',
+'.github/ISSUE_TEMPLATE/bug_report.md',
+'.github/ISSUE_TEMPLATE/config.yml',
+'.github/ISSUE_TEMPLATE/feature_request.md',
+'.github/markdownlint.yml',
+'.github/PULL_REQUEST_TEMPLATE.md',
+'.github/workflows/branch.yml',
+'.github/workflows/linting_comment.yml',
+'.github/workflows/linting.yml',
+'assets/email_template.html',
+'assets/email_template.txt',
+'assets/nf-core-PIPELINE_logo.png',
+'assets/sendmail_template.txt',
+'bin/markdown_to_html.py',
+'CODE_OF_CONDUCT.md',
+'docs/images/nf-core-PIPELINE_logo.png',
+'docs/README.md',
+'lib/nfcore_external_java_deps.jar'
+'lib/NfcoreSchema.groovy',
+['LICENSE', 'LICENSE.md', 'LICENCE', 'LICENCE.md'], # NB: British / American spelling
+```
+
+Files that can have additional content but must include the template contents:
+
+```default
+'.github/workflows/push_dockerhub_dev.yml',
+'.github/workflows/push_dockerhub_release.yml',
+'.gitignore',
+'assets/multiqc_config.yaml',
+```
+
+:::note
+You can configure the `nf-core lint` tests to ignore any of these checks by setting
+the `files_unchanged` key as follows in your linting config file. For example:
+
+```yaml
+files_unchanged:
+ - .github/workflows/branch.yml
+ - assets/multiqc_config.yaml
+```
+
+:::
diff --git a/src/content/tools/docs/1.13.1/lint_tests/index.md b/src/content/tools/docs/1.13.1/lint_tests/index.md
new file mode 100644
index 0000000000..f0e9f7e6a1
--- /dev/null
+++ b/src/content/tools/docs/1.13.1/lint_tests/index.md
@@ -0,0 +1,38 @@
+# Lint tests
+
+# Tests:
+
+- [actions_awsfulltest](actions_awsfulltest)
+ - [`PipelineLint.actions_awsfulltest()`](actions_awsfulltest#nf_core.lint.PipelineLint.actions_awsfulltest)
+- [actions_awstest](actions_awstest)
+ - [`PipelineLint.actions_awstest()`](actions_awstest#nf_core.lint.PipelineLint.actions_awstest)
+- [actions_ci](actions_ci)
+ - [`PipelineLint.actions_ci()`](actions_ci#nf_core.lint.PipelineLint.actions_ci)
+- [actions_schema_validation](actions_schema_validation)
+ - [`PipelineLint.actions_schema_validation()`](actions_schema_validation#nf_core.lint.PipelineLint.actions_schema_validation)
+- [conda_dockerfile](conda_dockerfile)
+ - [`PipelineLint.conda_dockerfile()`](conda_dockerfile#nf_core.lint.PipelineLint.conda_dockerfile)
+- [conda_env_yaml](conda_env_yaml)
+ - [`PipelineLint.conda_env_yaml()`](conda_env_yaml#nf_core.lint.PipelineLint.conda_env_yaml)
+- [files_exist](files_exist)
+ - [`PipelineLint.files_exist()`](files_exist#nf_core.lint.PipelineLint.files_exist)
+- [files_unchanged](files_unchanged)
+ - [`PipelineLint.files_unchanged()`](files_unchanged#nf_core.lint.PipelineLint.files_unchanged)
+- [merge_markers](merge_markers)
+ - [`PipelineLint.merge_markers()`](merge_markers#nf_core.lint.PipelineLint.merge_markers)
+- [nextflow_config](nextflow_config)
+ - [`PipelineLint.nextflow_config()`](nextflow_config#nf_core.lint.PipelineLint.nextflow_config)
+- [pipeline_name_conventions](pipeline_name_conventions)
+ - [`PipelineLint.pipeline_name_conventions()`](pipeline_name_conventions#nf_core.lint.PipelineLint.pipeline_name_conventions)
+- [pipeline_todos](pipeline_todos)
+ - [`PipelineLint.pipeline_todos()`](pipeline_todos#nf_core.lint.PipelineLint.pipeline_todos)
+- [readme](readme)
+ - [`PipelineLint.readme()`](readme#nf_core.lint.PipelineLint.readme)
+- [schema_lint](schema_lint)
+ - [`PipelineLint.schema_lint()`](schema_lint#nf_core.lint.PipelineLint.schema_lint)
+- [schema_params](schema_params)
+ - [`PipelineLint.schema_params()`](schema_params#nf_core.lint.PipelineLint.schema_params)
+- [template_strings](template_strings)
+ - [`PipelineLint.template_strings()`](template_strings#nf_core.lint.PipelineLint.template_strings)
+- [version_consistency](version_consistency)
+ - [`PipelineLint.version_consistency()`](version_consistency#nf_core.lint.PipelineLint.version_consistency)
diff --git a/src/content/tools/docs/1.13.1/lint_tests/merge_markers.md b/src/content/tools/docs/1.13.1/lint_tests/merge_markers.md
new file mode 100644
index 0000000000..38076814b1
--- /dev/null
+++ b/src/content/tools/docs/1.13.1/lint_tests/merge_markers.md
@@ -0,0 +1,8 @@
+# merge_markers
+
+#### `PipelineLint.merge_markers(){:python}`
+
+Check for remaining merge markers.
+
+This test looks for remaining merge markers in the code, e.g.:
+`>>>>>>>` or `<<<<<<<`
diff --git a/src/content/tools/docs/1.13.1/lint_tests/nextflow_config.md b/src/content/tools/docs/1.13.1/lint_tests/nextflow_config.md
new file mode 100644
index 0000000000..7f77f40c66
--- /dev/null
+++ b/src/content/tools/docs/1.13.1/lint_tests/nextflow_config.md
@@ -0,0 +1,65 @@
+# nextflow_config
+
+#### `PipelineLint.nextflow_config(){:python}`
+
+Checks the pipeline configuration for required variables.
+
+All nf-core pipelines are required to be configured with a minimal set of variable
+names. This test fails or throws warnings if required variables are not set.
+
+:::note
+These config variables must be set in `nextflow.config` or another config
+file imported from there. Any variables set in nextflow script files (eg. `main.nf`)
+are not checked and will be assumed to be missing.
+:::
+
+**The following variables fail the test if missing:**
+
+- `params.outdir`: A directory in which all pipeline results should be saved
+- `manifest.name`: The pipeline name. Should begin with `nf-core/`
+- `manifest.description`: A description of the pipeline
+- `manifest.version`
+ - The version of this pipeline. This should correspond to a [GitHub release](https://help.github.com/articles/creating-releases/).
+ - If `--release` is set when running `nf-core lint`, the version number must not contain the string `dev`
+ - If `--release` is \_not\_ set, the version should end in `dev` (warning triggered if not)
+- `manifest.nextflowVersion`
+ - The minimum version of Nextflow required to run the pipeline.
+ - Should be `>=` or `!>=` and a version number, eg. `manifest.nextflowVersion = '>=0.31.0'` (see [Nextflow documentation](https://www.nextflow.io/docs/latest/config.html#scope-manifest))
+ - `>=` warns about old versions but tries to run anyway, `!>=` fails for old versions. Only use the latter if you _know_ that the pipeline will certainly fail before this version.
+ - This should correspond to the `NXF_VER` version tested by GitHub Actions.
+- `manifest.homePage`
+ - The homepage for the pipeline. Should be the nf-core GitHub repository URL,
+ so beginning with `https://github.com/nf-core/`
+- `timeline.enabled`, `trace.enabled`, `report.enabled`, `dag.enabled`
+ - The nextflow timeline, trace, report and DAG should be enabled by default (set to `true`)
+- `process.cpus`, `process.memory`, `process.time`
+ - Default CPUs, memory and time limits for tasks
+- `params.input`
+ - Input parameter to specify input data, specify this to avoid a warning
+ - Typical usage:
+ - `params.input`: Input data that is not NGS sequencing data
+
+**The following variables throw warnings if missing:**
+
+- `manifest.mainScript`: The filename of the main pipeline script (should be `main.nf`)
+- `timeline.file`, `trace.file`, `report.file`, `dag.file`
+ - Default filenames for the timeline, trace and report
+ - The DAG file path should end with `.svg` (If Graphviz is not installed, Nextflow will generate a `.dot` file instead)
+- `process.container`
+ - Docker Hub handle for a single default container for use by all processes.
+ - Must specify a tag that matches the pipeline version number if set.
+ - If the pipeline version number contains the string `dev`, the DockerHub tag must be `:dev`
+
+**The following variables are depreciated and fail the test if they are still present:**
+
+- `params.version`: The old method for specifying the pipeline version. Replaced by `manifest.version`
+- `params.nf_required_version`: The old method for specifying the minimum Nextflow version. Replaced by `manifest.nextflowVersion`
+- `params.container`: The old method for specifying the dockerhub container address. Replaced by `process.container`
+- `igenomesIgnore`: Changed to `igenomes_ignore`
+ > :::note
+ > The `snake_case` convention should now be used when defining pipeline parameters
+ > :::
+
+**The following Nextflow syntax is depreciated and fails the test if present:**
+
+- Process-level configuration syntax still using the old Nextflow syntax, for example: `process.$fastqc` instead of `process withName:'fastqc'`.
diff --git a/src/content/tools/docs/1.13.1/lint_tests/pipeline_name_conventions.md b/src/content/tools/docs/1.13.1/lint_tests/pipeline_name_conventions.md
new file mode 100644
index 0000000000..788d77e41a
--- /dev/null
+++ b/src/content/tools/docs/1.13.1/lint_tests/pipeline_name_conventions.md
@@ -0,0 +1,13 @@
+# pipeline_name_conventions
+
+#### `PipelineLint.pipeline_name_conventions(){:python}`
+
+Checks that the pipeline name adheres to nf-core conventions.
+
+In order to ensure consistent naming, pipeline names should contain only lower case, alphanumeric characters.
+Otherwise a warning is displayed.
+
+:::warning
+DockerHub is very picky about image names and doesn’t even allow hyphens (we are `nfcore`).
+This is a large part of why we set this rule.
+:::
diff --git a/src/content/tools/docs/1.13.1/lint_tests/pipeline_todos.md b/src/content/tools/docs/1.13.1/lint_tests/pipeline_todos.md
new file mode 100644
index 0000000000..e8a73d1663
--- /dev/null
+++ b/src/content/tools/docs/1.13.1/lint_tests/pipeline_todos.md
@@ -0,0 +1,28 @@
+# pipeline_todos
+
+#### `PipelineLint.pipeline_todos(){:python}`
+
+Check for nf-core _TODO_ lines.
+
+The nf-core workflow template contains a number of comment lines to help developers
+of new pipelines know where they need to edit files and add content.
+They typically have the following format:
+
+```groovy
+// TODO nf-core: Make some kind of change to the workflow here
+```
+
+..or in markdown:
+
+```html
+
+```
+
+This lint test runs through all files in the pipeline and searches for these lines.
+If any are found they will throw a warning.
+
+:::note
+Note that many GUI code editors have plugins to list all instances of _TODO_
+in a given project directory. This is a very quick and convenient way to get
+started on your pipeline!
+:::
diff --git a/src/content/tools/docs/1.13.1/lint_tests/readme.md b/src/content/tools/docs/1.13.1/lint_tests/readme.md
new file mode 100644
index 0000000000..51b8040c97
--- /dev/null
+++ b/src/content/tools/docs/1.13.1/lint_tests/readme.md
@@ -0,0 +1,25 @@
+# readme
+
+#### `PipelineLint.readme(){:python}`
+
+Repository `README.md` tests
+
+The `README.md` files for a project are very important and must meet some requirements:
+
+- Nextflow badge
+ - If no Nextflow badge is found, a warning is given
+ - If a badge is found but the version doesn’t match the minimum version in the config file, the test fails
+ - Example badge code:
+ ```md
+ [![Nextflow](https://img.shields.io/badge/nextflow-%E2%89%A50.27.6-brightgreen.svg)](https://www.nextflow.io/)
+ ```
+- Bioconda badge
+ - If your pipeline contains a file called `environment.yml` in the root directory, a bioconda badge is required
+ - Required badge code:
+ ```md
+ [![install with bioconda](https://img.shields.io/badge/install%20with-bioconda-brightgreen.svg)](https://bioconda.github.io/)
+ ```
+
+:::note
+These badges are a markdown image `![alt-text]()` _inside_ a markdown link `[markdown image]()`, so a bit fiddly to write.
+:::
diff --git a/src/content/tools/docs/1.13.1/lint_tests/schema_lint.md b/src/content/tools/docs/1.13.1/lint_tests/schema_lint.md
new file mode 100644
index 0000000000..b5e29febba
--- /dev/null
+++ b/src/content/tools/docs/1.13.1/lint_tests/schema_lint.md
@@ -0,0 +1,59 @@
+# schema_lint
+
+#### `PipelineLint.schema_lint(){:python}`
+
+Pipeline schema syntax
+
+Pipelines should have a `nextflow_schema.json` file that describes the different
+pipeline parameters (eg. `params.something`, `--something`).
+
+:::note
+Reminder: you should generally never need to edit this JSON file by hand.
+The `nf-core schema build` command can create _and edit_ the file for you
+to keep it up to date, with a friendly user-interface for customisation.
+:::
+
+The lint test checks the schema for the following:
+
+- Schema should be a valid JSON file
+- Schema should adhere to [JSONSchema](https://json-schema.org/), Draft 7.
+- Parameters can be described in two places:
+ > - As `properties` in the top-level schema object
+ > - As `properties` within subschemas listed in a top-level `definitions` objects
+- The schema must describe at least one parameter
+- There must be no duplicate parameter IDs across the schema and definition subschema
+- All subschema in `definitions` must be referenced in the top-level `allOf` key
+- The top-level `allOf` key must not describe any non-existent definitions
+- Default parameters in the schema must be valid
+- Core top-level schema attributes should exist and be set as follows:
+ > - `$schema`: `https://json-schema.org/draft-07/schema`
+ > - `$id`: URL to the raw schema file, eg. `https://raw.githubusercontent.com/YOURPIPELINE/master/nextflow_schema.json`
+ > - `title`: `YOURPIPELINE pipeline parameters`
+ > - `description`: The pipeline config `manifest.description`
+
+For example, an _extremely_ minimal schema could look like this:
+
+```json
+{
+ "$schema": "https://json-schema.org/draft-07/schema",
+ "$id": "https://raw.githubusercontent.com/YOURPIPELINE/master/nextflow_schema.json",
+ "title": "YOURPIPELINE pipeline parameters",
+ "description": "This pipeline is for testing",
+ "properties": {
+ "first_param": { "type": "string" }
+ },
+ "definitions": {
+ "my_first_group": {
+ "properties": {
+ "second_param": { "type": "string" }
+ }
+ }
+ },
+ "allOf": [{ "$ref": "#/definitions/my_first_group" }]
+}
+```
+
+:::note
+You can check your pipeline schema without having to run the entire pipeline lint
+by running `nf-core schema lint` instead of `nf-core lint`
+:::
diff --git a/src/content/tools/docs/1.13.1/lint_tests/schema_params.md b/src/content/tools/docs/1.13.1/lint_tests/schema_params.md
new file mode 100644
index 0000000000..7d9440fa15
--- /dev/null
+++ b/src/content/tools/docs/1.13.1/lint_tests/schema_params.md
@@ -0,0 +1,11 @@
+# schema_params
+
+#### `PipelineLint.schema_params(){:python}`
+
+Check that the schema describes all flat params in the pipeline.
+
+The `nextflow_schema.json` pipeline schema should describe every flat parameter
+returned from the `nextflow config` command (params that are objects or more complex structures are ignored).
+
+- Failure: If parameters are found in `nextflow_schema.json` that are not in `nextflow_schema.json`
+- Warning: If parameters are found in `nextflow_schema.json` that are not in `nextflow_schema.json`
diff --git a/src/content/tools/docs/1.13.1/lint_tests/template_strings.md b/src/content/tools/docs/1.13.1/lint_tests/template_strings.md
new file mode 100644
index 0000000000..23c06bb63d
--- /dev/null
+++ b/src/content/tools/docs/1.13.1/lint_tests/template_strings.md
@@ -0,0 +1,17 @@
+# template_strings
+
+#### `PipelineLint.template_strings(){:python}`
+
+Check for template placeholders.
+
+The `nf-core create` pipeline template uses
+[Jinja](https://jinja.palletsprojects.com/en/2.11.x/) behind the scenes.
+
+This lint test fails if any Jinja template variables such as
+`{{ pipeline_name }}` are found in your pipeline code.
+
+Finding a placeholder like this means that something was probably copied and pasted
+from the template without being properly rendered for your pipeline.
+
+This test ignores any double-brackets prefixed with a dollar sign, such as
+`${{ secrets.AWS_ACCESS_KEY_ID }}` as these placeholders are used in GitHub Actions workflows.
diff --git a/src/content/tools/docs/1.13.1/lint_tests/version_consistency.md b/src/content/tools/docs/1.13.1/lint_tests/version_consistency.md
new file mode 100644
index 0000000000..040f5e57fc
--- /dev/null
+++ b/src/content/tools/docs/1.13.1/lint_tests/version_consistency.md
@@ -0,0 +1,23 @@
+# version_consistency
+
+#### `PipelineLint.version_consistency(){:python}`
+
+Pipeline and container version number consistency.
+
+:::note
+This test only runs when the `--release` flag is set for `nf-core lint`,
+or `$GITHUB_REF` is equal to `master`.
+:::
+
+This lint fetches the pipeline version number from three possible locations:
+
+- The pipeline config, `manifest.version`
+- The docker container in the pipeline config, `process.container`
+ > - Some pipelines may not have this set on a pipeline level. If it is not found, it is ignored.
+- `$GITHUB_REF`, if it looks like a release tag (`refs/tags/`)
+
+The test then checks that:
+
+- The container name has a tag specified (eg. `nfcore/pipeline:version`)
+- The pipeline version number is numeric (contains only numbers and dots)
+- That the version numbers all match one another
diff --git a/src/content/tools/docs/1.13.2/api/bump_version.md b/src/content/tools/docs/1.13.2/api/bump_version.md
new file mode 100644
index 0000000000..3f45f9fefb
--- /dev/null
+++ b/src/content/tools/docs/1.13.2/api/bump_version.md
@@ -0,0 +1,35 @@
+# nf_core.bump_version
+
+Bumps the version number in all appropriate files for
+a nf-core pipeline.
+
+### `nf_core.bump_version.bump_nextflow_version(pipeline_obj, new_version){:python}`
+
+Bumps the required Nextflow version number of a pipeline.
+
+- **Parameters:**
+ - **pipeline_obj** ([_nf_core.utils.Pipeline_](utils#nf_core.utils.Pipeline)) – A Pipeline object that holds information
+ about the pipeline contents and build files.
+ - **new_version** (_str_) – The new version tag for the required Nextflow version.
+
+### `nf_core.bump_version.bump_pipeline_version(pipeline_obj, new_version){:python}`
+
+Bumps a pipeline version number.
+
+- **Parameters:**
+ - **pipeline_obj** ([_nf_core.utils.Pipeline_](utils#nf_core.utils.Pipeline)) – A Pipeline object that holds information
+ about the pipeline contents and build files.
+ - **new_version** (_str_) – The new version tag for the pipeline. Semantic versioning only.
+
+### `nf_core.bump_version.update_file_version(filename, pipeline_obj, patterns){:python}`
+
+Updates the version number in a requested file.
+
+- **Parameters:**
+ - **filename** (_str_) – File to scan.
+ - **pipeline_obj** ([_nf_core.lint.PipelineLint_](lint#nf_core.lint.PipelineLint)) – A PipelineLint object that holds information
+ about the pipeline contents and build files.
+ - **pattern** (_str_) – Regex pattern to apply.
+ - **newstr** (_str_) – The replaced string.
+- **Raises:**
+ **ValueError**\*\*,\*\* **if the version number cannot be found.** –
diff --git a/src/content/tools/docs/1.13.2/api/create.md b/src/content/tools/docs/1.13.2/api/create.md
new file mode 100644
index 0000000000..92dcb9fb5e
--- /dev/null
+++ b/src/content/tools/docs/1.13.2/api/create.md
@@ -0,0 +1,36 @@
+# nf_core.create
+
+Creates a nf-core pipeline matching the current
+organization’s specification based on a template.
+
+### _`class{:python}`_`nf_core.create.PipelineCreate(name, description, author, version='1.0dev', no_git=False, force=False, outdir=None){:python}`
+
+Bases: `object`
+
+Creates a nf-core pipeline a la carte from the nf-core best-practise template.
+
+- **Parameters:**
+ - **name** (_str_) – Name for the pipeline.
+ - **description** (_str_) – Description for the pipeline.
+ - **author** (_str_) – Authors name of the pipeline.
+ - **version** (_str_) – Version flag. Semantic versioning only. Defaults to 1.0dev.
+ - **no_git** (_bool_) – Prevents the creation of a local Git repository for the pipeline. Defaults to False.
+ - **force** (_bool_) – Overwrites a given workflow directory with the same name. Defaults to False.
+ May the force be with you.
+ - **outdir** (_str_) – Path to the local output directory.
+
+#### `git_init_pipeline(){:python}`
+
+Initialises the new pipeline as a Git repository and submits first commit.
+
+#### `init_pipeline(){:python}`
+
+Creates the nf-core pipeline.
+
+#### `make_pipeline_logo(){:python}`
+
+Fetch a logo for the new pipeline from the nf-core website
+
+#### `render_template(){:python}`
+
+Runs Jinja to create a new nf-core pipeline.
diff --git a/src/content/tools/docs/1.13.2/api/download.md b/src/content/tools/docs/1.13.2/api/download.md
new file mode 100644
index 0000000000..92c33262e9
--- /dev/null
+++ b/src/content/tools/docs/1.13.2/api/download.md
@@ -0,0 +1,123 @@
+# nf_core.download
+
+Downloads a nf-core pipeline to the local file system.
+
+### _`class{:python}`_`nf_core.download.DownloadProgress(*columns: str | ProgressColumn, console: Console | None = None, auto_refresh: bool = True, refresh_per_second: float = 10, speed_estimate_period: float = 30.0, transient: bool = False, redirect_stdout: bool = True, redirect_stderr: bool = True, get_time: Callable[[], float] | None = None, disable: bool = False, expand: bool = False){:python}`
+
+Bases: `Progress`
+
+Custom Progress bar class, allowing us to have two progress
+bars with different columns / layouts.
+
+#### `get_renderables(){:python}`
+
+Get a number of renderables for the progress display.
+
+### _`class{:python}`_`nf_core.download.DownloadWorkflow(pipeline, release=None, outdir=None, compress_type='tar.gz', force=False, singularity=False, singularity_cache_only=False, parallel_downloads=4){:python}`
+
+Bases: `object`
+
+Downloads a nf-core workflow from GitHub to the local file system.
+
+Can also download its Singularity container image if required.
+
+- **Parameters:**
+ - **pipeline** (_str_) – A nf-core pipeline name.
+ - **release** (_str_) – The workflow release version to download, like 1.0. Defaults to None.
+ - **singularity** (_bool_) – Flag, if the Singularity container should be downloaded as well. Defaults to False.
+ - **outdir** (_str_) – Path to the local download directory. Defaults to None.
+
+#### `compress_download(){:python}`
+
+Take the downloaded files and make a compressed .tar.gz archive.
+
+#### `download_configs(){:python}`
+
+Downloads the centralised config profiles from nf-core/configs to `self.outdir`.
+
+#### `download_wf_files(){:python}`
+
+Downloads workflow files from GitHub to the `self.outdir`.
+
+#### `download_workflow(){:python}`
+
+Starts a nf-core workflow download.
+
+#### `fetch_workflow_details(wfs){:python}`
+
+Fetches details of a nf-core workflow to download.
+
+- **Parameters:**
+ **wfs** ([_nf_core.list.Workflows_](list#nf_core.list.Workflows)) – A nf_core.list.Workflows object
+- **Raises:**
+ **LockupError**\*\*,\*\* **if the pipeline can not be found.** –
+
+#### `find_container_images(){:python}`
+
+Find container image names for workflow.
+
+Starts by using nextflow config to pull out any process.container
+declarations. This works for DSL1.
+
+Second, we look for DSL2 containers. These can’t be found with
+nextflow config at the time of writing, so we scrape the pipeline files.
+
+#### `get_singularity_images(){:python}`
+
+Loop through container names and download Singularity images
+
+#### `singularity_copy_cache_image(container, out_path, cache_path){:python}`
+
+Copy Singularity image from NXF_SINGULARITY_CACHEDIR to target folder.
+
+#### `singularity_download_image(container, out_path, cache_path, progress){:python}`
+
+Download a singularity image from the web.
+
+Use native Python to download the file.
+
+- **Parameters:**
+ - **container** (_str_) – A pipeline’s container name. Usually it is of similar format
+ to `https://depot.galaxyproject.org/singularity/name:version`
+ - **out_path** (_str_) – The final target output path
+ - **cache_path** (_str_\*,\* _None_) – The NXF_SINGULARITY_CACHEDIR path if set, None if not
+ - **progress** (_Progress_) – Rich progress bar instance to add tasks to.
+
+#### `singularity_image_filenames(container){:python}`
+
+Check Singularity cache for image, copy to destination folder if found.
+
+- **Parameters:**
+ **container** (_str_) – A pipeline’s container name. Can be direct download URL
+ or a Docker Hub repository ID.
+- **Returns:**
+ Returns True if we have the image in the target location.
+ : Returns a download path if not.
+- **Return type:**
+ results (bool, str)
+
+#### `singularity_pull_image(container, out_path, cache_path, progress){:python}`
+
+Pull a singularity image using `singularity pull`
+
+Attempt to use a local installation of singularity to pull the image.
+
+- **Parameters:**
+ **container** (_str_) – A pipeline’s container name. Usually it is of similar format
+ to `nfcore/name:version`.
+- **Raises:**
+ **Various exceptions possible from subprocess execution** **of** **Singularity.** –
+
+#### `validate_md5(fname, expected=None){:python}`
+
+Calculates the md5sum for a file on the disk and validate with expected.
+
+- **Parameters:**
+ - **fname** (_str_) – Path to a local file.
+ - **expected** (_str_) – The expected md5sum.
+- **Raises:**
+ **IOError**\*\*,\*\* **if the md5sum does not match the remote sum.** –
+
+#### `wf_use_local_configs(){:python}`
+
+Edit the downloaded nextflow.config file to use the local config files
diff --git a/src/content/tools/docs/1.13.2/api/index.md b/src/content/tools/docs/1.13.2/api/index.md
new file mode 100644
index 0000000000..8fcbeab99d
--- /dev/null
+++ b/src/content/tools/docs/1.13.2/api/index.md
@@ -0,0 +1,49 @@
+# API Reference
+
+# Tests:
+
+- [nf_core.bump_version](bump_version)
+ - [`bump_nextflow_version()`](bump_version#nf_core.bump_version.bump_nextflow_version)
+ - [`bump_pipeline_version()`](bump_version#nf_core.bump_version.bump_pipeline_version)
+ - [`update_file_version()`](bump_version#nf_core.bump_version.update_file_version)
+- [nf_core.create](create)
+ - [`PipelineCreate`](create#nf_core.create.PipelineCreate)
+- [nf_core.download](download)
+ - [`DownloadProgress`](download#nf_core.download.DownloadProgress)
+ - [`DownloadWorkflow`](download#nf_core.download.DownloadWorkflow)
+- [nf_core.launch](launch)
+ - [`Launch`](launch#nf_core.launch.Launch)
+- [nf_core.licences](licences)
+ - [`WorkflowLicences`](licences#nf_core.licences.WorkflowLicences)
+- [nf_core.lint](lint)
+ - [`run_linting()`](lint#nf_core.lint.run_linting)
+ - [`PipelineLint`](lint#nf_core.lint.PipelineLint)
+- [nf_core.list](list)
+ - [`LocalWorkflow`](list#nf_core.list.LocalWorkflow)
+ - [`RemoteWorkflow`](list#nf_core.list.RemoteWorkflow)
+ - [`Workflows`](list#nf_core.list.Workflows)
+ - [`get_local_wf()`](list#nf_core.list.get_local_wf)
+ - [`list_workflows()`](list#nf_core.list.list_workflows)
+ - [`pretty_date()`](list#nf_core.list.pretty_date)
+- [nf_core.modules](modules)
+- [nf_core.schema](schema)
+ - [`PipelineSchema`](schema#nf_core.schema.PipelineSchema)
+- [nf_core.sync](sync)
+ - [`PipelineSync`](sync#nf_core.sync.PipelineSync)
+ - [`PullRequestException`](sync#nf_core.sync.PullRequestException)
+ - [`SyncException`](sync#nf_core.sync.SyncException)
+- [nf_core.utils](utils)
+ - [`Pipeline`](utils#nf_core.utils.Pipeline)
+ - [`anaconda_package()`](utils#nf_core.utils.anaconda_package)
+ - [`check_if_outdated()`](utils#nf_core.utils.check_if_outdated)
+ - [`custom_yaml_dumper()`](utils#nf_core.utils.custom_yaml_dumper)
+ - [`fetch_wf_config()`](utils#nf_core.utils.fetch_wf_config)
+ - [`get_biocontainer_tag()`](utils#nf_core.utils.get_biocontainer_tag)
+ - [`github_api_auto_auth()`](utils#nf_core.utils.github_api_auto_auth)
+ - [`nextflow_cmd()`](utils#nf_core.utils.nextflow_cmd)
+ - [`parse_anaconda_licence()`](utils#nf_core.utils.parse_anaconda_licence)
+ - [`pip_package()`](utils#nf_core.utils.pip_package)
+ - [`poll_nfcore_web_api()`](utils#nf_core.utils.poll_nfcore_web_api)
+ - [`rich_force_colors()`](utils#nf_core.utils.rich_force_colors)
+ - [`setup_requests_cachedir()`](utils#nf_core.utils.setup_requests_cachedir)
+ - [`wait_cli_function()`](utils#nf_core.utils.wait_cli_function)
diff --git a/src/content/tools/docs/1.13.2/api/launch.md b/src/content/tools/docs/1.13.2/api/launch.md
new file mode 100644
index 0000000000..8affe0fa4d
--- /dev/null
+++ b/src/content/tools/docs/1.13.2/api/launch.md
@@ -0,0 +1,87 @@
+# nf_core.launch
+
+Launch a pipeline, interactively collecting params
+
+### _`class{:python}`_`nf_core.launch.Launch(pipeline=None, revision=None, command_only=False, params_in=None, params_out=None, save_all=False, show_hidden=False, url=None, web_id=None){:python}`
+
+Bases: `object`
+
+Class to hold config option to launch a pipeline
+
+#### `build_command(){:python}`
+
+Build the nextflow run command based on what we know
+
+#### `get_pipeline_schema(){:python}`
+
+Load and validate the schema from the supplied pipeline
+
+#### `get_web_launch_response(){:python}`
+
+Given a URL for a web-gui launch response, recursively query it until results are ready.
+
+#### `launch_pipeline(){:python}`
+
+#### `launch_web_gui(){:python}`
+
+Send schema to nf-core website and launch input GUI
+
+#### `launch_workflow(){:python}`
+
+Launch nextflow if required
+
+#### `merge_nxf_flag_schema(){:python}`
+
+Take the Nextflow flag schema and merge it with the pipeline schema
+
+#### `print_param_header(param_id, param_obj, is_group=False){:python}`
+
+#### `prompt_group(group_id, group_obj){:python}`
+
+Prompt for edits to a group of parameters (subschema in ‘definitions’)
+
+- **Parameters:**
+ - **group_id** – Paramater ID (string)
+ - **group_obj** – JSON Schema keys (dict)
+- **Returns:**
+ val answers
+- **Return type:**
+ Dict of param_id
+
+#### `prompt_param(param_id, param_obj, is_required, answers){:python}`
+
+Prompt for a single parameter
+
+#### `prompt_schema(){:python}`
+
+Go through the pipeline schema and prompt user to change defaults
+
+#### `prompt_web_gui(){:python}`
+
+Ask whether to use the web-based or cli wizard to collect params
+
+#### `sanitise_web_response(){:python}`
+
+The web builder returns everything as strings.
+Use the functions defined in the cli wizard to convert to the correct types.
+
+#### `set_schema_inputs(){:python}`
+
+Take the loaded schema and set the defaults as the input parameters
+If a nf_params.json file is supplied, apply these over the top
+
+#### `single_param_to_questionary(param_id, param_obj, answers=None, print_help=True){:python}`
+
+Convert a JSONSchema param to a Questionary question
+
+- **Parameters:**
+ - **param_id** – Parameter ID (string)
+ - **param_obj** – JSON Schema keys (dict)
+ - **answers** – Optional preexisting answers (dict)
+ - **print_help** – If description and help_text should be printed (bool)
+- **Returns:**
+ Single Questionary dict, to be appended to questions list
+
+#### `strip_default_params(){:python}`
+
+Strip parameters if they have not changed from the default
diff --git a/src/content/tools/docs/1.13.2/api/licences.md b/src/content/tools/docs/1.13.2/api/licences.md
new file mode 100644
index 0000000000..578ddaae1d
--- /dev/null
+++ b/src/content/tools/docs/1.13.2/api/licences.md
@@ -0,0 +1,37 @@
+# nf_core.licences
+
+Lists software licences for a given workflow.
+
+### _`class{:python}`_`nf_core.licences.WorkflowLicences(pipeline){:python}`
+
+Bases: `object`
+
+A nf-core workflow licenses collection.
+
+Tries to retrieve the license information from all dependencies
+of a given nf-core pipeline.
+
+A condensed overview with license per dependency can be printed out.
+
+- **Parameters:**
+ **pipeline** (_str_) – An existing nf-core pipeline name, like nf-core/hlatyping
+ or short hlatyping.
+
+#### `fetch_conda_licences(){:python}`
+
+Fetch package licences from Anaconda and PyPi.
+
+#### `get_environment_file(){:python}`
+
+Get the conda environment file for the pipeline
+
+#### `print_licences(){:python}`
+
+Prints the fetched license information.
+
+- **Parameters:**
+ **as_json** (_boolean_) – Prints the information in JSON. Defaults to False.
+
+#### `run_licences(){:python}`
+
+Run the nf-core licences action
diff --git a/src/content/tools/docs/1.13.2/api/lint.md b/src/content/tools/docs/1.13.2/api/lint.md
new file mode 100644
index 0000000000..97f22f4911
--- /dev/null
+++ b/src/content/tools/docs/1.13.2/api/lint.md
@@ -0,0 +1,138 @@
+# nf_core.lint
+
+#### `SEE ALSO{:python}`
+
+See the [Lint Tests](../lint_tests/index.html) docs for information about specific linting functions.
+
+
+
+Linting policy for nf-core pipeline projects.
+
+Tests Nextflow-based pipelines to check that they adhere to
+the nf-core community guidelines.
+
+### `nf_core.lint.run_linting(pipeline_dir, release_mode=False, fix=(), show_passed=False, fail_ignored=False, md_fn=None, json_fn=None){:python}`
+
+Runs all nf-core linting checks on a given Nextflow pipeline project
+in either release mode or normal mode (default). Returns an object
+of type [`PipelineLint`](#nf_core.lint.PipelineLint) after finished.
+
+- **Parameters:**
+ - **pipeline_dir** (_str_) – The path to the Nextflow pipeline root directory
+ - **release_mode** (_bool_) – Set this to True, if the linting should be run in the release mode.
+ See [`PipelineLint`](#nf_core.lint.PipelineLint) for more information.
+- **Returns:**
+ An object of type [`PipelineLint`](#nf_core.lint.PipelineLint) that contains all the linting results.
+
+### _`class{:python}`_`nf_core.lint.PipelineLint(wf_path, release_mode=False, fix=(), fail_ignored=False){:python}`
+
+Bases: [`Pipeline`](utils#nf_core.utils.Pipeline)
+
+Object to hold linting information and results.
+
+Inherits [`nf_core.utils.Pipeline`](utils#nf_core.utils.Pipeline) class.
+
+Use the [`PipelineLint._lint_pipeline()`](#nf_core.lint.PipelineLint._lint_pipeline) function to run lint tests.
+
+- **Parameters:**
+ **path** (_str_) – The path to the nf-core pipeline directory.
+
+#### `failed{:python}`
+
+A list of tuples of the form: `(, )`
+
+- **Type:**
+ list
+
+#### `ignored{:python}`
+
+A list of tuples of the form: `(, )`
+
+- **Type:**
+ list
+
+#### `lint_config{:python}`
+
+The parsed nf-core linting config for this pipeline
+
+- **Type:**
+ dict
+
+#### `passed{:python}`
+
+A list of tuples of the form: `(, )`
+
+- **Type:**
+ list
+
+#### `release_mode{:python}`
+
+True, if you the to linting was run in release mode, False else.
+
+- **Type:**
+ bool
+
+#### `warned{:python}`
+
+A list of tuples of the form: `(, )`
+
+- **Type:**
+ list
+
+#### `_get_results_md(){:python}`
+
+Create a markdown file suitable for posting in a GitHub comment.
+
+- **Returns:**
+ Formatting markdown content
+- **Return type:**
+ markdown (str)
+
+#### `_lint_pipeline(){:python}`
+
+Main linting function.
+
+Takes the pipeline directory as the primary input and iterates through
+the different linting checks in order. Collects any warnings or errors
+into object attributes: `passed`, `ignored`, `warned` and `failed`.
+
+#### `_print_results(show_passed=False){:python}`
+
+Print linting results to the command line.
+
+Uses the `rich` library to print a set of formatted tables to the command line
+summarising the linting results.
+
+#### `_save_json_results(json_fn){:python}`
+
+Function to dump lint results to a JSON file for downstream use
+
+- **Parameters:**
+ **json_fn** (_str_) – File path to write JSON to.
+
+#### `_strip_ansi_codes(string, replace_with=''){:python}`
+
+Strip ANSI colouring codes from a string to return plain text.
+
+Solution found on Stack Overflow:
+
+#### `_wrap_quotes(files){:python}`
+
+Helper function to take a list of filenames and format with markdown.
+
+- **Parameters:**
+ **files** (_list_) –
+
+ List of filenames, eg:
+
+ ```default
+ ['foo', 'bar', 'baz']
+ ```
+
+- **Returns:**
+ Formatted string of paths separated by word `or`, eg:
+ ```default
+ `foo` or bar` or `baz`
+ ```
+- **Return type:**
+ markdown (str)
diff --git a/src/content/tools/docs/1.13.2/api/list.md b/src/content/tools/docs/1.13.2/api/list.md
new file mode 100644
index 0000000000..2fc6448de0
--- /dev/null
+++ b/src/content/tools/docs/1.13.2/api/list.md
@@ -0,0 +1,99 @@
+# nf_core.list
+
+Lists available nf-core pipelines and versions.
+
+### _`class{:python}`_`nf_core.list.LocalWorkflow(name){:python}`
+
+Bases: `object`
+
+Class to handle local workflows pulled by nextflow
+
+#### `get_local_nf_workflow_details(){:python}`
+
+Get full details about a local cached workflow
+
+### _`class{:python}`_`nf_core.list.RemoteWorkflow(data){:python}`
+
+Bases: `object`
+
+A information container for a remote workflow.
+
+- **Parameters:**
+ **data** (_dict_) – workflow information as they are retrieved from the GitHub repository REST API request
+ ().
+
+### _`class{:python}`_`nf_core.list.Workflows(filter_by=None, sort_by='release', show_archived=False){:python}`
+
+Bases: `object`
+
+Workflow container class.
+
+Is used to collect local and remote nf-core pipelines. Pipelines
+can be sorted, filtered and compared.
+
+- **Parameters:**
+ - **filter_by** (_list_) – A list of strings that can be used for filtering.
+ - **sort_by** (_str_) – workflows can be sorted by keywords. Keyword must be one of
+ release (default), name, stars.
+
+#### `compare_remote_local(){:python}`
+
+Matches local to remote workflows.
+
+If a matching remote workflow is found, the local workflow’s Git commit hash is compared
+with the latest one from remote.
+
+A boolean flag in `RemoteWorkflow.local_is_latest` is set to True, if the local workflow
+is the latest.
+
+#### `filtered_workflows(){:python}`
+
+Filters remote workflows for keywords.
+
+- **Returns:**
+ Filtered remote workflows.
+- **Return type:**
+ list
+
+#### `get_local_nf_workflows(){:python}`
+
+Retrieves local Nextflow workflows.
+
+Local workflows are stored in `self.local_workflows` list.
+
+#### `get_remote_workflows(){:python}`
+
+Retrieves remote workflows from [nf-co.re](https://nf-co.re).
+
+Remote workflows are stored in `self.remote_workflows` list.
+
+#### `print_json(){:python}`
+
+Dump JSON of all parsed information
+
+#### `print_summary(){:python}`
+
+Prints a summary of all pipelines.
+
+### `nf_core.list.get_local_wf(workflow, revision=None){:python}`
+
+Check if this workflow has a local copy and use nextflow to pull it if not
+
+### `nf_core.list.list_workflows(filter_by=None, sort_by='release', as_json=False, show_archived=False){:python}`
+
+Prints out a list of all nf-core workflows.
+
+- **Parameters:**
+ - **filter_by** (_list_) – A list of strings that can be used for filtering.
+ - **sort_by** (_str_) – workflows can be sorted by keywords. Keyword must be one of
+ release (default), name, stars.
+ - **as_json** (_boolean_) – Set to true, if the lists should be printed in JSON.
+
+### `nf_core.list.pretty_date(time){:python}`
+
+Transforms a datetime object or a int() Epoch timestamp into a
+pretty string like ‘an hour ago’, ‘Yesterday’, ‘3 months ago’,
+‘just now’, etc
+
+Based on
+Adapted by sven1103
diff --git a/src/content/tools/docs/1.13.2/api/modules.md b/src/content/tools/docs/1.13.2/api/modules.md
new file mode 100644
index 0000000000..2dd47b6359
--- /dev/null
+++ b/src/content/tools/docs/1.13.2/api/modules.md
@@ -0,0 +1 @@
+# nf_core.modules
diff --git a/src/content/tools/docs/1.13.2/api/schema.md b/src/content/tools/docs/1.13.2/api/schema.md
new file mode 100644
index 0000000000..162788eb1a
--- /dev/null
+++ b/src/content/tools/docs/1.13.2/api/schema.md
@@ -0,0 +1,109 @@
+# nf_core.schema
+
+Code to deal with pipeline JSON Schema
+
+### _`class{:python}`_`nf_core.schema.PipelineSchema{:python}`
+
+Bases: `object`
+
+Class to generate a schema object with
+functions to handle pipeline JSON Schema
+
+#### `add_schema_found_configs(){:python}`
+
+Add anything that’s found in the Nextflow params that’s missing in the pipeline schema
+
+#### `build_schema(pipeline_dir, no_prompts, web_only, url){:python}`
+
+Interactively build a new pipeline schema for a pipeline
+
+#### `build_schema_param(p_val){:python}`
+
+Build a pipeline schema dictionary for an param interactively
+
+#### `get_schema_defaults(){:python}`
+
+Generate set of default input parameters from schema.
+
+Saves defaults to self.schema_defaults
+Returns count of how many parameters were found (with or without a default value)
+
+#### `get_schema_path(path, local_only=False, revision=None){:python}`
+
+Given a pipeline name, directory, or path, set self.schema_filename
+
+#### `get_web_builder_response(){:python}`
+
+Given a URL for a Schema build response, recursively query it until results are ready.
+Once ready, validate Schema and write to disk.
+
+#### `get_wf_params(){:python}`
+
+Load the pipeline parameter defaults using nextflow config
+Strip out only the params. values and ignore anything that is not a flat variable
+
+#### `launch_web_builder(){:python}`
+
+Send pipeline schema to web builder and wait for response
+
+#### `load_input_params(params_path){:python}`
+
+Load a given a path to a parameters file (JSON/YAML)
+
+These should be input parameters used to run a pipeline with
+the Nextflow -params-file option.
+
+#### `load_lint_schema(){:python}`
+
+Load and lint a given schema to see if it looks valid
+
+#### `load_schema(){:python}`
+
+Load a pipeline schema from a file
+
+#### `make_skeleton_schema(){:python}`
+
+Make a new pipeline schema from the template
+
+#### `prompt_remove_schema_notfound_config(p_key){:python}`
+
+Check if a given key is found in the nextflow config params and prompt to remove it if note
+
+Returns True if it should be removed, False if not.
+
+#### `remove_schema_notfound_configs(){:python}`
+
+Go through top-level schema and all definitions sub-schemas to remove
+anything that’s not in the nextflow config.
+
+#### `remove_schema_notfound_configs_single_schema(schema){:python}`
+
+Go through a single schema / set of properties and strip out
+anything that’s not in the nextflow config.
+
+Takes: Schema or sub-schema with properties key
+Returns: Cleaned schema / sub-schema
+
+#### `save_schema(){:python}`
+
+Save a pipeline schema to a file
+
+#### `validate_default_params(){:python}`
+
+Check that all default parameters in the schema are valid
+Ignores ‘required’ flag, as required parameters might have no defaults
+
+#### `validate_params(){:python}`
+
+Check given parameters against a schema and validate
+
+#### `validate_schema(schema=None){:python}`
+
+Check that the Schema is valid
+
+Returns: Number of parameters found
+
+#### `validate_schema_title_description(schema=None){:python}`
+
+Extra validation command for linting.
+Checks that the schema “$id”, “title” and “description” attributes match the piipeline config.
diff --git a/src/content/tools/docs/1.13.2/api/sync.md b/src/content/tools/docs/1.13.2/api/sync.md
new file mode 100644
index 0000000000..e327553c3f
--- /dev/null
+++ b/src/content/tools/docs/1.13.2/api/sync.md
@@ -0,0 +1,151 @@
+# nf_core.sync
+
+Synchronise a pipeline TEMPLATE branch with the template.
+
+### _`class{:python}`_`nf_core.sync.PipelineSync(pipeline_dir, from_branch=None, make_pr=False, gh_repo=None, gh_username=None){:python}`
+
+Bases: `object`
+
+Object to hold syncing information and results.
+
+- **Parameters:**
+ - **pipeline_dir** (_str_) – The path to the Nextflow pipeline root directory
+ - **from_branch** (_str_) – The branch to use to fetch config vars. If not set, will use current active branch
+ - **make_pr** (_bool_) – Set this to True to create a GitHub pull-request with the changes
+ - **gh_username** (_str_) – GitHub username
+ - **gh_repo** (_str_) – GitHub repository name
+
+#### `pipeline_dir{:python}`
+
+Path to target pipeline directory
+
+- **Type:**
+ str
+
+#### `from_branch{:python}`
+
+Repo branch to use when collecting workflow variables. Default: active branch.
+
+- **Type:**
+ str
+
+#### `original_branch{:python}`
+
+Repo branch that was checked out before we started.
+
+- **Type:**
+ str
+
+#### `made_changes{:python}`
+
+Whether making the new template pipeline introduced any changes
+
+- **Type:**
+ bool
+
+#### `make_pr{:python}`
+
+Whether to try to automatically make a PR on GitHub.com
+
+- **Type:**
+ bool
+
+#### `required_config_vars{:python}`
+
+List of nextflow variables required to make template pipeline
+
+- **Type:**
+ list
+
+#### `gh_username{:python}`
+
+GitHub username
+
+- **Type:**
+ str
+
+#### `gh_repo{:python}`
+
+GitHub repository name
+
+- **Type:**
+ str
+
+#### `checkout_template_branch(){:python}`
+
+Try to check out the origin/TEMPLATE in a new TEMPLATE branch.
+If this fails, try to check out an existing local TEMPLATE branch.
+
+#### `close_open_pr(pr){:python}`
+
+Given a PR API response, add a comment and close.
+
+#### `close_open_template_merge_prs(){:python}`
+
+Get all template merging branches (starting with ‘nf-core-template-merge-‘)
+and check for any open PRs from these branches to the self.from_branch
+If open PRs are found, add a comment and close them
+
+#### `commit_template_changes(){:python}`
+
+If we have any changes with the new template files, make a git commit
+
+#### `create_merge_base_branch(){:python}`
+
+Create a new branch from the updated TEMPLATE branch
+This branch will then be used to create the PR
+
+#### `delete_template_branch_files(){:python}`
+
+Delete all files in the TEMPLATE branch
+
+#### `get_wf_config(){:python}`
+
+Check out the target branch if requested and fetch the nextflow config.
+Check that we have the required config variables.
+
+#### `inspect_sync_dir(){:python}`
+
+Takes a look at the target directory for syncing. Checks that it’s a git repo
+and makes sure that there are no uncommitted changes.
+
+#### `make_pull_request(){:python}`
+
+Create a pull request to a base branch (default: dev),
+from a head branch (default: TEMPLATE)
+
+Returns: An instance of class requests.Response
+
+#### `make_template_pipeline(){:python}`
+
+Delete all files and make a fresh template using the workflow variables
+
+#### `push_merge_branch(){:python}`
+
+Push the newly created merge branch to the remote repository
+
+#### `push_template_branch(){:python}`
+
+If we made any changes, push the TEMPLATE branch to the default remote
+and try to make a PR. If we don’t have the auth token, try to figure out a URL
+for the PR and print this to the console.
+
+#### `reset_target_dir(){:python}`
+
+Reset the target pipeline directory. Check out the original branch.
+
+#### `sync(){:python}`
+
+Find workflow attributes, create a new template pipeline on TEMPLATE
+
+### _`exception{:python}`_`nf_core.sync.PullRequestException{:python}`
+
+Bases: `Exception`
+
+Exception raised when there was an error creating a Pull-Request on GitHub.com
+
+### _`exception{:python}`_`nf_core.sync.SyncException{:python}`
+
+Bases: `Exception`
+
+Exception raised when there was an error with TEMPLATE branch synchronisation
diff --git a/src/content/tools/docs/1.13.2/api/utils.md b/src/content/tools/docs/1.13.2/api/utils.md
new file mode 100644
index 0000000000..782b715f4a
--- /dev/null
+++ b/src/content/tools/docs/1.13.2/api/utils.md
@@ -0,0 +1,200 @@
+# nf_core.utils
+
+Common utility functions for the nf-core python package.
+
+### _`class{:python}`_`nf_core.utils.Pipeline(wf_path){:python}`
+
+Bases: `object`
+
+Object to hold information about a local pipeline.
+
+- **Parameters:**
+ **path** (_str_) – The path to the nf-core pipeline directory.
+
+#### `conda_config{:python}`
+
+The parsed conda configuration file content (`environment.yml`).
+
+- **Type:**
+ dict
+
+#### `conda_package_info{:python}`
+
+The conda package(s) information, based on the API requests to Anaconda cloud.
+
+- **Type:**
+ dict
+
+#### `nf_config{:python}`
+
+The Nextflow pipeline configuration file content.
+
+- **Type:**
+ dict
+
+#### `files{:python}`
+
+A list of files found during the linting process.
+
+- **Type:**
+ list
+
+#### `git_sha{:python}`
+
+The git sha for the repo commit / current GitHub pull-request ($GITHUB_PR_COMMIT)
+
+- **Type:**
+ str
+
+#### `minNextflowVersion{:python}`
+
+The minimum required Nextflow version to run the pipeline.
+
+- **Type:**
+ str
+
+#### `wf_path{:python}`
+
+Path to the pipeline directory.
+
+- **Type:**
+ str
+
+#### `pipeline_name{:python}`
+
+The pipeline name, without the nf-core tag, for example hlatyping.
+
+- **Type:**
+ str
+
+#### `schema_obj{:python}`
+
+A `PipelineSchema` object
+
+- **Type:**
+ obj
+
+#### `_fp(fn){:python}`
+
+Convenience function to get full path to a file in the pipeline
+
+#### `_list_files(){:python}`
+
+Get a list of all files in the pipeline
+
+#### `_load(){:python}`
+
+Run core load functions
+
+#### `_load_conda_environment(){:python}`
+
+Try to load the pipeline environment.yml file, if it exists
+
+#### `_load_pipeline_config(){:python}`
+
+Get the nextflow config for this pipeline
+
+Once loaded, set a few convienence reference class attributes
+
+### `nf_core.utils.anaconda_package(dep, dep_channels=['conda-forge', 'bioconda', 'defaults']){:python}`
+
+Query conda package information.
+
+Sends a HTTP GET request to the Anaconda remote API.
+
+- **Parameters:**
+ - **dep** (_str_) – A conda package name.
+ - **dep_channels** (_list_) – list of conda channels to use
+- **Raises:**
+ - **A LookupError**\*\*,\*\* **if the connection fails** **or** **times out** **or** **gives an unexpected status code** –
+ - **A ValueError**\*\*,\*\* **if the package name can not be found** **(\*\***404\***\*)** –
+
+### `nf_core.utils.check_if_outdated(current_version=None, remote_version=None, source_url='https://nf-co.re/tools_version'){:python}`
+
+Check if the current version of nf-core is outdated
+
+### `nf_core.utils.custom_yaml_dumper(){:python}`
+
+Overwrite default PyYAML output to make Prettier YAML linting happy
+
+### `nf_core.utils.fetch_wf_config(wf_path){:python}`
+
+Uses Nextflow to retrieve the the configuration variables
+from a Nextflow workflow.
+
+- **Parameters:**
+ **wf_path** (_str_) – Nextflow workflow file system path.
+- **Returns:**
+ Workflow configuration settings.
+- **Return type:**
+ dict
+
+### `nf_core.utils.get_biocontainer_tag(package, version){:python}`
+
+Given a bioconda package and version, look for a container
+at quay.io and returns the tag of the most recent image
+that matches the package version
+Sends a HTTP GET request to the quay.io API.
+:param package: A bioconda package name.
+:type package: str
+:param version: Version of the bioconda package
+:type version: str
+
+- **Raises:**
+ - **A LookupError**\*\*,\*\* **if the connection fails** **or** **times out** **or** **gives an unexpected status code** –
+ - **A ValueError**\*\*,\*\* **if the package name can not be found** **(\*\***404\***\*)** –
+
+### `nf_core.utils.github_api_auto_auth(){:python}`
+
+### `nf_core.utils.nextflow_cmd(cmd){:python}`
+
+Run a Nextflow command and capture the output. Handle errors nicely
+
+### `nf_core.utils.parse_anaconda_licence(anaconda_response, version=None){:python}`
+
+Given a response from the anaconda API using anaconda_package, parse the software licences.
+
+Returns: Set of licence types
+
+### `nf_core.utils.pip_package(dep){:python}`
+
+Query PyPI package information.
+
+Sends a HTTP GET request to the PyPI remote API.
+
+- **Parameters:**
+ **dep** (_str_) – A PyPI package name.
+- **Raises:**
+ - **A LookupError**\*\*,\*\* **if the connection fails** **or** **times out** –
+ - **A ValueError**\*\*,\*\* **if the package name can not be found** –
+
+### `nf_core.utils.poll_nfcore_web_api(api_url, post_data=None){:python}`
+
+Poll the nf-core website API
+
+Takes argument api_url for URL
+
+Expects API reponse to be valid JSON and contain a top-level ‘status’ key.
+
+### `nf_core.utils.rich_force_colors(){:python}`
+
+Check if any environment variables are set to force Rich to use coloured output
+
+### `nf_core.utils.setup_requests_cachedir(){:python}`
+
+Sets up local caching for faster remote HTTP requests.
+
+Caching directory will be set up in the user’s home directory under
+a .nfcore_cache subdir.
+
+### `nf_core.utils.wait_cli_function(poll_func, poll_every=20){:python}`
+
+Display a command-line spinner while calling a function repeatedly.
+
+Keep waiting until that function returns True
+
+- **Parameters:**
+ - **poll_func** (_function_) – Function to call
+ - **poll_every** (_int_) – How many tenths of a second to wait between function calls. Default: 20.
+- **Returns:**
+ None. Just sits in an infite loop until the function returns True.
diff --git a/src/content/tools/docs/1.13.2/index.md b/src/content/tools/docs/1.13.2/index.md
new file mode 100644
index 0000000000..c5988c4254
--- /dev/null
+++ b/src/content/tools/docs/1.13.2/index.md
@@ -0,0 +1,18 @@
+
+
+# Welcome to nf-core tools API documentation!
+
+This documentation is for the `nf-core/tools` package.
+
+Primarily, it describes the different [code lint tests](lint_tests/index.html)
+run by `nf-core lint` (typically visited by a developer when their pipeline fails a given
+test), and also reference for the `nf_core` [Python package API](api/index.html).
+
+# Indices and tables
+
+- [Index](genindex)
+- [Module Index](py-modindex)
+- [Search Page](search)
diff --git a/src/content/tools/docs/1.13.2/lint_tests/actions_awsfulltest.md b/src/content/tools/docs/1.13.2/lint_tests/actions_awsfulltest.md
new file mode 100644
index 0000000000..fd32fa4a32
--- /dev/null
+++ b/src/content/tools/docs/1.13.2/lint_tests/actions_awsfulltest.md
@@ -0,0 +1,30 @@
+# actions_awsfulltest
+
+#### `PipelineLint.actions_awsfulltest(){:python}`
+
+Checks the GitHub Actions awsfulltest is valid.
+
+In addition to small test datasets run on GitHub Actions, we provide the possibility of testing the pipeline on full size datasets on AWS.
+This should ensure that the pipeline runs as expected on AWS and provide a resource estimation.
+
+The GitHub Actions workflow is called `awsfulltest.yml`, and it can be found in the `.github/workflows/` directory.
+
+:::warning
+This workflow incurs AWS costs, therefore it should only be triggered for pipeline releases:
+`workflow_run` (after the docker hub release workflow) and `workflow_dispatch`.
+:::
+
+:::note
+You can manually trigger the AWS tests by going to the Actions tab on the pipeline GitHub repository and selecting the
+nf-core AWS full size tests workflow on the left.
+:::
+
+:::note
+For tests on full data prior to release, [Nextflow Tower](https://tower.nf) launch feature can be employed.
+:::
+
+The `.github/workflows/awsfulltest.yml` file is tested for the following:
+
+- Must be turned on `workflow_dispatch`.
+- Must be turned on for `workflow_run` with `workflows: ["nf-core Docker push (release)"]` and `types: [completed]`
+- Should run the profile `test_full` that should be edited to provide the links to full-size datasets. If it runs the profile `test`, a warning is given.
diff --git a/src/content/tools/docs/1.13.2/lint_tests/actions_awstest.md b/src/content/tools/docs/1.13.2/lint_tests/actions_awstest.md
new file mode 100644
index 0000000000..4e4698c4e9
--- /dev/null
+++ b/src/content/tools/docs/1.13.2/lint_tests/actions_awstest.md
@@ -0,0 +1,24 @@
+# actions_awstest
+
+#### `PipelineLint.actions_awstest(){:python}`
+
+Checks the GitHub Actions awstest is valid.
+
+In addition to small test datasets run on GitHub Actions, we provide the possibility of testing the pipeline on AWS.
+This should ensure that the pipeline runs as expected on AWS (which often has its own unique edge cases).
+
+:::warning
+Running tests on AWS incurs costs, so these tests are not triggered automatically.
+Instead, they use the `workflow_dispatch` trigger, which allows for manual triggering
+of the workflow when testing on AWS is desired.
+:::
+
+:::note
+You can trigger the tests by going to the Actions tab on the pipeline GitHub repository
+and selecting the nf-core AWS test workflow on the left.
+:::
+
+The `.github/workflows/awstest.yml` file is tested for the following:
+
+- Must _not_ be turned on for `push` or `pull_request`.
+- Must be turned on for `workflow_dispatch`.
diff --git a/src/content/tools/docs/1.13.2/lint_tests/actions_ci.md b/src/content/tools/docs/1.13.2/lint_tests/actions_ci.md
new file mode 100644
index 0000000000..34f2096391
--- /dev/null
+++ b/src/content/tools/docs/1.13.2/lint_tests/actions_ci.md
@@ -0,0 +1,63 @@
+# actions_ci
+
+#### `PipelineLint.actions_ci(){:python}`
+
+Checks that the GitHub Actions pipeline CI (Continuous Integration) workflow is valid.
+
+The `.github/workflows/ci.yml` GitHub Actions workflow runs the pipeline on a minimal test
+dataset using `-profile test` to check that no breaking changes have been introduced.
+Final result files are not checked, just that the pipeline exists successfully.
+
+This lint test checks this GitHub Actions workflow file for the following:
+
+- Workflow must be triggered on the following events:
+ ```yaml
+ on:
+ push:
+ branches:
+ - dev
+ pull_request:
+ release:
+ types: [published]
+ ```
+- The minimum Nextflow version specified in the pipeline’s `nextflow.config` matches that defined by `nxf_ver` in the test matrix:
+
+ ```yaml
+ strategy:
+ matrix:
+ # Nextflow versions: check pipeline minimum and current latest
+ nxf_ver: ['19.10.0', '']
+ ```
+
+ :::note
+ These `matrix` variables run the test workflow twice, varying the `nxf_ver` variable each time.
+ This is used in the `nextflow run` commands to test the pipeline with both the latest available version
+ of the pipeline (`''`) and the stated minimum required version.
+ :::
+
+- The Docker container for the pipeline must use the correct pipeline version number:
+
+ > - Development pipelines:
+ > ```bash
+ > docker tag nfcore/:dev nfcore/:dev
+ > ```
+ > - Released pipelines:
+ > ```bash
+ > docker tag nfcore/:dev nfcore/:
+ > ```
+ > - Complete example for a released pipeline called _nf-core/example_ with version number `1.0.0`:
+ > ```yaml
+ > - name: Build new docker image
+ > if: env.GIT_DIFF
+ > run: docker build --no-cache . -t nfcore/example:1.0.0
+ > ```
+
+ > - name: Pull docker image
+ > if: ${{ !env.GIT\_DIFF }}
+ > run: |
+ > docker pull nfcore/example:dev
+ > docker tag nfcore/example:dev nfcore/example:1.0.0
+ >
+ > ```
+ >
+ > ```
diff --git a/src/content/tools/docs/1.13.2/lint_tests/actions_schema_validation.md b/src/content/tools/docs/1.13.2/lint_tests/actions_schema_validation.md
new file mode 100644
index 0000000000..caa60db6c4
--- /dev/null
+++ b/src/content/tools/docs/1.13.2/lint_tests/actions_schema_validation.md
@@ -0,0 +1,18 @@
+# actions_schema_validation
+
+#### `PipelineLint.actions_schema_validation(){:python}`
+
+Checks that the GitHub Action workflow yml/yaml files adhere to the correct schema
+
+nf-core pipelines use GitHub actions workflows to run CI tests, check formatting and also linting, among others.
+These workflows are defined by `yml``scripts in ``.github/workflows/`. This lint test verifies that these scripts are valid
+by comparing them against the JSON schema for GitHub workflows <>
+
+To pass this test, make sure that all your workflows contain the required properties `on` and
+
+```
+``
+```
+
+jobs\`\`and that
+all other properties are of the correct type, as specified in the schema (link above).
diff --git a/src/content/tools/docs/1.13.2/lint_tests/conda_dockerfile.md b/src/content/tools/docs/1.13.2/lint_tests/conda_dockerfile.md
new file mode 100644
index 0000000000..388e51dd9a
--- /dev/null
+++ b/src/content/tools/docs/1.13.2/lint_tests/conda_dockerfile.md
@@ -0,0 +1,33 @@
+# conda_dockerfile
+
+#### `PipelineLint.conda_dockerfile(){:python}`
+
+Checks the Dockerfile for use with Conda environments
+
+:::note
+This test only runs if there is both an `environment.yml`
+and `Dockerfile` present in the pipeline root directory.
+:::
+
+If a workflow has a conda `environment.yml` file, the `Dockerfile` should use this
+to create the docker image. These files are typically very short, just creating the conda
+environment inside the container.
+
+This linting test checks for the following:
+
+- All of the following lines are present in the file (where `PIPELINE` is your pipeline name):
+ > ```Dockerfile
+ > FROM nfcore/base:VERSION
+ > COPY environment.yml /
+ > RUN conda env create --quiet -f /environment.yml && conda clean -a
+ > RUN conda env export --name PIPELINE > PIPELINE.yml
+ > ENV PATH /opt/conda/envs/PIPELINE/bin:$PATH
+ > ```
+- That the `FROM nfcore/base:VERSION` is tagged to the most recent release of nf-core/tools
+ > - The linting tool compares the tag against the currently installed version of tools.
+ > - This line is not checked if running a development version of nf-core/tools.
+
+:::note
+Additional lines and different metadata can be added to the `Dockerfile`
+without causing this lint test to fail.
+:::
diff --git a/src/content/tools/docs/1.13.2/lint_tests/conda_env_yaml.md b/src/content/tools/docs/1.13.2/lint_tests/conda_env_yaml.md
new file mode 100644
index 0000000000..f2dab4fba1
--- /dev/null
+++ b/src/content/tools/docs/1.13.2/lint_tests/conda_env_yaml.md
@@ -0,0 +1,36 @@
+# conda_env_yaml
+
+#### `PipelineLint.conda_env_yaml(){:python}`
+
+Checks that the conda environment file is valid.
+
+:::note
+This test is ignored if there is not an `environment.yml`
+file present in the pipeline root directory.
+:::
+
+DSL1 nf-core pipelines use a single Conda environment to manage all software
+dependencies for a workflow. This can be used directly with `-profile conda`
+and is also used in the `Dockerfile` to build a docker image.
+
+This test checks the conda `environment.yml` file to ensure that it follows nf-core guidelines.
+Each dependency is checked using the [Anaconda API service](https://api.anaconda.org/docs).
+Dependency sublists are ignored with the exception of `- pip`: these packages are also checked
+for pinned version numbers and checked using the [PyPI JSON API](https://wiki.python.org/moin/PyPIJSON).
+
+Specifically, this lint test makes sure that:
+
+- The environment `name` must match the pipeline name and version
+ > - The pipeline name is defined in the config variable `manifest.name`
+ > - Replace the slash with a hyphen as environment names shouldn’t contain that character
+ > - Example: For `nf-core/test` version 1.4, the conda environment name should be `nf-core-test-1.4`
+- All package dependencies have a specific version number pinned
+ > :::warning
+ > Remember that Conda package versions should be pinned with one equals sign (`toolname=1.1`),
+ > but pip uses two (`toolname==1.2`)
+ > :::
+- That package versions can be found and are the latest available
+ > - Test will go through all conda channels listed in the file, or check PyPI if `pip`
+ > - Conda dependencies with pinned channels (eg. `conda-forge::openjdk`) are ok too
+ > - In addition to the package name, the pinned version is checked
+ > - If a newer version is available, a warning will be reported
diff --git a/src/content/tools/docs/1.13.2/lint_tests/files_exist.md b/src/content/tools/docs/1.13.2/lint_tests/files_exist.md
new file mode 100644
index 0000000000..927fbadce4
--- /dev/null
+++ b/src/content/tools/docs/1.13.2/lint_tests/files_exist.md
@@ -0,0 +1,77 @@
+# files_exist
+
+#### `PipelineLint.files_exist(){:python}`
+
+Checks a given pipeline directory for required files.
+
+Iterates through the pipeline’s directory content and checks that specified
+files are either present or absent, as required.
+
+:::note
+This test raises an `AssertionError` if neither `nextflow.config` or `main.nf` are found.
+If these files are not found then this cannot be a Nextflow pipeline and something has gone badly wrong.
+All lint tests are stopped immediately with a critical error message.
+:::
+
+Files that **must** be present:
+
+```default
+.gitattributes
+.github/.dockstore.yml
+.github/CONTRIBUTING.md
+.github/ISSUE_TEMPLATE/bug_report.md
+.github/ISSUE_TEMPLATE/config.yml
+.github/ISSUE_TEMPLATE/feature_request.md
+.github/markdownlint.yml
+.github/PULL_REQUEST_TEMPLATE.md
+.github/workflows/branch.yml
+.github/workflows/ci.yml
+.github/workflows/linting_comment.yml
+.github/workflows/linting.yml
+[LICENSE, LICENSE.md, LICENCE, LICENCE.md] # NB: British / American spelling
+assets/email_template.html
+assets/email_template.txt
+assets/nf-core-PIPELINE_logo.png
+assets/sendmail_template.txt
+bin/markdown_to_html.py
+CHANGELOG.md
+CODE_OF_CONDUCT.md
+CODE_OF_CONDUCT.md
+docs/images/nf-core-PIPELINE_logo.png
+docs/output.md
+docs/README.md
+docs/README.md
+docs/usage.md
+lib/nfcore_external_java_deps.jar
+lib/NfcoreSchema.groovy
+nextflow_schema.json
+nextflow.config
+README.md
+```
+
+Files that _should_ be present:
+
+```default
+'main.nf',
+'environment.yml',
+'Dockerfile',
+'conf/base.config',
+'.github/workflows/awstest.yml',
+'.github/workflows/awsfulltest.yml'
+```
+
+Files that _must not_ be present:
+
+```default
+'Singularity',
+'parameters.settings.json',
+'bin/markdown_to_html.r',
+'conf/aws.config',
+'.github/workflows/push_dockerhub.yml'
+```
+
+Files that _should not_ be present:
+
+```default
+'.travis.yml'
+```
diff --git a/src/content/tools/docs/1.13.2/lint_tests/files_unchanged.md b/src/content/tools/docs/1.13.2/lint_tests/files_unchanged.md
new file mode 100644
index 0000000000..ea06ada29f
--- /dev/null
+++ b/src/content/tools/docs/1.13.2/lint_tests/files_unchanged.md
@@ -0,0 +1,57 @@
+# files_unchanged
+
+#### `PipelineLint.files_unchanged(){:python}`
+
+Checks that certain pipeline files are not modified from template output.
+
+Iterates through the pipeline’s directory content and compares specified files
+against output from the template using the pipeline’s metadata. File content
+should not be modified / missing.
+
+Files that must be unchanged:
+
+```default
+'.gitattributes',
+'.github/.dockstore.yml',
+'.github/CONTRIBUTING.md',
+'.github/ISSUE_TEMPLATE/bug_report.md',
+'.github/ISSUE_TEMPLATE/config.yml',
+'.github/ISSUE_TEMPLATE/feature_request.md',
+'.github/markdownlint.yml',
+'.github/PULL_REQUEST_TEMPLATE.md',
+'.github/workflows/branch.yml',
+'.github/workflows/linting_comment.yml',
+'.github/workflows/linting.yml',
+'assets/email_template.html',
+'assets/email_template.txt',
+'assets/nf-core-PIPELINE_logo.png',
+'assets/sendmail_template.txt',
+'bin/markdown_to_html.py',
+'CODE_OF_CONDUCT.md',
+'docs/images/nf-core-PIPELINE_logo.png',
+'docs/README.md',
+'lib/nfcore_external_java_deps.jar'
+'lib/NfcoreSchema.groovy',
+['LICENSE', 'LICENSE.md', 'LICENCE', 'LICENCE.md'], # NB: British / American spelling
+```
+
+Files that can have additional content but must include the template contents:
+
+```default
+'.github/workflows/push_dockerhub_dev.yml',
+'.github/workflows/push_dockerhub_release.yml',
+'.gitignore',
+'assets/multiqc_config.yaml',
+```
+
+:::note
+You can configure the `nf-core lint` tests to ignore any of these checks by setting
+the `files_unchanged` key as follows in your linting config file. For example:
+
+```yaml
+files_unchanged:
+ - .github/workflows/branch.yml
+ - assets/multiqc_config.yaml
+```
+
+:::
diff --git a/src/content/tools/docs/1.13.2/lint_tests/index.md b/src/content/tools/docs/1.13.2/lint_tests/index.md
new file mode 100644
index 0000000000..f0e9f7e6a1
--- /dev/null
+++ b/src/content/tools/docs/1.13.2/lint_tests/index.md
@@ -0,0 +1,38 @@
+# Lint tests
+
+# Tests:
+
+- [actions_awsfulltest](actions_awsfulltest)
+ - [`PipelineLint.actions_awsfulltest()`](actions_awsfulltest#nf_core.lint.PipelineLint.actions_awsfulltest)
+- [actions_awstest](actions_awstest)
+ - [`PipelineLint.actions_awstest()`](actions_awstest#nf_core.lint.PipelineLint.actions_awstest)
+- [actions_ci](actions_ci)
+ - [`PipelineLint.actions_ci()`](actions_ci#nf_core.lint.PipelineLint.actions_ci)
+- [actions_schema_validation](actions_schema_validation)
+ - [`PipelineLint.actions_schema_validation()`](actions_schema_validation#nf_core.lint.PipelineLint.actions_schema_validation)
+- [conda_dockerfile](conda_dockerfile)
+ - [`PipelineLint.conda_dockerfile()`](conda_dockerfile#nf_core.lint.PipelineLint.conda_dockerfile)
+- [conda_env_yaml](conda_env_yaml)
+ - [`PipelineLint.conda_env_yaml()`](conda_env_yaml#nf_core.lint.PipelineLint.conda_env_yaml)
+- [files_exist](files_exist)
+ - [`PipelineLint.files_exist()`](files_exist#nf_core.lint.PipelineLint.files_exist)
+- [files_unchanged](files_unchanged)
+ - [`PipelineLint.files_unchanged()`](files_unchanged#nf_core.lint.PipelineLint.files_unchanged)
+- [merge_markers](merge_markers)
+ - [`PipelineLint.merge_markers()`](merge_markers#nf_core.lint.PipelineLint.merge_markers)
+- [nextflow_config](nextflow_config)
+ - [`PipelineLint.nextflow_config()`](nextflow_config#nf_core.lint.PipelineLint.nextflow_config)
+- [pipeline_name_conventions](pipeline_name_conventions)
+ - [`PipelineLint.pipeline_name_conventions()`](pipeline_name_conventions#nf_core.lint.PipelineLint.pipeline_name_conventions)
+- [pipeline_todos](pipeline_todos)
+ - [`PipelineLint.pipeline_todos()`](pipeline_todos#nf_core.lint.PipelineLint.pipeline_todos)
+- [readme](readme)
+ - [`PipelineLint.readme()`](readme#nf_core.lint.PipelineLint.readme)
+- [schema_lint](schema_lint)
+ - [`PipelineLint.schema_lint()`](schema_lint#nf_core.lint.PipelineLint.schema_lint)
+- [schema_params](schema_params)
+ - [`PipelineLint.schema_params()`](schema_params#nf_core.lint.PipelineLint.schema_params)
+- [template_strings](template_strings)
+ - [`PipelineLint.template_strings()`](template_strings#nf_core.lint.PipelineLint.template_strings)
+- [version_consistency](version_consistency)
+ - [`PipelineLint.version_consistency()`](version_consistency#nf_core.lint.PipelineLint.version_consistency)
diff --git a/src/content/tools/docs/1.13.2/lint_tests/merge_markers.md b/src/content/tools/docs/1.13.2/lint_tests/merge_markers.md
new file mode 100644
index 0000000000..38076814b1
--- /dev/null
+++ b/src/content/tools/docs/1.13.2/lint_tests/merge_markers.md
@@ -0,0 +1,8 @@
+# merge_markers
+
+#### `PipelineLint.merge_markers(){:python}`
+
+Check for remaining merge markers.
+
+This test looks for remaining merge markers in the code, e.g.:
+`>>>>>>>` or `<<<<<<<`
diff --git a/src/content/tools/docs/1.13.2/lint_tests/nextflow_config.md b/src/content/tools/docs/1.13.2/lint_tests/nextflow_config.md
new file mode 100644
index 0000000000..7f77f40c66
--- /dev/null
+++ b/src/content/tools/docs/1.13.2/lint_tests/nextflow_config.md
@@ -0,0 +1,65 @@
+# nextflow_config
+
+#### `PipelineLint.nextflow_config(){:python}`
+
+Checks the pipeline configuration for required variables.
+
+All nf-core pipelines are required to be configured with a minimal set of variable
+names. This test fails or throws warnings if required variables are not set.
+
+:::note
+These config variables must be set in `nextflow.config` or another config
+file imported from there. Any variables set in nextflow script files (eg. `main.nf`)
+are not checked and will be assumed to be missing.
+:::
+
+**The following variables fail the test if missing:**
+
+- `params.outdir`: A directory in which all pipeline results should be saved
+- `manifest.name`: The pipeline name. Should begin with `nf-core/`
+- `manifest.description`: A description of the pipeline
+- `manifest.version`
+ - The version of this pipeline. This should correspond to a [GitHub release](https://help.github.com/articles/creating-releases/).
+ - If `--release` is set when running `nf-core lint`, the version number must not contain the string `dev`
+ - If `--release` is \_not\_ set, the version should end in `dev` (warning triggered if not)
+- `manifest.nextflowVersion`
+ - The minimum version of Nextflow required to run the pipeline.
+ - Should be `>=` or `!>=` and a version number, eg. `manifest.nextflowVersion = '>=0.31.0'` (see [Nextflow documentation](https://www.nextflow.io/docs/latest/config.html#scope-manifest))
+ - `>=` warns about old versions but tries to run anyway, `!>=` fails for old versions. Only use the latter if you _know_ that the pipeline will certainly fail before this version.
+ - This should correspond to the `NXF_VER` version tested by GitHub Actions.
+- `manifest.homePage`
+ - The homepage for the pipeline. Should be the nf-core GitHub repository URL,
+ so beginning with `https://github.com/nf-core/`
+- `timeline.enabled`, `trace.enabled`, `report.enabled`, `dag.enabled`
+ - The nextflow timeline, trace, report and DAG should be enabled by default (set to `true`)
+- `process.cpus`, `process.memory`, `process.time`
+ - Default CPUs, memory and time limits for tasks
+- `params.input`
+ - Input parameter to specify input data, specify this to avoid a warning
+ - Typical usage:
+ - `params.input`: Input data that is not NGS sequencing data
+
+**The following variables throw warnings if missing:**
+
+- `manifest.mainScript`: The filename of the main pipeline script (should be `main.nf`)
+- `timeline.file`, `trace.file`, `report.file`, `dag.file`
+ - Default filenames for the timeline, trace and report
+ - The DAG file path should end with `.svg` (If Graphviz is not installed, Nextflow will generate a `.dot` file instead)
+- `process.container`
+ - Docker Hub handle for a single default container for use by all processes.
+ - Must specify a tag that matches the pipeline version number if set.
+ - If the pipeline version number contains the string `dev`, the DockerHub tag must be `:dev`
+
+**The following variables are depreciated and fail the test if they are still present:**
+
+- `params.version`: The old method for specifying the pipeline version. Replaced by `manifest.version`
+- `params.nf_required_version`: The old method for specifying the minimum Nextflow version. Replaced by `manifest.nextflowVersion`
+- `params.container`: The old method for specifying the dockerhub container address. Replaced by `process.container`
+- `igenomesIgnore`: Changed to `igenomes_ignore`
+ > :::note
+ > The `snake_case` convention should now be used when defining pipeline parameters
+ > :::
+
+**The following Nextflow syntax is depreciated and fails the test if present:**
+
+- Process-level configuration syntax still using the old Nextflow syntax, for example: `process.$fastqc` instead of `process withName:'fastqc'`.
diff --git a/src/content/tools/docs/1.13.2/lint_tests/pipeline_name_conventions.md b/src/content/tools/docs/1.13.2/lint_tests/pipeline_name_conventions.md
new file mode 100644
index 0000000000..788d77e41a
--- /dev/null
+++ b/src/content/tools/docs/1.13.2/lint_tests/pipeline_name_conventions.md
@@ -0,0 +1,13 @@
+# pipeline_name_conventions
+
+#### `PipelineLint.pipeline_name_conventions(){:python}`
+
+Checks that the pipeline name adheres to nf-core conventions.
+
+In order to ensure consistent naming, pipeline names should contain only lower case, alphanumeric characters.
+Otherwise a warning is displayed.
+
+:::warning
+DockerHub is very picky about image names and doesn’t even allow hyphens (we are `nfcore`).
+This is a large part of why we set this rule.
+:::
diff --git a/src/content/tools/docs/1.13.2/lint_tests/pipeline_todos.md b/src/content/tools/docs/1.13.2/lint_tests/pipeline_todos.md
new file mode 100644
index 0000000000..e8a73d1663
--- /dev/null
+++ b/src/content/tools/docs/1.13.2/lint_tests/pipeline_todos.md
@@ -0,0 +1,28 @@
+# pipeline_todos
+
+#### `PipelineLint.pipeline_todos(){:python}`
+
+Check for nf-core _TODO_ lines.
+
+The nf-core workflow template contains a number of comment lines to help developers
+of new pipelines know where they need to edit files and add content.
+They typically have the following format:
+
+```groovy
+// TODO nf-core: Make some kind of change to the workflow here
+```
+
+..or in markdown:
+
+```html
+
+```
+
+This lint test runs through all files in the pipeline and searches for these lines.
+If any are found they will throw a warning.
+
+:::note
+Note that many GUI code editors have plugins to list all instances of _TODO_
+in a given project directory. This is a very quick and convenient way to get
+started on your pipeline!
+:::
diff --git a/src/content/tools/docs/1.13.2/lint_tests/readme.md b/src/content/tools/docs/1.13.2/lint_tests/readme.md
new file mode 100644
index 0000000000..51b8040c97
--- /dev/null
+++ b/src/content/tools/docs/1.13.2/lint_tests/readme.md
@@ -0,0 +1,25 @@
+# readme
+
+#### `PipelineLint.readme(){:python}`
+
+Repository `README.md` tests
+
+The `README.md` files for a project are very important and must meet some requirements:
+
+- Nextflow badge
+ - If no Nextflow badge is found, a warning is given
+ - If a badge is found but the version doesn’t match the minimum version in the config file, the test fails
+ - Example badge code:
+ ```md
+ [![Nextflow](https://img.shields.io/badge/nextflow-%E2%89%A50.27.6-brightgreen.svg)](https://www.nextflow.io/)
+ ```
+- Bioconda badge
+ - If your pipeline contains a file called `environment.yml` in the root directory, a bioconda badge is required
+ - Required badge code:
+ ```md
+ [![install with bioconda](https://img.shields.io/badge/install%20with-bioconda-brightgreen.svg)](https://bioconda.github.io/)
+ ```
+
+:::note
+These badges are a markdown image `![alt-text]()` _inside_ a markdown link `[markdown image]()`, so a bit fiddly to write.
+:::
diff --git a/src/content/tools/docs/1.13.2/lint_tests/schema_lint.md b/src/content/tools/docs/1.13.2/lint_tests/schema_lint.md
new file mode 100644
index 0000000000..b5e29febba
--- /dev/null
+++ b/src/content/tools/docs/1.13.2/lint_tests/schema_lint.md
@@ -0,0 +1,59 @@
+# schema_lint
+
+#### `PipelineLint.schema_lint(){:python}`
+
+Pipeline schema syntax
+
+Pipelines should have a `nextflow_schema.json` file that describes the different
+pipeline parameters (eg. `params.something`, `--something`).
+
+:::note
+Reminder: you should generally never need to edit this JSON file by hand.
+The `nf-core schema build` command can create _and edit_ the file for you
+to keep it up to date, with a friendly user-interface for customisation.
+:::
+
+The lint test checks the schema for the following:
+
+- Schema should be a valid JSON file
+- Schema should adhere to [JSONSchema](https://json-schema.org/), Draft 7.
+- Parameters can be described in two places:
+ > - As `properties` in the top-level schema object
+ > - As `properties` within subschemas listed in a top-level `definitions` objects
+- The schema must describe at least one parameter
+- There must be no duplicate parameter IDs across the schema and definition subschema
+- All subschema in `definitions` must be referenced in the top-level `allOf` key
+- The top-level `allOf` key must not describe any non-existent definitions
+- Default parameters in the schema must be valid
+- Core top-level schema attributes should exist and be set as follows:
+ > - `$schema`: `https://json-schema.org/draft-07/schema`
+ > - `$id`: URL to the raw schema file, eg. `https://raw.githubusercontent.com/YOURPIPELINE/master/nextflow_schema.json`
+ > - `title`: `YOURPIPELINE pipeline parameters`
+ > - `description`: The pipeline config `manifest.description`
+
+For example, an _extremely_ minimal schema could look like this:
+
+```json
+{
+ "$schema": "https://json-schema.org/draft-07/schema",
+ "$id": "https://raw.githubusercontent.com/YOURPIPELINE/master/nextflow_schema.json",
+ "title": "YOURPIPELINE pipeline parameters",
+ "description": "This pipeline is for testing",
+ "properties": {
+ "first_param": { "type": "string" }
+ },
+ "definitions": {
+ "my_first_group": {
+ "properties": {
+ "second_param": { "type": "string" }
+ }
+ }
+ },
+ "allOf": [{ "$ref": "#/definitions/my_first_group" }]
+}
+```
+
+:::note
+You can check your pipeline schema without having to run the entire pipeline lint
+by running `nf-core schema lint` instead of `nf-core lint`
+:::
diff --git a/src/content/tools/docs/1.13.2/lint_tests/schema_params.md b/src/content/tools/docs/1.13.2/lint_tests/schema_params.md
new file mode 100644
index 0000000000..7d9440fa15
--- /dev/null
+++ b/src/content/tools/docs/1.13.2/lint_tests/schema_params.md
@@ -0,0 +1,11 @@
+# schema_params
+
+#### `PipelineLint.schema_params(){:python}`
+
+Check that the schema describes all flat params in the pipeline.
+
+The `nextflow_schema.json` pipeline schema should describe every flat parameter
+returned from the `nextflow config` command (params that are objects or more complex structures are ignored).
+
+- Failure: If parameters are found in `nextflow_schema.json` that are not in `nextflow_schema.json`
+- Warning: If parameters are found in `nextflow_schema.json` that are not in `nextflow_schema.json`
diff --git a/src/content/tools/docs/1.13.2/lint_tests/template_strings.md b/src/content/tools/docs/1.13.2/lint_tests/template_strings.md
new file mode 100644
index 0000000000..23c06bb63d
--- /dev/null
+++ b/src/content/tools/docs/1.13.2/lint_tests/template_strings.md
@@ -0,0 +1,17 @@
+# template_strings
+
+#### `PipelineLint.template_strings(){:python}`
+
+Check for template placeholders.
+
+The `nf-core create` pipeline template uses
+[Jinja](https://jinja.palletsprojects.com/en/2.11.x/) behind the scenes.
+
+This lint test fails if any Jinja template variables such as
+`{{ pipeline_name }}` are found in your pipeline code.
+
+Finding a placeholder like this means that something was probably copied and pasted
+from the template without being properly rendered for your pipeline.
+
+This test ignores any double-brackets prefixed with a dollar sign, such as
+`${{ secrets.AWS_ACCESS_KEY_ID }}` as these placeholders are used in GitHub Actions workflows.
diff --git a/src/content/tools/docs/1.13.2/lint_tests/version_consistency.md b/src/content/tools/docs/1.13.2/lint_tests/version_consistency.md
new file mode 100644
index 0000000000..040f5e57fc
--- /dev/null
+++ b/src/content/tools/docs/1.13.2/lint_tests/version_consistency.md
@@ -0,0 +1,23 @@
+# version_consistency
+
+#### `PipelineLint.version_consistency(){:python}`
+
+Pipeline and container version number consistency.
+
+:::note
+This test only runs when the `--release` flag is set for `nf-core lint`,
+or `$GITHUB_REF` is equal to `master`.
+:::
+
+This lint fetches the pipeline version number from three possible locations:
+
+- The pipeline config, `manifest.version`
+- The docker container in the pipeline config, `process.container`
+ > - Some pipelines may not have this set on a pipeline level. If it is not found, it is ignored.
+- `$GITHUB_REF`, if it looks like a release tag (`refs/tags/`)
+
+The test then checks that:
+
+- The container name has a tag specified (eg. `nfcore/pipeline:version`)
+- The pipeline version number is numeric (contains only numbers and dots)
+- That the version numbers all match one another
diff --git a/src/content/tools/docs/1.13.3/api/bump_version.md b/src/content/tools/docs/1.13.3/api/bump_version.md
new file mode 100644
index 0000000000..3f45f9fefb
--- /dev/null
+++ b/src/content/tools/docs/1.13.3/api/bump_version.md
@@ -0,0 +1,35 @@
+# nf_core.bump_version
+
+Bumps the version number in all appropriate files for
+a nf-core pipeline.
+
+### `nf_core.bump_version.bump_nextflow_version(pipeline_obj, new_version){:python}`
+
+Bumps the required Nextflow version number of a pipeline.
+
+- **Parameters:**
+ - **pipeline_obj** ([_nf_core.utils.Pipeline_](utils#nf_core.utils.Pipeline)) – A Pipeline object that holds information
+ about the pipeline contents and build files.
+ - **new_version** (_str_) – The new version tag for the required Nextflow version.
+
+### `nf_core.bump_version.bump_pipeline_version(pipeline_obj, new_version){:python}`
+
+Bumps a pipeline version number.
+
+- **Parameters:**
+ - **pipeline_obj** ([_nf_core.utils.Pipeline_](utils#nf_core.utils.Pipeline)) – A Pipeline object that holds information
+ about the pipeline contents and build files.
+ - **new_version** (_str_) – The new version tag for the pipeline. Semantic versioning only.
+
+### `nf_core.bump_version.update_file_version(filename, pipeline_obj, patterns){:python}`
+
+Updates the version number in a requested file.
+
+- **Parameters:**
+ - **filename** (_str_) – File to scan.
+ - **pipeline_obj** ([_nf_core.lint.PipelineLint_](lint#nf_core.lint.PipelineLint)) – A PipelineLint object that holds information
+ about the pipeline contents and build files.
+ - **pattern** (_str_) – Regex pattern to apply.
+ - **newstr** (_str_) – The replaced string.
+- **Raises:**
+ **ValueError**\*\*,\*\* **if the version number cannot be found.** –
diff --git a/src/content/tools/docs/1.13.3/api/create.md b/src/content/tools/docs/1.13.3/api/create.md
new file mode 100644
index 0000000000..92dcb9fb5e
--- /dev/null
+++ b/src/content/tools/docs/1.13.3/api/create.md
@@ -0,0 +1,36 @@
+# nf_core.create
+
+Creates a nf-core pipeline matching the current
+organization’s specification based on a template.
+
+### _`class{:python}`_`nf_core.create.PipelineCreate(name, description, author, version='1.0dev', no_git=False, force=False, outdir=None){:python}`
+
+Bases: `object`
+
+Creates a nf-core pipeline a la carte from the nf-core best-practise template.
+
+- **Parameters:**
+ - **name** (_str_) – Name for the pipeline.
+ - **description** (_str_) – Description for the pipeline.
+ - **author** (_str_) – Authors name of the pipeline.
+ - **version** (_str_) – Version flag. Semantic versioning only. Defaults to 1.0dev.
+ - **no_git** (_bool_) – Prevents the creation of a local Git repository for the pipeline. Defaults to False.
+ - **force** (_bool_) – Overwrites a given workflow directory with the same name. Defaults to False.
+ May the force be with you.
+ - **outdir** (_str_) – Path to the local output directory.
+
+#### `git_init_pipeline(){:python}`
+
+Initialises the new pipeline as a Git repository and submits first commit.
+
+#### `init_pipeline(){:python}`
+
+Creates the nf-core pipeline.
+
+#### `make_pipeline_logo(){:python}`
+
+Fetch a logo for the new pipeline from the nf-core website
+
+#### `render_template(){:python}`
+
+Runs Jinja to create a new nf-core pipeline.
diff --git a/src/content/tools/docs/1.13.3/api/download.md b/src/content/tools/docs/1.13.3/api/download.md
new file mode 100644
index 0000000000..92c33262e9
--- /dev/null
+++ b/src/content/tools/docs/1.13.3/api/download.md
@@ -0,0 +1,123 @@
+# nf_core.download
+
+Downloads a nf-core pipeline to the local file system.
+
+### _`class{:python}`_`nf_core.download.DownloadProgress(*columns: str | ProgressColumn, console: Console | None = None, auto_refresh: bool = True, refresh_per_second: float = 10, speed_estimate_period: float = 30.0, transient: bool = False, redirect_stdout: bool = True, redirect_stderr: bool = True, get_time: Callable[[], float] | None = None, disable: bool = False, expand: bool = False){:python}`
+
+Bases: `Progress`
+
+Custom Progress bar class, allowing us to have two progress
+bars with different columns / layouts.
+
+#### `get_renderables(){:python}`
+
+Get a number of renderables for the progress display.
+
+### _`class{:python}`_`nf_core.download.DownloadWorkflow(pipeline, release=None, outdir=None, compress_type='tar.gz', force=False, singularity=False, singularity_cache_only=False, parallel_downloads=4){:python}`
+
+Bases: `object`
+
+Downloads a nf-core workflow from GitHub to the local file system.
+
+Can also download its Singularity container image if required.
+
+- **Parameters:**
+ - **pipeline** (_str_) – A nf-core pipeline name.
+ - **release** (_str_) – The workflow release version to download, like 1.0. Defaults to None.
+ - **singularity** (_bool_) – Flag, if the Singularity container should be downloaded as well. Defaults to False.
+ - **outdir** (_str_) – Path to the local download directory. Defaults to None.
+
+#### `compress_download(){:python}`
+
+Take the downloaded files and make a compressed .tar.gz archive.
+
+#### `download_configs(){:python}`
+
+Downloads the centralised config profiles from nf-core/configs to `self.outdir`.
+
+#### `download_wf_files(){:python}`
+
+Downloads workflow files from GitHub to the `self.outdir`.
+
+#### `download_workflow(){:python}`
+
+Starts a nf-core workflow download.
+
+#### `fetch_workflow_details(wfs){:python}`
+
+Fetches details of a nf-core workflow to download.
+
+- **Parameters:**
+ **wfs** ([_nf_core.list.Workflows_](list#nf_core.list.Workflows)) – A nf_core.list.Workflows object
+- **Raises:**
+ **LockupError**\*\*,\*\* **if the pipeline can not be found.** –
+
+#### `find_container_images(){:python}`
+
+Find container image names for workflow.
+
+Starts by using nextflow config to pull out any process.container
+declarations. This works for DSL1.
+
+Second, we look for DSL2 containers. These can’t be found with
+nextflow config at the time of writing, so we scrape the pipeline files.
+
+#### `get_singularity_images(){:python}`
+
+Loop through container names and download Singularity images
+
+#### `singularity_copy_cache_image(container, out_path, cache_path){:python}`
+
+Copy Singularity image from NXF_SINGULARITY_CACHEDIR to target folder.
+
+#### `singularity_download_image(container, out_path, cache_path, progress){:python}`
+
+Download a singularity image from the web.
+
+Use native Python to download the file.
+
+- **Parameters:**
+ - **container** (_str_) – A pipeline’s container name. Usually it is of similar format
+ to `https://depot.galaxyproject.org/singularity/name:version`
+ - **out_path** (_str_) – The final target output path
+ - **cache_path** (_str_\*,\* _None_) – The NXF_SINGULARITY_CACHEDIR path if set, None if not
+ - **progress** (_Progress_) – Rich progress bar instance to add tasks to.
+
+#### `singularity_image_filenames(container){:python}`
+
+Check Singularity cache for image, copy to destination folder if found.
+
+- **Parameters:**
+ **container** (_str_) – A pipeline’s container name. Can be direct download URL
+ or a Docker Hub repository ID.
+- **Returns:**
+ Returns True if we have the image in the target location.
+ : Returns a download path if not.
+- **Return type:**
+ results (bool, str)
+
+#### `singularity_pull_image(container, out_path, cache_path, progress){:python}`
+
+Pull a singularity image using `singularity pull`
+
+Attempt to use a local installation of singularity to pull the image.
+
+- **Parameters:**
+ **container** (_str_) – A pipeline’s container name. Usually it is of similar format
+ to `nfcore/name:version`.
+- **Raises:**
+ **Various exceptions possible from subprocess execution** **of** **Singularity.** –
+
+#### `validate_md5(fname, expected=None){:python}`
+
+Calculates the md5sum for a file on the disk and validate with expected.
+
+- **Parameters:**
+ - **fname** (_str_) – Path to a local file.
+ - **expected** (_str_) – The expected md5sum.
+- **Raises:**
+ **IOError**\*\*,\*\* **if the md5sum does not match the remote sum.** –
+
+#### `wf_use_local_configs(){:python}`
+
+Edit the downloaded nextflow.config file to use the local config files
diff --git a/src/content/tools/docs/1.13.3/api/index.md b/src/content/tools/docs/1.13.3/api/index.md
new file mode 100644
index 0000000000..8fcbeab99d
--- /dev/null
+++ b/src/content/tools/docs/1.13.3/api/index.md
@@ -0,0 +1,49 @@
+# API Reference
+
+# Tests:
+
+- [nf_core.bump_version](bump_version)
+ - [`bump_nextflow_version()`](bump_version#nf_core.bump_version.bump_nextflow_version)
+ - [`bump_pipeline_version()`](bump_version#nf_core.bump_version.bump_pipeline_version)
+ - [`update_file_version()`](bump_version#nf_core.bump_version.update_file_version)
+- [nf_core.create](create)
+ - [`PipelineCreate`](create#nf_core.create.PipelineCreate)
+- [nf_core.download](download)
+ - [`DownloadProgress`](download#nf_core.download.DownloadProgress)
+ - [`DownloadWorkflow`](download#nf_core.download.DownloadWorkflow)
+- [nf_core.launch](launch)
+ - [`Launch`](launch#nf_core.launch.Launch)
+- [nf_core.licences](licences)
+ - [`WorkflowLicences`](licences#nf_core.licences.WorkflowLicences)
+- [nf_core.lint](lint)
+ - [`run_linting()`](lint#nf_core.lint.run_linting)
+ - [`PipelineLint`](lint#nf_core.lint.PipelineLint)
+- [nf_core.list](list)
+ - [`LocalWorkflow`](list#nf_core.list.LocalWorkflow)
+ - [`RemoteWorkflow`](list#nf_core.list.RemoteWorkflow)
+ - [`Workflows`](list#nf_core.list.Workflows)
+ - [`get_local_wf()`](list#nf_core.list.get_local_wf)
+ - [`list_workflows()`](list#nf_core.list.list_workflows)
+ - [`pretty_date()`](list#nf_core.list.pretty_date)
+- [nf_core.modules](modules)
+- [nf_core.schema](schema)
+ - [`PipelineSchema`](schema#nf_core.schema.PipelineSchema)
+- [nf_core.sync](sync)
+ - [`PipelineSync`](sync#nf_core.sync.PipelineSync)
+ - [`PullRequestException`](sync#nf_core.sync.PullRequestException)
+ - [`SyncException`](sync#nf_core.sync.SyncException)
+- [nf_core.utils](utils)
+ - [`Pipeline`](utils#nf_core.utils.Pipeline)
+ - [`anaconda_package()`](utils#nf_core.utils.anaconda_package)
+ - [`check_if_outdated()`](utils#nf_core.utils.check_if_outdated)
+ - [`custom_yaml_dumper()`](utils#nf_core.utils.custom_yaml_dumper)
+ - [`fetch_wf_config()`](utils#nf_core.utils.fetch_wf_config)
+ - [`get_biocontainer_tag()`](utils#nf_core.utils.get_biocontainer_tag)
+ - [`github_api_auto_auth()`](utils#nf_core.utils.github_api_auto_auth)
+ - [`nextflow_cmd()`](utils#nf_core.utils.nextflow_cmd)
+ - [`parse_anaconda_licence()`](utils#nf_core.utils.parse_anaconda_licence)
+ - [`pip_package()`](utils#nf_core.utils.pip_package)
+ - [`poll_nfcore_web_api()`](utils#nf_core.utils.poll_nfcore_web_api)
+ - [`rich_force_colors()`](utils#nf_core.utils.rich_force_colors)
+ - [`setup_requests_cachedir()`](utils#nf_core.utils.setup_requests_cachedir)
+ - [`wait_cli_function()`](utils#nf_core.utils.wait_cli_function)
diff --git a/src/content/tools/docs/1.13.3/api/launch.md b/src/content/tools/docs/1.13.3/api/launch.md
new file mode 100644
index 0000000000..8affe0fa4d
--- /dev/null
+++ b/src/content/tools/docs/1.13.3/api/launch.md
@@ -0,0 +1,87 @@
+# nf_core.launch
+
+Launch a pipeline, interactively collecting params
+
+### _`class{:python}`_`nf_core.launch.Launch(pipeline=None, revision=None, command_only=False, params_in=None, params_out=None, save_all=False, show_hidden=False, url=None, web_id=None){:python}`
+
+Bases: `object`
+
+Class to hold config option to launch a pipeline
+
+#### `build_command(){:python}`
+
+Build the nextflow run command based on what we know
+
+#### `get_pipeline_schema(){:python}`
+
+Load and validate the schema from the supplied pipeline
+
+#### `get_web_launch_response(){:python}`
+
+Given a URL for a web-gui launch response, recursively query it until results are ready.
+
+#### `launch_pipeline(){:python}`
+
+#### `launch_web_gui(){:python}`
+
+Send schema to nf-core website and launch input GUI
+
+#### `launch_workflow(){:python}`
+
+Launch nextflow if required
+
+#### `merge_nxf_flag_schema(){:python}`
+
+Take the Nextflow flag schema and merge it with the pipeline schema
+
+#### `print_param_header(param_id, param_obj, is_group=False){:python}`
+
+#### `prompt_group(group_id, group_obj){:python}`
+
+Prompt for edits to a group of parameters (subschema in ‘definitions’)
+
+- **Parameters:**
+ - **group_id** – Paramater ID (string)
+ - **group_obj** – JSON Schema keys (dict)
+- **Returns:**
+ val answers
+- **Return type:**
+ Dict of param_id
+
+#### `prompt_param(param_id, param_obj, is_required, answers){:python}`
+
+Prompt for a single parameter
+
+#### `prompt_schema(){:python}`
+
+Go through the pipeline schema and prompt user to change defaults
+
+#### `prompt_web_gui(){:python}`
+
+Ask whether to use the web-based or cli wizard to collect params
+
+#### `sanitise_web_response(){:python}`
+
+The web builder returns everything as strings.
+Use the functions defined in the cli wizard to convert to the correct types.
+
+#### `set_schema_inputs(){:python}`
+
+Take the loaded schema and set the defaults as the input parameters
+If a nf_params.json file is supplied, apply these over the top
+
+#### `single_param_to_questionary(param_id, param_obj, answers=None, print_help=True){:python}`
+
+Convert a JSONSchema param to a Questionary question
+
+- **Parameters:**
+ - **param_id** – Parameter ID (string)
+ - **param_obj** – JSON Schema keys (dict)
+ - **answers** – Optional preexisting answers (dict)
+ - **print_help** – If description and help_text should be printed (bool)
+- **Returns:**
+ Single Questionary dict, to be appended to questions list
+
+#### `strip_default_params(){:python}`
+
+Strip parameters if they have not changed from the default
diff --git a/src/content/tools/docs/1.13.3/api/licences.md b/src/content/tools/docs/1.13.3/api/licences.md
new file mode 100644
index 0000000000..578ddaae1d
--- /dev/null
+++ b/src/content/tools/docs/1.13.3/api/licences.md
@@ -0,0 +1,37 @@
+# nf_core.licences
+
+Lists software licences for a given workflow.
+
+### _`class{:python}`_`nf_core.licences.WorkflowLicences(pipeline){:python}`
+
+Bases: `object`
+
+A nf-core workflow licenses collection.
+
+Tries to retrieve the license information from all dependencies
+of a given nf-core pipeline.
+
+A condensed overview with license per dependency can be printed out.
+
+- **Parameters:**
+ **pipeline** (_str_) – An existing nf-core pipeline name, like nf-core/hlatyping
+ or short hlatyping.
+
+#### `fetch_conda_licences(){:python}`
+
+Fetch package licences from Anaconda and PyPi.
+
+#### `get_environment_file(){:python}`
+
+Get the conda environment file for the pipeline
+
+#### `print_licences(){:python}`
+
+Prints the fetched license information.
+
+- **Parameters:**
+ **as_json** (_boolean_) – Prints the information in JSON. Defaults to False.
+
+#### `run_licences(){:python}`
+
+Run the nf-core licences action
diff --git a/src/content/tools/docs/1.13.3/api/lint.md b/src/content/tools/docs/1.13.3/api/lint.md
new file mode 100644
index 0000000000..97f22f4911
--- /dev/null
+++ b/src/content/tools/docs/1.13.3/api/lint.md
@@ -0,0 +1,138 @@
+# nf_core.lint
+
+#### `SEE ALSO{:python}`
+
+See the [Lint Tests](../lint_tests/index.html) docs for information about specific linting functions.
+
+
+
+Linting policy for nf-core pipeline projects.
+
+Tests Nextflow-based pipelines to check that they adhere to
+the nf-core community guidelines.
+
+### `nf_core.lint.run_linting(pipeline_dir, release_mode=False, fix=(), show_passed=False, fail_ignored=False, md_fn=None, json_fn=None){:python}`
+
+Runs all nf-core linting checks on a given Nextflow pipeline project
+in either release mode or normal mode (default). Returns an object
+of type [`PipelineLint`](#nf_core.lint.PipelineLint) after finished.
+
+- **Parameters:**
+ - **pipeline_dir** (_str_) – The path to the Nextflow pipeline root directory
+ - **release_mode** (_bool_) – Set this to True, if the linting should be run in the release mode.
+ See [`PipelineLint`](#nf_core.lint.PipelineLint) for more information.
+- **Returns:**
+ An object of type [`PipelineLint`](#nf_core.lint.PipelineLint) that contains all the linting results.
+
+### _`class{:python}`_`nf_core.lint.PipelineLint(wf_path, release_mode=False, fix=(), fail_ignored=False){:python}`
+
+Bases: [`Pipeline`](utils#nf_core.utils.Pipeline)
+
+Object to hold linting information and results.
+
+Inherits [`nf_core.utils.Pipeline`](utils#nf_core.utils.Pipeline) class.
+
+Use the [`PipelineLint._lint_pipeline()`](#nf_core.lint.PipelineLint._lint_pipeline) function to run lint tests.
+
+- **Parameters:**
+ **path** (_str_) – The path to the nf-core pipeline directory.
+
+#### `failed{:python}`
+
+A list of tuples of the form: `(, )`
+
+- **Type:**
+ list
+
+#### `ignored{:python}`
+
+A list of tuples of the form: `(, )`
+
+- **Type:**
+ list
+
+#### `lint_config{:python}`
+
+The parsed nf-core linting config for this pipeline
+
+- **Type:**
+ dict
+
+#### `passed{:python}`
+
+A list of tuples of the form: `(, )`
+
+- **Type:**
+ list
+
+#### `release_mode{:python}`
+
+True, if you the to linting was run in release mode, False else.
+
+- **Type:**
+ bool
+
+#### `warned{:python}`
+
+A list of tuples of the form: `(, )`
+
+- **Type:**
+ list
+
+#### `_get_results_md(){:python}`
+
+Create a markdown file suitable for posting in a GitHub comment.
+
+- **Returns:**
+ Formatting markdown content
+- **Return type:**
+ markdown (str)
+
+#### `_lint_pipeline(){:python}`
+
+Main linting function.
+
+Takes the pipeline directory as the primary input and iterates through
+the different linting checks in order. Collects any warnings or errors
+into object attributes: `passed`, `ignored`, `warned` and `failed`.
+
+#### `_print_results(show_passed=False){:python}`
+
+Print linting results to the command line.
+
+Uses the `rich` library to print a set of formatted tables to the command line
+summarising the linting results.
+
+#### `_save_json_results(json_fn){:python}`
+
+Function to dump lint results to a JSON file for downstream use
+
+- **Parameters:**
+ **json_fn** (_str_) – File path to write JSON to.
+
+#### `_strip_ansi_codes(string, replace_with=''){:python}`
+
+Strip ANSI colouring codes from a string to return plain text.
+
+Solution found on Stack Overflow:
+
+#### `_wrap_quotes(files){:python}`
+
+Helper function to take a list of filenames and format with markdown.
+
+- **Parameters:**
+ **files** (_list_) –
+
+ List of filenames, eg:
+
+ ```default
+ ['foo', 'bar', 'baz']
+ ```
+
+- **Returns:**
+ Formatted string of paths separated by word `or`, eg:
+ ```default
+ `foo` or bar` or `baz`
+ ```
+- **Return type:**
+ markdown (str)
diff --git a/src/content/tools/docs/1.13.3/api/list.md b/src/content/tools/docs/1.13.3/api/list.md
new file mode 100644
index 0000000000..2fc6448de0
--- /dev/null
+++ b/src/content/tools/docs/1.13.3/api/list.md
@@ -0,0 +1,99 @@
+# nf_core.list
+
+Lists available nf-core pipelines and versions.
+
+### _`class{:python}`_`nf_core.list.LocalWorkflow(name){:python}`
+
+Bases: `object`
+
+Class to handle local workflows pulled by nextflow
+
+#### `get_local_nf_workflow_details(){:python}`
+
+Get full details about a local cached workflow
+
+### _`class{:python}`_`nf_core.list.RemoteWorkflow(data){:python}`
+
+Bases: `object`
+
+A information container for a remote workflow.
+
+- **Parameters:**
+ **data** (_dict_) – workflow information as they are retrieved from the GitHub repository REST API request
+ ().
+
+### _`class{:python}`_`nf_core.list.Workflows(filter_by=None, sort_by='release', show_archived=False){:python}`
+
+Bases: `object`
+
+Workflow container class.
+
+Is used to collect local and remote nf-core pipelines. Pipelines
+can be sorted, filtered and compared.
+
+- **Parameters:**
+ - **filter_by** (_list_) – A list of strings that can be used for filtering.
+ - **sort_by** (_str_) – workflows can be sorted by keywords. Keyword must be one of
+ release (default), name, stars.
+
+#### `compare_remote_local(){:python}`
+
+Matches local to remote workflows.
+
+If a matching remote workflow is found, the local workflow’s Git commit hash is compared
+with the latest one from remote.
+
+A boolean flag in `RemoteWorkflow.local_is_latest` is set to True, if the local workflow
+is the latest.
+
+#### `filtered_workflows(){:python}`
+
+Filters remote workflows for keywords.
+
+- **Returns:**
+ Filtered remote workflows.
+- **Return type:**
+ list
+
+#### `get_local_nf_workflows(){:python}`
+
+Retrieves local Nextflow workflows.
+
+Local workflows are stored in `self.local_workflows` list.
+
+#### `get_remote_workflows(){:python}`
+
+Retrieves remote workflows from [nf-co.re](https://nf-co.re).
+
+Remote workflows are stored in `self.remote_workflows` list.
+
+#### `print_json(){:python}`
+
+Dump JSON of all parsed information
+
+#### `print_summary(){:python}`
+
+Prints a summary of all pipelines.
+
+### `nf_core.list.get_local_wf(workflow, revision=None){:python}`
+
+Check if this workflow has a local copy and use nextflow to pull it if not
+
+### `nf_core.list.list_workflows(filter_by=None, sort_by='release', as_json=False, show_archived=False){:python}`
+
+Prints out a list of all nf-core workflows.
+
+- **Parameters:**
+ - **filter_by** (_list_) – A list of strings that can be used for filtering.
+ - **sort_by** (_str_) – workflows can be sorted by keywords. Keyword must be one of
+ release (default), name, stars.
+ - **as_json** (_boolean_) – Set to true, if the lists should be printed in JSON.
+
+### `nf_core.list.pretty_date(time){:python}`
+
+Transforms a datetime object or a int() Epoch timestamp into a
+pretty string like ‘an hour ago’, ‘Yesterday’, ‘3 months ago’,
+‘just now’, etc
+
+Based on
+Adapted by sven1103
diff --git a/src/content/tools/docs/1.13.3/api/modules.md b/src/content/tools/docs/1.13.3/api/modules.md
new file mode 100644
index 0000000000..2dd47b6359
--- /dev/null
+++ b/src/content/tools/docs/1.13.3/api/modules.md
@@ -0,0 +1 @@
+# nf_core.modules
diff --git a/src/content/tools/docs/1.13.3/api/schema.md b/src/content/tools/docs/1.13.3/api/schema.md
new file mode 100644
index 0000000000..162788eb1a
--- /dev/null
+++ b/src/content/tools/docs/1.13.3/api/schema.md
@@ -0,0 +1,109 @@
+# nf_core.schema
+
+Code to deal with pipeline JSON Schema
+
+### _`class{:python}`_`nf_core.schema.PipelineSchema{:python}`
+
+Bases: `object`
+
+Class to generate a schema object with
+functions to handle pipeline JSON Schema
+
+#### `add_schema_found_configs(){:python}`
+
+Add anything that’s found in the Nextflow params that’s missing in the pipeline schema
+
+#### `build_schema(pipeline_dir, no_prompts, web_only, url){:python}`
+
+Interactively build a new pipeline schema for a pipeline
+
+#### `build_schema_param(p_val){:python}`
+
+Build a pipeline schema dictionary for an param interactively
+
+#### `get_schema_defaults(){:python}`
+
+Generate set of default input parameters from schema.
+
+Saves defaults to self.schema_defaults
+Returns count of how many parameters were found (with or without a default value)
+
+#### `get_schema_path(path, local_only=False, revision=None){:python}`
+
+Given a pipeline name, directory, or path, set self.schema_filename
+
+#### `get_web_builder_response(){:python}`
+
+Given a URL for a Schema build response, recursively query it until results are ready.
+Once ready, validate Schema and write to disk.
+
+#### `get_wf_params(){:python}`
+
+Load the pipeline parameter defaults using nextflow config
+Strip out only the params. values and ignore anything that is not a flat variable
+
+#### `launch_web_builder(){:python}`
+
+Send pipeline schema to web builder and wait for response
+
+#### `load_input_params(params_path){:python}`
+
+Load a given a path to a parameters file (JSON/YAML)
+
+These should be input parameters used to run a pipeline with
+the Nextflow -params-file option.
+
+#### `load_lint_schema(){:python}`
+
+Load and lint a given schema to see if it looks valid
+
+#### `load_schema(){:python}`
+
+Load a pipeline schema from a file
+
+#### `make_skeleton_schema(){:python}`
+
+Make a new pipeline schema from the template
+
+#### `prompt_remove_schema_notfound_config(p_key){:python}`
+
+Check if a given key is found in the nextflow config params and prompt to remove it if note
+
+Returns True if it should be removed, False if not.
+
+#### `remove_schema_notfound_configs(){:python}`
+
+Go through top-level schema and all definitions sub-schemas to remove
+anything that’s not in the nextflow config.
+
+#### `remove_schema_notfound_configs_single_schema(schema){:python}`
+
+Go through a single schema / set of properties and strip out
+anything that’s not in the nextflow config.
+
+Takes: Schema or sub-schema with properties key
+Returns: Cleaned schema / sub-schema
+
+#### `save_schema(){:python}`
+
+Save a pipeline schema to a file
+
+#### `validate_default_params(){:python}`
+
+Check that all default parameters in the schema are valid
+Ignores ‘required’ flag, as required parameters might have no defaults
+
+#### `validate_params(){:python}`
+
+Check given parameters against a schema and validate
+
+#### `validate_schema(schema=None){:python}`
+
+Check that the Schema is valid
+
+Returns: Number of parameters found
+
+#### `validate_schema_title_description(schema=None){:python}`
+
+Extra validation command for linting.
+Checks that the schema “$id”, “title” and “description” attributes match the piipeline config.
diff --git a/src/content/tools/docs/1.13.3/api/sync.md b/src/content/tools/docs/1.13.3/api/sync.md
new file mode 100644
index 0000000000..e327553c3f
--- /dev/null
+++ b/src/content/tools/docs/1.13.3/api/sync.md
@@ -0,0 +1,151 @@
+# nf_core.sync
+
+Synchronise a pipeline TEMPLATE branch with the template.
+
+### _`class{:python}`_`nf_core.sync.PipelineSync(pipeline_dir, from_branch=None, make_pr=False, gh_repo=None, gh_username=None){:python}`
+
+Bases: `object`
+
+Object to hold syncing information and results.
+
+- **Parameters:**
+ - **pipeline_dir** (_str_) – The path to the Nextflow pipeline root directory
+ - **from_branch** (_str_) – The branch to use to fetch config vars. If not set, will use current active branch
+ - **make_pr** (_bool_) – Set this to True to create a GitHub pull-request with the changes
+ - **gh_username** (_str_) – GitHub username
+ - **gh_repo** (_str_) – GitHub repository name
+
+#### `pipeline_dir{:python}`
+
+Path to target pipeline directory
+
+- **Type:**
+ str
+
+#### `from_branch{:python}`
+
+Repo branch to use when collecting workflow variables. Default: active branch.
+
+- **Type:**
+ str
+
+#### `original_branch{:python}`
+
+Repo branch that was checked out before we started.
+
+- **Type:**
+ str
+
+#### `made_changes{:python}`
+
+Whether making the new template pipeline introduced any changes
+
+- **Type:**
+ bool
+
+#### `make_pr{:python}`
+
+Whether to try to automatically make a PR on GitHub.com
+
+- **Type:**
+ bool
+
+#### `required_config_vars{:python}`
+
+List of nextflow variables required to make template pipeline
+
+- **Type:**
+ list
+
+#### `gh_username{:python}`
+
+GitHub username
+
+- **Type:**
+ str
+
+#### `gh_repo{:python}`
+
+GitHub repository name
+
+- **Type:**
+ str
+
+#### `checkout_template_branch(){:python}`
+
+Try to check out the origin/TEMPLATE in a new TEMPLATE branch.
+If this fails, try to check out an existing local TEMPLATE branch.
+
+#### `close_open_pr(pr){:python}`
+
+Given a PR API response, add a comment and close.
+
+#### `close_open_template_merge_prs(){:python}`
+
+Get all template merging branches (starting with ‘nf-core-template-merge-‘)
+and check for any open PRs from these branches to the self.from_branch
+If open PRs are found, add a comment and close them
+
+#### `commit_template_changes(){:python}`
+
+If we have any changes with the new template files, make a git commit
+
+#### `create_merge_base_branch(){:python}`
+
+Create a new branch from the updated TEMPLATE branch
+This branch will then be used to create the PR
+
+#### `delete_template_branch_files(){:python}`
+
+Delete all files in the TEMPLATE branch
+
+#### `get_wf_config(){:python}`
+
+Check out the target branch if requested and fetch the nextflow config.
+Check that we have the required config variables.
+
+#### `inspect_sync_dir(){:python}`
+
+Takes a look at the target directory for syncing. Checks that it’s a git repo
+and makes sure that there are no uncommitted changes.
+
+#### `make_pull_request(){:python}`
+
+Create a pull request to a base branch (default: dev),
+from a head branch (default: TEMPLATE)
+
+Returns: An instance of class requests.Response
+
+#### `make_template_pipeline(){:python}`
+
+Delete all files and make a fresh template using the workflow variables
+
+#### `push_merge_branch(){:python}`
+
+Push the newly created merge branch to the remote repository
+
+#### `push_template_branch(){:python}`
+
+If we made any changes, push the TEMPLATE branch to the default remote
+and try to make a PR. If we don’t have the auth token, try to figure out a URL
+for the PR and print this to the console.
+
+#### `reset_target_dir(){:python}`
+
+Reset the target pipeline directory. Check out the original branch.
+
+#### `sync(){:python}`
+
+Find workflow attributes, create a new template pipeline on TEMPLATE
+
+### _`exception{:python}`_`nf_core.sync.PullRequestException{:python}`
+
+Bases: `Exception`
+
+Exception raised when there was an error creating a Pull-Request on GitHub.com
+
+### _`exception{:python}`_`nf_core.sync.SyncException{:python}`
+
+Bases: `Exception`
+
+Exception raised when there was an error with TEMPLATE branch synchronisation
diff --git a/src/content/tools/docs/1.13.3/api/utils.md b/src/content/tools/docs/1.13.3/api/utils.md
new file mode 100644
index 0000000000..782b715f4a
--- /dev/null
+++ b/src/content/tools/docs/1.13.3/api/utils.md
@@ -0,0 +1,200 @@
+# nf_core.utils
+
+Common utility functions for the nf-core python package.
+
+### _`class{:python}`_`nf_core.utils.Pipeline(wf_path){:python}`
+
+Bases: `object`
+
+Object to hold information about a local pipeline.
+
+- **Parameters:**
+ **path** (_str_) – The path to the nf-core pipeline directory.
+
+#### `conda_config{:python}`
+
+The parsed conda configuration file content (`environment.yml`).
+
+- **Type:**
+ dict
+
+#### `conda_package_info{:python}`
+
+The conda package(s) information, based on the API requests to Anaconda cloud.
+
+- **Type:**
+ dict
+
+#### `nf_config{:python}`
+
+The Nextflow pipeline configuration file content.
+
+- **Type:**
+ dict
+
+#### `files{:python}`
+
+A list of files found during the linting process.
+
+- **Type:**
+ list
+
+#### `git_sha{:python}`
+
+The git sha for the repo commit / current GitHub pull-request ($GITHUB_PR_COMMIT)
+
+- **Type:**
+ str
+
+#### `minNextflowVersion{:python}`
+
+The minimum required Nextflow version to run the pipeline.
+
+- **Type:**
+ str
+
+#### `wf_path{:python}`
+
+Path to the pipeline directory.
+
+- **Type:**
+ str
+
+#### `pipeline_name{:python}`
+
+The pipeline name, without the nf-core tag, for example hlatyping.
+
+- **Type:**
+ str
+
+#### `schema_obj{:python}`
+
+A `PipelineSchema` object
+
+- **Type:**
+ obj
+
+#### `_fp(fn){:python}`
+
+Convenience function to get full path to a file in the pipeline
+
+#### `_list_files(){:python}`
+
+Get a list of all files in the pipeline
+
+#### `_load(){:python}`
+
+Run core load functions
+
+#### `_load_conda_environment(){:python}`
+
+Try to load the pipeline environment.yml file, if it exists
+
+#### `_load_pipeline_config(){:python}`
+
+Get the nextflow config for this pipeline
+
+Once loaded, set a few convienence reference class attributes
+
+### `nf_core.utils.anaconda_package(dep, dep_channels=['conda-forge', 'bioconda', 'defaults']){:python}`
+
+Query conda package information.
+
+Sends a HTTP GET request to the Anaconda remote API.
+
+- **Parameters:**
+ - **dep** (_str_) – A conda package name.
+ - **dep_channels** (_list_) – list of conda channels to use
+- **Raises:**
+ - **A LookupError**\*\*,\*\* **if the connection fails** **or** **times out** **or** **gives an unexpected status code** –
+ - **A ValueError**\*\*,\*\* **if the package name can not be found** **(\*\***404\***\*)** –
+
+### `nf_core.utils.check_if_outdated(current_version=None, remote_version=None, source_url='https://nf-co.re/tools_version'){:python}`
+
+Check if the current version of nf-core is outdated
+
+### `nf_core.utils.custom_yaml_dumper(){:python}`
+
+Overwrite default PyYAML output to make Prettier YAML linting happy
+
+### `nf_core.utils.fetch_wf_config(wf_path){:python}`
+
+Uses Nextflow to retrieve the the configuration variables
+from a Nextflow workflow.
+
+- **Parameters:**
+ **wf_path** (_str_) – Nextflow workflow file system path.
+- **Returns:**
+ Workflow configuration settings.
+- **Return type:**
+ dict
+
+### `nf_core.utils.get_biocontainer_tag(package, version){:python}`
+
+Given a bioconda package and version, look for a container
+at quay.io and returns the tag of the most recent image
+that matches the package version
+Sends a HTTP GET request to the quay.io API.
+:param package: A bioconda package name.
+:type package: str
+:param version: Version of the bioconda package
+:type version: str
+
+- **Raises:**
+ - **A LookupError**\*\*,\*\* **if the connection fails** **or** **times out** **or** **gives an unexpected status code** –
+ - **A ValueError**\*\*,\*\* **if the package name can not be found** **(\*\***404\***\*)** –
+
+### `nf_core.utils.github_api_auto_auth(){:python}`
+
+### `nf_core.utils.nextflow_cmd(cmd){:python}`
+
+Run a Nextflow command and capture the output. Handle errors nicely
+
+### `nf_core.utils.parse_anaconda_licence(anaconda_response, version=None){:python}`
+
+Given a response from the anaconda API using anaconda_package, parse the software licences.
+
+Returns: Set of licence types
+
+### `nf_core.utils.pip_package(dep){:python}`
+
+Query PyPI package information.
+
+Sends a HTTP GET request to the PyPI remote API.
+
+- **Parameters:**
+ **dep** (_str_) – A PyPI package name.
+- **Raises:**
+ - **A LookupError**\*\*,\*\* **if the connection fails** **or** **times out** –
+ - **A ValueError**\*\*,\*\* **if the package name can not be found** –
+
+### `nf_core.utils.poll_nfcore_web_api(api_url, post_data=None){:python}`
+
+Poll the nf-core website API
+
+Takes argument api_url for URL
+
+Expects API reponse to be valid JSON and contain a top-level ‘status’ key.
+
+### `nf_core.utils.rich_force_colors(){:python}`
+
+Check if any environment variables are set to force Rich to use coloured output
+
+### `nf_core.utils.setup_requests_cachedir(){:python}`
+
+Sets up local caching for faster remote HTTP requests.
+
+Caching directory will be set up in the user’s home directory under
+a .nfcore_cache subdir.
+
+### `nf_core.utils.wait_cli_function(poll_func, poll_every=20){:python}`
+
+Display a command-line spinner while calling a function repeatedly.
+
+Keep waiting until that function returns True
+
+- **Parameters:**
+ - **poll_func** (_function_) – Function to call
+ - **poll_every** (_int_) – How many tenths of a second to wait between function calls. Default: 20.
+- **Returns:**
+ None. Just sits in an infite loop until the function returns True.
diff --git a/src/content/tools/docs/1.13.3/index.md b/src/content/tools/docs/1.13.3/index.md
new file mode 100644
index 0000000000..c5988c4254
--- /dev/null
+++ b/src/content/tools/docs/1.13.3/index.md
@@ -0,0 +1,18 @@
+
+
+# Welcome to nf-core tools API documentation!
+
+This documentation is for the `nf-core/tools` package.
+
+Primarily, it describes the different [code lint tests](lint_tests/index.html)
+run by `nf-core lint` (typically visited by a developer when their pipeline fails a given
+test), and also reference for the `nf_core` [Python package API](api/index.html).
+
+# Indices and tables
+
+- [Index](genindex)
+- [Module Index](py-modindex)
+- [Search Page](search)
diff --git a/src/content/tools/docs/1.13.3/lint_tests/actions_awsfulltest.md b/src/content/tools/docs/1.13.3/lint_tests/actions_awsfulltest.md
new file mode 100644
index 0000000000..fd32fa4a32
--- /dev/null
+++ b/src/content/tools/docs/1.13.3/lint_tests/actions_awsfulltest.md
@@ -0,0 +1,30 @@
+# actions_awsfulltest
+
+#### `PipelineLint.actions_awsfulltest(){:python}`
+
+Checks the GitHub Actions awsfulltest is valid.
+
+In addition to small test datasets run on GitHub Actions, we provide the possibility of testing the pipeline on full size datasets on AWS.
+This should ensure that the pipeline runs as expected on AWS and provide a resource estimation.
+
+The GitHub Actions workflow is called `awsfulltest.yml`, and it can be found in the `.github/workflows/` directory.
+
+:::warning
+This workflow incurs AWS costs, therefore it should only be triggered for pipeline releases:
+`workflow_run` (after the docker hub release workflow) and `workflow_dispatch`.
+:::
+
+:::note
+You can manually trigger the AWS tests by going to the Actions tab on the pipeline GitHub repository and selecting the
+nf-core AWS full size tests workflow on the left.
+:::
+
+:::note
+For tests on full data prior to release, [Nextflow Tower](https://tower.nf) launch feature can be employed.
+:::
+
+The `.github/workflows/awsfulltest.yml` file is tested for the following:
+
+- Must be turned on `workflow_dispatch`.
+- Must be turned on for `workflow_run` with `workflows: ["nf-core Docker push (release)"]` and `types: [completed]`
+- Should run the profile `test_full` that should be edited to provide the links to full-size datasets. If it runs the profile `test`, a warning is given.
diff --git a/src/content/tools/docs/1.13.3/lint_tests/actions_awstest.md b/src/content/tools/docs/1.13.3/lint_tests/actions_awstest.md
new file mode 100644
index 0000000000..4e4698c4e9
--- /dev/null
+++ b/src/content/tools/docs/1.13.3/lint_tests/actions_awstest.md
@@ -0,0 +1,24 @@
+# actions_awstest
+
+#### `PipelineLint.actions_awstest(){:python}`
+
+Checks the GitHub Actions awstest is valid.
+
+In addition to small test datasets run on GitHub Actions, we provide the possibility of testing the pipeline on AWS.
+This should ensure that the pipeline runs as expected on AWS (which often has its own unique edge cases).
+
+:::warning
+Running tests on AWS incurs costs, so these tests are not triggered automatically.
+Instead, they use the `workflow_dispatch` trigger, which allows for manual triggering
+of the workflow when testing on AWS is desired.
+:::
+
+:::note
+You can trigger the tests by going to the Actions tab on the pipeline GitHub repository
+and selecting the nf-core AWS test workflow on the left.
+:::
+
+The `.github/workflows/awstest.yml` file is tested for the following:
+
+- Must _not_ be turned on for `push` or `pull_request`.
+- Must be turned on for `workflow_dispatch`.
diff --git a/src/content/tools/docs/1.13.3/lint_tests/actions_ci.md b/src/content/tools/docs/1.13.3/lint_tests/actions_ci.md
new file mode 100644
index 0000000000..34f2096391
--- /dev/null
+++ b/src/content/tools/docs/1.13.3/lint_tests/actions_ci.md
@@ -0,0 +1,63 @@
+# actions_ci
+
+#### `PipelineLint.actions_ci(){:python}`
+
+Checks that the GitHub Actions pipeline CI (Continuous Integration) workflow is valid.
+
+The `.github/workflows/ci.yml` GitHub Actions workflow runs the pipeline on a minimal test
+dataset using `-profile test` to check that no breaking changes have been introduced.
+Final result files are not checked, just that the pipeline exists successfully.
+
+This lint test checks this GitHub Actions workflow file for the following:
+
+- Workflow must be triggered on the following events:
+ ```yaml
+ on:
+ push:
+ branches:
+ - dev
+ pull_request:
+ release:
+ types: [published]
+ ```
+- The minimum Nextflow version specified in the pipeline’s `nextflow.config` matches that defined by `nxf_ver` in the test matrix:
+
+ ```yaml
+ strategy:
+ matrix:
+ # Nextflow versions: check pipeline minimum and current latest
+ nxf_ver: ['19.10.0', '']
+ ```
+
+ :::note
+ These `matrix` variables run the test workflow twice, varying the `nxf_ver` variable each time.
+ This is used in the `nextflow run` commands to test the pipeline with both the latest available version
+ of the pipeline (`''`) and the stated minimum required version.
+ :::
+
+- The Docker container for the pipeline must use the correct pipeline version number:
+
+ > - Development pipelines:
+ > ```bash
+ > docker tag nfcore/:dev nfcore/:dev
+ > ```
+ > - Released pipelines:
+ > ```bash
+ > docker tag nfcore/:dev nfcore/:
+ > ```
+ > - Complete example for a released pipeline called _nf-core/example_ with version number `1.0.0`:
+ > ```yaml
+ > - name: Build new docker image
+ > if: env.GIT_DIFF
+ > run: docker build --no-cache . -t nfcore/example:1.0.0
+ > ```
+
+ > - name: Pull docker image
+ > if: ${{ !env.GIT\_DIFF }}
+ > run: |
+ > docker pull nfcore/example:dev
+ > docker tag nfcore/example:dev nfcore/example:1.0.0
+ >
+ > ```
+ >
+ > ```
diff --git a/src/content/tools/docs/1.13.3/lint_tests/actions_schema_validation.md b/src/content/tools/docs/1.13.3/lint_tests/actions_schema_validation.md
new file mode 100644
index 0000000000..caa60db6c4
--- /dev/null
+++ b/src/content/tools/docs/1.13.3/lint_tests/actions_schema_validation.md
@@ -0,0 +1,18 @@
+# actions_schema_validation
+
+#### `PipelineLint.actions_schema_validation(){:python}`
+
+Checks that the GitHub Action workflow yml/yaml files adhere to the correct schema
+
+nf-core pipelines use GitHub actions workflows to run CI tests, check formatting and also linting, among others.
+These workflows are defined by `yml``scripts in ``.github/workflows/`. This lint test verifies that these scripts are valid
+by comparing them against the JSON schema for GitHub workflows <>
+
+To pass this test, make sure that all your workflows contain the required properties `on` and
+
+```
+``
+```
+
+jobs\`\`and that
+all other properties are of the correct type, as specified in the schema (link above).
diff --git a/src/content/tools/docs/1.13.3/lint_tests/conda_dockerfile.md b/src/content/tools/docs/1.13.3/lint_tests/conda_dockerfile.md
new file mode 100644
index 0000000000..388e51dd9a
--- /dev/null
+++ b/src/content/tools/docs/1.13.3/lint_tests/conda_dockerfile.md
@@ -0,0 +1,33 @@
+# conda_dockerfile
+
+#### `PipelineLint.conda_dockerfile(){:python}`
+
+Checks the Dockerfile for use with Conda environments
+
+:::note
+This test only runs if there is both an `environment.yml`
+and `Dockerfile` present in the pipeline root directory.
+:::
+
+If a workflow has a conda `environment.yml` file, the `Dockerfile` should use this
+to create the docker image. These files are typically very short, just creating the conda
+environment inside the container.
+
+This linting test checks for the following:
+
+- All of the following lines are present in the file (where `PIPELINE` is your pipeline name):
+ > ```Dockerfile
+ > FROM nfcore/base:VERSION
+ > COPY environment.yml /
+ > RUN conda env create --quiet -f /environment.yml && conda clean -a
+ > RUN conda env export --name PIPELINE > PIPELINE.yml
+ > ENV PATH /opt/conda/envs/PIPELINE/bin:$PATH
+ > ```
+- That the `FROM nfcore/base:VERSION` is tagged to the most recent release of nf-core/tools
+ > - The linting tool compares the tag against the currently installed version of tools.
+ > - This line is not checked if running a development version of nf-core/tools.
+
+:::note
+Additional lines and different metadata can be added to the `Dockerfile`
+without causing this lint test to fail.
+:::
diff --git a/src/content/tools/docs/1.13.3/lint_tests/conda_env_yaml.md b/src/content/tools/docs/1.13.3/lint_tests/conda_env_yaml.md
new file mode 100644
index 0000000000..f2dab4fba1
--- /dev/null
+++ b/src/content/tools/docs/1.13.3/lint_tests/conda_env_yaml.md
@@ -0,0 +1,36 @@
+# conda_env_yaml
+
+#### `PipelineLint.conda_env_yaml(){:python}`
+
+Checks that the conda environment file is valid.
+
+:::note
+This test is ignored if there is not an `environment.yml`
+file present in the pipeline root directory.
+:::
+
+DSL1 nf-core pipelines use a single Conda environment to manage all software
+dependencies for a workflow. This can be used directly with `-profile conda`
+and is also used in the `Dockerfile` to build a docker image.
+
+This test checks the conda `environment.yml` file to ensure that it follows nf-core guidelines.
+Each dependency is checked using the [Anaconda API service](https://api.anaconda.org/docs).
+Dependency sublists are ignored with the exception of `- pip`: these packages are also checked
+for pinned version numbers and checked using the [PyPI JSON API](https://wiki.python.org/moin/PyPIJSON).
+
+Specifically, this lint test makes sure that:
+
+- The environment `name` must match the pipeline name and version
+ > - The pipeline name is defined in the config variable `manifest.name`
+ > - Replace the slash with a hyphen as environment names shouldn’t contain that character
+ > - Example: For `nf-core/test` version 1.4, the conda environment name should be `nf-core-test-1.4`
+- All package dependencies have a specific version number pinned
+ > :::warning
+ > Remember that Conda package versions should be pinned with one equals sign (`toolname=1.1`),
+ > but pip uses two (`toolname==1.2`)
+ > :::
+- That package versions can be found and are the latest available
+ > - Test will go through all conda channels listed in the file, or check PyPI if `pip`
+ > - Conda dependencies with pinned channels (eg. `conda-forge::openjdk`) are ok too
+ > - In addition to the package name, the pinned version is checked
+ > - If a newer version is available, a warning will be reported
diff --git a/src/content/tools/docs/1.13.3/lint_tests/files_exist.md b/src/content/tools/docs/1.13.3/lint_tests/files_exist.md
new file mode 100644
index 0000000000..927fbadce4
--- /dev/null
+++ b/src/content/tools/docs/1.13.3/lint_tests/files_exist.md
@@ -0,0 +1,77 @@
+# files_exist
+
+#### `PipelineLint.files_exist(){:python}`
+
+Checks a given pipeline directory for required files.
+
+Iterates through the pipeline’s directory content and checks that specified
+files are either present or absent, as required.
+
+:::note
+This test raises an `AssertionError` if neither `nextflow.config` or `main.nf` are found.
+If these files are not found then this cannot be a Nextflow pipeline and something has gone badly wrong.
+All lint tests are stopped immediately with a critical error message.
+:::
+
+Files that **must** be present:
+
+```default
+.gitattributes
+.github/.dockstore.yml
+.github/CONTRIBUTING.md
+.github/ISSUE_TEMPLATE/bug_report.md
+.github/ISSUE_TEMPLATE/config.yml
+.github/ISSUE_TEMPLATE/feature_request.md
+.github/markdownlint.yml
+.github/PULL_REQUEST_TEMPLATE.md
+.github/workflows/branch.yml
+.github/workflows/ci.yml
+.github/workflows/linting_comment.yml
+.github/workflows/linting.yml
+[LICENSE, LICENSE.md, LICENCE, LICENCE.md] # NB: British / American spelling
+assets/email_template.html
+assets/email_template.txt
+assets/nf-core-PIPELINE_logo.png
+assets/sendmail_template.txt
+bin/markdown_to_html.py
+CHANGELOG.md
+CODE_OF_CONDUCT.md
+CODE_OF_CONDUCT.md
+docs/images/nf-core-PIPELINE_logo.png
+docs/output.md
+docs/README.md
+docs/README.md
+docs/usage.md
+lib/nfcore_external_java_deps.jar
+lib/NfcoreSchema.groovy
+nextflow_schema.json
+nextflow.config
+README.md
+```
+
+Files that _should_ be present:
+
+```default
+'main.nf',
+'environment.yml',
+'Dockerfile',
+'conf/base.config',
+'.github/workflows/awstest.yml',
+'.github/workflows/awsfulltest.yml'
+```
+
+Files that _must not_ be present:
+
+```default
+'Singularity',
+'parameters.settings.json',
+'bin/markdown_to_html.r',
+'conf/aws.config',
+'.github/workflows/push_dockerhub.yml'
+```
+
+Files that _should not_ be present:
+
+```default
+'.travis.yml'
+```
diff --git a/src/content/tools/docs/1.13.3/lint_tests/files_unchanged.md b/src/content/tools/docs/1.13.3/lint_tests/files_unchanged.md
new file mode 100644
index 0000000000..ea06ada29f
--- /dev/null
+++ b/src/content/tools/docs/1.13.3/lint_tests/files_unchanged.md
@@ -0,0 +1,57 @@
+# files_unchanged
+
+#### `PipelineLint.files_unchanged(){:python}`
+
+Checks that certain pipeline files are not modified from template output.
+
+Iterates through the pipeline’s directory content and compares specified files
+against output from the template using the pipeline’s metadata. File content
+should not be modified / missing.
+
+Files that must be unchanged:
+
+```default
+'.gitattributes',
+'.github/.dockstore.yml',
+'.github/CONTRIBUTING.md',
+'.github/ISSUE_TEMPLATE/bug_report.md',
+'.github/ISSUE_TEMPLATE/config.yml',
+'.github/ISSUE_TEMPLATE/feature_request.md',
+'.github/markdownlint.yml',
+'.github/PULL_REQUEST_TEMPLATE.md',
+'.github/workflows/branch.yml',
+'.github/workflows/linting_comment.yml',
+'.github/workflows/linting.yml',
+'assets/email_template.html',
+'assets/email_template.txt',
+'assets/nf-core-PIPELINE_logo.png',
+'assets/sendmail_template.txt',
+'bin/markdown_to_html.py',
+'CODE_OF_CONDUCT.md',
+'docs/images/nf-core-PIPELINE_logo.png',
+'docs/README.md',
+'lib/nfcore_external_java_deps.jar'
+'lib/NfcoreSchema.groovy',
+['LICENSE', 'LICENSE.md', 'LICENCE', 'LICENCE.md'], # NB: British / American spelling
+```
+
+Files that can have additional content but must include the template contents:
+
+```default
+'.github/workflows/push_dockerhub_dev.yml',
+'.github/workflows/push_dockerhub_release.yml',
+'.gitignore',
+'assets/multiqc_config.yaml',
+```
+
+:::note
+You can configure the `nf-core lint` tests to ignore any of these checks by setting
+the `files_unchanged` key as follows in your linting config file. For example:
+
+```yaml
+files_unchanged:
+ - .github/workflows/branch.yml
+ - assets/multiqc_config.yaml
+```
+
+:::
diff --git a/src/content/tools/docs/1.13.3/lint_tests/index.md b/src/content/tools/docs/1.13.3/lint_tests/index.md
new file mode 100644
index 0000000000..f0e9f7e6a1
--- /dev/null
+++ b/src/content/tools/docs/1.13.3/lint_tests/index.md
@@ -0,0 +1,38 @@
+# Lint tests
+
+# Tests:
+
+- [actions_awsfulltest](actions_awsfulltest)
+ - [`PipelineLint.actions_awsfulltest()`](actions_awsfulltest#nf_core.lint.PipelineLint.actions_awsfulltest)
+- [actions_awstest](actions_awstest)
+ - [`PipelineLint.actions_awstest()`](actions_awstest#nf_core.lint.PipelineLint.actions_awstest)
+- [actions_ci](actions_ci)
+ - [`PipelineLint.actions_ci()`](actions_ci#nf_core.lint.PipelineLint.actions_ci)
+- [actions_schema_validation](actions_schema_validation)
+ - [`PipelineLint.actions_schema_validation()`](actions_schema_validation#nf_core.lint.PipelineLint.actions_schema_validation)
+- [conda_dockerfile](conda_dockerfile)
+ - [`PipelineLint.conda_dockerfile()`](conda_dockerfile#nf_core.lint.PipelineLint.conda_dockerfile)
+- [conda_env_yaml](conda_env_yaml)
+ - [`PipelineLint.conda_env_yaml()`](conda_env_yaml#nf_core.lint.PipelineLint.conda_env_yaml)
+- [files_exist](files_exist)
+ - [`PipelineLint.files_exist()`](files_exist#nf_core.lint.PipelineLint.files_exist)
+- [files_unchanged](files_unchanged)
+ - [`PipelineLint.files_unchanged()`](files_unchanged#nf_core.lint.PipelineLint.files_unchanged)
+- [merge_markers](merge_markers)
+ - [`PipelineLint.merge_markers()`](merge_markers#nf_core.lint.PipelineLint.merge_markers)
+- [nextflow_config](nextflow_config)
+ - [`PipelineLint.nextflow_config()`](nextflow_config#nf_core.lint.PipelineLint.nextflow_config)
+- [pipeline_name_conventions](pipeline_name_conventions)
+ - [`PipelineLint.pipeline_name_conventions()`](pipeline_name_conventions#nf_core.lint.PipelineLint.pipeline_name_conventions)
+- [pipeline_todos](pipeline_todos)
+ - [`PipelineLint.pipeline_todos()`](pipeline_todos#nf_core.lint.PipelineLint.pipeline_todos)
+- [readme](readme)
+ - [`PipelineLint.readme()`](readme#nf_core.lint.PipelineLint.readme)
+- [schema_lint](schema_lint)
+ - [`PipelineLint.schema_lint()`](schema_lint#nf_core.lint.PipelineLint.schema_lint)
+- [schema_params](schema_params)
+ - [`PipelineLint.schema_params()`](schema_params#nf_core.lint.PipelineLint.schema_params)
+- [template_strings](template_strings)
+ - [`PipelineLint.template_strings()`](template_strings#nf_core.lint.PipelineLint.template_strings)
+- [version_consistency](version_consistency)
+ - [`PipelineLint.version_consistency()`](version_consistency#nf_core.lint.PipelineLint.version_consistency)
diff --git a/src/content/tools/docs/1.13.3/lint_tests/merge_markers.md b/src/content/tools/docs/1.13.3/lint_tests/merge_markers.md
new file mode 100644
index 0000000000..38076814b1
--- /dev/null
+++ b/src/content/tools/docs/1.13.3/lint_tests/merge_markers.md
@@ -0,0 +1,8 @@
+# merge_markers
+
+#### `PipelineLint.merge_markers(){:python}`
+
+Check for remaining merge markers.
+
+This test looks for remaining merge markers in the code, e.g.:
+`>>>>>>>` or `<<<<<<<`
diff --git a/src/content/tools/docs/1.13.3/lint_tests/nextflow_config.md b/src/content/tools/docs/1.13.3/lint_tests/nextflow_config.md
new file mode 100644
index 0000000000..7f77f40c66
--- /dev/null
+++ b/src/content/tools/docs/1.13.3/lint_tests/nextflow_config.md
@@ -0,0 +1,65 @@
+# nextflow_config
+
+#### `PipelineLint.nextflow_config(){:python}`
+
+Checks the pipeline configuration for required variables.
+
+All nf-core pipelines are required to be configured with a minimal set of variable
+names. This test fails or throws warnings if required variables are not set.
+
+:::note
+These config variables must be set in `nextflow.config` or another config
+file imported from there. Any variables set in nextflow script files (eg. `main.nf`)
+are not checked and will be assumed to be missing.
+:::
+
+**The following variables fail the test if missing:**
+
+- `params.outdir`: A directory in which all pipeline results should be saved
+- `manifest.name`: The pipeline name. Should begin with `nf-core/`
+- `manifest.description`: A description of the pipeline
+- `manifest.version`
+ - The version of this pipeline. This should correspond to a [GitHub release](https://help.github.com/articles/creating-releases/).
+ - If `--release` is set when running `nf-core lint`, the version number must not contain the string `dev`
+ - If `--release` is \_not\_ set, the version should end in `dev` (warning triggered if not)
+- `manifest.nextflowVersion`
+ - The minimum version of Nextflow required to run the pipeline.
+ - Should be `>=` or `!>=` and a version number, eg. `manifest.nextflowVersion = '>=0.31.0'` (see [Nextflow documentation](https://www.nextflow.io/docs/latest/config.html#scope-manifest))
+ - `>=` warns about old versions but tries to run anyway, `!>=` fails for old versions. Only use the latter if you _know_ that the pipeline will certainly fail before this version.
+ - This should correspond to the `NXF_VER` version tested by GitHub Actions.
+- `manifest.homePage`
+ - The homepage for the pipeline. Should be the nf-core GitHub repository URL,
+ so beginning with `https://github.com/nf-core/`
+- `timeline.enabled`, `trace.enabled`, `report.enabled`, `dag.enabled`
+ - The nextflow timeline, trace, report and DAG should be enabled by default (set to `true`)
+- `process.cpus`, `process.memory`, `process.time`
+ - Default CPUs, memory and time limits for tasks
+- `params.input`
+ - Input parameter to specify input data, specify this to avoid a warning
+ - Typical usage:
+ - `params.input`: Input data that is not NGS sequencing data
+
+**The following variables throw warnings if missing:**
+
+- `manifest.mainScript`: The filename of the main pipeline script (should be `main.nf`)
+- `timeline.file`, `trace.file`, `report.file`, `dag.file`
+ - Default filenames for the timeline, trace and report
+ - The DAG file path should end with `.svg` (If Graphviz is not installed, Nextflow will generate a `.dot` file instead)
+- `process.container`
+ - Docker Hub handle for a single default container for use by all processes.
+ - Must specify a tag that matches the pipeline version number if set.
+ - If the pipeline version number contains the string `dev`, the DockerHub tag must be `:dev`
+
+**The following variables are depreciated and fail the test if they are still present:**
+
+- `params.version`: The old method for specifying the pipeline version. Replaced by `manifest.version`
+- `params.nf_required_version`: The old method for specifying the minimum Nextflow version. Replaced by `manifest.nextflowVersion`
+- `params.container`: The old method for specifying the dockerhub container address. Replaced by `process.container`
+- `igenomesIgnore`: Changed to `igenomes_ignore`
+ > :::note
+ > The `snake_case` convention should now be used when defining pipeline parameters
+ > :::
+
+**The following Nextflow syntax is depreciated and fails the test if present:**
+
+- Process-level configuration syntax still using the old Nextflow syntax, for example: `process.$fastqc` instead of `process withName:'fastqc'`.
diff --git a/src/content/tools/docs/1.13.3/lint_tests/pipeline_name_conventions.md b/src/content/tools/docs/1.13.3/lint_tests/pipeline_name_conventions.md
new file mode 100644
index 0000000000..788d77e41a
--- /dev/null
+++ b/src/content/tools/docs/1.13.3/lint_tests/pipeline_name_conventions.md
@@ -0,0 +1,13 @@
+# pipeline_name_conventions
+
+#### `PipelineLint.pipeline_name_conventions(){:python}`
+
+Checks that the pipeline name adheres to nf-core conventions.
+
+In order to ensure consistent naming, pipeline names should contain only lower case, alphanumeric characters.
+Otherwise a warning is displayed.
+
+:::warning
+DockerHub is very picky about image names and doesn’t even allow hyphens (we are `nfcore`).
+This is a large part of why we set this rule.
+:::
diff --git a/src/content/tools/docs/1.13.3/lint_tests/pipeline_todos.md b/src/content/tools/docs/1.13.3/lint_tests/pipeline_todos.md
new file mode 100644
index 0000000000..e8a73d1663
--- /dev/null
+++ b/src/content/tools/docs/1.13.3/lint_tests/pipeline_todos.md
@@ -0,0 +1,28 @@
+# pipeline_todos
+
+#### `PipelineLint.pipeline_todos(){:python}`
+
+Check for nf-core _TODO_ lines.
+
+The nf-core workflow template contains a number of comment lines to help developers
+of new pipelines know where they need to edit files and add content.
+They typically have the following format:
+
+```groovy
+// TODO nf-core: Make some kind of change to the workflow here
+```
+
+..or in markdown:
+
+```html
+
+```
+
+This lint test runs through all files in the pipeline and searches for these lines.
+If any are found they will throw a warning.
+
+:::note
+Note that many GUI code editors have plugins to list all instances of _TODO_
+in a given project directory. This is a very quick and convenient way to get
+started on your pipeline!
+:::
diff --git a/src/content/tools/docs/1.13.3/lint_tests/readme.md b/src/content/tools/docs/1.13.3/lint_tests/readme.md
new file mode 100644
index 0000000000..51b8040c97
--- /dev/null
+++ b/src/content/tools/docs/1.13.3/lint_tests/readme.md
@@ -0,0 +1,25 @@
+# readme
+
+#### `PipelineLint.readme(){:python}`
+
+Repository `README.md` tests
+
+The `README.md` files for a project are very important and must meet some requirements:
+
+- Nextflow badge
+ - If no Nextflow badge is found, a warning is given
+ - If a badge is found but the version doesn’t match the minimum version in the config file, the test fails
+ - Example badge code:
+ ```md
+ [![Nextflow](https://img.shields.io/badge/nextflow-%E2%89%A50.27.6-brightgreen.svg)](https://www.nextflow.io/)
+ ```
+- Bioconda badge
+ - If your pipeline contains a file called `environment.yml` in the root directory, a bioconda badge is required
+ - Required badge code:
+ ```md
+ [![install with bioconda](https://img.shields.io/badge/install%20with-bioconda-brightgreen.svg)](https://bioconda.github.io/)
+ ```
+
+:::note
+These badges are a markdown image `![alt-text]()` _inside_ a markdown link `[markdown image]()`, so a bit fiddly to write.
+:::
diff --git a/src/content/tools/docs/1.13.3/lint_tests/schema_lint.md b/src/content/tools/docs/1.13.3/lint_tests/schema_lint.md
new file mode 100644
index 0000000000..b5e29febba
--- /dev/null
+++ b/src/content/tools/docs/1.13.3/lint_tests/schema_lint.md
@@ -0,0 +1,59 @@
+# schema_lint
+
+#### `PipelineLint.schema_lint(){:python}`
+
+Pipeline schema syntax
+
+Pipelines should have a `nextflow_schema.json` file that describes the different
+pipeline parameters (eg. `params.something`, `--something`).
+
+:::note
+Reminder: you should generally never need to edit this JSON file by hand.
+The `nf-core schema build` command can create _and edit_ the file for you
+to keep it up to date, with a friendly user-interface for customisation.
+:::
+
+The lint test checks the schema for the following:
+
+- Schema should be a valid JSON file
+- Schema should adhere to [JSONSchema](https://json-schema.org/), Draft 7.
+- Parameters can be described in two places:
+ > - As `properties` in the top-level schema object
+ > - As `properties` within subschemas listed in a top-level `definitions` objects
+- The schema must describe at least one parameter
+- There must be no duplicate parameter IDs across the schema and definition subschema
+- All subschema in `definitions` must be referenced in the top-level `allOf` key
+- The top-level `allOf` key must not describe any non-existent definitions
+- Default parameters in the schema must be valid
+- Core top-level schema attributes should exist and be set as follows:
+ > - `$schema`: `https://json-schema.org/draft-07/schema`
+ > - `$id`: URL to the raw schema file, eg. `https://raw.githubusercontent.com/YOURPIPELINE/master/nextflow_schema.json`
+ > - `title`: `YOURPIPELINE pipeline parameters`
+ > - `description`: The pipeline config `manifest.description`
+
+For example, an _extremely_ minimal schema could look like this:
+
+```json
+{
+ "$schema": "https://json-schema.org/draft-07/schema",
+ "$id": "https://raw.githubusercontent.com/YOURPIPELINE/master/nextflow_schema.json",
+ "title": "YOURPIPELINE pipeline parameters",
+ "description": "This pipeline is for testing",
+ "properties": {
+ "first_param": { "type": "string" }
+ },
+ "definitions": {
+ "my_first_group": {
+ "properties": {
+ "second_param": { "type": "string" }
+ }
+ }
+ },
+ "allOf": [{ "$ref": "#/definitions/my_first_group" }]
+}
+```
+
+:::note
+You can check your pipeline schema without having to run the entire pipeline lint
+by running `nf-core schema lint` instead of `nf-core lint`
+:::
diff --git a/src/content/tools/docs/1.13.3/lint_tests/schema_params.md b/src/content/tools/docs/1.13.3/lint_tests/schema_params.md
new file mode 100644
index 0000000000..7d9440fa15
--- /dev/null
+++ b/src/content/tools/docs/1.13.3/lint_tests/schema_params.md
@@ -0,0 +1,11 @@
+# schema_params
+
+#### `PipelineLint.schema_params(){:python}`
+
+Check that the schema describes all flat params in the pipeline.
+
+The `nextflow_schema.json` pipeline schema should describe every flat parameter
+returned from the `nextflow config` command (params that are objects or more complex structures are ignored).
+
+- Failure: If parameters are found in `nextflow_schema.json` that are not in `nextflow_schema.json`
+- Warning: If parameters are found in `nextflow_schema.json` that are not in `nextflow_schema.json`
diff --git a/src/content/tools/docs/1.13.3/lint_tests/template_strings.md b/src/content/tools/docs/1.13.3/lint_tests/template_strings.md
new file mode 100644
index 0000000000..23c06bb63d
--- /dev/null
+++ b/src/content/tools/docs/1.13.3/lint_tests/template_strings.md
@@ -0,0 +1,17 @@
+# template_strings
+
+#### `PipelineLint.template_strings(){:python}`
+
+Check for template placeholders.
+
+The `nf-core create` pipeline template uses
+[Jinja](https://jinja.palletsprojects.com/en/2.11.x/) behind the scenes.
+
+This lint test fails if any Jinja template variables such as
+`{{ pipeline_name }}` are found in your pipeline code.
+
+Finding a placeholder like this means that something was probably copied and pasted
+from the template without being properly rendered for your pipeline.
+
+This test ignores any double-brackets prefixed with a dollar sign, such as
+`${{ secrets.AWS_ACCESS_KEY_ID }}` as these placeholders are used in GitHub Actions workflows.
diff --git a/src/content/tools/docs/1.13.3/lint_tests/version_consistency.md b/src/content/tools/docs/1.13.3/lint_tests/version_consistency.md
new file mode 100644
index 0000000000..040f5e57fc
--- /dev/null
+++ b/src/content/tools/docs/1.13.3/lint_tests/version_consistency.md
@@ -0,0 +1,23 @@
+# version_consistency
+
+#### `PipelineLint.version_consistency(){:python}`
+
+Pipeline and container version number consistency.
+
+:::note
+This test only runs when the `--release` flag is set for `nf-core lint`,
+or `$GITHUB_REF` is equal to `master`.
+:::
+
+This lint fetches the pipeline version number from three possible locations:
+
+- The pipeline config, `manifest.version`
+- The docker container in the pipeline config, `process.container`
+ > - Some pipelines may not have this set on a pipeline level. If it is not found, it is ignored.
+- `$GITHUB_REF`, if it looks like a release tag (`refs/tags/`)
+
+The test then checks that:
+
+- The container name has a tag specified (eg. `nfcore/pipeline:version`)
+- The pipeline version number is numeric (contains only numbers and dots)
+- That the version numbers all match one another
diff --git a/src/content/tools/docs/1.13/api/bump_version.md b/src/content/tools/docs/1.13/api/bump_version.md
new file mode 100644
index 0000000000..3f45f9fefb
--- /dev/null
+++ b/src/content/tools/docs/1.13/api/bump_version.md
@@ -0,0 +1,35 @@
+# nf_core.bump_version
+
+Bumps the version number in all appropriate files for
+a nf-core pipeline.
+
+### `nf_core.bump_version.bump_nextflow_version(pipeline_obj, new_version){:python}`
+
+Bumps the required Nextflow version number of a pipeline.
+
+- **Parameters:**
+ - **pipeline_obj** ([_nf_core.utils.Pipeline_](utils#nf_core.utils.Pipeline)) – A Pipeline object that holds information
+ about the pipeline contents and build files.
+ - **new_version** (_str_) – The new version tag for the required Nextflow version.
+
+### `nf_core.bump_version.bump_pipeline_version(pipeline_obj, new_version){:python}`
+
+Bumps a pipeline version number.
+
+- **Parameters:**
+ - **pipeline_obj** ([_nf_core.utils.Pipeline_](utils#nf_core.utils.Pipeline)) – A Pipeline object that holds information
+ about the pipeline contents and build files.
+ - **new_version** (_str_) – The new version tag for the pipeline. Semantic versioning only.
+
+### `nf_core.bump_version.update_file_version(filename, pipeline_obj, patterns){:python}`
+
+Updates the version number in a requested file.
+
+- **Parameters:**
+ - **filename** (_str_) – File to scan.
+ - **pipeline_obj** ([_nf_core.lint.PipelineLint_](lint#nf_core.lint.PipelineLint)) – A PipelineLint object that holds information
+ about the pipeline contents and build files.
+ - **pattern** (_str_) – Regex pattern to apply.
+ - **newstr** (_str_) – The replaced string.
+- **Raises:**
+ **ValueError**\*\*,\*\* **if the version number cannot be found.** –
diff --git a/src/content/tools/docs/1.13/api/create.md b/src/content/tools/docs/1.13/api/create.md
new file mode 100644
index 0000000000..92dcb9fb5e
--- /dev/null
+++ b/src/content/tools/docs/1.13/api/create.md
@@ -0,0 +1,36 @@
+# nf_core.create
+
+Creates a nf-core pipeline matching the current
+organization’s specification based on a template.
+
+### _`class{:python}`_`nf_core.create.PipelineCreate(name, description, author, version='1.0dev', no_git=False, force=False, outdir=None){:python}`
+
+Bases: `object`
+
+Creates a nf-core pipeline a la carte from the nf-core best-practise template.
+
+- **Parameters:**
+ - **name** (_str_) – Name for the pipeline.
+ - **description** (_str_) – Description for the pipeline.
+ - **author** (_str_) – Authors name of the pipeline.
+ - **version** (_str_) – Version flag. Semantic versioning only. Defaults to 1.0dev.
+ - **no_git** (_bool_) – Prevents the creation of a local Git repository for the pipeline. Defaults to False.
+ - **force** (_bool_) – Overwrites a given workflow directory with the same name. Defaults to False.
+ May the force be with you.
+ - **outdir** (_str_) – Path to the local output directory.
+
+#### `git_init_pipeline(){:python}`
+
+Initialises the new pipeline as a Git repository and submits first commit.
+
+#### `init_pipeline(){:python}`
+
+Creates the nf-core pipeline.
+
+#### `make_pipeline_logo(){:python}`
+
+Fetch a logo for the new pipeline from the nf-core website
+
+#### `render_template(){:python}`
+
+Runs Jinja to create a new nf-core pipeline.
diff --git a/src/content/tools/docs/1.13/api/download.md b/src/content/tools/docs/1.13/api/download.md
new file mode 100644
index 0000000000..92c33262e9
--- /dev/null
+++ b/src/content/tools/docs/1.13/api/download.md
@@ -0,0 +1,123 @@
+# nf_core.download
+
+Downloads a nf-core pipeline to the local file system.
+
+### _`class{:python}`_`nf_core.download.DownloadProgress(*columns: str | ProgressColumn, console: Console | None = None, auto_refresh: bool = True, refresh_per_second: float = 10, speed_estimate_period: float = 30.0, transient: bool = False, redirect_stdout: bool = True, redirect_stderr: bool = True, get_time: Callable[[], float] | None = None, disable: bool = False, expand: bool = False){:python}`
+
+Bases: `Progress`
+
+Custom Progress bar class, allowing us to have two progress
+bars with different columns / layouts.
+
+#### `get_renderables(){:python}`
+
+Get a number of renderables for the progress display.
+
+### _`class{:python}`_`nf_core.download.DownloadWorkflow(pipeline, release=None, outdir=None, compress_type='tar.gz', force=False, singularity=False, singularity_cache_only=False, parallel_downloads=4){:python}`
+
+Bases: `object`
+
+Downloads a nf-core workflow from GitHub to the local file system.
+
+Can also download its Singularity container image if required.
+
+- **Parameters:**
+ - **pipeline** (_str_) – A nf-core pipeline name.
+ - **release** (_str_) – The workflow release version to download, like 1.0. Defaults to None.
+ - **singularity** (_bool_) – Flag, if the Singularity container should be downloaded as well. Defaults to False.
+ - **outdir** (_str_) – Path to the local download directory. Defaults to None.
+
+#### `compress_download(){:python}`
+
+Take the downloaded files and make a compressed .tar.gz archive.
+
+#### `download_configs(){:python}`
+
+Downloads the centralised config profiles from nf-core/configs to `self.outdir`.
+
+#### `download_wf_files(){:python}`
+
+Downloads workflow files from GitHub to the `self.outdir`.
+
+#### `download_workflow(){:python}`
+
+Starts a nf-core workflow download.
+
+#### `fetch_workflow_details(wfs){:python}`
+
+Fetches details of a nf-core workflow to download.
+
+- **Parameters:**
+ **wfs** ([_nf_core.list.Workflows_](list#nf_core.list.Workflows)) – A nf_core.list.Workflows object
+- **Raises:**
+ **LockupError**\*\*,\*\* **if the pipeline can not be found.** –
+
+#### `find_container_images(){:python}`
+
+Find container image names for workflow.
+
+Starts by using nextflow config to pull out any process.container
+declarations. This works for DSL1.
+
+Second, we look for DSL2 containers. These can’t be found with
+nextflow config at the time of writing, so we scrape the pipeline files.
+
+#### `get_singularity_images(){:python}`
+
+Loop through container names and download Singularity images
+
+#### `singularity_copy_cache_image(container, out_path, cache_path){:python}`
+
+Copy Singularity image from NXF_SINGULARITY_CACHEDIR to target folder.
+
+#### `singularity_download_image(container, out_path, cache_path, progress){:python}`
+
+Download a singularity image from the web.
+
+Use native Python to download the file.
+
+- **Parameters:**
+ - **container** (_str_) – A pipeline’s container name. Usually it is of similar format
+ to `https://depot.galaxyproject.org/singularity/name:version`
+ - **out_path** (_str_) – The final target output path
+ - **cache_path** (_str_\*,\* _None_) – The NXF_SINGULARITY_CACHEDIR path if set, None if not
+ - **progress** (_Progress_) – Rich progress bar instance to add tasks to.
+
+#### `singularity_image_filenames(container){:python}`
+
+Check Singularity cache for image, copy to destination folder if found.
+
+- **Parameters:**
+ **container** (_str_) – A pipeline’s container name. Can be direct download URL
+ or a Docker Hub repository ID.
+- **Returns:**
+ Returns True if we have the image in the target location.
+ : Returns a download path if not.
+- **Return type:**
+ results (bool, str)
+
+#### `singularity_pull_image(container, out_path, cache_path, progress){:python}`
+
+Pull a singularity image using `singularity pull`
+
+Attempt to use a local installation of singularity to pull the image.
+
+- **Parameters:**
+ **container** (_str_) – A pipeline’s container name. Usually it is of similar format
+ to `nfcore/name:version`.
+- **Raises:**
+ **Various exceptions possible from subprocess execution** **of** **Singularity.** –
+
+#### `validate_md5(fname, expected=None){:python}`
+
+Calculates the md5sum for a file on the disk and validate with expected.
+
+- **Parameters:**
+ - **fname** (_str_) – Path to a local file.
+ - **expected** (_str_) – The expected md5sum.
+- **Raises:**
+ **IOError**\*\*,\*\* **if the md5sum does not match the remote sum.** –
+
+#### `wf_use_local_configs(){:python}`
+
+Edit the downloaded nextflow.config file to use the local config files
diff --git a/src/content/tools/docs/1.13/api/index.md b/src/content/tools/docs/1.13/api/index.md
new file mode 100644
index 0000000000..8fcbeab99d
--- /dev/null
+++ b/src/content/tools/docs/1.13/api/index.md
@@ -0,0 +1,49 @@
+# API Reference
+
+# Tests:
+
+- [nf_core.bump_version](bump_version)
+ - [`bump_nextflow_version()`](bump_version#nf_core.bump_version.bump_nextflow_version)
+ - [`bump_pipeline_version()`](bump_version#nf_core.bump_version.bump_pipeline_version)
+ - [`update_file_version()`](bump_version#nf_core.bump_version.update_file_version)
+- [nf_core.create](create)
+ - [`PipelineCreate`](create#nf_core.create.PipelineCreate)
+- [nf_core.download](download)
+ - [`DownloadProgress`](download#nf_core.download.DownloadProgress)
+ - [`DownloadWorkflow`](download#nf_core.download.DownloadWorkflow)
+- [nf_core.launch](launch)
+ - [`Launch`](launch#nf_core.launch.Launch)
+- [nf_core.licences](licences)
+ - [`WorkflowLicences`](licences#nf_core.licences.WorkflowLicences)
+- [nf_core.lint](lint)
+ - [`run_linting()`](lint#nf_core.lint.run_linting)
+ - [`PipelineLint`](lint#nf_core.lint.PipelineLint)
+- [nf_core.list](list)
+ - [`LocalWorkflow`](list#nf_core.list.LocalWorkflow)
+ - [`RemoteWorkflow`](list#nf_core.list.RemoteWorkflow)
+ - [`Workflows`](list#nf_core.list.Workflows)
+ - [`get_local_wf()`](list#nf_core.list.get_local_wf)
+ - [`list_workflows()`](list#nf_core.list.list_workflows)
+ - [`pretty_date()`](list#nf_core.list.pretty_date)
+- [nf_core.modules](modules)
+- [nf_core.schema](schema)
+ - [`PipelineSchema`](schema#nf_core.schema.PipelineSchema)
+- [nf_core.sync](sync)
+ - [`PipelineSync`](sync#nf_core.sync.PipelineSync)
+ - [`PullRequestException`](sync#nf_core.sync.PullRequestException)
+ - [`SyncException`](sync#nf_core.sync.SyncException)
+- [nf_core.utils](utils)
+ - [`Pipeline`](utils#nf_core.utils.Pipeline)
+ - [`anaconda_package()`](utils#nf_core.utils.anaconda_package)
+ - [`check_if_outdated()`](utils#nf_core.utils.check_if_outdated)
+ - [`custom_yaml_dumper()`](utils#nf_core.utils.custom_yaml_dumper)
+ - [`fetch_wf_config()`](utils#nf_core.utils.fetch_wf_config)
+ - [`get_biocontainer_tag()`](utils#nf_core.utils.get_biocontainer_tag)
+ - [`github_api_auto_auth()`](utils#nf_core.utils.github_api_auto_auth)
+ - [`nextflow_cmd()`](utils#nf_core.utils.nextflow_cmd)
+ - [`parse_anaconda_licence()`](utils#nf_core.utils.parse_anaconda_licence)
+ - [`pip_package()`](utils#nf_core.utils.pip_package)
+ - [`poll_nfcore_web_api()`](utils#nf_core.utils.poll_nfcore_web_api)
+ - [`rich_force_colors()`](utils#nf_core.utils.rich_force_colors)
+ - [`setup_requests_cachedir()`](utils#nf_core.utils.setup_requests_cachedir)
+ - [`wait_cli_function()`](utils#nf_core.utils.wait_cli_function)
diff --git a/src/content/tools/docs/1.13/api/launch.md b/src/content/tools/docs/1.13/api/launch.md
new file mode 100644
index 0000000000..8affe0fa4d
--- /dev/null
+++ b/src/content/tools/docs/1.13/api/launch.md
@@ -0,0 +1,87 @@
+# nf_core.launch
+
+Launch a pipeline, interactively collecting params
+
+### _`class{:python}`_`nf_core.launch.Launch(pipeline=None, revision=None, command_only=False, params_in=None, params_out=None, save_all=False, show_hidden=False, url=None, web_id=None){:python}`
+
+Bases: `object`
+
+Class to hold config option to launch a pipeline
+
+#### `build_command(){:python}`
+
+Build the nextflow run command based on what we know
+
+#### `get_pipeline_schema(){:python}`
+
+Load and validate the schema from the supplied pipeline
+
+#### `get_web_launch_response(){:python}`
+
+Given a URL for a web-gui launch response, recursively query it until results are ready.
+
+#### `launch_pipeline(){:python}`
+
+#### `launch_web_gui(){:python}`
+
+Send schema to nf-core website and launch input GUI
+
+#### `launch_workflow(){:python}`
+
+Launch nextflow if required
+
+#### `merge_nxf_flag_schema(){:python}`
+
+Take the Nextflow flag schema and merge it with the pipeline schema
+
+#### `print_param_header(param_id, param_obj, is_group=False){:python}`
+
+#### `prompt_group(group_id, group_obj){:python}`
+
+Prompt for edits to a group of parameters (subschema in ‘definitions’)
+
+- **Parameters:**
+ - **group_id** – Paramater ID (string)
+ - **group_obj** – JSON Schema keys (dict)
+- **Returns:**
+ val answers
+- **Return type:**
+ Dict of param_id
+
+#### `prompt_param(param_id, param_obj, is_required, answers){:python}`
+
+Prompt for a single parameter
+
+#### `prompt_schema(){:python}`
+
+Go through the pipeline schema and prompt user to change defaults
+
+#### `prompt_web_gui(){:python}`
+
+Ask whether to use the web-based or cli wizard to collect params
+
+#### `sanitise_web_response(){:python}`
+
+The web builder returns everything as strings.
+Use the functions defined in the cli wizard to convert to the correct types.
+
+#### `set_schema_inputs(){:python}`
+
+Take the loaded schema and set the defaults as the input parameters
+If a nf_params.json file is supplied, apply these over the top
+
+#### `single_param_to_questionary(param_id, param_obj, answers=None, print_help=True){:python}`
+
+Convert a JSONSchema param to a Questionary question
+
+- **Parameters:**
+ - **param_id** – Parameter ID (string)
+ - **param_obj** – JSON Schema keys (dict)
+ - **answers** – Optional preexisting answers (dict)
+ - **print_help** – If description and help_text should be printed (bool)
+- **Returns:**
+ Single Questionary dict, to be appended to questions list
+
+#### `strip_default_params(){:python}`
+
+Strip parameters if they have not changed from the default
diff --git a/src/content/tools/docs/1.13/api/licences.md b/src/content/tools/docs/1.13/api/licences.md
new file mode 100644
index 0000000000..578ddaae1d
--- /dev/null
+++ b/src/content/tools/docs/1.13/api/licences.md
@@ -0,0 +1,37 @@
+# nf_core.licences
+
+Lists software licences for a given workflow.
+
+### _`class{:python}`_`nf_core.licences.WorkflowLicences(pipeline){:python}`
+
+Bases: `object`
+
+A nf-core workflow licenses collection.
+
+Tries to retrieve the license information from all dependencies
+of a given nf-core pipeline.
+
+A condensed overview with license per dependency can be printed out.
+
+- **Parameters:**
+ **pipeline** (_str_) – An existing nf-core pipeline name, like nf-core/hlatyping
+ or short hlatyping.
+
+#### `fetch_conda_licences(){:python}`
+
+Fetch package licences from Anaconda and PyPi.
+
+#### `get_environment_file(){:python}`
+
+Get the conda environment file for the pipeline
+
+#### `print_licences(){:python}`
+
+Prints the fetched license information.
+
+- **Parameters:**
+ **as_json** (_boolean_) – Prints the information in JSON. Defaults to False.
+
+#### `run_licences(){:python}`
+
+Run the nf-core licences action
diff --git a/src/content/tools/docs/1.13/api/lint.md b/src/content/tools/docs/1.13/api/lint.md
new file mode 100644
index 0000000000..97f22f4911
--- /dev/null
+++ b/src/content/tools/docs/1.13/api/lint.md
@@ -0,0 +1,138 @@
+# nf_core.lint
+
+#### `SEE ALSO{:python}`
+
+See the [Lint Tests](../lint_tests/index.html) docs for information about specific linting functions.
+
+
+
+Linting policy for nf-core pipeline projects.
+
+Tests Nextflow-based pipelines to check that they adhere to
+the nf-core community guidelines.
+
+### `nf_core.lint.run_linting(pipeline_dir, release_mode=False, fix=(), show_passed=False, fail_ignored=False, md_fn=None, json_fn=None){:python}`
+
+Runs all nf-core linting checks on a given Nextflow pipeline project
+in either release mode or normal mode (default). Returns an object
+of type [`PipelineLint`](#nf_core.lint.PipelineLint) after finished.
+
+- **Parameters:**
+ - **pipeline_dir** (_str_) – The path to the Nextflow pipeline root directory
+ - **release_mode** (_bool_) – Set this to True, if the linting should be run in the release mode.
+ See [`PipelineLint`](#nf_core.lint.PipelineLint) for more information.
+- **Returns:**
+ An object of type [`PipelineLint`](#nf_core.lint.PipelineLint) that contains all the linting results.
+
+### _`class{:python}`_`nf_core.lint.PipelineLint(wf_path, release_mode=False, fix=(), fail_ignored=False){:python}`
+
+Bases: [`Pipeline`](utils#nf_core.utils.Pipeline)
+
+Object to hold linting information and results.
+
+Inherits [`nf_core.utils.Pipeline`](utils#nf_core.utils.Pipeline) class.
+
+Use the [`PipelineLint._lint_pipeline()`](#nf_core.lint.PipelineLint._lint_pipeline) function to run lint tests.
+
+- **Parameters:**
+ **path** (_str_) – The path to the nf-core pipeline directory.
+
+#### `failed{:python}`
+
+A list of tuples of the form: `(, )`
+
+- **Type:**
+ list
+
+#### `ignored{:python}`
+
+A list of tuples of the form: `(, )`
+
+- **Type:**
+ list
+
+#### `lint_config{:python}`
+
+The parsed nf-core linting config for this pipeline
+
+- **Type:**
+ dict
+
+#### `passed{:python}`
+
+A list of tuples of the form: `(, )`
+
+- **Type:**
+ list
+
+#### `release_mode{:python}`
+
+True, if you the to linting was run in release mode, False else.
+
+- **Type:**
+ bool
+
+#### `warned{:python}`
+
+A list of tuples of the form: `(, )`
+
+- **Type:**
+ list
+
+#### `_get_results_md(){:python}`
+
+Create a markdown file suitable for posting in a GitHub comment.
+
+- **Returns:**
+ Formatting markdown content
+- **Return type:**
+ markdown (str)
+
+#### `_lint_pipeline(){:python}`
+
+Main linting function.
+
+Takes the pipeline directory as the primary input and iterates through
+the different linting checks in order. Collects any warnings or errors
+into object attributes: `passed`, `ignored`, `warned` and `failed`.
+
+#### `_print_results(show_passed=False){:python}`
+
+Print linting results to the command line.
+
+Uses the `rich` library to print a set of formatted tables to the command line
+summarising the linting results.
+
+#### `_save_json_results(json_fn){:python}`
+
+Function to dump lint results to a JSON file for downstream use
+
+- **Parameters:**
+ **json_fn** (_str_) – File path to write JSON to.
+
+#### `_strip_ansi_codes(string, replace_with=''){:python}`
+
+Strip ANSI colouring codes from a string to return plain text.
+
+Solution found on Stack Overflow:
+
+#### `_wrap_quotes(files){:python}`
+
+Helper function to take a list of filenames and format with markdown.
+
+- **Parameters:**
+ **files** (_list_) –
+
+ List of filenames, eg:
+
+ ```default
+ ['foo', 'bar', 'baz']
+ ```
+
+- **Returns:**
+ Formatted string of paths separated by word `or`, eg:
+ ```default
+ `foo` or bar` or `baz`
+ ```
+- **Return type:**
+ markdown (str)
diff --git a/src/content/tools/docs/1.13/api/list.md b/src/content/tools/docs/1.13/api/list.md
new file mode 100644
index 0000000000..2fc6448de0
--- /dev/null
+++ b/src/content/tools/docs/1.13/api/list.md
@@ -0,0 +1,99 @@
+# nf_core.list
+
+Lists available nf-core pipelines and versions.
+
+### _`class{:python}`_`nf_core.list.LocalWorkflow(name){:python}`
+
+Bases: `object`
+
+Class to handle local workflows pulled by nextflow
+
+#### `get_local_nf_workflow_details(){:python}`
+
+Get full details about a local cached workflow
+
+### _`class{:python}`_`nf_core.list.RemoteWorkflow(data){:python}`
+
+Bases: `object`
+
+A information container for a remote workflow.
+
+- **Parameters:**
+ **data** (_dict_) – workflow information as they are retrieved from the GitHub repository REST API request
+ ().
+
+### _`class{:python}`_`nf_core.list.Workflows(filter_by=None, sort_by='release', show_archived=False){:python}`
+
+Bases: `object`
+
+Workflow container class.
+
+Is used to collect local and remote nf-core pipelines. Pipelines
+can be sorted, filtered and compared.
+
+- **Parameters:**
+ - **filter_by** (_list_) – A list of strings that can be used for filtering.
+ - **sort_by** (_str_) – workflows can be sorted by keywords. Keyword must be one of
+ release (default), name, stars.
+
+#### `compare_remote_local(){:python}`
+
+Matches local to remote workflows.
+
+If a matching remote workflow is found, the local workflow’s Git commit hash is compared
+with the latest one from remote.
+
+A boolean flag in `RemoteWorkflow.local_is_latest` is set to True, if the local workflow
+is the latest.
+
+#### `filtered_workflows(){:python}`
+
+Filters remote workflows for keywords.
+
+- **Returns:**
+ Filtered remote workflows.
+- **Return type:**
+ list
+
+#### `get_local_nf_workflows(){:python}`
+
+Retrieves local Nextflow workflows.
+
+Local workflows are stored in `self.local_workflows` list.
+
+#### `get_remote_workflows(){:python}`
+
+Retrieves remote workflows from [nf-co.re](https://nf-co.re).
+
+Remote workflows are stored in `self.remote_workflows` list.
+
+#### `print_json(){:python}`
+
+Dump JSON of all parsed information
+
+#### `print_summary(){:python}`
+
+Prints a summary of all pipelines.
+
+### `nf_core.list.get_local_wf(workflow, revision=None){:python}`
+
+Check if this workflow has a local copy and use nextflow to pull it if not
+
+### `nf_core.list.list_workflows(filter_by=None, sort_by='release', as_json=False, show_archived=False){:python}`
+
+Prints out a list of all nf-core workflows.
+
+- **Parameters:**
+ - **filter_by** (_list_) – A list of strings that can be used for filtering.
+ - **sort_by** (_str_) – workflows can be sorted by keywords. Keyword must be one of
+ release (default), name, stars.
+ - **as_json** (_boolean_) – Set to true, if the lists should be printed in JSON.
+
+### `nf_core.list.pretty_date(time){:python}`
+
+Transforms a datetime object or a int() Epoch timestamp into a
+pretty string like ‘an hour ago’, ‘Yesterday’, ‘3 months ago’,
+‘just now’, etc
+
+Based on
+Adapted by sven1103
diff --git a/src/content/tools/docs/1.13/api/modules.md b/src/content/tools/docs/1.13/api/modules.md
new file mode 100644
index 0000000000..2dd47b6359
--- /dev/null
+++ b/src/content/tools/docs/1.13/api/modules.md
@@ -0,0 +1 @@
+# nf_core.modules
diff --git a/src/content/tools/docs/1.13/api/schema.md b/src/content/tools/docs/1.13/api/schema.md
new file mode 100644
index 0000000000..162788eb1a
--- /dev/null
+++ b/src/content/tools/docs/1.13/api/schema.md
@@ -0,0 +1,109 @@
+# nf_core.schema
+
+Code to deal with pipeline JSON Schema
+
+### _`class{:python}`_`nf_core.schema.PipelineSchema{:python}`
+
+Bases: `object`
+
+Class to generate a schema object with
+functions to handle pipeline JSON Schema
+
+#### `add_schema_found_configs(){:python}`
+
+Add anything that’s found in the Nextflow params that’s missing in the pipeline schema
+
+#### `build_schema(pipeline_dir, no_prompts, web_only, url){:python}`
+
+Interactively build a new pipeline schema for a pipeline
+
+#### `build_schema_param(p_val){:python}`
+
+Build a pipeline schema dictionary for an param interactively
+
+#### `get_schema_defaults(){:python}`
+
+Generate set of default input parameters from schema.
+
+Saves defaults to self.schema_defaults
+Returns count of how many parameters were found (with or without a default value)
+
+#### `get_schema_path(path, local_only=False, revision=None){:python}`
+
+Given a pipeline name, directory, or path, set self.schema_filename
+
+#### `get_web_builder_response(){:python}`
+
+Given a URL for a Schema build response, recursively query it until results are ready.
+Once ready, validate Schema and write to disk.
+
+#### `get_wf_params(){:python}`
+
+Load the pipeline parameter defaults using nextflow config
+Strip out only the params. values and ignore anything that is not a flat variable
+
+#### `launch_web_builder(){:python}`
+
+Send pipeline schema to web builder and wait for response
+
+#### `load_input_params(params_path){:python}`
+
+Load a given a path to a parameters file (JSON/YAML)
+
+These should be input parameters used to run a pipeline with
+the Nextflow -params-file option.
+
+#### `load_lint_schema(){:python}`
+
+Load and lint a given schema to see if it looks valid
+
+#### `load_schema(){:python}`
+
+Load a pipeline schema from a file
+
+#### `make_skeleton_schema(){:python}`
+
+Make a new pipeline schema from the template
+
+#### `prompt_remove_schema_notfound_config(p_key){:python}`
+
+Check if a given key is found in the nextflow config params and prompt to remove it if note
+
+Returns True if it should be removed, False if not.
+
+#### `remove_schema_notfound_configs(){:python}`
+
+Go through top-level schema and all definitions sub-schemas to remove
+anything that’s not in the nextflow config.
+
+#### `remove_schema_notfound_configs_single_schema(schema){:python}`
+
+Go through a single schema / set of properties and strip out
+anything that’s not in the nextflow config.
+
+Takes: Schema or sub-schema with properties key
+Returns: Cleaned schema / sub-schema
+
+#### `save_schema(){:python}`
+
+Save a pipeline schema to a file
+
+#### `validate_default_params(){:python}`
+
+Check that all default parameters in the schema are valid
+Ignores ‘required’ flag, as required parameters might have no defaults
+
+#### `validate_params(){:python}`
+
+Check given parameters against a schema and validate
+
+#### `validate_schema(schema=None){:python}`
+
+Check that the Schema is valid
+
+Returns: Number of parameters found
+
+#### `validate_schema_title_description(schema=None){:python}`
+
+Extra validation command for linting.
+Checks that the schema “$id”, “title” and “description” attributes match the piipeline config.
diff --git a/src/content/tools/docs/1.13/api/sync.md b/src/content/tools/docs/1.13/api/sync.md
new file mode 100644
index 0000000000..e327553c3f
--- /dev/null
+++ b/src/content/tools/docs/1.13/api/sync.md
@@ -0,0 +1,151 @@
+# nf_core.sync
+
+Synchronise a pipeline TEMPLATE branch with the template.
+
+### _`class{:python}`_`nf_core.sync.PipelineSync(pipeline_dir, from_branch=None, make_pr=False, gh_repo=None, gh_username=None){:python}`
+
+Bases: `object`
+
+Object to hold syncing information and results.
+
+- **Parameters:**
+ - **pipeline_dir** (_str_) – The path to the Nextflow pipeline root directory
+ - **from_branch** (_str_) – The branch to use to fetch config vars. If not set, will use current active branch
+ - **make_pr** (_bool_) – Set this to True to create a GitHub pull-request with the changes
+ - **gh_username** (_str_) – GitHub username
+ - **gh_repo** (_str_) – GitHub repository name
+
+#### `pipeline_dir{:python}`
+
+Path to target pipeline directory
+
+- **Type:**
+ str
+
+#### `from_branch{:python}`
+
+Repo branch to use when collecting workflow variables. Default: active branch.
+
+- **Type:**
+ str
+
+#### `original_branch{:python}`
+
+Repo branch that was checked out before we started.
+
+- **Type:**
+ str
+
+#### `made_changes{:python}`
+
+Whether making the new template pipeline introduced any changes
+
+- **Type:**
+ bool
+
+#### `make_pr{:python}`
+
+Whether to try to automatically make a PR on GitHub.com
+
+- **Type:**
+ bool
+
+#### `required_config_vars{:python}`
+
+List of nextflow variables required to make template pipeline
+
+- **Type:**
+ list
+
+#### `gh_username{:python}`
+
+GitHub username
+
+- **Type:**
+ str
+
+#### `gh_repo{:python}`
+
+GitHub repository name
+
+- **Type:**
+ str
+
+#### `checkout_template_branch(){:python}`
+
+Try to check out the origin/TEMPLATE in a new TEMPLATE branch.
+If this fails, try to check out an existing local TEMPLATE branch.
+
+#### `close_open_pr(pr){:python}`
+
+Given a PR API response, add a comment and close.
+
+#### `close_open_template_merge_prs(){:python}`
+
+Get all template merging branches (starting with ‘nf-core-template-merge-‘)
+and check for any open PRs from these branches to the self.from_branch
+If open PRs are found, add a comment and close them
+
+#### `commit_template_changes(){:python}`
+
+If we have any changes with the new template files, make a git commit
+
+#### `create_merge_base_branch(){:python}`
+
+Create a new branch from the updated TEMPLATE branch
+This branch will then be used to create the PR
+
+#### `delete_template_branch_files(){:python}`
+
+Delete all files in the TEMPLATE branch
+
+#### `get_wf_config(){:python}`
+
+Check out the target branch if requested and fetch the nextflow config.
+Check that we have the required config variables.
+
+#### `inspect_sync_dir(){:python}`
+
+Takes a look at the target directory for syncing. Checks that it’s a git repo
+and makes sure that there are no uncommitted changes.
+
+#### `make_pull_request(){:python}`
+
+Create a pull request to a base branch (default: dev),
+from a head branch (default: TEMPLATE)
+
+Returns: An instance of class requests.Response
+
+#### `make_template_pipeline(){:python}`
+
+Delete all files and make a fresh template using the workflow variables
+
+#### `push_merge_branch(){:python}`
+
+Push the newly created merge branch to the remote repository
+
+#### `push_template_branch(){:python}`
+
+If we made any changes, push the TEMPLATE branch to the default remote
+and try to make a PR. If we don’t have the auth token, try to figure out a URL
+for the PR and print this to the console.
+
+#### `reset_target_dir(){:python}`
+
+Reset the target pipeline directory. Check out the original branch.
+
+#### `sync(){:python}`
+
+Find workflow attributes, create a new template pipeline on TEMPLATE
+
+### _`exception{:python}`_`nf_core.sync.PullRequestException{:python}`
+
+Bases: `Exception`
+
+Exception raised when there was an error creating a Pull-Request on GitHub.com
+
+### _`exception{:python}`_`nf_core.sync.SyncException{:python}`
+
+Bases: `Exception`
+
+Exception raised when there was an error with TEMPLATE branch synchronisation
diff --git a/src/content/tools/docs/1.13/api/utils.md b/src/content/tools/docs/1.13/api/utils.md
new file mode 100644
index 0000000000..782b715f4a
--- /dev/null
+++ b/src/content/tools/docs/1.13/api/utils.md
@@ -0,0 +1,200 @@
+# nf_core.utils
+
+Common utility functions for the nf-core python package.
+
+### _`class{:python}`_`nf_core.utils.Pipeline(wf_path){:python}`
+
+Bases: `object`
+
+Object to hold information about a local pipeline.
+
+- **Parameters:**
+ **path** (_str_) – The path to the nf-core pipeline directory.
+
+#### `conda_config{:python}`
+
+The parsed conda configuration file content (`environment.yml`).
+
+- **Type:**
+ dict
+
+#### `conda_package_info{:python}`
+
+The conda package(s) information, based on the API requests to Anaconda cloud.
+
+- **Type:**
+ dict
+
+#### `nf_config{:python}`
+
+The Nextflow pipeline configuration file content.
+
+- **Type:**
+ dict
+
+#### `files{:python}`
+
+A list of files found during the linting process.
+
+- **Type:**
+ list
+
+#### `git_sha{:python}`
+
+The git sha for the repo commit / current GitHub pull-request ($GITHUB_PR_COMMIT)
+
+- **Type:**
+ str
+
+#### `minNextflowVersion{:python}`
+
+The minimum required Nextflow version to run the pipeline.
+
+- **Type:**
+ str
+
+#### `wf_path{:python}`
+
+Path to the pipeline directory.
+
+- **Type:**
+ str
+
+#### `pipeline_name{:python}`
+
+The pipeline name, without the nf-core tag, for example hlatyping.
+
+- **Type:**
+ str
+
+#### `schema_obj{:python}`
+
+A `PipelineSchema` object
+
+- **Type:**
+ obj
+
+#### `_fp(fn){:python}`
+
+Convenience function to get full path to a file in the pipeline
+
+#### `_list_files(){:python}`
+
+Get a list of all files in the pipeline
+
+#### `_load(){:python}`
+
+Run core load functions
+
+#### `_load_conda_environment(){:python}`
+
+Try to load the pipeline environment.yml file, if it exists
+
+#### `_load_pipeline_config(){:python}`
+
+Get the nextflow config for this pipeline
+
+Once loaded, set a few convienence reference class attributes
+
+### `nf_core.utils.anaconda_package(dep, dep_channels=['conda-forge', 'bioconda', 'defaults']){:python}`
+
+Query conda package information.
+
+Sends a HTTP GET request to the Anaconda remote API.
+
+- **Parameters:**
+ - **dep** (_str_) – A conda package name.
+ - **dep_channels** (_list_) – list of conda channels to use
+- **Raises:**
+ - **A LookupError**\*\*,\*\* **if the connection fails** **or** **times out** **or** **gives an unexpected status code** –
+ - **A ValueError**\*\*,\*\* **if the package name can not be found** **(\*\***404\***\*)** –
+
+### `nf_core.utils.check_if_outdated(current_version=None, remote_version=None, source_url='https://nf-co.re/tools_version'){:python}`
+
+Check if the current version of nf-core is outdated
+
+### `nf_core.utils.custom_yaml_dumper(){:python}`
+
+Overwrite default PyYAML output to make Prettier YAML linting happy
+
+### `nf_core.utils.fetch_wf_config(wf_path){:python}`
+
+Uses Nextflow to retrieve the the configuration variables
+from a Nextflow workflow.
+
+- **Parameters:**
+ **wf_path** (_str_) – Nextflow workflow file system path.
+- **Returns:**
+ Workflow configuration settings.
+- **Return type:**
+ dict
+
+### `nf_core.utils.get_biocontainer_tag(package, version){:python}`
+
+Given a bioconda package and version, look for a container
+at quay.io and returns the tag of the most recent image
+that matches the package version
+Sends a HTTP GET request to the quay.io API.
+:param package: A bioconda package name.
+:type package: str
+:param version: Version of the bioconda package
+:type version: str
+
+- **Raises:**
+ - **A LookupError**\*\*,\*\* **if the connection fails** **or** **times out** **or** **gives an unexpected status code** –
+ - **A ValueError**\*\*,\*\* **if the package name can not be found** **(\*\***404\***\*)** –
+
+### `nf_core.utils.github_api_auto_auth(){:python}`
+
+### `nf_core.utils.nextflow_cmd(cmd){:python}`
+
+Run a Nextflow command and capture the output. Handle errors nicely
+
+### `nf_core.utils.parse_anaconda_licence(anaconda_response, version=None){:python}`
+
+Given a response from the anaconda API using anaconda_package, parse the software licences.
+
+Returns: Set of licence types
+
+### `nf_core.utils.pip_package(dep){:python}`
+
+Query PyPI package information.
+
+Sends a HTTP GET request to the PyPI remote API.
+
+- **Parameters:**
+ **dep** (_str_) – A PyPI package name.
+- **Raises:**
+ - **A LookupError**\*\*,\*\* **if the connection fails** **or** **times out** –
+ - **A ValueError**\*\*,\*\* **if the package name can not be found** –
+
+### `nf_core.utils.poll_nfcore_web_api(api_url, post_data=None){:python}`
+
+Poll the nf-core website API
+
+Takes argument api_url for URL
+
+Expects API reponse to be valid JSON and contain a top-level ‘status’ key.
+
+### `nf_core.utils.rich_force_colors(){:python}`
+
+Check if any environment variables are set to force Rich to use coloured output
+
+### `nf_core.utils.setup_requests_cachedir(){:python}`
+
+Sets up local caching for faster remote HTTP requests.
+
+Caching directory will be set up in the user’s home directory under
+a .nfcore_cache subdir.
+
+### `nf_core.utils.wait_cli_function(poll_func, poll_every=20){:python}`
+
+Display a command-line spinner while calling a function repeatedly.
+
+Keep waiting until that function returns True
+
+- **Parameters:**
+ - **poll_func** (_function_) – Function to call
+ - **poll_every** (_int_) – How many tenths of a second to wait between function calls. Default: 20.
+- **Returns:**
+ None. Just sits in an infite loop until the function returns True.
diff --git a/src/content/tools/docs/1.13/index.md b/src/content/tools/docs/1.13/index.md
new file mode 100644
index 0000000000..c5988c4254
--- /dev/null
+++ b/src/content/tools/docs/1.13/index.md
@@ -0,0 +1,18 @@
+
+
+# Welcome to nf-core tools API documentation!
+
+This documentation is for the `nf-core/tools` package.
+
+Primarily, it describes the different [code lint tests](lint_tests/index.html)
+run by `nf-core lint` (typically visited by a developer when their pipeline fails a given
+test), and also reference for the `nf_core` [Python package API](api/index.html).
+
+# Indices and tables
+
+- [Index](genindex)
+- [Module Index](py-modindex)
+- [Search Page](search)
diff --git a/src/content/tools/docs/1.13/lint_tests/actions_awsfulltest.md b/src/content/tools/docs/1.13/lint_tests/actions_awsfulltest.md
new file mode 100644
index 0000000000..fd32fa4a32
--- /dev/null
+++ b/src/content/tools/docs/1.13/lint_tests/actions_awsfulltest.md
@@ -0,0 +1,30 @@
+# actions_awsfulltest
+
+#### `PipelineLint.actions_awsfulltest(){:python}`
+
+Checks the GitHub Actions awsfulltest is valid.
+
+In addition to small test datasets run on GitHub Actions, we provide the possibility of testing the pipeline on full size datasets on AWS.
+This should ensure that the pipeline runs as expected on AWS and provide a resource estimation.
+
+The GitHub Actions workflow is called `awsfulltest.yml`, and it can be found in the `.github/workflows/` directory.
+
+:::warning
+This workflow incurs AWS costs, therefore it should only be triggered for pipeline releases:
+`workflow_run` (after the docker hub release workflow) and `workflow_dispatch`.
+:::
+
+:::note
+You can manually trigger the AWS tests by going to the Actions tab on the pipeline GitHub repository and selecting the
+nf-core AWS full size tests workflow on the left.
+:::
+
+:::note
+For tests on full data prior to release, [Nextflow Tower](https://tower.nf) launch feature can be employed.
+:::
+
+The `.github/workflows/awsfulltest.yml` file is tested for the following:
+
+- Must be turned on `workflow_dispatch`.
+- Must be turned on for `workflow_run` with `workflows: ["nf-core Docker push (release)"]` and `types: [completed]`
+- Should run the profile `test_full` that should be edited to provide the links to full-size datasets. If it runs the profile `test`, a warning is given.
diff --git a/src/content/tools/docs/1.13/lint_tests/actions_awstest.md b/src/content/tools/docs/1.13/lint_tests/actions_awstest.md
new file mode 100644
index 0000000000..4e4698c4e9
--- /dev/null
+++ b/src/content/tools/docs/1.13/lint_tests/actions_awstest.md
@@ -0,0 +1,24 @@
+# actions_awstest
+
+#### `PipelineLint.actions_awstest(){:python}`
+
+Checks the GitHub Actions awstest is valid.
+
+In addition to small test datasets run on GitHub Actions, we provide the possibility of testing the pipeline on AWS.
+This should ensure that the pipeline runs as expected on AWS (which often has its own unique edge cases).
+
+:::warning
+Running tests on AWS incurs costs, so these tests are not triggered automatically.
+Instead, they use the `workflow_dispatch` trigger, which allows for manual triggering
+of the workflow when testing on AWS is desired.
+:::
+
+:::note
+You can trigger the tests by going to the Actions tab on the pipeline GitHub repository
+and selecting the nf-core AWS test workflow on the left.
+:::
+
+The `.github/workflows/awstest.yml` file is tested for the following:
+
+- Must _not_ be turned on for `push` or `pull_request`.
+- Must be turned on for `workflow_dispatch`.
diff --git a/src/content/tools/docs/1.13/lint_tests/actions_ci.md b/src/content/tools/docs/1.13/lint_tests/actions_ci.md
new file mode 100644
index 0000000000..34f2096391
--- /dev/null
+++ b/src/content/tools/docs/1.13/lint_tests/actions_ci.md
@@ -0,0 +1,63 @@
+# actions_ci
+
+#### `PipelineLint.actions_ci(){:python}`
+
+Checks that the GitHub Actions pipeline CI (Continuous Integration) workflow is valid.
+
+The `.github/workflows/ci.yml` GitHub Actions workflow runs the pipeline on a minimal test
+dataset using `-profile test` to check that no breaking changes have been introduced.
+Final result files are not checked, just that the pipeline exists successfully.
+
+This lint test checks this GitHub Actions workflow file for the following:
+
+- Workflow must be triggered on the following events:
+ ```yaml
+ on:
+ push:
+ branches:
+ - dev
+ pull_request:
+ release:
+ types: [published]
+ ```
+- The minimum Nextflow version specified in the pipeline’s `nextflow.config` matches that defined by `nxf_ver` in the test matrix:
+
+ ```yaml
+ strategy:
+ matrix:
+ # Nextflow versions: check pipeline minimum and current latest
+ nxf_ver: ['19.10.0', '']
+ ```
+
+ :::note
+ These `matrix` variables run the test workflow twice, varying the `nxf_ver` variable each time.
+ This is used in the `nextflow run` commands to test the pipeline with both the latest available version
+ of the pipeline (`''`) and the stated minimum required version.
+ :::
+
+- The Docker container for the pipeline must use the correct pipeline version number:
+
+ > - Development pipelines:
+ > ```bash
+ > docker tag nfcore/:dev nfcore/:dev
+ > ```
+ > - Released pipelines:
+ > ```bash
+ > docker tag nfcore/:dev nfcore/:
+ > ```
+ > - Complete example for a released pipeline called _nf-core/example_ with version number `1.0.0`:
+ > ```yaml
+ > - name: Build new docker image
+ > if: env.GIT_DIFF
+ > run: docker build --no-cache . -t nfcore/example:1.0.0
+ > ```
+
+ > - name: Pull docker image
+ > if: ${{ !env.GIT\_DIFF }}
+ > run: |
+ > docker pull nfcore/example:dev
+ > docker tag nfcore/example:dev nfcore/example:1.0.0
+ >
+ > ```
+ >
+ > ```
diff --git a/src/content/tools/docs/1.13/lint_tests/actions_schema_validation.md b/src/content/tools/docs/1.13/lint_tests/actions_schema_validation.md
new file mode 100644
index 0000000000..caa60db6c4
--- /dev/null
+++ b/src/content/tools/docs/1.13/lint_tests/actions_schema_validation.md
@@ -0,0 +1,18 @@
+# actions_schema_validation
+
+#### `PipelineLint.actions_schema_validation(){:python}`
+
+Checks that the GitHub Action workflow yml/yaml files adhere to the correct schema
+
+nf-core pipelines use GitHub actions workflows to run CI tests, check formatting and also linting, among others.
+These workflows are defined by `yml``scripts in ``.github/workflows/`. This lint test verifies that these scripts are valid
+by comparing them against the JSON schema for GitHub workflows <>
+
+To pass this test, make sure that all your workflows contain the required properties `on` and
+
+```
+``
+```
+
+jobs\`\`and that
+all other properties are of the correct type, as specified in the schema (link above).
diff --git a/src/content/tools/docs/1.13/lint_tests/conda_dockerfile.md b/src/content/tools/docs/1.13/lint_tests/conda_dockerfile.md
new file mode 100644
index 0000000000..388e51dd9a
--- /dev/null
+++ b/src/content/tools/docs/1.13/lint_tests/conda_dockerfile.md
@@ -0,0 +1,33 @@
+# conda_dockerfile
+
+#### `PipelineLint.conda_dockerfile(){:python}`
+
+Checks the Dockerfile for use with Conda environments
+
+:::note
+This test only runs if there is both an `environment.yml`
+and `Dockerfile` present in the pipeline root directory.
+:::
+
+If a workflow has a conda `environment.yml` file, the `Dockerfile` should use this
+to create the docker image. These files are typically very short, just creating the conda
+environment inside the container.
+
+This linting test checks for the following:
+
+- All of the following lines are present in the file (where `PIPELINE` is your pipeline name):
+ > ```Dockerfile
+ > FROM nfcore/base:VERSION
+ > COPY environment.yml /
+ > RUN conda env create --quiet -f /environment.yml && conda clean -a
+ > RUN conda env export --name PIPELINE > PIPELINE.yml
+ > ENV PATH /opt/conda/envs/PIPELINE/bin:$PATH
+ > ```
+- That the `FROM nfcore/base:VERSION` is tagged to the most recent release of nf-core/tools
+ > - The linting tool compares the tag against the currently installed version of tools.
+ > - This line is not checked if running a development version of nf-core/tools.
+
+:::note
+Additional lines and different metadata can be added to the `Dockerfile`
+without causing this lint test to fail.
+:::
diff --git a/src/content/tools/docs/1.13/lint_tests/conda_env_yaml.md b/src/content/tools/docs/1.13/lint_tests/conda_env_yaml.md
new file mode 100644
index 0000000000..f2dab4fba1
--- /dev/null
+++ b/src/content/tools/docs/1.13/lint_tests/conda_env_yaml.md
@@ -0,0 +1,36 @@
+# conda_env_yaml
+
+#### `PipelineLint.conda_env_yaml(){:python}`
+
+Checks that the conda environment file is valid.
+
+:::note
+This test is ignored if there is not an `environment.yml`
+file present in the pipeline root directory.
+:::
+
+DSL1 nf-core pipelines use a single Conda environment to manage all software
+dependencies for a workflow. This can be used directly with `-profile conda`
+and is also used in the `Dockerfile` to build a docker image.
+
+This test checks the conda `environment.yml` file to ensure that it follows nf-core guidelines.
+Each dependency is checked using the [Anaconda API service](https://api.anaconda.org/docs).
+Dependency sublists are ignored with the exception of `- pip`: these packages are also checked
+for pinned version numbers and checked using the [PyPI JSON API](https://wiki.python.org/moin/PyPIJSON).
+
+Specifically, this lint test makes sure that:
+
+- The environment `name` must match the pipeline name and version
+ > - The pipeline name is defined in the config variable `manifest.name`
+ > - Replace the slash with a hyphen as environment names shouldn’t contain that character
+ > - Example: For `nf-core/test` version 1.4, the conda environment name should be `nf-core-test-1.4`
+- All package dependencies have a specific version number pinned
+ > :::warning
+ > Remember that Conda package versions should be pinned with one equals sign (`toolname=1.1`),
+ > but pip uses two (`toolname==1.2`)
+ > :::
+- That package versions can be found and are the latest available
+ > - Test will go through all conda channels listed in the file, or check PyPI if `pip`
+ > - Conda dependencies with pinned channels (eg. `conda-forge::openjdk`) are ok too
+ > - In addition to the package name, the pinned version is checked
+ > - If a newer version is available, a warning will be reported
diff --git a/src/content/tools/docs/1.13/lint_tests/files_exist.md b/src/content/tools/docs/1.13/lint_tests/files_exist.md
new file mode 100644
index 0000000000..927fbadce4
--- /dev/null
+++ b/src/content/tools/docs/1.13/lint_tests/files_exist.md
@@ -0,0 +1,77 @@
+# files_exist
+
+#### `PipelineLint.files_exist(){:python}`
+
+Checks a given pipeline directory for required files.
+
+Iterates through the pipeline’s directory content and checks that specified
+files are either present or absent, as required.
+
+:::note
+This test raises an `AssertionError` if neither `nextflow.config` or `main.nf` are found.
+If these files are not found then this cannot be a Nextflow pipeline and something has gone badly wrong.
+All lint tests are stopped immediately with a critical error message.
+:::
+
+Files that **must** be present:
+
+```default
+.gitattributes
+.github/.dockstore.yml
+.github/CONTRIBUTING.md
+.github/ISSUE_TEMPLATE/bug_report.md
+.github/ISSUE_TEMPLATE/config.yml
+.github/ISSUE_TEMPLATE/feature_request.md
+.github/markdownlint.yml
+.github/PULL_REQUEST_TEMPLATE.md
+.github/workflows/branch.yml
+.github/workflows/ci.yml
+.github/workflows/linting_comment.yml
+.github/workflows/linting.yml
+[LICENSE, LICENSE.md, LICENCE, LICENCE.md] # NB: British / American spelling
+assets/email_template.html
+assets/email_template.txt
+assets/nf-core-PIPELINE_logo.png
+assets/sendmail_template.txt
+bin/markdown_to_html.py
+CHANGELOG.md
+CODE_OF_CONDUCT.md
+CODE_OF_CONDUCT.md
+docs/images/nf-core-PIPELINE_logo.png
+docs/output.md
+docs/README.md
+docs/README.md
+docs/usage.md
+lib/nfcore_external_java_deps.jar
+lib/NfcoreSchema.groovy
+nextflow_schema.json
+nextflow.config
+README.md
+```
+
+Files that _should_ be present:
+
+```default
+'main.nf',
+'environment.yml',
+'Dockerfile',
+'conf/base.config',
+'.github/workflows/awstest.yml',
+'.github/workflows/awsfulltest.yml'
+```
+
+Files that _must not_ be present:
+
+```default
+'Singularity',
+'parameters.settings.json',
+'bin/markdown_to_html.r',
+'conf/aws.config',
+'.github/workflows/push_dockerhub.yml'
+```
+
+Files that _should not_ be present:
+
+```default
+'.travis.yml'
+```
diff --git a/src/content/tools/docs/1.13/lint_tests/files_unchanged.md b/src/content/tools/docs/1.13/lint_tests/files_unchanged.md
new file mode 100644
index 0000000000..ea06ada29f
--- /dev/null
+++ b/src/content/tools/docs/1.13/lint_tests/files_unchanged.md
@@ -0,0 +1,57 @@
+# files_unchanged
+
+#### `PipelineLint.files_unchanged(){:python}`
+
+Checks that certain pipeline files are not modified from template output.
+
+Iterates through the pipeline’s directory content and compares specified files
+against output from the template using the pipeline’s metadata. File content
+should not be modified / missing.
+
+Files that must be unchanged:
+
+```default
+'.gitattributes',
+'.github/.dockstore.yml',
+'.github/CONTRIBUTING.md',
+'.github/ISSUE_TEMPLATE/bug_report.md',
+'.github/ISSUE_TEMPLATE/config.yml',
+'.github/ISSUE_TEMPLATE/feature_request.md',
+'.github/markdownlint.yml',
+'.github/PULL_REQUEST_TEMPLATE.md',
+'.github/workflows/branch.yml',
+'.github/workflows/linting_comment.yml',
+'.github/workflows/linting.yml',
+'assets/email_template.html',
+'assets/email_template.txt',
+'assets/nf-core-PIPELINE_logo.png',
+'assets/sendmail_template.txt',
+'bin/markdown_to_html.py',
+'CODE_OF_CONDUCT.md',
+'docs/images/nf-core-PIPELINE_logo.png',
+'docs/README.md',
+'lib/nfcore_external_java_deps.jar'
+'lib/NfcoreSchema.groovy',
+['LICENSE', 'LICENSE.md', 'LICENCE', 'LICENCE.md'], # NB: British / American spelling
+```
+
+Files that can have additional content but must include the template contents:
+
+```default
+'.github/workflows/push_dockerhub_dev.yml',
+'.github/workflows/push_dockerhub_release.yml',
+'.gitignore',
+'assets/multiqc_config.yaml',
+```
+
+:::note
+You can configure the `nf-core lint` tests to ignore any of these checks by setting
+the `files_unchanged` key as follows in your linting config file. For example:
+
+```yaml
+files_unchanged:
+ - .github/workflows/branch.yml
+ - assets/multiqc_config.yaml
+```
+
+:::
diff --git a/src/content/tools/docs/1.13/lint_tests/index.md b/src/content/tools/docs/1.13/lint_tests/index.md
new file mode 100644
index 0000000000..f0e9f7e6a1
--- /dev/null
+++ b/src/content/tools/docs/1.13/lint_tests/index.md
@@ -0,0 +1,38 @@
+# Lint tests
+
+# Tests:
+
+- [actions_awsfulltest](actions_awsfulltest)
+ - [`PipelineLint.actions_awsfulltest()`](actions_awsfulltest#nf_core.lint.PipelineLint.actions_awsfulltest)
+- [actions_awstest](actions_awstest)
+ - [`PipelineLint.actions_awstest()`](actions_awstest#nf_core.lint.PipelineLint.actions_awstest)
+- [actions_ci](actions_ci)
+ - [`PipelineLint.actions_ci()`](actions_ci#nf_core.lint.PipelineLint.actions_ci)
+- [actions_schema_validation](actions_schema_validation)
+ - [`PipelineLint.actions_schema_validation()`](actions_schema_validation#nf_core.lint.PipelineLint.actions_schema_validation)
+- [conda_dockerfile](conda_dockerfile)
+ - [`PipelineLint.conda_dockerfile()`](conda_dockerfile#nf_core.lint.PipelineLint.conda_dockerfile)
+- [conda_env_yaml](conda_env_yaml)
+ - [`PipelineLint.conda_env_yaml()`](conda_env_yaml#nf_core.lint.PipelineLint.conda_env_yaml)
+- [files_exist](files_exist)
+ - [`PipelineLint.files_exist()`](files_exist#nf_core.lint.PipelineLint.files_exist)
+- [files_unchanged](files_unchanged)
+ - [`PipelineLint.files_unchanged()`](files_unchanged#nf_core.lint.PipelineLint.files_unchanged)
+- [merge_markers](merge_markers)
+ - [`PipelineLint.merge_markers()`](merge_markers#nf_core.lint.PipelineLint.merge_markers)
+- [nextflow_config](nextflow_config)
+ - [`PipelineLint.nextflow_config()`](nextflow_config#nf_core.lint.PipelineLint.nextflow_config)
+- [pipeline_name_conventions](pipeline_name_conventions)
+ - [`PipelineLint.pipeline_name_conventions()`](pipeline_name_conventions#nf_core.lint.PipelineLint.pipeline_name_conventions)
+- [pipeline_todos](pipeline_todos)
+ - [`PipelineLint.pipeline_todos()`](pipeline_todos#nf_core.lint.PipelineLint.pipeline_todos)
+- [readme](readme)
+ - [`PipelineLint.readme()`](readme#nf_core.lint.PipelineLint.readme)
+- [schema_lint](schema_lint)
+ - [`PipelineLint.schema_lint()`](schema_lint#nf_core.lint.PipelineLint.schema_lint)
+- [schema_params](schema_params)
+ - [`PipelineLint.schema_params()`](schema_params#nf_core.lint.PipelineLint.schema_params)
+- [template_strings](template_strings)
+ - [`PipelineLint.template_strings()`](template_strings#nf_core.lint.PipelineLint.template_strings)
+- [version_consistency](version_consistency)
+ - [`PipelineLint.version_consistency()`](version_consistency#nf_core.lint.PipelineLint.version_consistency)
diff --git a/src/content/tools/docs/1.13/lint_tests/merge_markers.md b/src/content/tools/docs/1.13/lint_tests/merge_markers.md
new file mode 100644
index 0000000000..38076814b1
--- /dev/null
+++ b/src/content/tools/docs/1.13/lint_tests/merge_markers.md
@@ -0,0 +1,8 @@
+# merge_markers
+
+#### `PipelineLint.merge_markers(){:python}`
+
+Check for remaining merge markers.
+
+This test looks for remaining merge markers in the code, e.g.:
+`>>>>>>>` or `<<<<<<<`
diff --git a/src/content/tools/docs/1.13/lint_tests/nextflow_config.md b/src/content/tools/docs/1.13/lint_tests/nextflow_config.md
new file mode 100644
index 0000000000..7f77f40c66
--- /dev/null
+++ b/src/content/tools/docs/1.13/lint_tests/nextflow_config.md
@@ -0,0 +1,65 @@
+# nextflow_config
+
+#### `PipelineLint.nextflow_config(){:python}`
+
+Checks the pipeline configuration for required variables.
+
+All nf-core pipelines are required to be configured with a minimal set of variable
+names. This test fails or throws warnings if required variables are not set.
+
+:::note
+These config variables must be set in `nextflow.config` or another config
+file imported from there. Any variables set in nextflow script files (eg. `main.nf`)
+are not checked and will be assumed to be missing.
+:::
+
+**The following variables fail the test if missing:**
+
+- `params.outdir`: A directory in which all pipeline results should be saved
+- `manifest.name`: The pipeline name. Should begin with `nf-core/`
+- `manifest.description`: A description of the pipeline
+- `manifest.version`
+ - The version of this pipeline. This should correspond to a [GitHub release](https://help.github.com/articles/creating-releases/).
+ - If `--release` is set when running `nf-core lint`, the version number must not contain the string `dev`
+ - If `--release` is \_not\_ set, the version should end in `dev` (warning triggered if not)
+- `manifest.nextflowVersion`
+ - The minimum version of Nextflow required to run the pipeline.
+ - Should be `>=` or `!>=` and a version number, eg. `manifest.nextflowVersion = '>=0.31.0'` (see [Nextflow documentation](https://www.nextflow.io/docs/latest/config.html#scope-manifest))
+ - `>=` warns about old versions but tries to run anyway, `!>=` fails for old versions. Only use the latter if you _know_ that the pipeline will certainly fail before this version.
+ - This should correspond to the `NXF_VER` version tested by GitHub Actions.
+- `manifest.homePage`
+ - The homepage for the pipeline. Should be the nf-core GitHub repository URL,
+ so beginning with `https://github.com/nf-core/`
+- `timeline.enabled`, `trace.enabled`, `report.enabled`, `dag.enabled`
+ - The nextflow timeline, trace, report and DAG should be enabled by default (set to `true`)
+- `process.cpus`, `process.memory`, `process.time`
+ - Default CPUs, memory and time limits for tasks
+- `params.input`
+ - Input parameter to specify input data, specify this to avoid a warning
+ - Typical usage:
+ - `params.input`: Input data that is not NGS sequencing data
+
+**The following variables throw warnings if missing:**
+
+- `manifest.mainScript`: The filename of the main pipeline script (should be `main.nf`)
+- `timeline.file`, `trace.file`, `report.file`, `dag.file`
+ - Default filenames for the timeline, trace and report
+ - The DAG file path should end with `.svg` (If Graphviz is not installed, Nextflow will generate a `.dot` file instead)
+- `process.container`
+ - Docker Hub handle for a single default container for use by all processes.
+ - Must specify a tag that matches the pipeline version number if set.
+ - If the pipeline version number contains the string `dev`, the DockerHub tag must be `:dev`
+
+**The following variables are depreciated and fail the test if they are still present:**
+
+- `params.version`: The old method for specifying the pipeline version. Replaced by `manifest.version`
+- `params.nf_required_version`: The old method for specifying the minimum Nextflow version. Replaced by `manifest.nextflowVersion`
+- `params.container`: The old method for specifying the dockerhub container address. Replaced by `process.container`
+- `igenomesIgnore`: Changed to `igenomes_ignore`
+ > :::note
+ > The `snake_case` convention should now be used when defining pipeline parameters
+ > :::
+
+**The following Nextflow syntax is depreciated and fails the test if present:**
+
+- Process-level configuration syntax still using the old Nextflow syntax, for example: `process.$fastqc` instead of `process withName:'fastqc'`.
diff --git a/src/content/tools/docs/1.13/lint_tests/pipeline_name_conventions.md b/src/content/tools/docs/1.13/lint_tests/pipeline_name_conventions.md
new file mode 100644
index 0000000000..788d77e41a
--- /dev/null
+++ b/src/content/tools/docs/1.13/lint_tests/pipeline_name_conventions.md
@@ -0,0 +1,13 @@
+# pipeline_name_conventions
+
+#### `PipelineLint.pipeline_name_conventions(){:python}`
+
+Checks that the pipeline name adheres to nf-core conventions.
+
+In order to ensure consistent naming, pipeline names should contain only lower case, alphanumeric characters.
+Otherwise a warning is displayed.
+
+:::warning
+DockerHub is very picky about image names and doesn’t even allow hyphens (we are `nfcore`).
+This is a large part of why we set this rule.
+:::
diff --git a/src/content/tools/docs/1.13/lint_tests/pipeline_todos.md b/src/content/tools/docs/1.13/lint_tests/pipeline_todos.md
new file mode 100644
index 0000000000..e8a73d1663
--- /dev/null
+++ b/src/content/tools/docs/1.13/lint_tests/pipeline_todos.md
@@ -0,0 +1,28 @@
+# pipeline_todos
+
+#### `PipelineLint.pipeline_todos(){:python}`
+
+Check for nf-core _TODO_ lines.
+
+The nf-core workflow template contains a number of comment lines to help developers
+of new pipelines know where they need to edit files and add content.
+They typically have the following format:
+
+```groovy
+// TODO nf-core: Make some kind of change to the workflow here
+```
+
+..or in markdown:
+
+```html
+
+```
+
+This lint test runs through all files in the pipeline and searches for these lines.
+If any are found they will throw a warning.
+
+:::note
+Note that many GUI code editors have plugins to list all instances of _TODO_
+in a given project directory. This is a very quick and convenient way to get
+started on your pipeline!
+:::
diff --git a/src/content/tools/docs/1.13/lint_tests/readme.md b/src/content/tools/docs/1.13/lint_tests/readme.md
new file mode 100644
index 0000000000..51b8040c97
--- /dev/null
+++ b/src/content/tools/docs/1.13/lint_tests/readme.md
@@ -0,0 +1,25 @@
+# readme
+
+#### `PipelineLint.readme(){:python}`
+
+Repository `README.md` tests
+
+The `README.md` files for a project are very important and must meet some requirements:
+
+- Nextflow badge
+ - If no Nextflow badge is found, a warning is given
+ - If a badge is found but the version doesn’t match the minimum version in the config file, the test fails
+ - Example badge code:
+ ```md
+ [![Nextflow](https://img.shields.io/badge/nextflow-%E2%89%A50.27.6-brightgreen.svg)](https://www.nextflow.io/)
+ ```
+- Bioconda badge
+ - If your pipeline contains a file called `environment.yml` in the root directory, a bioconda badge is required
+ - Required badge code:
+ ```md
+ [![install with bioconda](https://img.shields.io/badge/install%20with-bioconda-brightgreen.svg)](https://bioconda.github.io/)
+ ```
+
+:::note
+These badges are a markdown image `![alt-text]()` _inside_ a markdown link `[markdown image]()`, so a bit fiddly to write.
+:::
diff --git a/src/content/tools/docs/1.13/lint_tests/schema_lint.md b/src/content/tools/docs/1.13/lint_tests/schema_lint.md
new file mode 100644
index 0000000000..b5e29febba
--- /dev/null
+++ b/src/content/tools/docs/1.13/lint_tests/schema_lint.md
@@ -0,0 +1,59 @@
+# schema_lint
+
+#### `PipelineLint.schema_lint(){:python}`
+
+Pipeline schema syntax
+
+Pipelines should have a `nextflow_schema.json` file that describes the different
+pipeline parameters (eg. `params.something`, `--something`).
+
+:::note
+Reminder: you should generally never need to edit this JSON file by hand.
+The `nf-core schema build` command can create _and edit_ the file for you
+to keep it up to date, with a friendly user-interface for customisation.
+:::
+
+The lint test checks the schema for the following:
+
+- Schema should be a valid JSON file
+- Schema should adhere to [JSONSchema](https://json-schema.org/), Draft 7.
+- Parameters can be described in two places:
+ > - As `properties` in the top-level schema object
+ > - As `properties` within subschemas listed in a top-level `definitions` objects
+- The schema must describe at least one parameter
+- There must be no duplicate parameter IDs across the schema and definition subschema
+- All subschema in `definitions` must be referenced in the top-level `allOf` key
+- The top-level `allOf` key must not describe any non-existent definitions
+- Default parameters in the schema must be valid
+- Core top-level schema attributes should exist and be set as follows:
+ > - `$schema`: `https://json-schema.org/draft-07/schema`
+ > - `$id`: URL to the raw schema file, eg. `https://raw.githubusercontent.com/YOURPIPELINE/master/nextflow_schema.json`
+ > - `title`: `YOURPIPELINE pipeline parameters`
+ > - `description`: The pipeline config `manifest.description`
+
+For example, an _extremely_ minimal schema could look like this:
+
+```json
+{
+ "$schema": "https://json-schema.org/draft-07/schema",
+ "$id": "https://raw.githubusercontent.com/YOURPIPELINE/master/nextflow_schema.json",
+ "title": "YOURPIPELINE pipeline parameters",
+ "description": "This pipeline is for testing",
+ "properties": {
+ "first_param": { "type": "string" }
+ },
+ "definitions": {
+ "my_first_group": {
+ "properties": {
+ "second_param": { "type": "string" }
+ }
+ }
+ },
+ "allOf": [{ "$ref": "#/definitions/my_first_group" }]
+}
+```
+
+:::note
+You can check your pipeline schema without having to run the entire pipeline lint
+by running `nf-core schema lint` instead of `nf-core lint`
+:::
diff --git a/src/content/tools/docs/1.13/lint_tests/schema_params.md b/src/content/tools/docs/1.13/lint_tests/schema_params.md
new file mode 100644
index 0000000000..7d9440fa15
--- /dev/null
+++ b/src/content/tools/docs/1.13/lint_tests/schema_params.md
@@ -0,0 +1,11 @@
+# schema_params
+
+#### `PipelineLint.schema_params(){:python}`
+
+Check that the schema describes all flat params in the pipeline.
+
+The `nextflow_schema.json` pipeline schema should describe every flat parameter
+returned from the `nextflow config` command (params that are objects or more complex structures are ignored).
+
+- Failure: If parameters are found in `nextflow_schema.json` that are not in `nextflow_schema.json`
+- Warning: If parameters are found in `nextflow_schema.json` that are not in `nextflow_schema.json`
diff --git a/src/content/tools/docs/1.13/lint_tests/template_strings.md b/src/content/tools/docs/1.13/lint_tests/template_strings.md
new file mode 100644
index 0000000000..23c06bb63d
--- /dev/null
+++ b/src/content/tools/docs/1.13/lint_tests/template_strings.md
@@ -0,0 +1,17 @@
+# template_strings
+
+#### `PipelineLint.template_strings(){:python}`
+
+Check for template placeholders.
+
+The `nf-core create` pipeline template uses
+[Jinja](https://jinja.palletsprojects.com/en/2.11.x/) behind the scenes.
+
+This lint test fails if any Jinja template variables such as
+`{{ pipeline_name }}` are found in your pipeline code.
+
+Finding a placeholder like this means that something was probably copied and pasted
+from the template without being properly rendered for your pipeline.
+
+This test ignores any double-brackets prefixed with a dollar sign, such as
+`${{ secrets.AWS_ACCESS_KEY_ID }}` as these placeholders are used in GitHub Actions workflows.
diff --git a/src/content/tools/docs/1.13/lint_tests/version_consistency.md b/src/content/tools/docs/1.13/lint_tests/version_consistency.md
new file mode 100644
index 0000000000..040f5e57fc
--- /dev/null
+++ b/src/content/tools/docs/1.13/lint_tests/version_consistency.md
@@ -0,0 +1,23 @@
+# version_consistency
+
+#### `PipelineLint.version_consistency(){:python}`
+
+Pipeline and container version number consistency.
+
+:::note
+This test only runs when the `--release` flag is set for `nf-core lint`,
+or `$GITHUB_REF` is equal to `master`.
+:::
+
+This lint fetches the pipeline version number from three possible locations:
+
+- The pipeline config, `manifest.version`
+- The docker container in the pipeline config, `process.container`
+ > - Some pipelines may not have this set on a pipeline level. If it is not found, it is ignored.
+- `$GITHUB_REF`, if it looks like a release tag (`refs/tags/`)
+
+The test then checks that:
+
+- The container name has a tag specified (eg. `nfcore/pipeline:version`)
+- The pipeline version number is numeric (contains only numbers and dots)
+- That the version numbers all match one another
diff --git a/src/content/tools/docs/1.14/api/bump_version.md b/src/content/tools/docs/1.14/api/bump_version.md
new file mode 100644
index 0000000000..3f45f9fefb
--- /dev/null
+++ b/src/content/tools/docs/1.14/api/bump_version.md
@@ -0,0 +1,35 @@
+# nf_core.bump_version
+
+Bumps the version number in all appropriate files for
+a nf-core pipeline.
+
+### `nf_core.bump_version.bump_nextflow_version(pipeline_obj, new_version){:python}`
+
+Bumps the required Nextflow version number of a pipeline.
+
+- **Parameters:**
+ - **pipeline_obj** ([_nf_core.utils.Pipeline_](utils#nf_core.utils.Pipeline)) – A Pipeline object that holds information
+ about the pipeline contents and build files.
+ - **new_version** (_str_) – The new version tag for the required Nextflow version.
+
+### `nf_core.bump_version.bump_pipeline_version(pipeline_obj, new_version){:python}`
+
+Bumps a pipeline version number.
+
+- **Parameters:**
+ - **pipeline_obj** ([_nf_core.utils.Pipeline_](utils#nf_core.utils.Pipeline)) – A Pipeline object that holds information
+ about the pipeline contents and build files.
+ - **new_version** (_str_) – The new version tag for the pipeline. Semantic versioning only.
+
+### `nf_core.bump_version.update_file_version(filename, pipeline_obj, patterns){:python}`
+
+Updates the version number in a requested file.
+
+- **Parameters:**
+ - **filename** (_str_) – File to scan.
+ - **pipeline_obj** ([_nf_core.lint.PipelineLint_](lint#nf_core.lint.PipelineLint)) – A PipelineLint object that holds information
+ about the pipeline contents and build files.
+ - **pattern** (_str_) – Regex pattern to apply.
+ - **newstr** (_str_) – The replaced string.
+- **Raises:**
+ **ValueError**\*\*,\*\* **if the version number cannot be found.** –
diff --git a/src/content/tools/docs/1.14/api/create.md b/src/content/tools/docs/1.14/api/create.md
new file mode 100644
index 0000000000..92dcb9fb5e
--- /dev/null
+++ b/src/content/tools/docs/1.14/api/create.md
@@ -0,0 +1,36 @@
+# nf_core.create
+
+Creates a nf-core pipeline matching the current
+organization’s specification based on a template.
+
+### _`class{:python}`_`nf_core.create.PipelineCreate(name, description, author, version='1.0dev', no_git=False, force=False, outdir=None){:python}`
+
+Bases: `object`
+
+Creates a nf-core pipeline a la carte from the nf-core best-practise template.
+
+- **Parameters:**
+ - **name** (_str_) – Name for the pipeline.
+ - **description** (_str_) – Description for the pipeline.
+ - **author** (_str_) – Authors name of the pipeline.
+ - **version** (_str_) – Version flag. Semantic versioning only. Defaults to 1.0dev.
+ - **no_git** (_bool_) – Prevents the creation of a local Git repository for the pipeline. Defaults to False.
+ - **force** (_bool_) – Overwrites a given workflow directory with the same name. Defaults to False.
+ May the force be with you.
+ - **outdir** (_str_) – Path to the local output directory.
+
+#### `git_init_pipeline(){:python}`
+
+Initialises the new pipeline as a Git repository and submits first commit.
+
+#### `init_pipeline(){:python}`
+
+Creates the nf-core pipeline.
+
+#### `make_pipeline_logo(){:python}`
+
+Fetch a logo for the new pipeline from the nf-core website
+
+#### `render_template(){:python}`
+
+Runs Jinja to create a new nf-core pipeline.
diff --git a/src/content/tools/docs/1.14/api/download.md b/src/content/tools/docs/1.14/api/download.md
new file mode 100644
index 0000000000..7769825e08
--- /dev/null
+++ b/src/content/tools/docs/1.14/api/download.md
@@ -0,0 +1,142 @@
+# nf_core.download
+
+Downloads a nf-core pipeline to the local file system.
+
+### _`class{:python}`_`nf_core.download.DownloadProgress(*columns: str | ProgressColumn, console: Console | None = None, auto_refresh: bool = True, refresh_per_second: float = 10, speed_estimate_period: float = 30.0, transient: bool = False, redirect_stdout: bool = True, redirect_stderr: bool = True, get_time: Callable[[], float] | None = None, disable: bool = False, expand: bool = False){:python}`
+
+Bases: `Progress`
+
+Custom Progress bar class, allowing us to have two progress
+bars with different columns / layouts.
+
+#### `get_renderables(){:python}`
+
+Get a number of renderables for the progress display.
+
+### _`class{:python}`_`nf_core.download.DownloadWorkflow(pipeline=None, release=None, outdir=None, compress_type=None, force=False, container=None, singularity_cache_only=False, parallel_downloads=4){:python}`
+
+Bases: `object`
+
+Downloads a nf-core workflow from GitHub to the local file system.
+
+Can also download its Singularity container image if required.
+
+- **Parameters:**
+ - **pipeline** (_str_) – A nf-core pipeline name.
+ - **release** (_str_) – The workflow release version to download, like 1.0. Defaults to None.
+ - **singularity** (_bool_) – Flag, if the Singularity container should be downloaded as well. Defaults to False.
+ - **outdir** (_str_) – Path to the local download directory. Defaults to None.
+
+#### `compress_download(){:python}`
+
+Take the downloaded files and make a compressed .tar.gz archive.
+
+#### `download_configs(){:python}`
+
+Downloads the centralised config profiles from nf-core/configs to `self.outdir`.
+
+#### `download_wf_files(){:python}`
+
+Downloads workflow files from GitHub to the `self.outdir`.
+
+#### `download_workflow(){:python}`
+
+Starts a nf-core workflow download.
+
+#### `find_container_images(){:python}`
+
+Find container image names for workflow.
+
+Starts by using nextflow config to pull out any process.container
+declarations. This works for DSL1.
+
+Second, we look for DSL2 containers. These can’t be found with
+nextflow config at the time of writing, so we scrape the pipeline files.
+
+#### `get_release_hash(){:python}`
+
+Find specified release / branch hash
+
+#### `get_singularity_images(){:python}`
+
+Loop through container names and download Singularity images
+
+#### `prompt_compression_type(){:python}`
+
+Ask user if we should compress the downloaded files
+
+#### `prompt_container_download(){:python}`
+
+Prompt whether to download container images or not
+
+#### `prompt_pipeline_name(){:python}`
+
+Prompt for the pipeline name if not set with a flag
+
+#### `prompt_release(){:python}`
+
+Prompt for pipeline release / branch
+
+#### `prompt_singularity_cachedir_only(){:python}`
+
+Ask if we should _only_ use $NXF_SINGULARITY_CACHEDIR without copying into target
+
+#### `prompt_use_singularity_cachedir(){:python}`
+
+Prompt about using $NXF_SINGULARITY_CACHEDIR if not already set
+
+#### `singularity_copy_cache_image(container, out_path, cache_path){:python}`
+
+Copy Singularity image from NXF_SINGULARITY_CACHEDIR to target folder.
+
+#### `singularity_download_image(container, out_path, cache_path, progress){:python}`
+
+Download a singularity image from the web.
+
+Use native Python to download the file.
+
+- **Parameters:**
+ - **container** (_str_) – A pipeline’s container name. Usually it is of similar format
+ to `https://depot.galaxyproject.org/singularity/name:version`
+ - **out_path** (_str_) – The final target output path
+ - **cache_path** (_str_\*,\* _None_) – The NXF_SINGULARITY_CACHEDIR path if set, None if not
+ - **progress** (_Progress_) – Rich progress bar instance to add tasks to.
+
+#### `singularity_image_filenames(container){:python}`
+
+Check Singularity cache for image, copy to destination folder if found.
+
+- **Parameters:**
+ **container** (_str_) – A pipeline’s container name. Can be direct download URL
+ or a Docker Hub repository ID.
+- **Returns:**
+ Returns True if we have the image in the target location.
+ : Returns a download path if not.
+- **Return type:**
+ results (bool, str)
+
+#### `singularity_pull_image(container, out_path, cache_path, progress){:python}`
+
+Pull a singularity image using `singularity pull`
+
+Attempt to use a local installation of singularity to pull the image.
+
+- **Parameters:**
+ **container** (_str_) – A pipeline’s container name. Usually it is of similar format
+ to `nfcore/name:version`.
+- **Raises:**
+ **Various exceptions possible from subprocess execution** **of** **Singularity.** –
+
+#### `validate_md5(fname, expected=None){:python}`
+
+Calculates the md5sum for a file on the disk and validate with expected.
+
+- **Parameters:**
+ - **fname** (_str_) – Path to a local file.
+ - **expected** (_str_) – The expected md5sum.
+- **Raises:**
+ **IOError**\*\*,\*\* **if the md5sum does not match the remote sum.** –
+
+#### `wf_use_local_configs(){:python}`
+
+Edit the downloaded nextflow.config file to use the local config files
diff --git a/src/content/tools/docs/1.14/api/index.md b/src/content/tools/docs/1.14/api/index.md
new file mode 100644
index 0000000000..642e52b056
--- /dev/null
+++ b/src/content/tools/docs/1.14/api/index.md
@@ -0,0 +1,53 @@
+# API Reference
+
+# Tests:
+
+- [nf_core.bump_version](bump_version)
+ - [`bump_nextflow_version()`](bump_version#nf_core.bump_version.bump_nextflow_version)
+ - [`bump_pipeline_version()`](bump_version#nf_core.bump_version.bump_pipeline_version)
+ - [`update_file_version()`](bump_version#nf_core.bump_version.update_file_version)
+- [nf_core.create](create)
+ - [`PipelineCreate`](create#nf_core.create.PipelineCreate)
+- [nf_core.download](download)
+ - [`DownloadProgress`](download#nf_core.download.DownloadProgress)
+ - [`DownloadWorkflow`](download#nf_core.download.DownloadWorkflow)
+- [nf_core.launch](launch)
+ - [`Launch`](launch#nf_core.launch.Launch)
+- [nf_core.licences](licences)
+ - [`WorkflowLicences`](licences#nf_core.licences.WorkflowLicences)
+- [nf_core.lint](lint)
+ - [`run_linting()`](lint#nf_core.lint.run_linting)
+ - [`PipelineLint`](lint#nf_core.lint.PipelineLint)
+- [nf_core.list](list)
+ - [`LocalWorkflow`](list#nf_core.list.LocalWorkflow)
+ - [`RemoteWorkflow`](list#nf_core.list.RemoteWorkflow)
+ - [`Workflows`](list#nf_core.list.Workflows)
+ - [`get_local_wf()`](list#nf_core.list.get_local_wf)
+ - [`list_workflows()`](list#nf_core.list.list_workflows)
+ - [`pretty_date()`](list#nf_core.list.pretty_date)
+- [nf_core.modules](modules)
+- [nf_core.schema](schema)
+ - [`PipelineSchema`](schema#nf_core.schema.PipelineSchema)
+- [nf_core.sync](sync)
+ - [`PipelineSync`](sync#nf_core.sync.PipelineSync)
+ - [`PullRequestException`](sync#nf_core.sync.PullRequestException)
+ - [`SyncException`](sync#nf_core.sync.SyncException)
+- [nf_core.utils](utils)
+ - [`Pipeline`](utils#nf_core.utils.Pipeline)
+ - [`anaconda_package()`](utils#nf_core.utils.anaconda_package)
+ - [`check_if_outdated()`](utils#nf_core.utils.check_if_outdated)
+ - [`custom_yaml_dumper()`](utils#nf_core.utils.custom_yaml_dumper)
+ - [`fetch_wf_config()`](utils#nf_core.utils.fetch_wf_config)
+ - [`get_biocontainer_tag()`](utils#nf_core.utils.get_biocontainer_tag)
+ - [`get_repo_releases_branches()`](utils#nf_core.utils.get_repo_releases_branches)
+ - [`github_api_auto_auth()`](utils#nf_core.utils.github_api_auto_auth)
+ - [`is_file_binary()`](utils#nf_core.utils.is_file_binary)
+ - [`nextflow_cmd()`](utils#nf_core.utils.nextflow_cmd)
+ - [`parse_anaconda_licence()`](utils#nf_core.utils.parse_anaconda_licence)
+ - [`pip_package()`](utils#nf_core.utils.pip_package)
+ - [`poll_nfcore_web_api()`](utils#nf_core.utils.poll_nfcore_web_api)
+ - [`prompt_pipeline_release_branch()`](utils#nf_core.utils.prompt_pipeline_release_branch)
+ - [`prompt_remote_pipeline_name()`](utils#nf_core.utils.prompt_remote_pipeline_name)
+ - [`rich_force_colors()`](utils#nf_core.utils.rich_force_colors)
+ - [`setup_requests_cachedir()`](utils#nf_core.utils.setup_requests_cachedir)
+ - [`wait_cli_function()`](utils#nf_core.utils.wait_cli_function)
diff --git a/src/content/tools/docs/1.14/api/launch.md b/src/content/tools/docs/1.14/api/launch.md
new file mode 100644
index 0000000000..8affe0fa4d
--- /dev/null
+++ b/src/content/tools/docs/1.14/api/launch.md
@@ -0,0 +1,87 @@
+# nf_core.launch
+
+Launch a pipeline, interactively collecting params
+
+### _`class{:python}`_`nf_core.launch.Launch(pipeline=None, revision=None, command_only=False, params_in=None, params_out=None, save_all=False, show_hidden=False, url=None, web_id=None){:python}`
+
+Bases: `object`
+
+Class to hold config option to launch a pipeline
+
+#### `build_command(){:python}`
+
+Build the nextflow run command based on what we know
+
+#### `get_pipeline_schema(){:python}`
+
+Load and validate the schema from the supplied pipeline
+
+#### `get_web_launch_response(){:python}`
+
+Given a URL for a web-gui launch response, recursively query it until results are ready.
+
+#### `launch_pipeline(){:python}`
+
+#### `launch_web_gui(){:python}`
+
+Send schema to nf-core website and launch input GUI
+
+#### `launch_workflow(){:python}`
+
+Launch nextflow if required
+
+#### `merge_nxf_flag_schema(){:python}`
+
+Take the Nextflow flag schema and merge it with the pipeline schema
+
+#### `print_param_header(param_id, param_obj, is_group=False){:python}`
+
+#### `prompt_group(group_id, group_obj){:python}`
+
+Prompt for edits to a group of parameters (subschema in ‘definitions’)
+
+- **Parameters:**
+ - **group_id** – Paramater ID (string)
+ - **group_obj** – JSON Schema keys (dict)
+- **Returns:**
+ val answers
+- **Return type:**
+ Dict of param_id
+
+#### `prompt_param(param_id, param_obj, is_required, answers){:python}`
+
+Prompt for a single parameter
+
+#### `prompt_schema(){:python}`
+
+Go through the pipeline schema and prompt user to change defaults
+
+#### `prompt_web_gui(){:python}`
+
+Ask whether to use the web-based or cli wizard to collect params
+
+#### `sanitise_web_response(){:python}`
+
+The web builder returns everything as strings.
+Use the functions defined in the cli wizard to convert to the correct types.
+
+#### `set_schema_inputs(){:python}`
+
+Take the loaded schema and set the defaults as the input parameters
+If a nf_params.json file is supplied, apply these over the top
+
+#### `single_param_to_questionary(param_id, param_obj, answers=None, print_help=True){:python}`
+
+Convert a JSONSchema param to a Questionary question
+
+- **Parameters:**
+ - **param_id** – Parameter ID (string)
+ - **param_obj** – JSON Schema keys (dict)
+ - **answers** – Optional preexisting answers (dict)
+ - **print_help** – If description and help_text should be printed (bool)
+- **Returns:**
+ Single Questionary dict, to be appended to questions list
+
+#### `strip_default_params(){:python}`
+
+Strip parameters if they have not changed from the default
diff --git a/src/content/tools/docs/1.14/api/licences.md b/src/content/tools/docs/1.14/api/licences.md
new file mode 100644
index 0000000000..578ddaae1d
--- /dev/null
+++ b/src/content/tools/docs/1.14/api/licences.md
@@ -0,0 +1,37 @@
+# nf_core.licences
+
+Lists software licences for a given workflow.
+
+### _`class{:python}`_`nf_core.licences.WorkflowLicences(pipeline){:python}`
+
+Bases: `object`
+
+A nf-core workflow licenses collection.
+
+Tries to retrieve the license information from all dependencies
+of a given nf-core pipeline.
+
+A condensed overview with license per dependency can be printed out.
+
+- **Parameters:**
+ **pipeline** (_str_) – An existing nf-core pipeline name, like nf-core/hlatyping
+ or short hlatyping.
+
+#### `fetch_conda_licences(){:python}`
+
+Fetch package licences from Anaconda and PyPi.
+
+#### `get_environment_file(){:python}`
+
+Get the conda environment file for the pipeline
+
+#### `print_licences(){:python}`
+
+Prints the fetched license information.
+
+- **Parameters:**
+ **as_json** (_boolean_) – Prints the information in JSON. Defaults to False.
+
+#### `run_licences(){:python}`
+
+Run the nf-core licences action
diff --git a/src/content/tools/docs/1.14/api/lint.md b/src/content/tools/docs/1.14/api/lint.md
new file mode 100644
index 0000000000..e8f44cd06c
--- /dev/null
+++ b/src/content/tools/docs/1.14/api/lint.md
@@ -0,0 +1,138 @@
+# nf_core.lint
+
+#### `SEE ALSO{:python}`
+
+See the [Lint Tests](../lint_tests/index.html) docs for information about specific linting functions.
+
+
+
+Linting policy for nf-core pipeline projects.
+
+Tests Nextflow-based pipelines to check that they adhere to
+the nf-core community guidelines.
+
+### `nf_core.lint.run_linting(pipeline_dir, release_mode=False, fix=(), key=(), show_passed=False, fail_ignored=False, md_fn=None, json_fn=None){:python}`
+
+Runs all nf-core linting checks on a given Nextflow pipeline project
+in either release mode or normal mode (default). Returns an object
+of type [`PipelineLint`](#nf_core.lint.PipelineLint) after finished.
+
+- **Parameters:**
+ - **pipeline_dir** (_str_) – The path to the Nextflow pipeline root directory
+ - **release_mode** (_bool_) – Set this to True, if the linting should be run in the release mode.
+ See [`PipelineLint`](#nf_core.lint.PipelineLint) for more information.
+- **Returns:**
+ An object of type [`PipelineLint`](#nf_core.lint.PipelineLint) that contains all the linting results.
+
+### _`class{:python}`_`nf_core.lint.PipelineLint(wf_path, release_mode=False, fix=(), key=(), fail_ignored=False){:python}`
+
+Bases: [`Pipeline`](utils#nf_core.utils.Pipeline)
+
+Object to hold linting information and results.
+
+Inherits [`nf_core.utils.Pipeline`](utils#nf_core.utils.Pipeline) class.
+
+Use the [`PipelineLint._lint_pipeline()`](#nf_core.lint.PipelineLint._lint_pipeline) function to run lint tests.
+
+- **Parameters:**
+ **path** (_str_) – The path to the nf-core pipeline directory.
+
+#### `failed{:python}`
+
+A list of tuples of the form: `(, )`
+
+- **Type:**
+ list
+
+#### `ignored{:python}`
+
+A list of tuples of the form: `(, )`
+
+- **Type:**
+ list
+
+#### `lint_config{:python}`
+
+The parsed nf-core linting config for this pipeline
+
+- **Type:**
+ dict
+
+#### `passed{:python}`
+
+A list of tuples of the form: `(, )`
+
+- **Type:**
+ list
+
+#### `release_mode{:python}`
+
+True, if you the to linting was run in release mode, False else.
+
+- **Type:**
+ bool
+
+#### `warned{:python}`
+
+A list of tuples of the form: `(, )`
+
+- **Type:**
+ list
+
+#### `_get_results_md(){:python}`
+
+Create a markdown file suitable for posting in a GitHub comment.
+
+- **Returns:**
+ Formatting markdown content
+- **Return type:**
+ markdown (str)
+
+#### `_lint_pipeline(){:python}`
+
+Main linting function.
+
+Takes the pipeline directory as the primary input and iterates through
+the different linting checks in order. Collects any warnings or errors
+into object attributes: `passed`, `ignored`, `warned` and `failed`.
+
+#### `_print_results(show_passed=False){:python}`
+
+Print linting results to the command line.
+
+Uses the `rich` library to print a set of formatted tables to the command line
+summarising the linting results.
+
+#### `_save_json_results(json_fn){:python}`
+
+Function to dump lint results to a JSON file for downstream use
+
+- **Parameters:**
+ **json_fn** (_str_) – File path to write JSON to.
+
+#### `_strip_ansi_codes(string, replace_with=''){:python}`
+
+Strip ANSI colouring codes from a string to return plain text.
+
+Solution found on Stack Overflow:
+
+#### `_wrap_quotes(files){:python}`
+
+Helper function to take a list of filenames and format with markdown.
+
+- **Parameters:**
+ **files** (_list_) –
+
+ List of filenames, eg:
+
+ ```default
+ ['foo', 'bar', 'baz']
+ ```
+
+- **Returns:**
+ Formatted string of paths separated by word `or`, eg:
+ ```default
+ `foo` or bar` or `baz`
+ ```
+- **Return type:**
+ markdown (str)
diff --git a/src/content/tools/docs/1.14/api/list.md b/src/content/tools/docs/1.14/api/list.md
new file mode 100644
index 0000000000..2fc6448de0
--- /dev/null
+++ b/src/content/tools/docs/1.14/api/list.md
@@ -0,0 +1,99 @@
+# nf_core.list
+
+Lists available nf-core pipelines and versions.
+
+### _`class{:python}`_`nf_core.list.LocalWorkflow(name){:python}`
+
+Bases: `object`
+
+Class to handle local workflows pulled by nextflow
+
+#### `get_local_nf_workflow_details(){:python}`
+
+Get full details about a local cached workflow
+
+### _`class{:python}`_`nf_core.list.RemoteWorkflow(data){:python}`
+
+Bases: `object`
+
+A information container for a remote workflow.
+
+- **Parameters:**
+ **data** (_dict_) – workflow information as they are retrieved from the GitHub repository REST API request
+ ().
+
+### _`class{:python}`_`nf_core.list.Workflows(filter_by=None, sort_by='release', show_archived=False){:python}`
+
+Bases: `object`
+
+Workflow container class.
+
+Is used to collect local and remote nf-core pipelines. Pipelines
+can be sorted, filtered and compared.
+
+- **Parameters:**
+ - **filter_by** (_list_) – A list of strings that can be used for filtering.
+ - **sort_by** (_str_) – workflows can be sorted by keywords. Keyword must be one of
+ release (default), name, stars.
+
+#### `compare_remote_local(){:python}`
+
+Matches local to remote workflows.
+
+If a matching remote workflow is found, the local workflow’s Git commit hash is compared
+with the latest one from remote.
+
+A boolean flag in `RemoteWorkflow.local_is_latest` is set to True, if the local workflow
+is the latest.
+
+#### `filtered_workflows(){:python}`
+
+Filters remote workflows for keywords.
+
+- **Returns:**
+ Filtered remote workflows.
+- **Return type:**
+ list
+
+#### `get_local_nf_workflows(){:python}`
+
+Retrieves local Nextflow workflows.
+
+Local workflows are stored in `self.local_workflows` list.
+
+#### `get_remote_workflows(){:python}`
+
+Retrieves remote workflows from [nf-co.re](https://nf-co.re).
+
+Remote workflows are stored in `self.remote_workflows` list.
+
+#### `print_json(){:python}`
+
+Dump JSON of all parsed information
+
+#### `print_summary(){:python}`
+
+Prints a summary of all pipelines.
+
+### `nf_core.list.get_local_wf(workflow, revision=None){:python}`
+
+Check if this workflow has a local copy and use nextflow to pull it if not
+
+### `nf_core.list.list_workflows(filter_by=None, sort_by='release', as_json=False, show_archived=False){:python}`
+
+Prints out a list of all nf-core workflows.
+
+- **Parameters:**
+ - **filter_by** (_list_) – A list of strings that can be used for filtering.
+ - **sort_by** (_str_) – workflows can be sorted by keywords. Keyword must be one of
+ release (default), name, stars.
+ - **as_json** (_boolean_) – Set to true, if the lists should be printed in JSON.
+
+### `nf_core.list.pretty_date(time){:python}`
+
+Transforms a datetime object or a int() Epoch timestamp into a
+pretty string like ‘an hour ago’, ‘Yesterday’, ‘3 months ago’,
+‘just now’, etc
+
+Based on
+Adapted by sven1103
diff --git a/src/content/tools/docs/1.14/api/modules.md b/src/content/tools/docs/1.14/api/modules.md
new file mode 100644
index 0000000000..2dd47b6359
--- /dev/null
+++ b/src/content/tools/docs/1.14/api/modules.md
@@ -0,0 +1 @@
+# nf_core.modules
diff --git a/src/content/tools/docs/1.14/api/schema.md b/src/content/tools/docs/1.14/api/schema.md
new file mode 100644
index 0000000000..bbb2b2e39b
--- /dev/null
+++ b/src/content/tools/docs/1.14/api/schema.md
@@ -0,0 +1,113 @@
+# nf_core.schema
+
+Code to deal with pipeline JSON Schema
+
+### _`class{:python}`_`nf_core.schema.PipelineSchema{:python}`
+
+Bases: `object`
+
+Class to generate a schema object with
+functions to handle pipeline JSON Schema
+
+#### `add_schema_found_configs(){:python}`
+
+Add anything that’s found in the Nextflow params that’s missing in the pipeline schema
+
+#### `build_schema(pipeline_dir, no_prompts, web_only, url){:python}`
+
+Interactively build a new pipeline schema for a pipeline
+
+#### `build_schema_param(p_val){:python}`
+
+Build a pipeline schema dictionary for an param interactively
+
+#### `get_schema_defaults(){:python}`
+
+Generate set of default input parameters from schema.
+
+Saves defaults to self.schema_defaults
+Returns count of how many parameters were found (with or without a default value)
+
+#### `get_schema_path(path, local_only=False, revision=None){:python}`
+
+Given a pipeline name, directory, or path, set self.schema_filename
+
+#### `get_web_builder_response(){:python}`
+
+Given a URL for a Schema build response, recursively query it until results are ready.
+Once ready, validate Schema and write to disk.
+
+#### `get_wf_params(){:python}`
+
+Load the pipeline parameter defaults using nextflow config
+Strip out only the params. values and ignore anything that is not a flat variable
+
+#### `launch_web_builder(){:python}`
+
+Send pipeline schema to web builder and wait for response
+
+#### `load_input_params(params_path){:python}`
+
+Load a given a path to a parameters file (JSON/YAML)
+
+These should be input parameters used to run a pipeline with
+the Nextflow -params-file option.
+
+#### `load_lint_schema(){:python}`
+
+Load and lint a given schema to see if it looks valid
+
+#### `load_schema(){:python}`
+
+Load a pipeline schema from a file
+
+#### `make_skeleton_schema(){:python}`
+
+Make a new pipeline schema from the template
+
+#### `prompt_remove_schema_notfound_config(p_key){:python}`
+
+Check if a given key is found in the nextflow config params and prompt to remove it if note
+
+Returns True if it should be removed, False if not.
+
+#### `remove_schema_notfound_configs(){:python}`
+
+Go through top-level schema and all definitions sub-schemas to remove
+anything that’s not in the nextflow config.
+
+#### `remove_schema_notfound_configs_single_schema(schema){:python}`
+
+Go through a single schema / set of properties and strip out
+anything that’s not in the nextflow config.
+
+Takes: Schema or sub-schema with properties key
+Returns: Cleaned schema / sub-schema
+
+#### `sanitise_param_default(param){:python}`
+
+Given a param, ensure that the default value is the correct variable type
+
+#### `save_schema(){:python}`
+
+Save a pipeline schema to a file
+
+#### `validate_default_params(){:python}`
+
+Check that all default parameters in the schema are valid
+Ignores ‘required’ flag, as required parameters might have no defaults
+
+#### `validate_params(){:python}`
+
+Check given parameters against a schema and validate
+
+#### `validate_schema(schema=None){:python}`
+
+Check that the Schema is valid
+
+Returns: Number of parameters found
+
+#### `validate_schema_title_description(schema=None){:python}`
+
+Extra validation command for linting.
+Checks that the schema “$id”, “title” and “description” attributes match the piipeline config.
diff --git a/src/content/tools/docs/1.14/api/sync.md b/src/content/tools/docs/1.14/api/sync.md
new file mode 100644
index 0000000000..e327553c3f
--- /dev/null
+++ b/src/content/tools/docs/1.14/api/sync.md
@@ -0,0 +1,151 @@
+# nf_core.sync
+
+Synchronise a pipeline TEMPLATE branch with the template.
+
+### _`class{:python}`_`nf_core.sync.PipelineSync(pipeline_dir, from_branch=None, make_pr=False, gh_repo=None, gh_username=None){:python}`
+
+Bases: `object`
+
+Object to hold syncing information and results.
+
+- **Parameters:**
+ - **pipeline_dir** (_str_) – The path to the Nextflow pipeline root directory
+ - **from_branch** (_str_) – The branch to use to fetch config vars. If not set, will use current active branch
+ - **make_pr** (_bool_) – Set this to True to create a GitHub pull-request with the changes
+ - **gh_username** (_str_) – GitHub username
+ - **gh_repo** (_str_) – GitHub repository name
+
+#### `pipeline_dir{:python}`
+
+Path to target pipeline directory
+
+- **Type:**
+ str
+
+#### `from_branch{:python}`
+
+Repo branch to use when collecting workflow variables. Default: active branch.
+
+- **Type:**
+ str
+
+#### `original_branch{:python}`
+
+Repo branch that was checked out before we started.
+
+- **Type:**
+ str
+
+#### `made_changes{:python}`
+
+Whether making the new template pipeline introduced any changes
+
+- **Type:**
+ bool
+
+#### `make_pr{:python}`
+
+Whether to try to automatically make a PR on GitHub.com
+
+- **Type:**
+ bool
+
+#### `required_config_vars{:python}`
+
+List of nextflow variables required to make template pipeline
+
+- **Type:**
+ list
+
+#### `gh_username{:python}`
+
+GitHub username
+
+- **Type:**
+ str
+
+#### `gh_repo{:python}`
+
+GitHub repository name
+
+- **Type:**
+ str
+
+#### `checkout_template_branch(){:python}`
+
+Try to check out the origin/TEMPLATE in a new TEMPLATE branch.
+If this fails, try to check out an existing local TEMPLATE branch.
+
+#### `close_open_pr(pr){:python}`
+
+Given a PR API response, add a comment and close.
+
+#### `close_open_template_merge_prs(){:python}`
+
+Get all template merging branches (starting with ‘nf-core-template-merge-‘)
+and check for any open PRs from these branches to the self.from_branch
+If open PRs are found, add a comment and close them
+
+#### `commit_template_changes(){:python}`
+
+If we have any changes with the new template files, make a git commit
+
+#### `create_merge_base_branch(){:python}`
+
+Create a new branch from the updated TEMPLATE branch
+This branch will then be used to create the PR
+
+#### `delete_template_branch_files(){:python}`
+
+Delete all files in the TEMPLATE branch
+
+#### `get_wf_config(){:python}`
+
+Check out the target branch if requested and fetch the nextflow config.
+Check that we have the required config variables.
+
+#### `inspect_sync_dir(){:python}`
+
+Takes a look at the target directory for syncing. Checks that it’s a git repo
+and makes sure that there are no uncommitted changes.
+
+#### `make_pull_request(){:python}`
+
+Create a pull request to a base branch (default: dev),
+from a head branch (default: TEMPLATE)
+
+Returns: An instance of class requests.Response
+
+#### `make_template_pipeline(){:python}`
+
+Delete all files and make a fresh template using the workflow variables
+
+#### `push_merge_branch(){:python}`
+
+Push the newly created merge branch to the remote repository
+
+#### `push_template_branch(){:python}`
+
+If we made any changes, push the TEMPLATE branch to the default remote
+and try to make a PR. If we don’t have the auth token, try to figure out a URL
+for the PR and print this to the console.
+
+#### `reset_target_dir(){:python}`
+
+Reset the target pipeline directory. Check out the original branch.
+
+#### `sync(){:python}`
+
+Find workflow attributes, create a new template pipeline on TEMPLATE
+
+### _`exception{:python}`_`nf_core.sync.PullRequestException{:python}`
+
+Bases: `Exception`
+
+Exception raised when there was an error creating a Pull-Request on GitHub.com
+
+### _`exception{:python}`_`nf_core.sync.SyncException{:python}`
+
+Bases: `Exception`
+
+Exception raised when there was an error with TEMPLATE branch synchronisation
diff --git a/src/content/tools/docs/1.14/api/utils.md b/src/content/tools/docs/1.14/api/utils.md
new file mode 100644
index 0000000000..c3761e9e96
--- /dev/null
+++ b/src/content/tools/docs/1.14/api/utils.md
@@ -0,0 +1,243 @@
+# nf_core.utils
+
+Common utility functions for the nf-core python package.
+
+### _`class{:python}`_`nf_core.utils.Pipeline(wf_path){:python}`
+
+Bases: `object`
+
+Object to hold information about a local pipeline.
+
+- **Parameters:**
+ **path** (_str_) – The path to the nf-core pipeline directory.
+
+#### `conda_config{:python}`
+
+The parsed conda configuration file content (`environment.yml`).
+
+- **Type:**
+ dict
+
+#### `conda_package_info{:python}`
+
+The conda package(s) information, based on the API requests to Anaconda cloud.
+
+- **Type:**
+ dict
+
+#### `nf_config{:python}`
+
+The Nextflow pipeline configuration file content.
+
+- **Type:**
+ dict
+
+#### `files{:python}`
+
+A list of files found during the linting process.
+
+- **Type:**
+ list
+
+#### `git_sha{:python}`
+
+The git sha for the repo commit / current GitHub pull-request ($GITHUB_PR_COMMIT)
+
+- **Type:**
+ str
+
+#### `minNextflowVersion{:python}`
+
+The minimum required Nextflow version to run the pipeline.
+
+- **Type:**
+ str
+
+#### `wf_path{:python}`
+
+Path to the pipeline directory.
+
+- **Type:**
+ str
+
+#### `pipeline_name{:python}`
+
+The pipeline name, without the nf-core tag, for example hlatyping.
+
+- **Type:**
+ str
+
+#### `schema_obj{:python}`
+
+A `PipelineSchema` object
+
+- **Type:**
+ obj
+
+#### `_fp(fn){:python}`
+
+Convenience function to get full path to a file in the pipeline
+
+#### `_list_files(){:python}`
+
+Get a list of all files in the pipeline
+
+#### `_load(){:python}`
+
+Run core load functions
+
+#### `_load_conda_environment(){:python}`
+
+Try to load the pipeline environment.yml file, if it exists
+
+#### `_load_pipeline_config(){:python}`
+
+Get the nextflow config for this pipeline
+
+Once loaded, set a few convienence reference class attributes
+
+### `nf_core.utils.anaconda_package(dep, dep_channels=['conda-forge', 'bioconda', 'defaults']){:python}`
+
+Query conda package information.
+
+Sends a HTTP GET request to the Anaconda remote API.
+
+- **Parameters:**
+ - **dep** (_str_) – A conda package name.
+ - **dep_channels** (_list_) – list of conda channels to use
+- **Raises:**
+ - **A LookupError**\*\*,\*\* **if the connection fails** **or** **times out** **or** **gives an unexpected status code** –
+ - **A ValueError**\*\*,\*\* **if the package name can not be found** **(\*\***404\***\*)** –
+
+### `nf_core.utils.check_if_outdated(current_version=None, remote_version=None, source_url='https://nf-co.re/tools_version'){:python}`
+
+Check if the current version of nf-core is outdated
+
+### `nf_core.utils.custom_yaml_dumper(){:python}`
+
+Overwrite default PyYAML output to make Prettier YAML linting happy
+
+### `nf_core.utils.fetch_wf_config(wf_path){:python}`
+
+Uses Nextflow to retrieve the the configuration variables
+from a Nextflow workflow.
+
+- **Parameters:**
+ **wf_path** (_str_) – Nextflow workflow file system path.
+- **Returns:**
+ Workflow configuration settings.
+- **Return type:**
+ dict
+
+### `nf_core.utils.get_biocontainer_tag(package, version){:python}`
+
+Given a bioconda package and version, look for a container
+at quay.io and returns the tag of the most recent image
+that matches the package version
+Sends a HTTP GET request to the quay.io API.
+:param package: A bioconda package name.
+:type package: str
+:param version: Version of the bioconda package
+:type version: str
+
+- **Raises:**
+ - **A LookupError**\*\*,\*\* **if the connection fails** **or** **times out** **or** **gives an unexpected status code** –
+ - **A ValueError**\*\*,\*\* **if the package name can not be found** **(\*\***404\***\*)** –
+
+### `nf_core.utils.get_repo_releases_branches(pipeline, wfs){:python}`
+
+Fetches details of a nf-core workflow to download.
+
+- **Parameters:**
+ - **pipeline** (_str_) – GitHub repo username/repo
+ - **wfs** – A nf_core.list.Workflows() object, where get_remote_workflows() has been called.
+- **Returns:**
+ Array of releases, Array of branches
+- **Return type:**
+ wf_releases, wf_branches (tuple)
+- **Raises:**
+ **LockupError**\*\*,\*\* **if the pipeline can not be found.** –
+
+### `nf_core.utils.github_api_auto_auth(){:python}`
+
+### `nf_core.utils.is_file_binary(path){:python}`
+
+Check file path to see if it is a binary file
+
+### `nf_core.utils.nextflow_cmd(cmd){:python}`
+
+Run a Nextflow command and capture the output. Handle errors nicely
+
+### `nf_core.utils.parse_anaconda_licence(anaconda_response, version=None){:python}`
+
+Given a response from the anaconda API using anaconda_package, parse the software licences.
+
+Returns: Set of licence types
+
+### `nf_core.utils.pip_package(dep){:python}`
+
+Query PyPI package information.
+
+Sends a HTTP GET request to the PyPI remote API.
+
+- **Parameters:**
+ **dep** (_str_) – A PyPI package name.
+- **Raises:**
+ - **A LookupError**\*\*,\*\* **if the connection fails** **or** **times out** –
+ - **A ValueError**\*\*,\*\* **if the package name can not be found** –
+
+### `nf_core.utils.poll_nfcore_web_api(api_url, post_data=None){:python}`
+
+Poll the nf-core website API
+
+Takes argument api_url for URL
+
+Expects API reponse to be valid JSON and contain a top-level ‘status’ key.
+
+### `nf_core.utils.prompt_pipeline_release_branch(wf_releases, wf_branches){:python}`
+
+Prompt for pipeline release / branch
+
+- **Parameters:**
+ - **wf_releases** (_array_) – Array of repo releases as returned by the GitHub API
+ - **wf_branches** (_array_) – Array of repo branches, as returned by the GitHub API
+- **Returns:**
+ Selected release / branch name
+- **Return type:**
+ choice (str)
+
+### `nf_core.utils.prompt_remote_pipeline_name(wfs){:python}`
+
+Prompt for the pipeline name with questionary
+
+- **Parameters:**
+ **wfs** – A nf_core.list.Workflows() object, where get_remote_workflows() has been called.
+- **Returns:**
+ GitHub repo - username/repo
+- **Return type:**
+ pipeline (str)
+- **Raises:**
+ **AssertionError**\*\*,\*\* **if pipeline cannot be found** –
+
+### `nf_core.utils.rich_force_colors(){:python}`
+
+Check if any environment variables are set to force Rich to use coloured output
+
+### `nf_core.utils.setup_requests_cachedir(){:python}`
+
+Sets up local caching for faster remote HTTP requests.
+
+Caching directory will be set up in the user’s home directory under
+a .nfcore_cache subdir.
+
+### `nf_core.utils.wait_cli_function(poll_func, poll_every=20){:python}`
+
+Display a command-line spinner while calling a function repeatedly.
+
+Keep waiting until that function returns True
+
+- **Parameters:**
+ - **poll_func** (_function_) – Function to call
+ - **poll_every** (_int_) – How many tenths of a second to wait between function calls. Default: 20.
+- **Returns:**
+ None. Just sits in an infite loop until the function returns True.
diff --git a/src/content/tools/docs/1.14/index.md b/src/content/tools/docs/1.14/index.md
new file mode 100644
index 0000000000..c3d07a7628
--- /dev/null
+++ b/src/content/tools/docs/1.14/index.md
@@ -0,0 +1,13 @@
+# nf-core/tools documentation
+
+This documentation is for the `nf-core/tools` package.
+
+Primarily, it describes the different [code lint tests](lint_tests/index.html)
+run by `nf-core lint` (typically visited by a developer when their pipeline fails a given
+test), and also reference for the `nf_core` [Python package API](api/index.html).
+
+# Indices and tables
+
+- [Index](genindex)
+- [Module Index](py-modindex)
+- [Search Page](search)
diff --git a/src/content/tools/docs/1.14/pipeline_lint_tests/actions_awsfulltest.md b/src/content/tools/docs/1.14/pipeline_lint_tests/actions_awsfulltest.md
new file mode 100644
index 0000000000..fd32fa4a32
--- /dev/null
+++ b/src/content/tools/docs/1.14/pipeline_lint_tests/actions_awsfulltest.md
@@ -0,0 +1,30 @@
+# actions_awsfulltest
+
+#### `PipelineLint.actions_awsfulltest(){:python}`
+
+Checks the GitHub Actions awsfulltest is valid.
+
+In addition to small test datasets run on GitHub Actions, we provide the possibility of testing the pipeline on full size datasets on AWS.
+This should ensure that the pipeline runs as expected on AWS and provide a resource estimation.
+
+The GitHub Actions workflow is called `awsfulltest.yml`, and it can be found in the `.github/workflows/` directory.
+
+:::warning
+This workflow incurs AWS costs, therefore it should only be triggered for pipeline releases:
+`workflow_run` (after the docker hub release workflow) and `workflow_dispatch`.
+:::
+
+:::note
+You can manually trigger the AWS tests by going to the Actions tab on the pipeline GitHub repository and selecting the
+nf-core AWS full size tests workflow on the left.
+:::
+
+:::note
+For tests on full data prior to release, [Nextflow Tower](https://tower.nf) launch feature can be employed.
+:::
+
+The `.github/workflows/awsfulltest.yml` file is tested for the following:
+
+- Must be turned on `workflow_dispatch`.
+- Must be turned on for `workflow_run` with `workflows: ["nf-core Docker push (release)"]` and `types: [completed]`
+- Should run the profile `test_full` that should be edited to provide the links to full-size datasets. If it runs the profile `test`, a warning is given.
diff --git a/src/content/tools/docs/1.14/pipeline_lint_tests/actions_awstest.md b/src/content/tools/docs/1.14/pipeline_lint_tests/actions_awstest.md
new file mode 100644
index 0000000000..4e4698c4e9
--- /dev/null
+++ b/src/content/tools/docs/1.14/pipeline_lint_tests/actions_awstest.md
@@ -0,0 +1,24 @@
+# actions_awstest
+
+#### `PipelineLint.actions_awstest(){:python}`
+
+Checks the GitHub Actions awstest is valid.
+
+In addition to small test datasets run on GitHub Actions, we provide the possibility of testing the pipeline on AWS.
+This should ensure that the pipeline runs as expected on AWS (which often has its own unique edge cases).
+
+:::warning
+Running tests on AWS incurs costs, so these tests are not triggered automatically.
+Instead, they use the `workflow_dispatch` trigger, which allows for manual triggering
+of the workflow when testing on AWS is desired.
+:::
+
+:::note
+You can trigger the tests by going to the Actions tab on the pipeline GitHub repository
+and selecting the nf-core AWS test workflow on the left.
+:::
+
+The `.github/workflows/awstest.yml` file is tested for the following:
+
+- Must _not_ be turned on for `push` or `pull_request`.
+- Must be turned on for `workflow_dispatch`.
diff --git a/src/content/tools/docs/1.14/pipeline_lint_tests/actions_ci.md b/src/content/tools/docs/1.14/pipeline_lint_tests/actions_ci.md
new file mode 100644
index 0000000000..34f2096391
--- /dev/null
+++ b/src/content/tools/docs/1.14/pipeline_lint_tests/actions_ci.md
@@ -0,0 +1,63 @@
+# actions_ci
+
+#### `PipelineLint.actions_ci(){:python}`
+
+Checks that the GitHub Actions pipeline CI (Continuous Integration) workflow is valid.
+
+The `.github/workflows/ci.yml` GitHub Actions workflow runs the pipeline on a minimal test
+dataset using `-profile test` to check that no breaking changes have been introduced.
+Final result files are not checked, just that the pipeline exists successfully.
+
+This lint test checks this GitHub Actions workflow file for the following:
+
+- Workflow must be triggered on the following events:
+ ```yaml
+ on:
+ push:
+ branches:
+ - dev
+ pull_request:
+ release:
+ types: [published]
+ ```
+- The minimum Nextflow version specified in the pipeline’s `nextflow.config` matches that defined by `nxf_ver` in the test matrix:
+
+ ```yaml
+ strategy:
+ matrix:
+ # Nextflow versions: check pipeline minimum and current latest
+ nxf_ver: ['19.10.0', '']
+ ```
+
+ :::note
+ These `matrix` variables run the test workflow twice, varying the `nxf_ver` variable each time.
+ This is used in the `nextflow run` commands to test the pipeline with both the latest available version
+ of the pipeline (`''`) and the stated minimum required version.
+ :::
+
+- The Docker container for the pipeline must use the correct pipeline version number:
+
+ > - Development pipelines:
+ > ```bash
+ > docker tag nfcore/:dev nfcore/:dev
+ > ```
+ > - Released pipelines:
+ > ```bash
+ > docker tag nfcore/:dev nfcore/:
+ > ```
+ > - Complete example for a released pipeline called _nf-core/example_ with version number `1.0.0`:
+ > ```yaml
+ > - name: Build new docker image
+ > if: env.GIT_DIFF
+ > run: docker build --no-cache . -t nfcore/example:1.0.0
+ > ```
+
+ > - name: Pull docker image
+ > if: ${{ !env.GIT\_DIFF }}
+ > run: |
+ > docker pull nfcore/example:dev
+ > docker tag nfcore/example:dev nfcore/example:1.0.0
+ >
+ > ```
+ >
+ > ```
diff --git a/src/content/tools/docs/1.14/pipeline_lint_tests/actions_schema_validation.md b/src/content/tools/docs/1.14/pipeline_lint_tests/actions_schema_validation.md
new file mode 100644
index 0000000000..518037bc74
--- /dev/null
+++ b/src/content/tools/docs/1.14/pipeline_lint_tests/actions_schema_validation.md
@@ -0,0 +1,12 @@
+# actions_schema_validation
+
+#### `PipelineLint.actions_schema_validation(){:python}`
+
+Checks that the GitHub Action workflow yml/yaml files adhere to the correct schema
+
+nf-core pipelines use GitHub actions workflows to run CI tests, check formatting and also linting, among others.
+These workflows are defined by `yml` scripts in `.github/workflows/`. This lint test verifies that these scripts are valid
+by comparing them against the [JSON schema for GitHub workflows](https://json.schemastore.org/github-workflow).
+
+To pass this test, make sure that all your workflows contain the required properties `on` and `jobs` and that
+all other properties are of the correct type, as specified in the schema (link above).
diff --git a/src/content/tools/docs/1.14/pipeline_lint_tests/conda_dockerfile.md b/src/content/tools/docs/1.14/pipeline_lint_tests/conda_dockerfile.md
new file mode 100644
index 0000000000..388e51dd9a
--- /dev/null
+++ b/src/content/tools/docs/1.14/pipeline_lint_tests/conda_dockerfile.md
@@ -0,0 +1,33 @@
+# conda_dockerfile
+
+#### `PipelineLint.conda_dockerfile(){:python}`
+
+Checks the Dockerfile for use with Conda environments
+
+:::note
+This test only runs if there is both an `environment.yml`
+and `Dockerfile` present in the pipeline root directory.
+:::
+
+If a workflow has a conda `environment.yml` file, the `Dockerfile` should use this
+to create the docker image. These files are typically very short, just creating the conda
+environment inside the container.
+
+This linting test checks for the following:
+
+- All of the following lines are present in the file (where `PIPELINE` is your pipeline name):
+ > ```Dockerfile
+ > FROM nfcore/base:VERSION
+ > COPY environment.yml /
+ > RUN conda env create --quiet -f /environment.yml && conda clean -a
+ > RUN conda env export --name PIPELINE > PIPELINE.yml
+ > ENV PATH /opt/conda/envs/PIPELINE/bin:$PATH
+ > ```
+- That the `FROM nfcore/base:VERSION` is tagged to the most recent release of nf-core/tools
+ > - The linting tool compares the tag against the currently installed version of tools.
+ > - This line is not checked if running a development version of nf-core/tools.
+
+:::note
+Additional lines and different metadata can be added to the `Dockerfile`
+without causing this lint test to fail.
+:::
diff --git a/src/content/tools/docs/1.14/pipeline_lint_tests/conda_env_yaml.md b/src/content/tools/docs/1.14/pipeline_lint_tests/conda_env_yaml.md
new file mode 100644
index 0000000000..f2dab4fba1
--- /dev/null
+++ b/src/content/tools/docs/1.14/pipeline_lint_tests/conda_env_yaml.md
@@ -0,0 +1,36 @@
+# conda_env_yaml
+
+#### `PipelineLint.conda_env_yaml(){:python}`
+
+Checks that the conda environment file is valid.
+
+:::note
+This test is ignored if there is not an `environment.yml`
+file present in the pipeline root directory.
+:::
+
+DSL1 nf-core pipelines use a single Conda environment to manage all software
+dependencies for a workflow. This can be used directly with `-profile conda`
+and is also used in the `Dockerfile` to build a docker image.
+
+This test checks the conda `environment.yml` file to ensure that it follows nf-core guidelines.
+Each dependency is checked using the [Anaconda API service](https://api.anaconda.org/docs).
+Dependency sublists are ignored with the exception of `- pip`: these packages are also checked
+for pinned version numbers and checked using the [PyPI JSON API](https://wiki.python.org/moin/PyPIJSON).
+
+Specifically, this lint test makes sure that:
+
+- The environment `name` must match the pipeline name and version
+ > - The pipeline name is defined in the config variable `manifest.name`
+ > - Replace the slash with a hyphen as environment names shouldn’t contain that character
+ > - Example: For `nf-core/test` version 1.4, the conda environment name should be `nf-core-test-1.4`
+- All package dependencies have a specific version number pinned
+ > :::warning
+ > Remember that Conda package versions should be pinned with one equals sign (`toolname=1.1`),
+ > but pip uses two (`toolname==1.2`)
+ > :::
+- That package versions can be found and are the latest available
+ > - Test will go through all conda channels listed in the file, or check PyPI if `pip`
+ > - Conda dependencies with pinned channels (eg. `conda-forge::openjdk`) are ok too
+ > - In addition to the package name, the pinned version is checked
+ > - If a newer version is available, a warning will be reported
diff --git a/src/content/tools/docs/1.14/pipeline_lint_tests/files_exist.md b/src/content/tools/docs/1.14/pipeline_lint_tests/files_exist.md
new file mode 100644
index 0000000000..14c85336d8
--- /dev/null
+++ b/src/content/tools/docs/1.14/pipeline_lint_tests/files_exist.md
@@ -0,0 +1,77 @@
+# files_exist
+
+#### `PipelineLint.files_exist(){:python}`
+
+Checks a given pipeline directory for required files.
+
+Iterates through the pipeline’s directory content and checks that specified
+files are either present or absent, as required.
+
+:::note
+This test raises an `AssertionError` if neither `nextflow.config` or `main.nf` are found.
+If these files are not found then this cannot be a Nextflow pipeline and something has gone badly wrong.
+All lint tests are stopped immediately with a critical error message.
+:::
+
+Files that _must_ be present:
+
+```bash
+.gitattributes
+.github/.dockstore.yml
+.github/CONTRIBUTING.md
+.github/ISSUE_TEMPLATE/bug_report.md
+.github/ISSUE_TEMPLATE/config.yml
+.github/ISSUE_TEMPLATE/feature_request.md
+.github/markdownlint.yml
+.github/PULL_REQUEST_TEMPLATE.md
+.github/workflows/branch.yml
+.github/workflows/ci.yml
+.github/workflows/linting_comment.yml
+.github/workflows/linting.yml
+[LICENSE, LICENSE.md, LICENCE, LICENCE.md] # NB: British / American spelling
+assets/email_template.html
+assets/email_template.txt
+assets/nf-core-PIPELINE_logo.png
+assets/sendmail_template.txt
+bin/markdown_to_html.py
+CHANGELOG.md
+CODE_OF_CONDUCT.md
+CODE_OF_CONDUCT.md
+docs/images/nf-core-PIPELINE_logo.png
+docs/output.md
+docs/README.md
+docs/README.md
+docs/usage.md
+lib/nfcore_external_java_deps.jar
+lib/NfcoreSchema.groovy
+nextflow_schema.json
+nextflow.config
+README.md
+```
+
+Files that _should_ be present:
+
+```bash
+main.nf
+environment.yml
+Dockerfile
+conf/base.config
+.github/workflows/awstest.yml
+.github/workflows/awsfulltest.yml
+```
+
+Files that _must not_ be present:
+
+```bash
+Singularity
+parameters.settings.json
+bin/markdown_to_html.r
+conf/aws.config
+.github/workflows/push_dockerhub.yml
+```
+
+Files that _should not_ be present:
+
+```bash
+.travis.yml
+```
diff --git a/src/content/tools/docs/1.14/pipeline_lint_tests/files_unchanged.md b/src/content/tools/docs/1.14/pipeline_lint_tests/files_unchanged.md
new file mode 100644
index 0000000000..ea06ada29f
--- /dev/null
+++ b/src/content/tools/docs/1.14/pipeline_lint_tests/files_unchanged.md
@@ -0,0 +1,57 @@
+# files_unchanged
+
+#### `PipelineLint.files_unchanged(){:python}`
+
+Checks that certain pipeline files are not modified from template output.
+
+Iterates through the pipeline’s directory content and compares specified files
+against output from the template using the pipeline’s metadata. File content
+should not be modified / missing.
+
+Files that must be unchanged:
+
+```default
+'.gitattributes',
+'.github/.dockstore.yml',
+'.github/CONTRIBUTING.md',
+'.github/ISSUE_TEMPLATE/bug_report.md',
+'.github/ISSUE_TEMPLATE/config.yml',
+'.github/ISSUE_TEMPLATE/feature_request.md',
+'.github/markdownlint.yml',
+'.github/PULL_REQUEST_TEMPLATE.md',
+'.github/workflows/branch.yml',
+'.github/workflows/linting_comment.yml',
+'.github/workflows/linting.yml',
+'assets/email_template.html',
+'assets/email_template.txt',
+'assets/nf-core-PIPELINE_logo.png',
+'assets/sendmail_template.txt',
+'bin/markdown_to_html.py',
+'CODE_OF_CONDUCT.md',
+'docs/images/nf-core-PIPELINE_logo.png',
+'docs/README.md',
+'lib/nfcore_external_java_deps.jar'
+'lib/NfcoreSchema.groovy',
+['LICENSE', 'LICENSE.md', 'LICENCE', 'LICENCE.md'], # NB: British / American spelling
+```
+
+Files that can have additional content but must include the template contents:
+
+```default
+'.github/workflows/push_dockerhub_dev.yml',
+'.github/workflows/push_dockerhub_release.yml',
+'.gitignore',
+'assets/multiqc_config.yaml',
+```
+
+:::note
+You can configure the `nf-core lint` tests to ignore any of these checks by setting
+the `files_unchanged` key as follows in your linting config file. For example:
+
+```yaml
+files_unchanged:
+ - .github/workflows/branch.yml
+ - assets/multiqc_config.yaml
+```
+
+:::
diff --git a/src/content/tools/docs/1.14/pipeline_lint_tests/index.md b/src/content/tools/docs/1.14/pipeline_lint_tests/index.md
new file mode 100644
index 0000000000..0dc9aca3b7
--- /dev/null
+++ b/src/content/tools/docs/1.14/pipeline_lint_tests/index.md
@@ -0,0 +1,42 @@
+# Pipline lint tests
+
+# Tests:
+
+- [actions_awsfulltest](actions_awsfulltest)
+ - [`PipelineLint.actions_awsfulltest()`](actions_awsfulltest#nf_core.lint.PipelineLint.actions_awsfulltest)
+- [actions_awstest](actions_awstest)
+ - [`PipelineLint.actions_awstest()`](actions_awstest#nf_core.lint.PipelineLint.actions_awstest)
+- [actions_ci](actions_ci)
+ - [`PipelineLint.actions_ci()`](actions_ci#nf_core.lint.PipelineLint.actions_ci)
+- [actions_schema_validation](actions_schema_validation)
+ - [`PipelineLint.actions_schema_validation()`](actions_schema_validation#nf_core.lint.PipelineLint.actions_schema_validation)
+- [conda_dockerfile](conda_dockerfile)
+ - [`PipelineLint.conda_dockerfile()`](conda_dockerfile#nf_core.lint.PipelineLint.conda_dockerfile)
+- [conda_env_yaml](conda_env_yaml)
+ - [`PipelineLint.conda_env_yaml()`](conda_env_yaml#nf_core.lint.PipelineLint.conda_env_yaml)
+- [files_exist](files_exist)
+ - [`PipelineLint.files_exist()`](files_exist#nf_core.lint.PipelineLint.files_exist)
+- [files_unchanged](files_unchanged)
+ - [`PipelineLint.files_unchanged()`](files_unchanged#nf_core.lint.PipelineLint.files_unchanged)
+- [merge_markers](merge_markers)
+ - [`PipelineLint.merge_markers()`](merge_markers#nf_core.lint.PipelineLint.merge_markers)
+- [nextflow_config](nextflow_config)
+ - [`PipelineLint.nextflow_config()`](nextflow_config#nf_core.lint.PipelineLint.nextflow_config)
+- [params_used](params_used)
+ - [`PipelineLint.params_used()`](params_used#nf_core.lint.PipelineLint.params_used)
+- [pipeline_name_conventions](pipeline_name_conventions)
+ - [`PipelineLint.pipeline_name_conventions()`](pipeline_name_conventions#nf_core.lint.PipelineLint.pipeline_name_conventions)
+- [pipeline_todos](pipeline_todos)
+ - [`PipelineLint.pipeline_todos()`](pipeline_todos#nf_core.lint.PipelineLint.pipeline_todos)
+- [readme](readme)
+ - [`PipelineLint.readme()`](readme#nf_core.lint.PipelineLint.readme)
+- [schema_description](schema_description)
+ - [`PipelineLint.schema_description()`](schema_description#nf_core.lint.PipelineLint.schema_description)
+- [schema_lint](schema_lint)
+ - [`PipelineLint.schema_lint()`](schema_lint#nf_core.lint.PipelineLint.schema_lint)
+- [schema_params](schema_params)
+ - [`PipelineLint.schema_params()`](schema_params#nf_core.lint.PipelineLint.schema_params)
+- [template_strings](template_strings)
+ - [`PipelineLint.template_strings()`](template_strings#nf_core.lint.PipelineLint.template_strings)
+- [version_consistency](version_consistency)
+ - [`PipelineLint.version_consistency()`](version_consistency#nf_core.lint.PipelineLint.version_consistency)
diff --git a/src/content/tools/docs/1.14/pipeline_lint_tests/merge_markers.md b/src/content/tools/docs/1.14/pipeline_lint_tests/merge_markers.md
new file mode 100644
index 0000000000..38076814b1
--- /dev/null
+++ b/src/content/tools/docs/1.14/pipeline_lint_tests/merge_markers.md
@@ -0,0 +1,8 @@
+# merge_markers
+
+#### `PipelineLint.merge_markers(){:python}`
+
+Check for remaining merge markers.
+
+This test looks for remaining merge markers in the code, e.g.:
+`>>>>>>>` or `<<<<<<<`
diff --git a/src/content/tools/docs/1.14/pipeline_lint_tests/nextflow_config.md b/src/content/tools/docs/1.14/pipeline_lint_tests/nextflow_config.md
new file mode 100644
index 0000000000..8d911801f5
--- /dev/null
+++ b/src/content/tools/docs/1.14/pipeline_lint_tests/nextflow_config.md
@@ -0,0 +1,96 @@
+# nextflow_config
+
+#### `PipelineLint.nextflow_config(){:python}`
+
+Checks the pipeline configuration for required variables.
+
+All nf-core pipelines are required to be configured with a minimal set of variable
+names. This test fails or throws warnings if required variables are not set.
+
+:::note
+These config variables must be set in `nextflow.config` or another config
+file imported from there. Any variables set in nextflow script files (eg. `main.nf`)
+are not checked and will be assumed to be missing.
+:::
+
+**The following variables fail the test if missing:**
+
+- `params.outdir`: A directory in which all pipeline results should be saved
+- `manifest.name`: The pipeline name. Should begin with `nf-core/`
+- `manifest.description`: A description of the pipeline
+- `manifest.version`
+ - The version of this pipeline. This should correspond to a [GitHub release](https://help.github.com/articles/creating-releases/).
+ - If `--release` is set when running `nf-core lint`, the version number must not contain the string `dev`
+ - If `--release` is \_not\_ set, the version should end in `dev` (warning triggered if not)
+- `manifest.nextflowVersion`
+ - The minimum version of Nextflow required to run the pipeline.
+ - Should be `>=` or `!>=` and a version number, eg. `manifest.nextflowVersion = '>=0.31.0'` (see [Nextflow documentation](https://www.nextflow.io/docs/latest/config.html#scope-manifest))
+ - `>=` warns about old versions but tries to run anyway, `!>=` fails for old versions. Only use the latter if you _know_ that the pipeline will certainly fail before this version.
+ - This should correspond to the `NXF_VER` version tested by GitHub Actions.
+- `manifest.homePage`
+ - The homepage for the pipeline. Should be the nf-core GitHub repository URL,
+ so beginning with `https://github.com/nf-core/`
+- `timeline.enabled`, `trace.enabled`, `report.enabled`, `dag.enabled`
+ - The nextflow timeline, trace, report and DAG should be enabled by default (set to `true`)
+- `process.cpus`, `process.memory`, `process.time`
+ - Default CPUs, memory and time limits for tasks
+- `params.input`
+ - Input parameter to specify input data, specify this to avoid a warning
+ - Typical usage:
+ - `params.input`: Input data that is not NGS sequencing data
+- `params.custom_config_version`
+ > - Should always be set to default value `master`
+- `params.custom_config_base`
+
+ > - Should always be set to default value:
+
+ > `https://raw.githubusercontent.com/nf-core/configs/${params.custom_config_version}`
+
+- `params.show_hidden_params`
+ > - Determines whether boilerplate params are showed by schema. Set to `false` by default
+- `params.schema_ignore_params`
+ > - A comma separated string of inputs the schema validation should ignore.
+
+**The following variables throw warnings if missing:**
+
+- `manifest.mainScript`: The filename of the main pipeline script (should be `main.nf`)
+- `timeline.file`, `trace.file`, `report.file`, `dag.file`
+ - Default filenames for the timeline, trace and report
+ - The DAG file path should end with `.svg` (If Graphviz is not installed, Nextflow will generate a `.dot` file instead)
+- `process.container`
+ - Docker Hub handle for a single default container for use by all processes.
+ - Must specify a tag that matches the pipeline version number if set.
+ - If the pipeline version number contains the string `dev`, the DockerHub tag must be `:dev`
+
+**The following variables are depreciated and fail the test if they are still present:**
+
+- `params.version`: The old method for specifying the pipeline version. Replaced by `manifest.version`
+- `params.nf_required_version`: The old method for specifying the minimum Nextflow version. Replaced by `manifest.nextflowVersion`
+- `params.container`: The old method for specifying the dockerhub container address. Replaced by `process.container`
+- `igenomesIgnore`: Changed to `igenomes_ignore`
+ > :::note
+ > The `snake_case` convention should now be used when defining pipeline parameters
+ > :::
+
+**The following Nextflow syntax is depreciated and fails the test if present:**
+
+- Process-level configuration syntax still using the old Nextflow syntax, for example: `process.$fastqc` instead of `process withName:'fastqc'`.
+
+:::note
+You can choose to ignore tests for the presence or absence of specific config variables
+by creating a file called `.nf-core-lint.yml` in the root of your pipeline and creating
+a list the config variables that should be ignored. For example:
+
+```yaml
+nextflow_config:
+ - params.input
+```
+
+:::
+
+The other checks in this test (depreciated syntax etc) can not be individually identified,
+but you can skip the entire test block if you wish:
+
+```yaml
+nextflow_config: False
+```
diff --git a/src/content/tools/docs/1.14/pipeline_lint_tests/params_used.md b/src/content/tools/docs/1.14/pipeline_lint_tests/params_used.md
new file mode 100644
index 0000000000..7290540d02
--- /dev/null
+++ b/src/content/tools/docs/1.14/pipeline_lint_tests/params_used.md
@@ -0,0 +1,5 @@
+# params_used
+
+#### `PipelineLint.params_used(){:python}`
+
+Check for that params in `nextflow.config` are mentioned in `main.nf`.
diff --git a/src/content/tools/docs/1.14/pipeline_lint_tests/pipeline_name_conventions.md b/src/content/tools/docs/1.14/pipeline_lint_tests/pipeline_name_conventions.md
new file mode 100644
index 0000000000..788d77e41a
--- /dev/null
+++ b/src/content/tools/docs/1.14/pipeline_lint_tests/pipeline_name_conventions.md
@@ -0,0 +1,13 @@
+# pipeline_name_conventions
+
+#### `PipelineLint.pipeline_name_conventions(){:python}`
+
+Checks that the pipeline name adheres to nf-core conventions.
+
+In order to ensure consistent naming, pipeline names should contain only lower case, alphanumeric characters.
+Otherwise a warning is displayed.
+
+:::warning
+DockerHub is very picky about image names and doesn’t even allow hyphens (we are `nfcore`).
+This is a large part of why we set this rule.
+:::
diff --git a/src/content/tools/docs/1.14/pipeline_lint_tests/pipeline_todos.md b/src/content/tools/docs/1.14/pipeline_lint_tests/pipeline_todos.md
new file mode 100644
index 0000000000..e8a73d1663
--- /dev/null
+++ b/src/content/tools/docs/1.14/pipeline_lint_tests/pipeline_todos.md
@@ -0,0 +1,28 @@
+# pipeline_todos
+
+#### `PipelineLint.pipeline_todos(){:python}`
+
+Check for nf-core _TODO_ lines.
+
+The nf-core workflow template contains a number of comment lines to help developers
+of new pipelines know where they need to edit files and add content.
+They typically have the following format:
+
+```groovy
+// TODO nf-core: Make some kind of change to the workflow here
+```
+
+..or in markdown:
+
+```html
+
+```
+
+This lint test runs through all files in the pipeline and searches for these lines.
+If any are found they will throw a warning.
+
+:::note
+Note that many GUI code editors have plugins to list all instances of _TODO_
+in a given project directory. This is a very quick and convenient way to get
+started on your pipeline!
+:::
diff --git a/src/content/tools/docs/1.14/pipeline_lint_tests/readme.md b/src/content/tools/docs/1.14/pipeline_lint_tests/readme.md
new file mode 100644
index 0000000000..51b8040c97
--- /dev/null
+++ b/src/content/tools/docs/1.14/pipeline_lint_tests/readme.md
@@ -0,0 +1,25 @@
+# readme
+
+#### `PipelineLint.readme(){:python}`
+
+Repository `README.md` tests
+
+The `README.md` files for a project are very important and must meet some requirements:
+
+- Nextflow badge
+ - If no Nextflow badge is found, a warning is given
+ - If a badge is found but the version doesn’t match the minimum version in the config file, the test fails
+ - Example badge code:
+ ```md
+ [![Nextflow](https://img.shields.io/badge/nextflow-%E2%89%A50.27.6-brightgreen.svg)](https://www.nextflow.io/)
+ ```
+- Bioconda badge
+ - If your pipeline contains a file called `environment.yml` in the root directory, a bioconda badge is required
+ - Required badge code:
+ ```md
+ [![install with bioconda](https://img.shields.io/badge/install%20with-bioconda-brightgreen.svg)](https://bioconda.github.io/)
+ ```
+
+:::note
+These badges are a markdown image `![alt-text]()` _inside_ a markdown link `[markdown image]()`, so a bit fiddly to write.
+:::
diff --git a/src/content/tools/docs/1.14/pipeline_lint_tests/schema_description.md b/src/content/tools/docs/1.14/pipeline_lint_tests/schema_description.md
new file mode 100644
index 0000000000..fafc6d6c59
--- /dev/null
+++ b/src/content/tools/docs/1.14/pipeline_lint_tests/schema_description.md
@@ -0,0 +1,11 @@
+# schema_description
+
+#### `PipelineLint.schema_description(){:python}`
+
+Check that every parameter in the schema has a description
+
+The `nextflow_schema.json` pipeline schema should describe every flat parameter
+Furthermore warns about parameters outside of groups
+
+- Warning: Parameters in `nextflow_schema.json` without a description
+- Warning: Parameters in `nextflow_schema.json` that are defined outside of a group
diff --git a/src/content/tools/docs/1.14/pipeline_lint_tests/schema_lint.md b/src/content/tools/docs/1.14/pipeline_lint_tests/schema_lint.md
new file mode 100644
index 0000000000..b5e29febba
--- /dev/null
+++ b/src/content/tools/docs/1.14/pipeline_lint_tests/schema_lint.md
@@ -0,0 +1,59 @@
+# schema_lint
+
+#### `PipelineLint.schema_lint(){:python}`
+
+Pipeline schema syntax
+
+Pipelines should have a `nextflow_schema.json` file that describes the different
+pipeline parameters (eg. `params.something`, `--something`).
+
+:::note
+Reminder: you should generally never need to edit this JSON file by hand.
+The `nf-core schema build` command can create _and edit_ the file for you
+to keep it up to date, with a friendly user-interface for customisation.
+:::
+
+The lint test checks the schema for the following:
+
+- Schema should be a valid JSON file
+- Schema should adhere to [JSONSchema](https://json-schema.org/), Draft 7.
+- Parameters can be described in two places:
+ > - As `properties` in the top-level schema object
+ > - As `properties` within subschemas listed in a top-level `definitions` objects
+- The schema must describe at least one parameter
+- There must be no duplicate parameter IDs across the schema and definition subschema
+- All subschema in `definitions` must be referenced in the top-level `allOf` key
+- The top-level `allOf` key must not describe any non-existent definitions
+- Default parameters in the schema must be valid
+- Core top-level schema attributes should exist and be set as follows:
+ > - `$schema`: `https://json-schema.org/draft-07/schema`
+ > - `$id`: URL to the raw schema file, eg. `https://raw.githubusercontent.com/YOURPIPELINE/master/nextflow_schema.json`
+ > - `title`: `YOURPIPELINE pipeline parameters`
+ > - `description`: The pipeline config `manifest.description`
+
+For example, an _extremely_ minimal schema could look like this:
+
+```json
+{
+ "$schema": "https://json-schema.org/draft-07/schema",
+ "$id": "https://raw.githubusercontent.com/YOURPIPELINE/master/nextflow_schema.json",
+ "title": "YOURPIPELINE pipeline parameters",
+ "description": "This pipeline is for testing",
+ "properties": {
+ "first_param": { "type": "string" }
+ },
+ "definitions": {
+ "my_first_group": {
+ "properties": {
+ "second_param": { "type": "string" }
+ }
+ }
+ },
+ "allOf": [{ "$ref": "#/definitions/my_first_group" }]
+}
+```
+
+:::note
+You can check your pipeline schema without having to run the entire pipeline lint
+by running `nf-core schema lint` instead of `nf-core lint`
+:::
diff --git a/src/content/tools/docs/1.14/pipeline_lint_tests/schema_params.md b/src/content/tools/docs/1.14/pipeline_lint_tests/schema_params.md
new file mode 100644
index 0000000000..7d9440fa15
--- /dev/null
+++ b/src/content/tools/docs/1.14/pipeline_lint_tests/schema_params.md
@@ -0,0 +1,11 @@
+# schema_params
+
+#### `PipelineLint.schema_params(){:python}`
+
+Check that the schema describes all flat params in the pipeline.
+
+The `nextflow_schema.json` pipeline schema should describe every flat parameter
+returned from the `nextflow config` command (params that are objects or more complex structures are ignored).
+
+- Failure: If parameters are found in `nextflow_schema.json` that are not in `nextflow_schema.json`
+- Warning: If parameters are found in `nextflow_schema.json` that are not in `nextflow_schema.json`
diff --git a/src/content/tools/docs/1.14/pipeline_lint_tests/template_strings.md b/src/content/tools/docs/1.14/pipeline_lint_tests/template_strings.md
new file mode 100644
index 0000000000..23c06bb63d
--- /dev/null
+++ b/src/content/tools/docs/1.14/pipeline_lint_tests/template_strings.md
@@ -0,0 +1,17 @@
+# template_strings
+
+#### `PipelineLint.template_strings(){:python}`
+
+Check for template placeholders.
+
+The `nf-core create` pipeline template uses
+[Jinja](https://jinja.palletsprojects.com/en/2.11.x/) behind the scenes.
+
+This lint test fails if any Jinja template variables such as
+`{{ pipeline_name }}` are found in your pipeline code.
+
+Finding a placeholder like this means that something was probably copied and pasted
+from the template without being properly rendered for your pipeline.
+
+This test ignores any double-brackets prefixed with a dollar sign, such as
+`${{ secrets.AWS_ACCESS_KEY_ID }}` as these placeholders are used in GitHub Actions workflows.
diff --git a/src/content/tools/docs/1.14/pipeline_lint_tests/version_consistency.md b/src/content/tools/docs/1.14/pipeline_lint_tests/version_consistency.md
new file mode 100644
index 0000000000..040f5e57fc
--- /dev/null
+++ b/src/content/tools/docs/1.14/pipeline_lint_tests/version_consistency.md
@@ -0,0 +1,23 @@
+# version_consistency
+
+#### `PipelineLint.version_consistency(){:python}`
+
+Pipeline and container version number consistency.
+
+:::note
+This test only runs when the `--release` flag is set for `nf-core lint`,
+or `$GITHUB_REF` is equal to `master`.
+:::
+
+This lint fetches the pipeline version number from three possible locations:
+
+- The pipeline config, `manifest.version`
+- The docker container in the pipeline config, `process.container`
+ > - Some pipelines may not have this set on a pipeline level. If it is not found, it is ignored.
+- `$GITHUB_REF`, if it looks like a release tag (`refs/tags/`)
+
+The test then checks that:
+
+- The container name has a tag specified (eg. `nfcore/pipeline:version`)
+- The pipeline version number is numeric (contains only numbers and dots)
+- That the version numbers all match one another
diff --git a/src/content/tools/docs/1.5/bump_version.md b/src/content/tools/docs/1.5/bump_version.md
new file mode 100644
index 0000000000..388370558e
--- /dev/null
+++ b/src/content/tools/docs/1.5/bump_version.md
@@ -0,0 +1,36 @@
+# nf_core.bump_version
+
+Bumps the version number in all appropriate files for
+a nf-core pipeline.
+
+### `nf_core.bump_version.bump_nextflow_version(lint_obj, new_version){:python}`
+
+Bumps the required Nextflow version number of a pipeline.
+
+- **Parameters:**
+ - **lint_obj** ([_nf_core.lint.PipelineLint_](lint#nf_core.lint.PipelineLint)) – A PipelineLint object that holds information
+ about the pipeline contents and build files.
+ - **new_version** (_str_) – The new version tag for the required Nextflow version.
+
+### `nf_core.bump_version.bump_pipeline_version(lint_obj, new_version){:python}`
+
+Bumps a pipeline version number.
+
+- **Parameters:**
+ - **lint_obj** ([_nf_core.lint.PipelineLint_](lint#nf_core.lint.PipelineLint)) – A PipelineLint object that holds information
+ about the pipeline contents and build files.
+ - **new_version** (_str_) – The new version tag for the pipeline. Semantic versioning only.
+
+### `nf_core.bump_version.update_file_version(filename, lint_obj, pattern, newstr, allow_multiple=False){:python}`
+
+Updates the version number in a requested file.
+
+- **Parameters:**
+ - **filename** (_str_) – File to scan.
+ - **lint_obj** ([_nf_core.lint.PipelineLint_](lint#nf_core.lint.PipelineLint)) – A PipelineLint object that holds information
+ about the pipeline contents and build files.
+ - **pattern** (_str_) – Regex pattern to apply.
+ - **newstr** (_str_) – The replaced string.
+ - **allow_multiple** (_bool_) – Replace all pattern hits, not only the first. Defaults to False.
+- **Raises:**
+ **SyntaxError**\*\*,\*\* **if the version number cannot be found.** –
diff --git a/src/content/tools/docs/1.5/create.md b/src/content/tools/docs/1.5/create.md
new file mode 100644
index 0000000000..354aadee7b
--- /dev/null
+++ b/src/content/tools/docs/1.5/create.md
@@ -0,0 +1 @@
+# nf_core.create
diff --git a/src/content/tools/docs/1.5/download.md b/src/content/tools/docs/1.5/download.md
new file mode 100644
index 0000000000..79f40d0d68
--- /dev/null
+++ b/src/content/tools/docs/1.5/download.md
@@ -0,0 +1,56 @@
+# nf_core.download
+
+Downloads a nf-core pipeline to the local file system.
+
+### _`class{:python}`_`nf_core.download.DownloadWorkflow(pipeline, release=None, singularity=False, outdir=None){:python}`
+
+Downloads a nf-core workflow from Github to the local file system.
+
+Can also download its Singularity container image if required.
+
+- **Parameters:**
+ - **pipeline** (_str_) – A nf-core pipeline name.
+ - **release** (_str_) – The workflow release version to download, like 1.0. Defaults to None.
+ - **singularity** (_bool_) – Flag, if the Singularity container should be downloaded as well. Defaults to False.
+ - **outdir** (_str_) – Path to the local download directory. Defaults to None.
+
+#### `download_wf_files(){:python}`
+
+Downloads workflow files from Github to the `self.outdir`.
+
+#### `download_workflow(){:python}`
+
+Starts a nf-core workflow download.
+
+#### `fetch_workflow_details(wfs){:python}`
+
+Fetches details of a nf-core workflow to download.
+
+- **Parameters:**
+ **wfs** ([_nf_core.list.Workflows_](list#nf_core.list.Workflows)) – A nf_core.list.Workflows object
+- **Raises:**
+ **LockupError**\*\*,\*\* **if the pipeline can not be found.** –
+
+#### `find_container_images(){:python}`
+
+Find container image names for workflow
+
+#### `pull_singularity_image(container){:python}`
+
+Uses a local installation of singularity to pull an image from Docker Hub.
+
+- **Parameters:**
+ **container** (_str_) – A pipeline’s container name. Usually it is of similar format
+ to nfcore/name:dev.
+- **Raises:**
+ **Various exceptions possible from subprocess execution** **of** **Singularity.** –
+
+#### `validate_md5(fname, expected){:python}`
+
+Calculates the md5sum for a file on the disk and validate with expected.
+
+- **Parameters:**
+ - **fname** (_str_) – Path to a local file.
+ - **expected** (_str_) – The expected md5sum.
+- **Raises:**
+ **IOError**\*\*,\*\* **if the md5sum does not match the remote sum.** –
diff --git a/src/content/tools/docs/1.5/index.md b/src/content/tools/docs/1.5/index.md
new file mode 100644
index 0000000000..fd6054924b
--- /dev/null
+++ b/src/content/tools/docs/1.5/index.md
@@ -0,0 +1,37 @@
+
+
+# Welcome to nf-core tools API documentation!
+
+# Contents:
+
+- [nf_core.bump_version](bump_version)
+ - [`bump_nextflow_version()`](bump_version#nf_core.bump_version.bump_nextflow_version)
+ - [`bump_pipeline_version()`](bump_version#nf_core.bump_version.bump_pipeline_version)
+ - [`update_file_version()`](bump_version#nf_core.bump_version.update_file_version)
+- [nf_core.create](create)
+- [nf_core.download](download)
+ - [`DownloadWorkflow`](download#nf_core.download.DownloadWorkflow)
+- [nf_core.licences](licences)
+ - [`WorkflowLicences`](licences#nf_core.licences.WorkflowLicences)
+- [nf_core.lint](lint)
+ - [`PipelineLint`](lint#nf_core.lint.PipelineLint)
+ - [`run_linting()`](lint#nf_core.lint.run_linting)
+- [nf_core.list](list)
+ - [`LocalWorkflow`](list#nf_core.list.LocalWorkflow)
+ - [`RemoteWorkflow`](list#nf_core.list.RemoteWorkflow)
+ - [`Workflows`](list#nf_core.list.Workflows)
+ - [`list_workflows()`](list#nf_core.list.list_workflows)
+ - [`pretty_date()`](list#nf_core.list.pretty_date)
+- [nf_core.utils](utils)
+ - [`fetch_wf_config()`](utils#nf_core.utils.fetch_wf_config)
+ - [`setup_requests_cachedir()`](utils#nf_core.utils.setup_requests_cachedir)
+- [nf_core.list](workflow)
+
+# Indices and tables
+
+- [Index](genindex)
+- [Module Index](py-modindex)
+- [Search Page](search)
diff --git a/src/content/tools/docs/1.5/licences.md b/src/content/tools/docs/1.5/licences.md
new file mode 100644
index 0000000000..731a506571
--- /dev/null
+++ b/src/content/tools/docs/1.5/licences.md
@@ -0,0 +1,39 @@
+# nf_core.licences
+
+Lists software licences for a given workflow.
+
+### _`class{:python}`_`nf_core.licences.WorkflowLicences(pipeline){:python}`
+
+A nf-core workflow licenses collection.
+
+Tries to retrieve the license information from all dependencies
+of a given nf-core pipeline.
+
+A condensed overview with license per dependency can be printed out.
+
+- **Parameters:**
+ **pipeline** (_str_) – An existing nf-core pipeline name, like nf-core/hlatyping
+ or short hlatyping.
+
+#### `clean_licence_names(licences){:python}`
+
+Normalises varying licence names.
+
+- **Parameters:**
+ **licences** (_list_) – A list of licences which are basically raw string objects from
+ the licence content information.
+- **Returns:**
+ Cleaned licences.
+- **Return type:**
+ list
+
+#### `fetch_conda_licences(){:python}`
+
+Fetch package licences from Anaconda and PyPi.
+
+#### `print_licences(as_json=False){:python}`
+
+Prints the fetched license information.
+
+- **Parameters:**
+ **as_json** (_boolean_) – Prints the information in JSON. Defaults to False.
diff --git a/src/content/tools/docs/1.5/lint.md b/src/content/tools/docs/1.5/lint.md
new file mode 100644
index 0000000000..da9bb3b4f6
--- /dev/null
+++ b/src/content/tools/docs/1.5/lint.md
@@ -0,0 +1,300 @@
+# nf_core.lint
+
+Linting policy for nf-core pipeline projects.
+
+Tests Nextflow-based pipelines to check that they adhere to
+the nf-core community guidelines.
+
+### _`class{:python}`_`nf_core.lint.PipelineLint(path){:python}`
+
+Object to hold linting information and results.
+All objects attributes are set, after the [`PipelineLint.lint_pipeline()`](#nf_core.lint.PipelineLint.lint_pipeline) function was called.
+
+- **Parameters:**
+ **path** (_str_) – The path to the nf-core pipeline directory.
+
+#### `conda_config{:python}`
+
+The parsed conda configuration file content (environment.yml).
+
+- **Type:**
+ dict
+
+#### `conda_package_info{:python}`
+
+The conda package(s) information, based on the API requests to Anaconda cloud.
+
+- **Type:**
+ dict
+
+#### `config{:python}`
+
+The Nextflow pipeline configuration file content.
+
+- **Type:**
+ dict
+
+#### `dockerfile{:python}`
+
+A list of lines (str) from the parsed Dockerfile.
+
+- **Type:**
+ list
+
+#### `failed{:python}`
+
+A list of tuples of the form: (, )
+
+- **Type:**
+ list
+
+#### `files{:python}`
+
+A list of files found during the linting process.
+
+- **Type:**
+ list
+
+#### `minNextflowVersion{:python}`
+
+The minimum required Nextflow version to run the pipeline.
+
+- **Type:**
+ str
+
+#### `passed{:python}`
+
+A list of tuples of the form: (, )
+
+- **Type:**
+ list
+
+#### `path{:python}`
+
+Path to the pipeline directory.
+
+- **Type:**
+ str
+
+#### `pipeline_name{:python}`
+
+The pipeline name, without the nf-core tag, for example hlatyping.
+
+- **Type:**
+ str
+
+#### `release_mode{:python}`
+
+True, if you the to linting was run in release mode, False else.
+
+- **Type:**
+ bool
+
+#### `warned{:python}`
+
+A list of tuples of the form: (, )
+
+- **Type:**
+ list
+
+**Attribute specifications**
+
+Some of the more complex attributes of a PipelineLint object.
+
+- conda_config:
+ ```default
+ # Example
+ {
+ 'name': 'nf-core-hlatyping',
+ 'channels': ['bioconda', 'conda-forge'],
+ 'dependencies': ['optitype=1.3.2', 'yara=0.9.6']
+ }
+ ```
+- conda_package_info:
+ ```default
+ # See https://api.anaconda.org/package/bioconda/bioconda-utils as an example.
+ {
+ :
+ }
+ ```
+- config: Produced by calling Nextflow with `nextflow config -flat `. Here is an example from
+ : the [nf-core/hlatyping](https://github.com/nf-core/hlatyping) pipeline:
+ ```default
+ process.container = 'nfcore/hlatyping:1.1.1'
+ params.help = false
+ params.outdir = './results'
+ params.bam = false
+ params.singleEnd = false
+ params.seqtype = 'dna'
+ params.solver = 'glpk'
+ params.igenomes_base = './iGenomes'
+ params.clusterOptions = false
+ ...
+ ```
+
+#### `check_anaconda_package(dep){:python}`
+
+Query conda package information.
+
+Sends a HTTP GET request to the Anaconda remote API.
+
+- **Parameters:**
+ **dep** (_str_) – A conda package name.
+- **Raises:**
+ **A ValueError**\*\*,\*\* **if the package name can not be resolved.** –
+
+#### `check_ci_config(){:python}`
+
+Checks that the Travis or Circle CI YAML config is valid.
+
+Makes sure that `nf-core lint` runs in travis tests and that
+tests run with the required nextflow version.
+
+#### `check_conda_dockerfile(){:python}`
+
+Checks the Docker build file.
+
+Checks that:
+: \* a name is given and is consistent with the pipeline name
+
+- dependency versions are pinned
+- dependency versions are the latest available
+
+#### `check_conda_env_yaml(){:python}`
+
+Checks that the conda environment file is valid.
+
+Checks that:
+: \* a name is given and is consistent with the pipeline name
+
+- check that dependency versions are pinned
+- dependency versions are the latest available
+
+#### `check_docker(){:python}`
+
+Checks that Dockerfile contains the string `FROM`.
+
+#### `check_files_exist(){:python}`
+
+Checks a given pipeline directory for required files.
+
+Iterates through the pipeline’s directory content and checkmarks files
+for presence.
+Files that **must** be present:
+
+```default
+'nextflow.config',
+'Dockerfile',
+['.travis.yml', '.circle.yml'],
+['LICENSE', 'LICENSE.md', 'LICENCE', 'LICENCE.md'], # NB: British / American spelling
+'README.md',
+'CHANGELOG.md',
+'docs/README.md',
+'docs/output.md',
+'docs/usage.md'
+```
+
+Files that _should_ be present:
+
+```default
+'main.nf',
+'environment.yml',
+'conf/base.config'
+```
+
+- **Raises:**
+ **An AssertionError if neither nextflow.config** **or** **main.nf found.** –
+
+#### `check_licence(){:python}`
+
+Checks licence file is MIT.
+
+Currently the checkpoints are:
+: \* licence file must be long enough (4 or more lines)
+
+- licence contains the string _without restriction_
+- licence doesn’t have any placeholder variables
+
+#### `check_nextflow_config(){:python}`
+
+Checks a given pipeline for required config variables.
+
+Uses `nextflow config -flat` to parse pipeline `nextflow.config`
+and print all config variables.
+NB: Does NOT parse contents of main.nf / nextflow script
+
+#### `check_pip_package(dep){:python}`
+
+Query PyPi package information.
+
+Sends a HTTP GET request to the PyPi remote API.
+
+- **Parameters:**
+ **dep** (_str_) – A PyPi package name.
+- **Raises:**
+ **A ValueError**\*\*,\*\* **if the package name can not be resolved** **or** **the connection timed out.** –
+
+#### `check_pipeline_todos(){:python}`
+
+Go through all template files looking for the string ‘TODO nf-core:’
+
+#### `check_readme(){:python}`
+
+Checks the repository README file for errors.
+
+Currently just checks the badges at the top of the README.
+
+#### `check_version_consistency(){:python}`
+
+Checks container tags versions.
+
+Runs on `process.container`, `process.container` and `$TRAVIS_TAG` (each only if set).
+
+Checks that:
+: \* the container has a tag
+
+- the version numbers are numeric
+- the version numbers are the same as one-another
+
+#### `lint_pipeline(release_mode=False){:python}`
+
+Main linting function.
+
+Takes the pipeline directory as the primary input and iterates through
+the different linting checks in order. Collects any warnings or errors
+and returns summary at completion. Raises an exception if there is a
+critical error that makes the rest of the tests pointless (eg. no
+pipeline script). Results from this function are printed by the main script.
+
+- **Parameters:**
+ **release_mode** (_boolean_) – Activates the release mode, which checks for
+ consistent version tags of containers. Default is False.
+- **Returns:**
+ Summary of test result messages structured as follows:
+ ```default
+ {
+ 'pass': [
+ ( test-id (int), message (string) ),
+ ( test-id (int), message (string) )
+ ],
+ 'warn': [(id, msg)],
+ 'fail': [(id, msg)],
+ }
+ ```
+- **Return type:**
+ dict
+- **Raises:**
+ **If a critical problem is found**\*\*,\*\* **an AssertionError is raised.** –
+
+### `nf_core.lint.run_linting(pipeline_dir, release_mode=False){:python}`
+
+Runs all nf-core linting checks on a given Nextflow pipeline project
+in either release mode or normal mode (default). Returns an object
+of type [`PipelineLint`](#nf_core.lint.PipelineLint) after finished.
+
+- **Parameters:**
+ - **pipeline_dir** (_str_) – The path to the Nextflow pipeline root directory
+ - **release_mode** (_bool_) – Set this to True, if the linting should be run in the release mode.
+ See [`PipelineLint`](#nf_core.lint.PipelineLint) for more information.
+- **Returns:**
+ An object of type [`PipelineLint`](#nf_core.lint.PipelineLint) that contains all the linting results.
diff --git a/src/content/tools/docs/1.5/list.md b/src/content/tools/docs/1.5/list.md
new file mode 100644
index 0000000000..432af5d7a2
--- /dev/null
+++ b/src/content/tools/docs/1.5/list.md
@@ -0,0 +1,89 @@
+# nf_core.list
+
+Lists available nf-core pipelines and versions.
+
+### _`class{:python}`_`nf_core.list.LocalWorkflow(name){:python}`
+
+Class to handle local workflows pulled by nextflow
+
+#### `get_local_nf_workflow_details(){:python}`
+
+Get full details about a local cached workflow
+
+### _`class{:python}`_`nf_core.list.RemoteWorkflow(data){:python}`
+
+A information container for a remote workflow.
+
+- **Parameters:**
+ **data** (_dict_) – workflow information as they are retrieved from the Github repository REST API request
+ ().
+
+### _`class{:python}`_`nf_core.list.Workflows(filter_by=None, sort_by='release'){:python}`
+
+Workflow container class.
+
+Is used to collect local and remote nf-core pipelines. Pipelines
+can be sorted, filtered and compared.
+
+- **Parameters:**
+ - **filter_by** (_list_) – A list of strings that can be used for filtering.
+ - **sort_by** (_str_) – workflows can be sorted by keywords. Keyword must be one of
+ release (default), name, stars.
+
+#### `compare_remote_local(){:python}`
+
+Matches local to remote workflows.
+
+If a matching remote workflow is found, the local workflow’s Git commit hash is compared
+with the latest one from remote.
+
+A boolean flag in `RemoteWorkflow.local_is_latest` is set to True, if the local workflow
+is the latest.
+
+#### `filtered_workflows(){:python}`
+
+Filters remote workflows for keywords.
+
+- **Returns:**
+ Filtered remote workflows.
+- **Return type:**
+ list
+
+#### `get_local_nf_workflows(){:python}`
+
+Retrieves local Nextflow workflows.
+
+Local workflows are stored in `self.local_workflows` list.
+
+#### `get_remote_workflows(){:python}`
+
+Retrieves remote workflows from [nf-co.re](http://nf-co.re).
+
+Remote workflows are stored in `self.remote_workflows` list.
+
+#### `print_json(){:python}`
+
+Dump JSON of all parsed information
+
+#### `print_summary(){:python}`
+
+Prints a summary of all pipelines.
+
+### `nf_core.list.list_workflows(filter_by=None, sort_by='release', as_json=False){:python}`
+
+Prints out a list of all nf-core workflows.
+
+- **Parameters:**
+ - **filter_by** (_list_) – A list of strings that can be used for filtering.
+ - **sort_by** (_str_) – workflows can be sorted by keywords. Keyword must be one of
+ release (default), name, stars.
+ - **as_json** (_boolean_) – Set to true, if the lists should be printed in JSON.
+
+### `nf_core.list.pretty_date(time){:python}`
+
+Transforms a datetime object or a int() Epoch timestamp into a
+pretty string like ‘an hour ago’, ‘Yesterday’, ‘3 months ago’,
+‘just now’, etc
+
+Based on
+Adapted by sven1103
diff --git a/src/content/tools/docs/1.5/utils.md b/src/content/tools/docs/1.5/utils.md
new file mode 100644
index 0000000000..340ec4292f
--- /dev/null
+++ b/src/content/tools/docs/1.5/utils.md
@@ -0,0 +1,22 @@
+# nf_core.utils
+
+Common utility functions for the nf-core python package.
+
+### `nf_core.utils.fetch_wf_config(wf_path, wf=None){:python}`
+
+Uses Nextflow to retrieve the the configuration variables
+from a Nextflow workflow.
+
+- **Parameters:**
+ **wf_path** (_str_) – Nextflow workflow file system path.
+- **Returns:**
+ Workflow configuration settings.
+- **Return type:**
+ dict
+
+### `nf_core.utils.setup_requests_cachedir(){:python}`
+
+Sets up local caching for faster remote HTTP requests.
+
+Caching directory will be generated by tempfile.gettempdir() under
+a nfcore_cache subdir.
diff --git a/src/content/tools/docs/1.5/workflow.md b/src/content/tools/docs/1.5/workflow.md
new file mode 100644
index 0000000000..9e44b801a9
--- /dev/null
+++ b/src/content/tools/docs/1.5/workflow.md
@@ -0,0 +1 @@
+# nf_core.list
diff --git a/src/content/tools/docs/1.6/bump_version.md b/src/content/tools/docs/1.6/bump_version.md
new file mode 100644
index 0000000000..388370558e
--- /dev/null
+++ b/src/content/tools/docs/1.6/bump_version.md
@@ -0,0 +1,36 @@
+# nf_core.bump_version
+
+Bumps the version number in all appropriate files for
+a nf-core pipeline.
+
+### `nf_core.bump_version.bump_nextflow_version(lint_obj, new_version){:python}`
+
+Bumps the required Nextflow version number of a pipeline.
+
+- **Parameters:**
+ - **lint_obj** ([_nf_core.lint.PipelineLint_](lint#nf_core.lint.PipelineLint)) – A PipelineLint object that holds information
+ about the pipeline contents and build files.
+ - **new_version** (_str_) – The new version tag for the required Nextflow version.
+
+### `nf_core.bump_version.bump_pipeline_version(lint_obj, new_version){:python}`
+
+Bumps a pipeline version number.
+
+- **Parameters:**
+ - **lint_obj** ([_nf_core.lint.PipelineLint_](lint#nf_core.lint.PipelineLint)) – A PipelineLint object that holds information
+ about the pipeline contents and build files.
+ - **new_version** (_str_) – The new version tag for the pipeline. Semantic versioning only.
+
+### `nf_core.bump_version.update_file_version(filename, lint_obj, pattern, newstr, allow_multiple=False){:python}`
+
+Updates the version number in a requested file.
+
+- **Parameters:**
+ - **filename** (_str_) – File to scan.
+ - **lint_obj** ([_nf_core.lint.PipelineLint_](lint#nf_core.lint.PipelineLint)) – A PipelineLint object that holds information
+ about the pipeline contents and build files.
+ - **pattern** (_str_) – Regex pattern to apply.
+ - **newstr** (_str_) – The replaced string.
+ - **allow_multiple** (_bool_) – Replace all pattern hits, not only the first. Defaults to False.
+- **Raises:**
+ **SyntaxError**\*\*,\*\* **if the version number cannot be found.** –
diff --git a/src/content/tools/docs/1.6/create.md b/src/content/tools/docs/1.6/create.md
new file mode 100644
index 0000000000..354aadee7b
--- /dev/null
+++ b/src/content/tools/docs/1.6/create.md
@@ -0,0 +1 @@
+# nf_core.create
diff --git a/src/content/tools/docs/1.6/download.md b/src/content/tools/docs/1.6/download.md
new file mode 100644
index 0000000000..79f40d0d68
--- /dev/null
+++ b/src/content/tools/docs/1.6/download.md
@@ -0,0 +1,56 @@
+# nf_core.download
+
+Downloads a nf-core pipeline to the local file system.
+
+### _`class{:python}`_`nf_core.download.DownloadWorkflow(pipeline, release=None, singularity=False, outdir=None){:python}`
+
+Downloads a nf-core workflow from Github to the local file system.
+
+Can also download its Singularity container image if required.
+
+- **Parameters:**
+ - **pipeline** (_str_) – A nf-core pipeline name.
+ - **release** (_str_) – The workflow release version to download, like 1.0. Defaults to None.
+ - **singularity** (_bool_) – Flag, if the Singularity container should be downloaded as well. Defaults to False.
+ - **outdir** (_str_) – Path to the local download directory. Defaults to None.
+
+#### `download_wf_files(){:python}`
+
+Downloads workflow files from Github to the `self.outdir`.
+
+#### `download_workflow(){:python}`
+
+Starts a nf-core workflow download.
+
+#### `fetch_workflow_details(wfs){:python}`
+
+Fetches details of a nf-core workflow to download.
+
+- **Parameters:**
+ **wfs** ([_nf_core.list.Workflows_](list#nf_core.list.Workflows)) – A nf_core.list.Workflows object
+- **Raises:**
+ **LockupError**\*\*,\*\* **if the pipeline can not be found.** –
+
+#### `find_container_images(){:python}`
+
+Find container image names for workflow
+
+#### `pull_singularity_image(container){:python}`
+
+Uses a local installation of singularity to pull an image from Docker Hub.
+
+- **Parameters:**
+ **container** (_str_) – A pipeline’s container name. Usually it is of similar format
+ to nfcore/name:dev.
+- **Raises:**
+ **Various exceptions possible from subprocess execution** **of** **Singularity.** –
+
+#### `validate_md5(fname, expected){:python}`
+
+Calculates the md5sum for a file on the disk and validate with expected.
+
+- **Parameters:**
+ - **fname** (_str_) – Path to a local file.
+ - **expected** (_str_) – The expected md5sum.
+- **Raises:**
+ **IOError**\*\*,\*\* **if the md5sum does not match the remote sum.** –
diff --git a/src/content/tools/docs/1.6/index.md b/src/content/tools/docs/1.6/index.md
new file mode 100644
index 0000000000..fd6054924b
--- /dev/null
+++ b/src/content/tools/docs/1.6/index.md
@@ -0,0 +1,37 @@
+
+
+# Welcome to nf-core tools API documentation!
+
+# Contents:
+
+- [nf_core.bump_version](bump_version)
+ - [`bump_nextflow_version()`](bump_version#nf_core.bump_version.bump_nextflow_version)
+ - [`bump_pipeline_version()`](bump_version#nf_core.bump_version.bump_pipeline_version)
+ - [`update_file_version()`](bump_version#nf_core.bump_version.update_file_version)
+- [nf_core.create](create)
+- [nf_core.download](download)
+ - [`DownloadWorkflow`](download#nf_core.download.DownloadWorkflow)
+- [nf_core.licences](licences)
+ - [`WorkflowLicences`](licences#nf_core.licences.WorkflowLicences)
+- [nf_core.lint](lint)
+ - [`PipelineLint`](lint#nf_core.lint.PipelineLint)
+ - [`run_linting()`](lint#nf_core.lint.run_linting)
+- [nf_core.list](list)
+ - [`LocalWorkflow`](list#nf_core.list.LocalWorkflow)
+ - [`RemoteWorkflow`](list#nf_core.list.RemoteWorkflow)
+ - [`Workflows`](list#nf_core.list.Workflows)
+ - [`list_workflows()`](list#nf_core.list.list_workflows)
+ - [`pretty_date()`](list#nf_core.list.pretty_date)
+- [nf_core.utils](utils)
+ - [`fetch_wf_config()`](utils#nf_core.utils.fetch_wf_config)
+ - [`setup_requests_cachedir()`](utils#nf_core.utils.setup_requests_cachedir)
+- [nf_core.list](workflow)
+
+# Indices and tables
+
+- [Index](genindex)
+- [Module Index](py-modindex)
+- [Search Page](search)
diff --git a/src/content/tools/docs/1.6/licences.md b/src/content/tools/docs/1.6/licences.md
new file mode 100644
index 0000000000..731a506571
--- /dev/null
+++ b/src/content/tools/docs/1.6/licences.md
@@ -0,0 +1,39 @@
+# nf_core.licences
+
+Lists software licences for a given workflow.
+
+### _`class{:python}`_`nf_core.licences.WorkflowLicences(pipeline){:python}`
+
+A nf-core workflow licenses collection.
+
+Tries to retrieve the license information from all dependencies
+of a given nf-core pipeline.
+
+A condensed overview with license per dependency can be printed out.
+
+- **Parameters:**
+ **pipeline** (_str_) – An existing nf-core pipeline name, like nf-core/hlatyping
+ or short hlatyping.
+
+#### `clean_licence_names(licences){:python}`
+
+Normalises varying licence names.
+
+- **Parameters:**
+ **licences** (_list_) – A list of licences which are basically raw string objects from
+ the licence content information.
+- **Returns:**
+ Cleaned licences.
+- **Return type:**
+ list
+
+#### `fetch_conda_licences(){:python}`
+
+Fetch package licences from Anaconda and PyPi.
+
+#### `print_licences(as_json=False){:python}`
+
+Prints the fetched license information.
+
+- **Parameters:**
+ **as_json** (_boolean_) – Prints the information in JSON. Defaults to False.
diff --git a/src/content/tools/docs/1.6/lint.md b/src/content/tools/docs/1.6/lint.md
new file mode 100644
index 0000000000..da9bb3b4f6
--- /dev/null
+++ b/src/content/tools/docs/1.6/lint.md
@@ -0,0 +1,300 @@
+# nf_core.lint
+
+Linting policy for nf-core pipeline projects.
+
+Tests Nextflow-based pipelines to check that they adhere to
+the nf-core community guidelines.
+
+### _`class{:python}`_`nf_core.lint.PipelineLint(path){:python}`
+
+Object to hold linting information and results.
+All objects attributes are set, after the [`PipelineLint.lint_pipeline()`](#nf_core.lint.PipelineLint.lint_pipeline) function was called.
+
+- **Parameters:**
+ **path** (_str_) – The path to the nf-core pipeline directory.
+
+#### `conda_config{:python}`
+
+The parsed conda configuration file content (environment.yml).
+
+- **Type:**
+ dict
+
+#### `conda_package_info{:python}`
+
+The conda package(s) information, based on the API requests to Anaconda cloud.
+
+- **Type:**
+ dict
+
+#### `config{:python}`
+
+The Nextflow pipeline configuration file content.
+
+- **Type:**
+ dict
+
+#### `dockerfile{:python}`
+
+A list of lines (str) from the parsed Dockerfile.
+
+- **Type:**
+ list
+
+#### `failed{:python}`
+
+A list of tuples of the form: (, )
+
+- **Type:**
+ list
+
+#### `files{:python}`
+
+A list of files found during the linting process.
+
+- **Type:**
+ list
+
+#### `minNextflowVersion{:python}`
+
+The minimum required Nextflow version to run the pipeline.
+
+- **Type:**
+ str
+
+#### `passed{:python}`
+
+A list of tuples of the form: (, )
+
+- **Type:**
+ list
+
+#### `path{:python}`
+
+Path to the pipeline directory.
+
+- **Type:**
+ str
+
+#### `pipeline_name{:python}`
+
+The pipeline name, without the nf-core tag, for example hlatyping.
+
+- **Type:**
+ str
+
+#### `release_mode{:python}`
+
+True, if you the to linting was run in release mode, False else.
+
+- **Type:**
+ bool
+
+#### `warned{:python}`
+
+A list of tuples of the form: (, )
+
+- **Type:**
+ list
+
+**Attribute specifications**
+
+Some of the more complex attributes of a PipelineLint object.
+
+- conda_config:
+ ```default
+ # Example
+ {
+ 'name': 'nf-core-hlatyping',
+ 'channels': ['bioconda', 'conda-forge'],
+ 'dependencies': ['optitype=1.3.2', 'yara=0.9.6']
+ }
+ ```
+- conda_package_info:
+ ```default
+ # See https://api.anaconda.org/package/bioconda/bioconda-utils as an example.
+ {
+ :
+ }
+ ```
+- config: Produced by calling Nextflow with `nextflow config -flat `. Here is an example from
+ : the [nf-core/hlatyping](https://github.com/nf-core/hlatyping) pipeline:
+ ```default
+ process.container = 'nfcore/hlatyping:1.1.1'
+ params.help = false
+ params.outdir = './results'
+ params.bam = false
+ params.singleEnd = false
+ params.seqtype = 'dna'
+ params.solver = 'glpk'
+ params.igenomes_base = './iGenomes'
+ params.clusterOptions = false
+ ...
+ ```
+
+#### `check_anaconda_package(dep){:python}`
+
+Query conda package information.
+
+Sends a HTTP GET request to the Anaconda remote API.
+
+- **Parameters:**
+ **dep** (_str_) – A conda package name.
+- **Raises:**
+ **A ValueError**\*\*,\*\* **if the package name can not be resolved.** –
+
+#### `check_ci_config(){:python}`
+
+Checks that the Travis or Circle CI YAML config is valid.
+
+Makes sure that `nf-core lint` runs in travis tests and that
+tests run with the required nextflow version.
+
+#### `check_conda_dockerfile(){:python}`
+
+Checks the Docker build file.
+
+Checks that:
+: \* a name is given and is consistent with the pipeline name
+
+- dependency versions are pinned
+- dependency versions are the latest available
+
+#### `check_conda_env_yaml(){:python}`
+
+Checks that the conda environment file is valid.
+
+Checks that:
+: \* a name is given and is consistent with the pipeline name
+
+- check that dependency versions are pinned
+- dependency versions are the latest available
+
+#### `check_docker(){:python}`
+
+Checks that Dockerfile contains the string `FROM`.
+
+#### `check_files_exist(){:python}`
+
+Checks a given pipeline directory for required files.
+
+Iterates through the pipeline’s directory content and checkmarks files
+for presence.
+Files that **must** be present:
+
+```default
+'nextflow.config',
+'Dockerfile',
+['.travis.yml', '.circle.yml'],
+['LICENSE', 'LICENSE.md', 'LICENCE', 'LICENCE.md'], # NB: British / American spelling
+'README.md',
+'CHANGELOG.md',
+'docs/README.md',
+'docs/output.md',
+'docs/usage.md'
+```
+
+Files that _should_ be present:
+
+```default
+'main.nf',
+'environment.yml',
+'conf/base.config'
+```
+
+- **Raises:**
+ **An AssertionError if neither nextflow.config** **or** **main.nf found.** –
+
+#### `check_licence(){:python}`
+
+Checks licence file is MIT.
+
+Currently the checkpoints are:
+: \* licence file must be long enough (4 or more lines)
+
+- licence contains the string _without restriction_
+- licence doesn’t have any placeholder variables
+
+#### `check_nextflow_config(){:python}`
+
+Checks a given pipeline for required config variables.
+
+Uses `nextflow config -flat` to parse pipeline `nextflow.config`
+and print all config variables.
+NB: Does NOT parse contents of main.nf / nextflow script
+
+#### `check_pip_package(dep){:python}`
+
+Query PyPi package information.
+
+Sends a HTTP GET request to the PyPi remote API.
+
+- **Parameters:**
+ **dep** (_str_) – A PyPi package name.
+- **Raises:**
+ **A ValueError**\*\*,\*\* **if the package name can not be resolved** **or** **the connection timed out.** –
+
+#### `check_pipeline_todos(){:python}`
+
+Go through all template files looking for the string ‘TODO nf-core:’
+
+#### `check_readme(){:python}`
+
+Checks the repository README file for errors.
+
+Currently just checks the badges at the top of the README.
+
+#### `check_version_consistency(){:python}`
+
+Checks container tags versions.
+
+Runs on `process.container`, `process.container` and `$TRAVIS_TAG` (each only if set).
+
+Checks that:
+: \* the container has a tag
+
+- the version numbers are numeric
+- the version numbers are the same as one-another
+
+#### `lint_pipeline(release_mode=False){:python}`
+
+Main linting function.
+
+Takes the pipeline directory as the primary input and iterates through
+the different linting checks in order. Collects any warnings or errors
+and returns summary at completion. Raises an exception if there is a
+critical error that makes the rest of the tests pointless (eg. no
+pipeline script). Results from this function are printed by the main script.
+
+- **Parameters:**
+ **release_mode** (_boolean_) – Activates the release mode, which checks for
+ consistent version tags of containers. Default is False.
+- **Returns:**
+ Summary of test result messages structured as follows:
+ ```default
+ {
+ 'pass': [
+ ( test-id (int), message (string) ),
+ ( test-id (int), message (string) )
+ ],
+ 'warn': [(id, msg)],
+ 'fail': [(id, msg)],
+ }
+ ```
+- **Return type:**
+ dict
+- **Raises:**
+ **If a critical problem is found**\*\*,\*\* **an AssertionError is raised.** –
+
+### `nf_core.lint.run_linting(pipeline_dir, release_mode=False){:python}`
+
+Runs all nf-core linting checks on a given Nextflow pipeline project
+in either release mode or normal mode (default). Returns an object
+of type [`PipelineLint`](#nf_core.lint.PipelineLint) after finished.
+
+- **Parameters:**
+ - **pipeline_dir** (_str_) – The path to the Nextflow pipeline root directory
+ - **release_mode** (_bool_) – Set this to True, if the linting should be run in the release mode.
+ See [`PipelineLint`](#nf_core.lint.PipelineLint) for more information.
+- **Returns:**
+ An object of type [`PipelineLint`](#nf_core.lint.PipelineLint) that contains all the linting results.
diff --git a/src/content/tools/docs/1.6/list.md b/src/content/tools/docs/1.6/list.md
new file mode 100644
index 0000000000..432af5d7a2
--- /dev/null
+++ b/src/content/tools/docs/1.6/list.md
@@ -0,0 +1,89 @@
+# nf_core.list
+
+Lists available nf-core pipelines and versions.
+
+### _`class{:python}`_`nf_core.list.LocalWorkflow(name){:python}`
+
+Class to handle local workflows pulled by nextflow
+
+#### `get_local_nf_workflow_details(){:python}`
+
+Get full details about a local cached workflow
+
+### _`class{:python}`_`nf_core.list.RemoteWorkflow(data){:python}`
+
+A information container for a remote workflow.
+
+- **Parameters:**
+ **data** (_dict_) – workflow information as they are retrieved from the Github repository REST API request
+ ().
+
+### _`class{:python}`_`nf_core.list.Workflows(filter_by=None, sort_by='release'){:python}`
+
+Workflow container class.
+
+Is used to collect local and remote nf-core pipelines. Pipelines
+can be sorted, filtered and compared.
+
+- **Parameters:**
+ - **filter_by** (_list_) – A list of strings that can be used for filtering.
+ - **sort_by** (_str_) – workflows can be sorted by keywords. Keyword must be one of
+ release (default), name, stars.
+
+#### `compare_remote_local(){:python}`
+
+Matches local to remote workflows.
+
+If a matching remote workflow is found, the local workflow’s Git commit hash is compared
+with the latest one from remote.
+
+A boolean flag in `RemoteWorkflow.local_is_latest` is set to True, if the local workflow
+is the latest.
+
+#### `filtered_workflows(){:python}`
+
+Filters remote workflows for keywords.
+
+- **Returns:**
+ Filtered remote workflows.
+- **Return type:**
+ list
+
+#### `get_local_nf_workflows(){:python}`
+
+Retrieves local Nextflow workflows.
+
+Local workflows are stored in `self.local_workflows` list.
+
+#### `get_remote_workflows(){:python}`
+
+Retrieves remote workflows from [nf-co.re](http://nf-co.re).
+
+Remote workflows are stored in `self.remote_workflows` list.
+
+#### `print_json(){:python}`
+
+Dump JSON of all parsed information
+
+#### `print_summary(){:python}`
+
+Prints a summary of all pipelines.
+
+### `nf_core.list.list_workflows(filter_by=None, sort_by='release', as_json=False){:python}`
+
+Prints out a list of all nf-core workflows.
+
+- **Parameters:**
+ - **filter_by** (_list_) – A list of strings that can be used for filtering.
+ - **sort_by** (_str_) – workflows can be sorted by keywords. Keyword must be one of
+ release (default), name, stars.
+ - **as_json** (_boolean_) – Set to true, if the lists should be printed in JSON.
+
+### `nf_core.list.pretty_date(time){:python}`
+
+Transforms a datetime object or a int() Epoch timestamp into a
+pretty string like ‘an hour ago’, ‘Yesterday’, ‘3 months ago’,
+‘just now’, etc
+
+Based on
+Adapted by sven1103
diff --git a/src/content/tools/docs/1.6/utils.md b/src/content/tools/docs/1.6/utils.md
new file mode 100644
index 0000000000..340ec4292f
--- /dev/null
+++ b/src/content/tools/docs/1.6/utils.md
@@ -0,0 +1,22 @@
+# nf_core.utils
+
+Common utility functions for the nf-core python package.
+
+### `nf_core.utils.fetch_wf_config(wf_path, wf=None){:python}`
+
+Uses Nextflow to retrieve the the configuration variables
+from a Nextflow workflow.
+
+- **Parameters:**
+ **wf_path** (_str_) – Nextflow workflow file system path.
+- **Returns:**
+ Workflow configuration settings.
+- **Return type:**
+ dict
+
+### `nf_core.utils.setup_requests_cachedir(){:python}`
+
+Sets up local caching for faster remote HTTP requests.
+
+Caching directory will be generated by tempfile.gettempdir() under
+a nfcore_cache subdir.
diff --git a/src/content/tools/docs/1.6/workflow.md b/src/content/tools/docs/1.6/workflow.md
new file mode 100644
index 0000000000..9e44b801a9
--- /dev/null
+++ b/src/content/tools/docs/1.6/workflow.md
@@ -0,0 +1 @@
+# nf_core.list
diff --git a/src/content/tools/docs/1.7/bump_version.md b/src/content/tools/docs/1.7/bump_version.md
new file mode 100644
index 0000000000..388370558e
--- /dev/null
+++ b/src/content/tools/docs/1.7/bump_version.md
@@ -0,0 +1,36 @@
+# nf_core.bump_version
+
+Bumps the version number in all appropriate files for
+a nf-core pipeline.
+
+### `nf_core.bump_version.bump_nextflow_version(lint_obj, new_version){:python}`
+
+Bumps the required Nextflow version number of a pipeline.
+
+- **Parameters:**
+ - **lint_obj** ([_nf_core.lint.PipelineLint_](lint#nf_core.lint.PipelineLint)) – A PipelineLint object that holds information
+ about the pipeline contents and build files.
+ - **new_version** (_str_) – The new version tag for the required Nextflow version.
+
+### `nf_core.bump_version.bump_pipeline_version(lint_obj, new_version){:python}`
+
+Bumps a pipeline version number.
+
+- **Parameters:**
+ - **lint_obj** ([_nf_core.lint.PipelineLint_](lint#nf_core.lint.PipelineLint)) – A PipelineLint object that holds information
+ about the pipeline contents and build files.
+ - **new_version** (_str_) – The new version tag for the pipeline. Semantic versioning only.
+
+### `nf_core.bump_version.update_file_version(filename, lint_obj, pattern, newstr, allow_multiple=False){:python}`
+
+Updates the version number in a requested file.
+
+- **Parameters:**
+ - **filename** (_str_) – File to scan.
+ - **lint_obj** ([_nf_core.lint.PipelineLint_](lint#nf_core.lint.PipelineLint)) – A PipelineLint object that holds information
+ about the pipeline contents and build files.
+ - **pattern** (_str_) – Regex pattern to apply.
+ - **newstr** (_str_) – The replaced string.
+ - **allow_multiple** (_bool_) – Replace all pattern hits, not only the first. Defaults to False.
+- **Raises:**
+ **SyntaxError**\*\*,\*\* **if the version number cannot be found.** –
diff --git a/src/content/tools/docs/1.7/create.md b/src/content/tools/docs/1.7/create.md
new file mode 100644
index 0000000000..354aadee7b
--- /dev/null
+++ b/src/content/tools/docs/1.7/create.md
@@ -0,0 +1 @@
+# nf_core.create
diff --git a/src/content/tools/docs/1.7/download.md b/src/content/tools/docs/1.7/download.md
new file mode 100644
index 0000000000..c9d784edac
--- /dev/null
+++ b/src/content/tools/docs/1.7/download.md
@@ -0,0 +1,68 @@
+# nf_core.download
+
+Downloads a nf-core pipeline to the local file system.
+
+### _`class{:python}`_`nf_core.download.DownloadWorkflow(pipeline, release=None, singularity=False, outdir=None, compress_type='tar.gz'){:python}`
+
+Downloads a nf-core workflow from Github to the local file system.
+
+Can also download its Singularity container image if required.
+
+- **Parameters:**
+ - **pipeline** (_str_) – A nf-core pipeline name.
+ - **release** (_str_) – The workflow release version to download, like 1.0. Defaults to None.
+ - **singularity** (_bool_) – Flag, if the Singularity container should be downloaded as well. Defaults to False.
+ - **outdir** (_str_) – Path to the local download directory. Defaults to None.
+
+#### `compress_download(){:python}`
+
+Take the downloaded files and make a compressed .tar.gz archive.
+
+#### `download_configs(){:python}`
+
+Downloads the centralised config profiles from nf-core/configs to `self.outdir`.
+
+#### `download_wf_files(){:python}`
+
+Downloads workflow files from Github to the `self.outdir`.
+
+#### `download_workflow(){:python}`
+
+Starts a nf-core workflow download.
+
+#### `fetch_workflow_details(wfs){:python}`
+
+Fetches details of a nf-core workflow to download.
+
+- **Parameters:**
+ **wfs** ([_nf_core.list.Workflows_](list#nf_core.list.Workflows)) – A nf_core.list.Workflows object
+- **Raises:**
+ **LockupError**\*\*,\*\* **if the pipeline can not be found.** –
+
+#### `find_container_images(){:python}`
+
+Find container image names for workflow
+
+#### `pull_singularity_image(container){:python}`
+
+Uses a local installation of singularity to pull an image from Docker Hub.
+
+- **Parameters:**
+ **container** (_str_) – A pipeline’s container name. Usually it is of similar format
+ to nfcore/name:dev.
+- **Raises:**
+ **Various exceptions possible from subprocess execution** **of** **Singularity.** –
+
+#### `validate_md5(fname, expected=None){:python}`
+
+Calculates the md5sum for a file on the disk and validate with expected.
+
+- **Parameters:**
+ - **fname** (_str_) – Path to a local file.
+ - **expected** (_str_) – The expected md5sum.
+- **Raises:**
+ **IOError**\*\*,\*\* **if the md5sum does not match the remote sum.** –
+
+#### `wf_use_local_configs(){:python}`
+
+Edit the downloaded nextflow.config file to use the local config files
diff --git a/src/content/tools/docs/1.7/index.md b/src/content/tools/docs/1.7/index.md
new file mode 100644
index 0000000000..fd6054924b
--- /dev/null
+++ b/src/content/tools/docs/1.7/index.md
@@ -0,0 +1,37 @@
+
+
+# Welcome to nf-core tools API documentation!
+
+# Contents:
+
+- [nf_core.bump_version](bump_version)
+ - [`bump_nextflow_version()`](bump_version#nf_core.bump_version.bump_nextflow_version)
+ - [`bump_pipeline_version()`](bump_version#nf_core.bump_version.bump_pipeline_version)
+ - [`update_file_version()`](bump_version#nf_core.bump_version.update_file_version)
+- [nf_core.create](create)
+- [nf_core.download](download)
+ - [`DownloadWorkflow`](download#nf_core.download.DownloadWorkflow)
+- [nf_core.licences](licences)
+ - [`WorkflowLicences`](licences#nf_core.licences.WorkflowLicences)
+- [nf_core.lint](lint)
+ - [`PipelineLint`](lint#nf_core.lint.PipelineLint)
+ - [`run_linting()`](lint#nf_core.lint.run_linting)
+- [nf_core.list](list)
+ - [`LocalWorkflow`](list#nf_core.list.LocalWorkflow)
+ - [`RemoteWorkflow`](list#nf_core.list.RemoteWorkflow)
+ - [`Workflows`](list#nf_core.list.Workflows)
+ - [`list_workflows()`](list#nf_core.list.list_workflows)
+ - [`pretty_date()`](list#nf_core.list.pretty_date)
+- [nf_core.utils](utils)
+ - [`fetch_wf_config()`](utils#nf_core.utils.fetch_wf_config)
+ - [`setup_requests_cachedir()`](utils#nf_core.utils.setup_requests_cachedir)
+- [nf_core.list](workflow)
+
+# Indices and tables
+
+- [Index](genindex)
+- [Module Index](py-modindex)
+- [Search Page](search)
diff --git a/src/content/tools/docs/1.7/licences.md b/src/content/tools/docs/1.7/licences.md
new file mode 100644
index 0000000000..731a506571
--- /dev/null
+++ b/src/content/tools/docs/1.7/licences.md
@@ -0,0 +1,39 @@
+# nf_core.licences
+
+Lists software licences for a given workflow.
+
+### _`class{:python}`_`nf_core.licences.WorkflowLicences(pipeline){:python}`
+
+A nf-core workflow licenses collection.
+
+Tries to retrieve the license information from all dependencies
+of a given nf-core pipeline.
+
+A condensed overview with license per dependency can be printed out.
+
+- **Parameters:**
+ **pipeline** (_str_) – An existing nf-core pipeline name, like nf-core/hlatyping
+ or short hlatyping.
+
+#### `clean_licence_names(licences){:python}`
+
+Normalises varying licence names.
+
+- **Parameters:**
+ **licences** (_list_) – A list of licences which are basically raw string objects from
+ the licence content information.
+- **Returns:**
+ Cleaned licences.
+- **Return type:**
+ list
+
+#### `fetch_conda_licences(){:python}`
+
+Fetch package licences from Anaconda and PyPi.
+
+#### `print_licences(as_json=False){:python}`
+
+Prints the fetched license information.
+
+- **Parameters:**
+ **as_json** (_boolean_) – Prints the information in JSON. Defaults to False.
diff --git a/src/content/tools/docs/1.7/lint.md b/src/content/tools/docs/1.7/lint.md
new file mode 100644
index 0000000000..da9bb3b4f6
--- /dev/null
+++ b/src/content/tools/docs/1.7/lint.md
@@ -0,0 +1,300 @@
+# nf_core.lint
+
+Linting policy for nf-core pipeline projects.
+
+Tests Nextflow-based pipelines to check that they adhere to
+the nf-core community guidelines.
+
+### _`class{:python}`_`nf_core.lint.PipelineLint(path){:python}`
+
+Object to hold linting information and results.
+All objects attributes are set, after the [`PipelineLint.lint_pipeline()`](#nf_core.lint.PipelineLint.lint_pipeline) function was called.
+
+- **Parameters:**
+ **path** (_str_) – The path to the nf-core pipeline directory.
+
+#### `conda_config{:python}`
+
+The parsed conda configuration file content (environment.yml).
+
+- **Type:**
+ dict
+
+#### `conda_package_info{:python}`
+
+The conda package(s) information, based on the API requests to Anaconda cloud.
+
+- **Type:**
+ dict
+
+#### `config{:python}`
+
+The Nextflow pipeline configuration file content.
+
+- **Type:**
+ dict
+
+#### `dockerfile{:python}`
+
+A list of lines (str) from the parsed Dockerfile.
+
+- **Type:**
+ list
+
+#### `failed{:python}`
+
+A list of tuples of the form: (, )
+
+- **Type:**
+ list
+
+#### `files{:python}`
+
+A list of files found during the linting process.
+
+- **Type:**
+ list
+
+#### `minNextflowVersion{:python}`
+
+The minimum required Nextflow version to run the pipeline.
+
+- **Type:**
+ str
+
+#### `passed{:python}`
+
+A list of tuples of the form: (, )
+
+- **Type:**
+ list
+
+#### `path{:python}`
+
+Path to the pipeline directory.
+
+- **Type:**
+ str
+
+#### `pipeline_name{:python}`
+
+The pipeline name, without the nf-core tag, for example hlatyping.
+
+- **Type:**
+ str
+
+#### `release_mode{:python}`
+
+True, if you the to linting was run in release mode, False else.
+
+- **Type:**
+ bool
+
+#### `warned{:python}`
+
+A list of tuples of the form: (, )
+
+- **Type:**
+ list
+
+**Attribute specifications**
+
+Some of the more complex attributes of a PipelineLint object.
+
+- conda_config:
+ ```default
+ # Example
+ {
+ 'name': 'nf-core-hlatyping',
+ 'channels': ['bioconda', 'conda-forge'],
+ 'dependencies': ['optitype=1.3.2', 'yara=0.9.6']
+ }
+ ```
+- conda_package_info:
+ ```default
+ # See https://api.anaconda.org/package/bioconda/bioconda-utils as an example.
+ {
+ :
+ }
+ ```
+- config: Produced by calling Nextflow with `nextflow config -flat `. Here is an example from
+ : the [nf-core/hlatyping](https://github.com/nf-core/hlatyping) pipeline:
+ ```default
+ process.container = 'nfcore/hlatyping:1.1.1'
+ params.help = false
+ params.outdir = './results'
+ params.bam = false
+ params.singleEnd = false
+ params.seqtype = 'dna'
+ params.solver = 'glpk'
+ params.igenomes_base = './iGenomes'
+ params.clusterOptions = false
+ ...
+ ```
+
+#### `check_anaconda_package(dep){:python}`
+
+Query conda package information.
+
+Sends a HTTP GET request to the Anaconda remote API.
+
+- **Parameters:**
+ **dep** (_str_) – A conda package name.
+- **Raises:**
+ **A ValueError**\*\*,\*\* **if the package name can not be resolved.** –
+
+#### `check_ci_config(){:python}`
+
+Checks that the Travis or Circle CI YAML config is valid.
+
+Makes sure that `nf-core lint` runs in travis tests and that
+tests run with the required nextflow version.
+
+#### `check_conda_dockerfile(){:python}`
+
+Checks the Docker build file.
+
+Checks that:
+: \* a name is given and is consistent with the pipeline name
+
+- dependency versions are pinned
+- dependency versions are the latest available
+
+#### `check_conda_env_yaml(){:python}`
+
+Checks that the conda environment file is valid.
+
+Checks that:
+: \* a name is given and is consistent with the pipeline name
+
+- check that dependency versions are pinned
+- dependency versions are the latest available
+
+#### `check_docker(){:python}`
+
+Checks that Dockerfile contains the string `FROM`.
+
+#### `check_files_exist(){:python}`
+
+Checks a given pipeline directory for required files.
+
+Iterates through the pipeline’s directory content and checkmarks files
+for presence.
+Files that **must** be present:
+
+```default
+'nextflow.config',
+'Dockerfile',
+['.travis.yml', '.circle.yml'],
+['LICENSE', 'LICENSE.md', 'LICENCE', 'LICENCE.md'], # NB: British / American spelling
+'README.md',
+'CHANGELOG.md',
+'docs/README.md',
+'docs/output.md',
+'docs/usage.md'
+```
+
+Files that _should_ be present:
+
+```default
+'main.nf',
+'environment.yml',
+'conf/base.config'
+```
+
+- **Raises:**
+ **An AssertionError if neither nextflow.config** **or** **main.nf found.** –
+
+#### `check_licence(){:python}`
+
+Checks licence file is MIT.
+
+Currently the checkpoints are:
+: \* licence file must be long enough (4 or more lines)
+
+- licence contains the string _without restriction_
+- licence doesn’t have any placeholder variables
+
+#### `check_nextflow_config(){:python}`
+
+Checks a given pipeline for required config variables.
+
+Uses `nextflow config -flat` to parse pipeline `nextflow.config`
+and print all config variables.
+NB: Does NOT parse contents of main.nf / nextflow script
+
+#### `check_pip_package(dep){:python}`
+
+Query PyPi package information.
+
+Sends a HTTP GET request to the PyPi remote API.
+
+- **Parameters:**
+ **dep** (_str_) – A PyPi package name.
+- **Raises:**
+ **A ValueError**\*\*,\*\* **if the package name can not be resolved** **or** **the connection timed out.** –
+
+#### `check_pipeline_todos(){:python}`
+
+Go through all template files looking for the string ‘TODO nf-core:’
+
+#### `check_readme(){:python}`
+
+Checks the repository README file for errors.
+
+Currently just checks the badges at the top of the README.
+
+#### `check_version_consistency(){:python}`
+
+Checks container tags versions.
+
+Runs on `process.container`, `process.container` and `$TRAVIS_TAG` (each only if set).
+
+Checks that:
+: \* the container has a tag
+
+- the version numbers are numeric
+- the version numbers are the same as one-another
+
+#### `lint_pipeline(release_mode=False){:python}`
+
+Main linting function.
+
+Takes the pipeline directory as the primary input and iterates through
+the different linting checks in order. Collects any warnings or errors
+and returns summary at completion. Raises an exception if there is a
+critical error that makes the rest of the tests pointless (eg. no
+pipeline script). Results from this function are printed by the main script.
+
+- **Parameters:**
+ **release_mode** (_boolean_) – Activates the release mode, which checks for
+ consistent version tags of containers. Default is False.
+- **Returns:**
+ Summary of test result messages structured as follows:
+ ```default
+ {
+ 'pass': [
+ ( test-id (int), message (string) ),
+ ( test-id (int), message (string) )
+ ],
+ 'warn': [(id, msg)],
+ 'fail': [(id, msg)],
+ }
+ ```
+- **Return type:**
+ dict
+- **Raises:**
+ **If a critical problem is found**\*\*,\*\* **an AssertionError is raised.** –
+
+### `nf_core.lint.run_linting(pipeline_dir, release_mode=False){:python}`
+
+Runs all nf-core linting checks on a given Nextflow pipeline project
+in either release mode or normal mode (default). Returns an object
+of type [`PipelineLint`](#nf_core.lint.PipelineLint) after finished.
+
+- **Parameters:**
+ - **pipeline_dir** (_str_) – The path to the Nextflow pipeline root directory
+ - **release_mode** (_bool_) – Set this to True, if the linting should be run in the release mode.
+ See [`PipelineLint`](#nf_core.lint.PipelineLint) for more information.
+- **Returns:**
+ An object of type [`PipelineLint`](#nf_core.lint.PipelineLint) that contains all the linting results.
diff --git a/src/content/tools/docs/1.7/list.md b/src/content/tools/docs/1.7/list.md
new file mode 100644
index 0000000000..432af5d7a2
--- /dev/null
+++ b/src/content/tools/docs/1.7/list.md
@@ -0,0 +1,89 @@
+# nf_core.list
+
+Lists available nf-core pipelines and versions.
+
+### _`class{:python}`_`nf_core.list.LocalWorkflow(name){:python}`
+
+Class to handle local workflows pulled by nextflow
+
+#### `get_local_nf_workflow_details(){:python}`
+
+Get full details about a local cached workflow
+
+### _`class{:python}`_`nf_core.list.RemoteWorkflow(data){:python}`
+
+A information container for a remote workflow.
+
+- **Parameters:**
+ **data** (_dict_) – workflow information as they are retrieved from the Github repository REST API request
+ ().
+
+### _`class{:python}`_`nf_core.list.Workflows(filter_by=None, sort_by='release'){:python}`
+
+Workflow container class.
+
+Is used to collect local and remote nf-core pipelines. Pipelines
+can be sorted, filtered and compared.
+
+- **Parameters:**
+ - **filter_by** (_list_) – A list of strings that can be used for filtering.
+ - **sort_by** (_str_) – workflows can be sorted by keywords. Keyword must be one of
+ release (default), name, stars.
+
+#### `compare_remote_local(){:python}`
+
+Matches local to remote workflows.
+
+If a matching remote workflow is found, the local workflow’s Git commit hash is compared
+with the latest one from remote.
+
+A boolean flag in `RemoteWorkflow.local_is_latest` is set to True, if the local workflow
+is the latest.
+
+#### `filtered_workflows(){:python}`
+
+Filters remote workflows for keywords.
+
+- **Returns:**
+ Filtered remote workflows.
+- **Return type:**
+ list
+
+#### `get_local_nf_workflows(){:python}`
+
+Retrieves local Nextflow workflows.
+
+Local workflows are stored in `self.local_workflows` list.
+
+#### `get_remote_workflows(){:python}`
+
+Retrieves remote workflows from [nf-co.re](http://nf-co.re).
+
+Remote workflows are stored in `self.remote_workflows` list.
+
+#### `print_json(){:python}`
+
+Dump JSON of all parsed information
+
+#### `print_summary(){:python}`
+
+Prints a summary of all pipelines.
+
+### `nf_core.list.list_workflows(filter_by=None, sort_by='release', as_json=False){:python}`
+
+Prints out a list of all nf-core workflows.
+
+- **Parameters:**
+ - **filter_by** (_list_) – A list of strings that can be used for filtering.
+ - **sort_by** (_str_) – workflows can be sorted by keywords. Keyword must be one of
+ release (default), name, stars.
+ - **as_json** (_boolean_) – Set to true, if the lists should be printed in JSON.
+
+### `nf_core.list.pretty_date(time){:python}`
+
+Transforms a datetime object or a int() Epoch timestamp into a
+pretty string like ‘an hour ago’, ‘Yesterday’, ‘3 months ago’,
+‘just now’, etc
+
+Based on
+Adapted by sven1103
diff --git a/src/content/tools/docs/1.7/utils.md b/src/content/tools/docs/1.7/utils.md
new file mode 100644
index 0000000000..48285a4fe6
--- /dev/null
+++ b/src/content/tools/docs/1.7/utils.md
@@ -0,0 +1,22 @@
+# nf_core.utils
+
+Common utility functions for the nf-core python package.
+
+### `nf_core.utils.fetch_wf_config(wf_path, wf=None){:python}`
+
+Uses Nextflow to retrieve the the configuration variables
+from a Nextflow workflow.
+
+- **Parameters:**
+ **wf_path** (_str_) – Nextflow workflow file system path.
+- **Returns:**
+ Workflow configuration settings.
+- **Return type:**
+ dict
+
+### `nf_core.utils.setup_requests_cachedir(){:python}`
+
+Sets up local caching for faster remote HTTP requests.
+
+Caching directory will be set up in the user’s home directory under
+a .nfcore_cache subdir.
diff --git a/src/content/tools/docs/1.7/workflow.md b/src/content/tools/docs/1.7/workflow.md
new file mode 100644
index 0000000000..9e44b801a9
--- /dev/null
+++ b/src/content/tools/docs/1.7/workflow.md
@@ -0,0 +1 @@
+# nf_core.list
diff --git a/src/content/tools/docs/1.8/bump_version.md b/src/content/tools/docs/1.8/bump_version.md
new file mode 100644
index 0000000000..388370558e
--- /dev/null
+++ b/src/content/tools/docs/1.8/bump_version.md
@@ -0,0 +1,36 @@
+# nf_core.bump_version
+
+Bumps the version number in all appropriate files for
+a nf-core pipeline.
+
+### `nf_core.bump_version.bump_nextflow_version(lint_obj, new_version){:python}`
+
+Bumps the required Nextflow version number of a pipeline.
+
+- **Parameters:**
+ - **lint_obj** ([_nf_core.lint.PipelineLint_](lint#nf_core.lint.PipelineLint)) – A PipelineLint object that holds information
+ about the pipeline contents and build files.
+ - **new_version** (_str_) – The new version tag for the required Nextflow version.
+
+### `nf_core.bump_version.bump_pipeline_version(lint_obj, new_version){:python}`
+
+Bumps a pipeline version number.
+
+- **Parameters:**
+ - **lint_obj** ([_nf_core.lint.PipelineLint_](lint#nf_core.lint.PipelineLint)) – A PipelineLint object that holds information
+ about the pipeline contents and build files.
+ - **new_version** (_str_) – The new version tag for the pipeline. Semantic versioning only.
+
+### `nf_core.bump_version.update_file_version(filename, lint_obj, pattern, newstr, allow_multiple=False){:python}`
+
+Updates the version number in a requested file.
+
+- **Parameters:**
+ - **filename** (_str_) – File to scan.
+ - **lint_obj** ([_nf_core.lint.PipelineLint_](lint#nf_core.lint.PipelineLint)) – A PipelineLint object that holds information
+ about the pipeline contents and build files.
+ - **pattern** (_str_) – Regex pattern to apply.
+ - **newstr** (_str_) – The replaced string.
+ - **allow_multiple** (_bool_) – Replace all pattern hits, not only the first. Defaults to False.
+- **Raises:**
+ **SyntaxError**\*\*,\*\* **if the version number cannot be found.** –
diff --git a/src/content/tools/docs/1.8/create.md b/src/content/tools/docs/1.8/create.md
new file mode 100644
index 0000000000..354aadee7b
--- /dev/null
+++ b/src/content/tools/docs/1.8/create.md
@@ -0,0 +1 @@
+# nf_core.create
diff --git a/src/content/tools/docs/1.8/download.md b/src/content/tools/docs/1.8/download.md
new file mode 100644
index 0000000000..c9d784edac
--- /dev/null
+++ b/src/content/tools/docs/1.8/download.md
@@ -0,0 +1,68 @@
+# nf_core.download
+
+Downloads a nf-core pipeline to the local file system.
+
+### _`class{:python}`_`nf_core.download.DownloadWorkflow(pipeline, release=None, singularity=False, outdir=None, compress_type='tar.gz'){:python}`
+
+Downloads a nf-core workflow from Github to the local file system.
+
+Can also download its Singularity container image if required.
+
+- **Parameters:**
+ - **pipeline** (_str_) – A nf-core pipeline name.
+ - **release** (_str_) – The workflow release version to download, like 1.0. Defaults to None.
+ - **singularity** (_bool_) – Flag, if the Singularity container should be downloaded as well. Defaults to False.
+ - **outdir** (_str_) – Path to the local download directory. Defaults to None.
+
+#### `compress_download(){:python}`
+
+Take the downloaded files and make a compressed .tar.gz archive.
+
+#### `download_configs(){:python}`
+
+Downloads the centralised config profiles from nf-core/configs to `self.outdir`.
+
+#### `download_wf_files(){:python}`
+
+Downloads workflow files from Github to the `self.outdir`.
+
+#### `download_workflow(){:python}`
+
+Starts a nf-core workflow download.
+
+#### `fetch_workflow_details(wfs){:python}`
+
+Fetches details of a nf-core workflow to download.
+
+- **Parameters:**
+ **wfs** ([_nf_core.list.Workflows_](list#nf_core.list.Workflows)) – A nf_core.list.Workflows object
+- **Raises:**
+ **LockupError**\*\*,\*\* **if the pipeline can not be found.** –
+
+#### `find_container_images(){:python}`
+
+Find container image names for workflow
+
+#### `pull_singularity_image(container){:python}`
+
+Uses a local installation of singularity to pull an image from Docker Hub.
+
+- **Parameters:**
+ **container** (_str_) – A pipeline’s container name. Usually it is of similar format
+ to nfcore/name:dev.
+- **Raises:**
+ **Various exceptions possible from subprocess execution** **of** **Singularity.** –
+
+#### `validate_md5(fname, expected=None){:python}`
+
+Calculates the md5sum for a file on the disk and validate with expected.
+
+- **Parameters:**
+ - **fname** (_str_) – Path to a local file.
+ - **expected** (_str_) – The expected md5sum.
+- **Raises:**
+ **IOError**\*\*,\*\* **if the md5sum does not match the remote sum.** –
+
+#### `wf_use_local_configs(){:python}`
+
+Edit the downloaded nextflow.config file to use the local config files
diff --git a/src/content/tools/docs/1.8/index.md b/src/content/tools/docs/1.8/index.md
new file mode 100644
index 0000000000..fd6054924b
--- /dev/null
+++ b/src/content/tools/docs/1.8/index.md
@@ -0,0 +1,37 @@
+
+
+# Welcome to nf-core tools API documentation!
+
+# Contents:
+
+- [nf_core.bump_version](bump_version)
+ - [`bump_nextflow_version()`](bump_version#nf_core.bump_version.bump_nextflow_version)
+ - [`bump_pipeline_version()`](bump_version#nf_core.bump_version.bump_pipeline_version)
+ - [`update_file_version()`](bump_version#nf_core.bump_version.update_file_version)
+- [nf_core.create](create)
+- [nf_core.download](download)
+ - [`DownloadWorkflow`](download#nf_core.download.DownloadWorkflow)
+- [nf_core.licences](licences)
+ - [`WorkflowLicences`](licences#nf_core.licences.WorkflowLicences)
+- [nf_core.lint](lint)
+ - [`PipelineLint`](lint#nf_core.lint.PipelineLint)
+ - [`run_linting()`](lint#nf_core.lint.run_linting)
+- [nf_core.list](list)
+ - [`LocalWorkflow`](list#nf_core.list.LocalWorkflow)
+ - [`RemoteWorkflow`](list#nf_core.list.RemoteWorkflow)
+ - [`Workflows`](list#nf_core.list.Workflows)
+ - [`list_workflows()`](list#nf_core.list.list_workflows)
+ - [`pretty_date()`](list#nf_core.list.pretty_date)
+- [nf_core.utils](utils)
+ - [`fetch_wf_config()`](utils#nf_core.utils.fetch_wf_config)
+ - [`setup_requests_cachedir()`](utils#nf_core.utils.setup_requests_cachedir)
+- [nf_core.list](workflow)
+
+# Indices and tables
+
+- [Index](genindex)
+- [Module Index](py-modindex)
+- [Search Page](search)
diff --git a/src/content/tools/docs/1.8/licences.md b/src/content/tools/docs/1.8/licences.md
new file mode 100644
index 0000000000..731a506571
--- /dev/null
+++ b/src/content/tools/docs/1.8/licences.md
@@ -0,0 +1,39 @@
+# nf_core.licences
+
+Lists software licences for a given workflow.
+
+### _`class{:python}`_`nf_core.licences.WorkflowLicences(pipeline){:python}`
+
+A nf-core workflow licenses collection.
+
+Tries to retrieve the license information from all dependencies
+of a given nf-core pipeline.
+
+A condensed overview with license per dependency can be printed out.
+
+- **Parameters:**
+ **pipeline** (_str_) – An existing nf-core pipeline name, like nf-core/hlatyping
+ or short hlatyping.
+
+#### `clean_licence_names(licences){:python}`
+
+Normalises varying licence names.
+
+- **Parameters:**
+ **licences** (_list_) – A list of licences which are basically raw string objects from
+ the licence content information.
+- **Returns:**
+ Cleaned licences.
+- **Return type:**
+ list
+
+#### `fetch_conda_licences(){:python}`
+
+Fetch package licences from Anaconda and PyPi.
+
+#### `print_licences(as_json=False){:python}`
+
+Prints the fetched license information.
+
+- **Parameters:**
+ **as_json** (_boolean_) – Prints the information in JSON. Defaults to False.
diff --git a/src/content/tools/docs/1.8/lint.md b/src/content/tools/docs/1.8/lint.md
new file mode 100644
index 0000000000..5b8c83cc64
--- /dev/null
+++ b/src/content/tools/docs/1.8/lint.md
@@ -0,0 +1,336 @@
+# nf_core.lint
+
+Linting policy for nf-core pipeline projects.
+
+Tests Nextflow-based pipelines to check that they adhere to
+the nf-core community guidelines.
+
+### _`class{:python}`_`nf_core.lint.PipelineLint(path){:python}`
+
+Object to hold linting information and results.
+All objects attributes are set, after the [`PipelineLint.lint_pipeline()`](#nf_core.lint.PipelineLint.lint_pipeline) function was called.
+
+- **Parameters:**
+ **path** (_str_) – The path to the nf-core pipeline directory.
+
+#### `conda_config{:python}`
+
+The parsed conda configuration file content (environment.yml).
+
+- **Type:**
+ dict
+
+#### `conda_package_info{:python}`
+
+The conda package(s) information, based on the API requests to Anaconda cloud.
+
+- **Type:**
+ dict
+
+#### `config{:python}`
+
+The Nextflow pipeline configuration file content.
+
+- **Type:**
+ dict
+
+#### `dockerfile{:python}`
+
+A list of lines (str) from the parsed Dockerfile.
+
+- **Type:**
+ list
+
+#### `failed{:python}`
+
+A list of tuples of the form: (, )
+
+- **Type:**
+ list
+
+#### `files{:python}`
+
+A list of files found during the linting process.
+
+- **Type:**
+ list
+
+#### `minNextflowVersion{:python}`
+
+The minimum required Nextflow version to run the pipeline.
+
+- **Type:**
+ str
+
+#### `passed{:python}`
+
+A list of tuples of the form: (, )
+
+- **Type:**
+ list
+
+#### `path{:python}`
+
+Path to the pipeline directory.
+
+- **Type:**
+ str
+
+#### `pipeline_name{:python}`
+
+The pipeline name, without the nf-core tag, for example hlatyping.
+
+- **Type:**
+ str
+
+#### `release_mode{:python}`
+
+True, if you the to linting was run in release mode, False else.
+
+- **Type:**
+ bool
+
+#### `warned{:python}`
+
+A list of tuples of the form: (, )
+
+- **Type:**
+ list
+
+**Attribute specifications**
+
+Some of the more complex attributes of a PipelineLint object.
+
+- conda_config:
+ ```default
+ # Example
+ {
+ 'name': 'nf-core-hlatyping',
+ 'channels': ['bioconda', 'conda-forge'],
+ 'dependencies': ['optitype=1.3.2', 'yara=0.9.6']
+ }
+ ```
+- conda_package_info:
+ ```default
+ # See https://api.anaconda.org/package/bioconda/bioconda-utils as an example.
+ {
+ :
+ }
+ ```
+- config: Produced by calling Nextflow with `nextflow config -flat `. Here is an example from
+ : the [nf-core/hlatyping](https://github.com/nf-core/hlatyping) pipeline:
+ ```default
+ process.container = 'nfcore/hlatyping:1.1.1'
+ params.help = false
+ params.outdir = './results'
+ params.bam = false
+ params.single_end = false
+ params.seqtype = 'dna'
+ params.solver = 'glpk'
+ params.igenomes_base = './iGenomes'
+ params.clusterOptions = false
+ ...
+ ```
+
+#### `check_actions_branch_protection(){:python}`
+
+Checks that the GitHub actions branch protection workflow is valid.
+
+Makes sure PRs can only come from nf-core dev or ‘patch’ of a fork.
+
+#### `check_actions_ci(){:python}`
+
+Checks that the GitHub actions ci workflow is valid
+
+Makes sure tests run with the required nextflow version.
+
+#### `check_actions_lint(){:python}`
+
+Checks that the GitHub actions lint workflow is valid
+
+Makes sure `nf-core lint` and `markdownlint` runs.
+
+#### `check_anaconda_package(dep){:python}`
+
+Query conda package information.
+
+Sends a HTTP GET request to the Anaconda remote API.
+
+- **Parameters:**
+ **dep** (_str_) – A conda package name.
+- **Raises:**
+ **A ValueError**\*\*,\*\* **if the package name can not be resolved.** –
+
+#### `check_ci_config(){:python}`
+
+Checks that the Travis or Circle CI YAML config is valid.
+
+Makes sure that `nf-core lint` runs in travis tests and that
+tests run with the required nextflow version.
+
+#### `check_conda_dockerfile(){:python}`
+
+Checks the Docker build file.
+
+Checks that:
+: \* a name is given and is consistent with the pipeline name
+
+- dependency versions are pinned
+- dependency versions are the latest available
+
+#### `check_conda_env_yaml(){:python}`
+
+Checks that the conda environment file is valid.
+
+Checks that:
+: \* a name is given and is consistent with the pipeline name
+
+- check that dependency versions are pinned
+- dependency versions are the latest available
+
+#### `check_docker(){:python}`
+
+Checks that Dockerfile contains the string `FROM`.
+
+#### `check_files_exist(){:python}`
+
+Checks a given pipeline directory for required files.
+
+Iterates through the pipeline’s directory content and checkmarks files
+for presence.
+Files that **must** be present:
+
+```default
+'nextflow.config',
+'Dockerfile',
+['LICENSE', 'LICENSE.md', 'LICENCE', 'LICENCE.md'], # NB: British / American spelling
+'README.md',
+'CHANGELOG.md',
+'docs/README.md',
+'docs/output.md',
+'docs/usage.md'
+```
+
+Files that _should_ be present:
+
+```default
+'main.nf',
+'environment.yml',
+'conf/base.config',
+'.github/workflows/branch.yml',
+'.github/workflows/ci.yml',
+'.github/workfows/linting.yml'
+```
+
+Files that _must not_ be present:
+
+```default
+'Singularity'
+```
+
+Files that _should not_ be present:
+
+```default
+'.travis.yml'
+```
+
+- **Raises:**
+ **An AssertionError if neither nextflow.config** **or** **main.nf found.** –
+
+#### `check_licence(){:python}`
+
+Checks licence file is MIT.
+
+Currently the checkpoints are:
+: \* licence file must be long enough (4 or more lines)
+
+- licence contains the string _without restriction_
+- licence doesn’t have any placeholder variables
+
+#### `check_nextflow_config(){:python}`
+
+Checks a given pipeline for required config variables.
+
+Uses `nextflow config -flat` to parse pipeline `nextflow.config`
+and print all config variables.
+NB: Does NOT parse contents of main.nf / nextflow script
+
+#### `check_pip_package(dep){:python}`
+
+Query PyPi package information.
+
+Sends a HTTP GET request to the PyPi remote API.
+
+- **Parameters:**
+ **dep** (_str_) – A PyPi package name.
+- **Raises:**
+ **A ValueError**\*\*,\*\* **if the package name can not be resolved** **or** **the connection timed out.** –
+
+#### `check_pipeline_name(){:python}`
+
+Check whether pipeline name adheres to lower case/no hyphen naming convention
+
+#### `check_pipeline_todos(){:python}`
+
+Go through all template files looking for the string ‘TODO nf-core:’
+
+#### `check_readme(){:python}`
+
+Checks the repository README file for errors.
+
+Currently just checks the badges at the top of the README.
+
+#### `check_version_consistency(){:python}`
+
+Checks container tags versions.
+
+Runs on `process.container`, `process.container` and `$TRAVIS_TAG` (each only if set).
+
+Checks that:
+: \* the container has a tag
+
+- the version numbers are numeric
+- the version numbers are the same as one-another
+
+#### `lint_pipeline(release_mode=False){:python}`
+
+Main linting function.
+
+Takes the pipeline directory as the primary input and iterates through
+the different linting checks in order. Collects any warnings or errors
+and returns summary at completion. Raises an exception if there is a
+critical error that makes the rest of the tests pointless (eg. no
+pipeline script). Results from this function are printed by the main script.
+
+- **Parameters:**
+ **release_mode** (_boolean_) – Activates the release mode, which checks for
+ consistent version tags of containers. Default is False.
+- **Returns:**
+ Summary of test result messages structured as follows:
+ ```default
+ {
+ 'pass': [
+ ( test-id (int), message (string) ),
+ ( test-id (int), message (string) )
+ ],
+ 'warn': [(id, msg)],
+ 'fail': [(id, msg)],
+ }
+ ```
+- **Return type:**
+ dict
+- **Raises:**
+ **If a critical problem is found**\*\*,\*\* **an AssertionError is raised.** –
+
+### `nf_core.lint.run_linting(pipeline_dir, release_mode=False){:python}`
+
+Runs all nf-core linting checks on a given Nextflow pipeline project
+in either release mode or normal mode (default). Returns an object
+of type [`PipelineLint`](#nf_core.lint.PipelineLint) after finished.
+
+- **Parameters:**
+ - **pipeline_dir** (_str_) – The path to the Nextflow pipeline root directory
+ - **release_mode** (_bool_) – Set this to True, if the linting should be run in the release mode.
+ See [`PipelineLint`](#nf_core.lint.PipelineLint) for more information.
+- **Returns:**
+ An object of type [`PipelineLint`](#nf_core.lint.PipelineLint) that contains all the linting results.
diff --git a/src/content/tools/docs/1.8/list.md b/src/content/tools/docs/1.8/list.md
new file mode 100644
index 0000000000..432af5d7a2
--- /dev/null
+++ b/src/content/tools/docs/1.8/list.md
@@ -0,0 +1,89 @@
+# nf_core.list
+
+Lists available nf-core pipelines and versions.
+
+### _`class{:python}`_`nf_core.list.LocalWorkflow(name){:python}`
+
+Class to handle local workflows pulled by nextflow
+
+#### `get_local_nf_workflow_details(){:python}`
+
+Get full details about a local cached workflow
+
+### _`class{:python}`_`nf_core.list.RemoteWorkflow(data){:python}`
+
+A information container for a remote workflow.
+
+- **Parameters:**
+ **data** (_dict_) – workflow information as they are retrieved from the Github repository REST API request
+ ().
+
+### _`class{:python}`_`nf_core.list.Workflows(filter_by=None, sort_by='release'){:python}`
+
+Workflow container class.
+
+Is used to collect local and remote nf-core pipelines. Pipelines
+can be sorted, filtered and compared.
+
+- **Parameters:**
+ - **filter_by** (_list_) – A list of strings that can be used for filtering.
+ - **sort_by** (_str_) – workflows can be sorted by keywords. Keyword must be one of
+ release (default), name, stars.
+
+#### `compare_remote_local(){:python}`
+
+Matches local to remote workflows.
+
+If a matching remote workflow is found, the local workflow’s Git commit hash is compared
+with the latest one from remote.
+
+A boolean flag in `RemoteWorkflow.local_is_latest` is set to True, if the local workflow
+is the latest.
+
+#### `filtered_workflows(){:python}`
+
+Filters remote workflows for keywords.
+
+- **Returns:**
+ Filtered remote workflows.
+- **Return type:**
+ list
+
+#### `get_local_nf_workflows(){:python}`
+
+Retrieves local Nextflow workflows.
+
+Local workflows are stored in `self.local_workflows` list.
+
+#### `get_remote_workflows(){:python}`
+
+Retrieves remote workflows from [nf-co.re](http://nf-co.re).
+
+Remote workflows are stored in `self.remote_workflows` list.
+
+#### `print_json(){:python}`
+
+Dump JSON of all parsed information
+
+#### `print_summary(){:python}`
+
+Prints a summary of all pipelines.
+
+### `nf_core.list.list_workflows(filter_by=None, sort_by='release', as_json=False){:python}`
+
+Prints out a list of all nf-core workflows.
+
+- **Parameters:**
+ - **filter_by** (_list_) – A list of strings that can be used for filtering.
+ - **sort_by** (_str_) – workflows can be sorted by keywords. Keyword must be one of
+ release (default), name, stars.
+ - **as_json** (_boolean_) – Set to true, if the lists should be printed in JSON.
+
+### `nf_core.list.pretty_date(time){:python}`
+
+Transforms a datetime object or a int() Epoch timestamp into a
+pretty string like ‘an hour ago’, ‘Yesterday’, ‘3 months ago’,
+‘just now’, etc
+
+Based on
+Adapted by sven1103
diff --git a/src/content/tools/docs/1.8/utils.md b/src/content/tools/docs/1.8/utils.md
new file mode 100644
index 0000000000..48285a4fe6
--- /dev/null
+++ b/src/content/tools/docs/1.8/utils.md
@@ -0,0 +1,22 @@
+# nf_core.utils
+
+Common utility functions for the nf-core python package.
+
+### `nf_core.utils.fetch_wf_config(wf_path, wf=None){:python}`
+
+Uses Nextflow to retrieve the the configuration variables
+from a Nextflow workflow.
+
+- **Parameters:**
+ **wf_path** (_str_) – Nextflow workflow file system path.
+- **Returns:**
+ Workflow configuration settings.
+- **Return type:**
+ dict
+
+### `nf_core.utils.setup_requests_cachedir(){:python}`
+
+Sets up local caching for faster remote HTTP requests.
+
+Caching directory will be set up in the user’s home directory under
+a .nfcore_cache subdir.
diff --git a/src/content/tools/docs/1.8/workflow.md b/src/content/tools/docs/1.8/workflow.md
new file mode 100644
index 0000000000..9e44b801a9
--- /dev/null
+++ b/src/content/tools/docs/1.8/workflow.md
@@ -0,0 +1 @@
+# nf_core.list
diff --git a/src/content/tools/docs/1.9/bump_version.md b/src/content/tools/docs/1.9/bump_version.md
new file mode 100644
index 0000000000..388370558e
--- /dev/null
+++ b/src/content/tools/docs/1.9/bump_version.md
@@ -0,0 +1,36 @@
+# nf_core.bump_version
+
+Bumps the version number in all appropriate files for
+a nf-core pipeline.
+
+### `nf_core.bump_version.bump_nextflow_version(lint_obj, new_version){:python}`
+
+Bumps the required Nextflow version number of a pipeline.
+
+- **Parameters:**
+ - **lint_obj** ([_nf_core.lint.PipelineLint_](lint#nf_core.lint.PipelineLint)) – A PipelineLint object that holds information
+ about the pipeline contents and build files.
+ - **new_version** (_str_) – The new version tag for the required Nextflow version.
+
+### `nf_core.bump_version.bump_pipeline_version(lint_obj, new_version){:python}`
+
+Bumps a pipeline version number.
+
+- **Parameters:**
+ - **lint_obj** ([_nf_core.lint.PipelineLint_](lint#nf_core.lint.PipelineLint)) – A PipelineLint object that holds information
+ about the pipeline contents and build files.
+ - **new_version** (_str_) – The new version tag for the pipeline. Semantic versioning only.
+
+### `nf_core.bump_version.update_file_version(filename, lint_obj, pattern, newstr, allow_multiple=False){:python}`
+
+Updates the version number in a requested file.
+
+- **Parameters:**
+ - **filename** (_str_) – File to scan.
+ - **lint_obj** ([_nf_core.lint.PipelineLint_](lint#nf_core.lint.PipelineLint)) – A PipelineLint object that holds information
+ about the pipeline contents and build files.
+ - **pattern** (_str_) – Regex pattern to apply.
+ - **newstr** (_str_) – The replaced string.
+ - **allow_multiple** (_bool_) – Replace all pattern hits, not only the first. Defaults to False.
+- **Raises:**
+ **SyntaxError**\*\*,\*\* **if the version number cannot be found.** –
diff --git a/src/content/tools/docs/1.9/create.md b/src/content/tools/docs/1.9/create.md
new file mode 100644
index 0000000000..354aadee7b
--- /dev/null
+++ b/src/content/tools/docs/1.9/create.md
@@ -0,0 +1 @@
+# nf_core.create
diff --git a/src/content/tools/docs/1.9/download.md b/src/content/tools/docs/1.9/download.md
new file mode 100644
index 0000000000..fbea76fc98
--- /dev/null
+++ b/src/content/tools/docs/1.9/download.md
@@ -0,0 +1,68 @@
+# nf_core.download
+
+Downloads a nf-core pipeline to the local file system.
+
+### _`class{:python}`_`nf_core.download.DownloadWorkflow(pipeline, release=None, singularity=False, outdir=None, compress_type='tar.gz'){:python}`
+
+Downloads a nf-core workflow from GitHub to the local file system.
+
+Can also download its Singularity container image if required.
+
+- **Parameters:**
+ - **pipeline** (_str_) – A nf-core pipeline name.
+ - **release** (_str_) – The workflow release version to download, like 1.0. Defaults to None.
+ - **singularity** (_bool_) – Flag, if the Singularity container should be downloaded as well. Defaults to False.
+ - **outdir** (_str_) – Path to the local download directory. Defaults to None.
+
+#### `compress_download(){:python}`
+
+Take the downloaded files and make a compressed .tar.gz archive.
+
+#### `download_configs(){:python}`
+
+Downloads the centralised config profiles from nf-core/configs to `self.outdir`.
+
+#### `download_wf_files(){:python}`
+
+Downloads workflow files from GitHub to the `self.outdir`.
+
+#### `download_workflow(){:python}`
+
+Starts a nf-core workflow download.
+
+#### `fetch_workflow_details(wfs){:python}`
+
+Fetches details of a nf-core workflow to download.
+
+- **Parameters:**
+ **wfs** ([_nf_core.list.Workflows_](list#nf_core.list.Workflows)) – A nf_core.list.Workflows object
+- **Raises:**
+ **LockupError**\*\*,\*\* **if the pipeline can not be found.** –
+
+#### `find_container_images(){:python}`
+
+Find container image names for workflow
+
+#### `pull_singularity_image(container){:python}`
+
+Uses a local installation of singularity to pull an image from Docker Hub.
+
+- **Parameters:**
+ **container** (_str_) – A pipeline’s container name. Usually it is of similar format
+ to nfcore/name:dev.
+- **Raises:**
+ **Various exceptions possible from subprocess execution** **of** **Singularity.** –
+
+#### `validate_md5(fname, expected=None){:python}`
+
+Calculates the md5sum for a file on the disk and validate with expected.
+
+- **Parameters:**
+ - **fname** (_str_) – Path to a local file.
+ - **expected** (_str_) – The expected md5sum.
+- **Raises:**
+ **IOError**\*\*,\*\* **if the md5sum does not match the remote sum.** –
+
+#### `wf_use_local_configs(){:python}`
+
+Edit the downloaded nextflow.config file to use the local config files
diff --git a/src/content/tools/docs/1.9/index.md b/src/content/tools/docs/1.9/index.md
new file mode 100644
index 0000000000..fd6054924b
--- /dev/null
+++ b/src/content/tools/docs/1.9/index.md
@@ -0,0 +1,37 @@
+
+
+# Welcome to nf-core tools API documentation!
+
+# Contents:
+
+- [nf_core.bump_version](bump_version)
+ - [`bump_nextflow_version()`](bump_version#nf_core.bump_version.bump_nextflow_version)
+ - [`bump_pipeline_version()`](bump_version#nf_core.bump_version.bump_pipeline_version)
+ - [`update_file_version()`](bump_version#nf_core.bump_version.update_file_version)
+- [nf_core.create](create)
+- [nf_core.download](download)
+ - [`DownloadWorkflow`](download#nf_core.download.DownloadWorkflow)
+- [nf_core.licences](licences)
+ - [`WorkflowLicences`](licences#nf_core.licences.WorkflowLicences)
+- [nf_core.lint](lint)
+ - [`PipelineLint`](lint#nf_core.lint.PipelineLint)
+ - [`run_linting()`](lint#nf_core.lint.run_linting)
+- [nf_core.list](list)
+ - [`LocalWorkflow`](list#nf_core.list.LocalWorkflow)
+ - [`RemoteWorkflow`](list#nf_core.list.RemoteWorkflow)
+ - [`Workflows`](list#nf_core.list.Workflows)
+ - [`list_workflows()`](list#nf_core.list.list_workflows)
+ - [`pretty_date()`](list#nf_core.list.pretty_date)
+- [nf_core.utils](utils)
+ - [`fetch_wf_config()`](utils#nf_core.utils.fetch_wf_config)
+ - [`setup_requests_cachedir()`](utils#nf_core.utils.setup_requests_cachedir)
+- [nf_core.list](workflow)
+
+# Indices and tables
+
+- [Index](genindex)
+- [Module Index](py-modindex)
+- [Search Page](search)
diff --git a/src/content/tools/docs/1.9/licences.md b/src/content/tools/docs/1.9/licences.md
new file mode 100644
index 0000000000..731a506571
--- /dev/null
+++ b/src/content/tools/docs/1.9/licences.md
@@ -0,0 +1,39 @@
+# nf_core.licences
+
+Lists software licences for a given workflow.
+
+### _`class{:python}`_`nf_core.licences.WorkflowLicences(pipeline){:python}`
+
+A nf-core workflow licenses collection.
+
+Tries to retrieve the license information from all dependencies
+of a given nf-core pipeline.
+
+A condensed overview with license per dependency can be printed out.
+
+- **Parameters:**
+ **pipeline** (_str_) – An existing nf-core pipeline name, like nf-core/hlatyping
+ or short hlatyping.
+
+#### `clean_licence_names(licences){:python}`
+
+Normalises varying licence names.
+
+- **Parameters:**
+ **licences** (_list_) – A list of licences which are basically raw string objects from
+ the licence content information.
+- **Returns:**
+ Cleaned licences.
+- **Return type:**
+ list
+
+#### `fetch_conda_licences(){:python}`
+
+Fetch package licences from Anaconda and PyPi.
+
+#### `print_licences(as_json=False){:python}`
+
+Prints the fetched license information.
+
+- **Parameters:**
+ **as_json** (_boolean_) – Prints the information in JSON. Defaults to False.
diff --git a/src/content/tools/docs/1.9/lint.md b/src/content/tools/docs/1.9/lint.md
new file mode 100644
index 0000000000..5bf982b0d4
--- /dev/null
+++ b/src/content/tools/docs/1.9/lint.md
@@ -0,0 +1,332 @@
+# nf_core.lint
+
+Linting policy for nf-core pipeline projects.
+
+Tests Nextflow-based pipelines to check that they adhere to
+the nf-core community guidelines.
+
+### _`class{:python}`_`nf_core.lint.PipelineLint(path){:python}`
+
+Object to hold linting information and results.
+All objects attributes are set, after the [`PipelineLint.lint_pipeline()`](#nf_core.lint.PipelineLint.lint_pipeline) function was called.
+
+- **Parameters:**
+ **path** (_str_) – The path to the nf-core pipeline directory.
+
+#### `conda_config{:python}`
+
+The parsed conda configuration file content (environment.yml).
+
+- **Type:**
+ dict
+
+#### `conda_package_info{:python}`
+
+The conda package(s) information, based on the API requests to Anaconda cloud.
+
+- **Type:**
+ dict
+
+#### `config{:python}`
+
+The Nextflow pipeline configuration file content.
+
+- **Type:**
+ dict
+
+#### `dockerfile{:python}`
+
+A list of lines (str) from the parsed Dockerfile.
+
+- **Type:**
+ list
+
+#### `failed{:python}`
+
+A list of tuples of the form: (, )
+
+- **Type:**
+ list
+
+#### `files{:python}`
+
+A list of files found during the linting process.
+
+- **Type:**
+ list
+
+#### `minNextflowVersion{:python}`
+
+The minimum required Nextflow version to run the pipeline.
+
+- **Type:**
+ str
+
+#### `passed{:python}`
+
+A list of tuples of the form: (, )
+
+- **Type:**
+ list
+
+#### `path{:python}`
+
+Path to the pipeline directory.
+
+- **Type:**
+ str
+
+#### `pipeline_name{:python}`
+
+The pipeline name, without the nf-core tag, for example hlatyping.
+
+- **Type:**
+ str
+
+#### `release_mode{:python}`
+
+True, if you the to linting was run in release mode, False else.
+
+- **Type:**
+ bool
+
+#### `warned{:python}`
+
+A list of tuples of the form: (, )
+
+- **Type:**
+ list
+
+**Attribute specifications**
+
+Some of the more complex attributes of a PipelineLint object.
+
+- conda_config:
+ ```default
+ # Example
+ {
+ 'name': 'nf-core-hlatyping',
+ 'channels': ['bioconda', 'conda-forge'],
+ 'dependencies': ['optitype=1.3.2', 'yara=0.9.6']
+ }
+ ```
+- conda_package_info:
+ ```default
+ # See https://api.anaconda.org/package/bioconda/bioconda-utils as an example.
+ {
+ :
+ }
+ ```
+- config: Produced by calling Nextflow with `nextflow config -flat `. Here is an example from
+ : the [nf-core/hlatyping](https://github.com/nf-core/hlatyping) pipeline:
+ ```default
+ process.container = 'nfcore/hlatyping:1.1.1'
+ params.help = false
+ params.outdir = './results'
+ params.bam = false
+ params.single_end = false
+ params.seqtype = 'dna'
+ params.solver = 'glpk'
+ params.igenomes_base = './iGenomes'
+ params.clusterOptions = false
+ ...
+ ```
+
+#### `check_actions_branch_protection(){:python}`
+
+Checks that the GitHub Actions branch protection workflow is valid.
+
+Makes sure PRs can only come from nf-core dev or ‘patch’ of a fork.
+
+#### `check_actions_ci(){:python}`
+
+Checks that the GitHub Actions CI workflow is valid
+
+Makes sure tests run with the required nextflow version.
+
+#### `check_actions_lint(){:python}`
+
+Checks that the GitHub Actions lint workflow is valid
+
+Makes sure `nf-core lint` and `markdownlint` runs.
+
+#### `check_anaconda_package(dep){:python}`
+
+Query conda package information.
+
+Sends a HTTP GET request to the Anaconda remote API.
+
+- **Parameters:**
+ **dep** (_str_) – A conda package name.
+- **Raises:**
+ **A ValueError**\*\*,\*\* **if the package name can not be resolved.** –
+
+#### `check_conda_dockerfile(){:python}`
+
+Checks the Docker build file.
+
+Checks that:
+: \* a name is given and is consistent with the pipeline name
+
+- dependency versions are pinned
+- dependency versions are the latest available
+
+#### `check_conda_env_yaml(){:python}`
+
+Checks that the conda environment file is valid.
+
+Checks that:
+: \* a name is given and is consistent with the pipeline name
+
+- check that dependency versions are pinned
+- dependency versions are the latest available
+
+#### `check_docker(){:python}`
+
+Checks that Dockerfile contains the string `FROM`.
+
+#### `check_files_exist(){:python}`
+
+Checks a given pipeline directory for required files.
+
+Iterates through the pipeline’s directory content and checkmarks files
+for presence.
+Files that **must** be present:
+
+```default
+'nextflow.config',
+'Dockerfile',
+['LICENSE', 'LICENSE.md', 'LICENCE', 'LICENCE.md'], # NB: British / American spelling
+'README.md',
+'CHANGELOG.md',
+'docs/README.md',
+'docs/output.md',
+'docs/usage.md'
+```
+
+Files that _should_ be present:
+
+```default
+'main.nf',
+'environment.yml',
+'conf/base.config',
+'.github/workflows/branch.yml',
+'.github/workflows/ci.yml',
+'.github/workfows/linting.yml'
+```
+
+Files that _must not_ be present:
+
+```default
+'Singularity'
+```
+
+Files that _should not_ be present:
+
+```default
+'.travis.yml'
+```
+
+- **Raises:**
+ **An AssertionError if neither nextflow.config** **or** **main.nf found.** –
+
+#### `check_licence(){:python}`
+
+Checks licence file is MIT.
+
+Currently the checkpoints are:
+: \* licence file must be long enough (4 or more lines)
+
+- licence contains the string _without restriction_
+- licence doesn’t have any placeholder variables
+
+#### `check_nextflow_config(){:python}`
+
+Checks a given pipeline for required config variables.
+
+At least one string in each list must be present for fail and warn.
+Any config in config_fail_ifdefined results in a failure.
+
+Uses `nextflow config -flat` to parse pipeline `nextflow.config`
+and print all config variables.
+NB: Does NOT parse contents of main.nf / nextflow script
+
+#### `check_pip_package(dep){:python}`
+
+Query PyPi package information.
+
+Sends a HTTP GET request to the PyPi remote API.
+
+- **Parameters:**
+ **dep** (_str_) – A PyPi package name.
+- **Raises:**
+ **A ValueError**\*\*,\*\* **if the package name can not be resolved** **or** **the connection timed out.** –
+
+#### `check_pipeline_name(){:python}`
+
+Check whether pipeline name adheres to lower case/no hyphen naming convention
+
+#### `check_pipeline_todos(){:python}`
+
+Go through all template files looking for the string ‘TODO nf-core:’
+
+#### `check_readme(){:python}`
+
+Checks the repository README file for errors.
+
+Currently just checks the badges at the top of the README.
+
+#### `check_version_consistency(){:python}`
+
+Checks container tags versions.
+
+Runs on `process.container` (if set) and `$GITHUB_REF` (if a GitHub Actions release).
+
+Checks that:
+: \* the container has a tag
+
+- the version numbers are numeric
+- the version numbers are the same as one-another
+
+#### `lint_pipeline(release_mode=False){:python}`
+
+Main linting function.
+
+Takes the pipeline directory as the primary input and iterates through
+the different linting checks in order. Collects any warnings or errors
+and returns summary at completion. Raises an exception if there is a
+critical error that makes the rest of the tests pointless (eg. no
+pipeline script). Results from this function are printed by the main script.
+
+- **Parameters:**
+ **release_mode** (_boolean_) – Activates the release mode, which checks for
+ consistent version tags of containers. Default is False.
+- **Returns:**
+ Summary of test result messages structured as follows:
+ ```default
+ {
+ 'pass': [
+ ( test-id (int), message (string) ),
+ ( test-id (int), message (string) )
+ ],
+ 'warn': [(id, msg)],
+ 'fail': [(id, msg)],
+ }
+ ```
+- **Return type:**
+ dict
+- **Raises:**
+ **If a critical problem is found**\*\*,\*\* **an AssertionError is raised.** –
+
+### `nf_core.lint.run_linting(pipeline_dir, release_mode=False){:python}`
+
+Runs all nf-core linting checks on a given Nextflow pipeline project
+in either release mode or normal mode (default). Returns an object
+of type [`PipelineLint`](#nf_core.lint.PipelineLint) after finished.
+
+- **Parameters:**
+ - **pipeline_dir** (_str_) – The path to the Nextflow pipeline root directory
+ - **release_mode** (_bool_) – Set this to True, if the linting should be run in the release mode.
+ See [`PipelineLint`](#nf_core.lint.PipelineLint) for more information.
+- **Returns:**
+ An object of type [`PipelineLint`](#nf_core.lint.PipelineLint) that contains all the linting results.
diff --git a/src/content/tools/docs/1.9/list.md b/src/content/tools/docs/1.9/list.md
new file mode 100644
index 0000000000..138558a207
--- /dev/null
+++ b/src/content/tools/docs/1.9/list.md
@@ -0,0 +1,89 @@
+# nf_core.list
+
+Lists available nf-core pipelines and versions.
+
+### _`class{:python}`_`nf_core.list.LocalWorkflow(name){:python}`
+
+Class to handle local workflows pulled by nextflow
+
+#### `get_local_nf_workflow_details(){:python}`
+
+Get full details about a local cached workflow
+
+### _`class{:python}`_`nf_core.list.RemoteWorkflow(data){:python}`
+
+A information container for a remote workflow.
+
+- **Parameters:**
+ **data** (_dict_) – workflow information as they are retrieved from the GitHub repository REST API request
+ ().
+
+### _`class{:python}`_`nf_core.list.Workflows(filter_by=None, sort_by='release'){:python}`
+
+Workflow container class.
+
+Is used to collect local and remote nf-core pipelines. Pipelines
+can be sorted, filtered and compared.
+
+- **Parameters:**
+ - **filter_by** (_list_) – A list of strings that can be used for filtering.
+ - **sort_by** (_str_) – workflows can be sorted by keywords. Keyword must be one of
+ release (default), name, stars.
+
+#### `compare_remote_local(){:python}`
+
+Matches local to remote workflows.
+
+If a matching remote workflow is found, the local workflow’s Git commit hash is compared
+with the latest one from remote.
+
+A boolean flag in `RemoteWorkflow.local_is_latest` is set to True, if the local workflow
+is the latest.
+
+#### `filtered_workflows(){:python}`
+
+Filters remote workflows for keywords.
+
+- **Returns:**
+ Filtered remote workflows.
+- **Return type:**
+ list
+
+#### `get_local_nf_workflows(){:python}`
+
+Retrieves local Nextflow workflows.
+
+Local workflows are stored in `self.local_workflows` list.
+
+#### `get_remote_workflows(){:python}`
+
+Retrieves remote workflows from [nf-co.re](http://nf-co.re).
+
+Remote workflows are stored in `self.remote_workflows` list.
+
+#### `print_json(){:python}`
+
+Dump JSON of all parsed information
+
+#### `print_summary(){:python}`
+
+Prints a summary of all pipelines.
+
+### `nf_core.list.list_workflows(filter_by=None, sort_by='release', as_json=False){:python}`
+
+Prints out a list of all nf-core workflows.
+
+- **Parameters:**
+ - **filter_by** (_list_) – A list of strings that can be used for filtering.
+ - **sort_by** (_str_) – workflows can be sorted by keywords. Keyword must be one of
+ release (default), name, stars.
+ - **as_json** (_boolean_) – Set to true, if the lists should be printed in JSON.
+
+### `nf_core.list.pretty_date(time){:python}`
+
+Transforms a datetime object or a int() Epoch timestamp into a
+pretty string like ‘an hour ago’, ‘Yesterday’, ‘3 months ago’,
+‘just now’, etc
+
+Based on
+Adapted by sven1103
diff --git a/src/content/tools/docs/1.9/utils.md b/src/content/tools/docs/1.9/utils.md
new file mode 100644
index 0000000000..48285a4fe6
--- /dev/null
+++ b/src/content/tools/docs/1.9/utils.md
@@ -0,0 +1,22 @@
+# nf_core.utils
+
+Common utility functions for the nf-core python package.
+
+### `nf_core.utils.fetch_wf_config(wf_path, wf=None){:python}`
+
+Uses Nextflow to retrieve the the configuration variables
+from a Nextflow workflow.
+
+- **Parameters:**
+ **wf_path** (_str_) – Nextflow workflow file system path.
+- **Returns:**
+ Workflow configuration settings.
+- **Return type:**
+ dict
+
+### `nf_core.utils.setup_requests_cachedir(){:python}`
+
+Sets up local caching for faster remote HTTP requests.
+
+Caching directory will be set up in the user’s home directory under
+a .nfcore_cache subdir.
diff --git a/src/content/tools/docs/1.9/workflow.md b/src/content/tools/docs/1.9/workflow.md
new file mode 100644
index 0000000000..9e44b801a9
--- /dev/null
+++ b/src/content/tools/docs/1.9/workflow.md
@@ -0,0 +1 @@
+# nf_core.list
diff --git a/src/content/tools/docs/2.0.1/api/bump_version.md b/src/content/tools/docs/2.0.1/api/bump_version.md
new file mode 100644
index 0000000000..3f45f9fefb
--- /dev/null
+++ b/src/content/tools/docs/2.0.1/api/bump_version.md
@@ -0,0 +1,35 @@
+# nf_core.bump_version
+
+Bumps the version number in all appropriate files for
+a nf-core pipeline.
+
+### `nf_core.bump_version.bump_nextflow_version(pipeline_obj, new_version){:python}`
+
+Bumps the required Nextflow version number of a pipeline.
+
+- **Parameters:**
+ - **pipeline_obj** ([_nf_core.utils.Pipeline_](utils#nf_core.utils.Pipeline)) – A Pipeline object that holds information
+ about the pipeline contents and build files.
+ - **new_version** (_str_) – The new version tag for the required Nextflow version.
+
+### `nf_core.bump_version.bump_pipeline_version(pipeline_obj, new_version){:python}`
+
+Bumps a pipeline version number.
+
+- **Parameters:**
+ - **pipeline_obj** ([_nf_core.utils.Pipeline_](utils#nf_core.utils.Pipeline)) – A Pipeline object that holds information
+ about the pipeline contents and build files.
+ - **new_version** (_str_) – The new version tag for the pipeline. Semantic versioning only.
+
+### `nf_core.bump_version.update_file_version(filename, pipeline_obj, patterns){:python}`
+
+Updates the version number in a requested file.
+
+- **Parameters:**
+ - **filename** (_str_) – File to scan.
+ - **pipeline_obj** ([_nf_core.lint.PipelineLint_](lint#nf_core.lint.PipelineLint)) – A PipelineLint object that holds information
+ about the pipeline contents and build files.
+ - **pattern** (_str_) – Regex pattern to apply.
+ - **newstr** (_str_) – The replaced string.
+- **Raises:**
+ **ValueError**\*\*,\*\* **if the version number cannot be found.** –
diff --git a/src/content/tools/docs/2.0.1/api/create.md b/src/content/tools/docs/2.0.1/api/create.md
new file mode 100644
index 0000000000..302bfd36a6
--- /dev/null
+++ b/src/content/tools/docs/2.0.1/api/create.md
@@ -0,0 +1,36 @@
+# nf_core.create
+
+Creates a nf-core pipeline matching the current
+organization’s specification based on a template.
+
+### _`class{:python}`_`nf_core.create.PipelineCreate(name, description, author, version='1.0dev', no_git=False, force=False, outdir=None){:python}`
+
+Bases: `object`
+
+Creates a nf-core pipeline a la carte from the nf-core best-practice template.
+
+- **Parameters:**
+ - **name** (_str_) – Name for the pipeline.
+ - **description** (_str_) – Description for the pipeline.
+ - **author** (_str_) – Authors name of the pipeline.
+ - **version** (_str_) – Version flag. Semantic versioning only. Defaults to 1.0dev.
+ - **no_git** (_bool_) – Prevents the creation of a local Git repository for the pipeline. Defaults to False.
+ - **force** (_bool_) – Overwrites a given workflow directory with the same name. Defaults to False.
+ May the force be with you.
+ - **outdir** (_str_) – Path to the local output directory.
+
+#### `git_init_pipeline(){:python}`
+
+Initialises the new pipeline as a Git repository and submits first commit.
+
+#### `init_pipeline(){:python}`
+
+Creates the nf-core pipeline.
+
+#### `make_pipeline_logo(){:python}`
+
+Fetch a logo for the new pipeline from the nf-core website
+
+#### `render_template(){:python}`
+
+Runs Jinja to create a new nf-core pipeline.
diff --git a/src/content/tools/docs/2.0.1/api/download.md b/src/content/tools/docs/2.0.1/api/download.md
new file mode 100644
index 0000000000..7769825e08
--- /dev/null
+++ b/src/content/tools/docs/2.0.1/api/download.md
@@ -0,0 +1,142 @@
+# nf_core.download
+
+Downloads a nf-core pipeline to the local file system.
+
+### _`class{:python}`_`nf_core.download.DownloadProgress(*columns: str | ProgressColumn, console: Console | None = None, auto_refresh: bool = True, refresh_per_second: float = 10, speed_estimate_period: float = 30.0, transient: bool = False, redirect_stdout: bool = True, redirect_stderr: bool = True, get_time: Callable[[], float] | None = None, disable: bool = False, expand: bool = False){:python}`
+
+Bases: `Progress`
+
+Custom Progress bar class, allowing us to have two progress
+bars with different columns / layouts.
+
+#### `get_renderables(){:python}`
+
+Get a number of renderables for the progress display.
+
+### _`class{:python}`_`nf_core.download.DownloadWorkflow(pipeline=None, release=None, outdir=None, compress_type=None, force=False, container=None, singularity_cache_only=False, parallel_downloads=4){:python}`
+
+Bases: `object`
+
+Downloads a nf-core workflow from GitHub to the local file system.
+
+Can also download its Singularity container image if required.
+
+- **Parameters:**
+ - **pipeline** (_str_) – A nf-core pipeline name.
+ - **release** (_str_) – The workflow release version to download, like 1.0. Defaults to None.
+ - **singularity** (_bool_) – Flag, if the Singularity container should be downloaded as well. Defaults to False.
+ - **outdir** (_str_) – Path to the local download directory. Defaults to None.
+
+#### `compress_download(){:python}`
+
+Take the downloaded files and make a compressed .tar.gz archive.
+
+#### `download_configs(){:python}`
+
+Downloads the centralised config profiles from nf-core/configs to `self.outdir`.
+
+#### `download_wf_files(){:python}`
+
+Downloads workflow files from GitHub to the `self.outdir`.
+
+#### `download_workflow(){:python}`
+
+Starts a nf-core workflow download.
+
+#### `find_container_images(){:python}`
+
+Find container image names for workflow.
+
+Starts by using nextflow config to pull out any process.container
+declarations. This works for DSL1.
+
+Second, we look for DSL2 containers. These can’t be found with
+nextflow config at the time of writing, so we scrape the pipeline files.
+
+#### `get_release_hash(){:python}`
+
+Find specified release / branch hash
+
+#### `get_singularity_images(){:python}`
+
+Loop through container names and download Singularity images
+
+#### `prompt_compression_type(){:python}`
+
+Ask user if we should compress the downloaded files
+
+#### `prompt_container_download(){:python}`
+
+Prompt whether to download container images or not
+
+#### `prompt_pipeline_name(){:python}`
+
+Prompt for the pipeline name if not set with a flag
+
+#### `prompt_release(){:python}`
+
+Prompt for pipeline release / branch
+
+#### `prompt_singularity_cachedir_only(){:python}`
+
+Ask if we should _only_ use $NXF_SINGULARITY_CACHEDIR without copying into target
+
+#### `prompt_use_singularity_cachedir(){:python}`
+
+Prompt about using $NXF_SINGULARITY_CACHEDIR if not already set
+
+#### `singularity_copy_cache_image(container, out_path, cache_path){:python}`
+
+Copy Singularity image from NXF_SINGULARITY_CACHEDIR to target folder.
+
+#### `singularity_download_image(container, out_path, cache_path, progress){:python}`
+
+Download a singularity image from the web.
+
+Use native Python to download the file.
+
+- **Parameters:**
+ - **container** (_str_) – A pipeline’s container name. Usually it is of similar format
+ to `https://depot.galaxyproject.org/singularity/name:version`
+ - **out_path** (_str_) – The final target output path
+ - **cache_path** (_str_\*,\* _None_) – The NXF_SINGULARITY_CACHEDIR path if set, None if not
+ - **progress** (_Progress_) – Rich progress bar instance to add tasks to.
+
+#### `singularity_image_filenames(container){:python}`
+
+Check Singularity cache for image, copy to destination folder if found.
+
+- **Parameters:**
+ **container** (_str_) – A pipeline’s container name. Can be direct download URL
+ or a Docker Hub repository ID.
+- **Returns:**
+ Returns True if we have the image in the target location.
+ : Returns a download path if not.
+- **Return type:**
+ results (bool, str)
+
+#### `singularity_pull_image(container, out_path, cache_path, progress){:python}`
+
+Pull a singularity image using `singularity pull`
+
+Attempt to use a local installation of singularity to pull the image.
+
+- **Parameters:**
+ **container** (_str_) – A pipeline’s container name. Usually it is of similar format
+ to `nfcore/name:version`.
+- **Raises:**
+ **Various exceptions possible from subprocess execution** **of** **Singularity.** –
+
+#### `validate_md5(fname, expected=None){:python}`
+
+Calculates the md5sum for a file on the disk and validate with expected.
+
+- **Parameters:**
+ - **fname** (_str_) – Path to a local file.
+ - **expected** (_str_) – The expected md5sum.
+- **Raises:**
+ **IOError**\*\*,\*\* **if the md5sum does not match the remote sum.** –
+
+#### `wf_use_local_configs(){:python}`
+
+Edit the downloaded nextflow.config file to use the local config files
diff --git a/src/content/tools/docs/2.0.1/api/index.md b/src/content/tools/docs/2.0.1/api/index.md
new file mode 100644
index 0000000000..958932acef
--- /dev/null
+++ b/src/content/tools/docs/2.0.1/api/index.md
@@ -0,0 +1,55 @@
+# API Reference
+
+# Tests:
+
+- [nf_core.bump_version](bump_version)
+ - [`bump_nextflow_version()`](bump_version#nf_core.bump_version.bump_nextflow_version)
+ - [`bump_pipeline_version()`](bump_version#nf_core.bump_version.bump_pipeline_version)
+ - [`update_file_version()`](bump_version#nf_core.bump_version.update_file_version)
+- [nf_core.create](create)
+ - [`PipelineCreate`](create#nf_core.create.PipelineCreate)
+- [nf_core.download](download)
+ - [`DownloadProgress`](download#nf_core.download.DownloadProgress)
+ - [`DownloadWorkflow`](download#nf_core.download.DownloadWorkflow)
+- [nf_core.launch](launch)
+ - [`Launch`](launch#nf_core.launch.Launch)
+- [nf_core.licences](licences)
+ - [`WorkflowLicences`](licences#nf_core.licences.WorkflowLicences)
+- [nf_core.lint](lint)
+ - [`run_linting()`](lint#nf_core.lint.run_linting)
+ - [`PipelineLint`](lint#nf_core.lint.PipelineLint)
+- [nf_core.list](list)
+ - [`LocalWorkflow`](list#nf_core.list.LocalWorkflow)
+ - [`RemoteWorkflow`](list#nf_core.list.RemoteWorkflow)
+ - [`Workflows`](list#nf_core.list.Workflows)
+ - [`get_local_wf()`](list#nf_core.list.get_local_wf)
+ - [`list_workflows()`](list#nf_core.list.list_workflows)
+ - [`pretty_date()`](list#nf_core.list.pretty_date)
+- [nf_core.modules](modules)
+- [nf_core.schema](schema)
+ - [`PipelineSchema`](schema#nf_core.schema.PipelineSchema)
+- [nf_core.sync](sync)
+ - [`PipelineSync`](sync#nf_core.sync.PipelineSync)
+ - [`PullRequestException`](sync#nf_core.sync.PullRequestException)
+ - [`SyncException`](sync#nf_core.sync.SyncException)
+- [nf_core.utils](utils)
+ - [`Pipeline`](utils#nf_core.utils.Pipeline)
+ - [`anaconda_package()`](utils#nf_core.utils.anaconda_package)
+ - [`check_if_outdated()`](utils#nf_core.utils.check_if_outdated)
+ - [`custom_yaml_dumper()`](utils#nf_core.utils.custom_yaml_dumper)
+ - [`fetch_wf_config()`](utils#nf_core.utils.fetch_wf_config)
+ - [`get_biocontainer_tag()`](utils#nf_core.utils.get_biocontainer_tag)
+ - [`get_repo_releases_branches()`](utils#nf_core.utils.get_repo_releases_branches)
+ - [`github_api_auto_auth()`](utils#nf_core.utils.github_api_auto_auth)
+ - [`is_file_binary()`](utils#nf_core.utils.is_file_binary)
+ - [`is_pipeline_directory()`](utils#nf_core.utils.is_pipeline_directory)
+ - [`load_tools_config()`](utils#nf_core.utils.load_tools_config)
+ - [`nextflow_cmd()`](utils#nf_core.utils.nextflow_cmd)
+ - [`parse_anaconda_licence()`](utils#nf_core.utils.parse_anaconda_licence)
+ - [`pip_package()`](utils#nf_core.utils.pip_package)
+ - [`poll_nfcore_web_api()`](utils#nf_core.utils.poll_nfcore_web_api)
+ - [`prompt_pipeline_release_branch()`](utils#nf_core.utils.prompt_pipeline_release_branch)
+ - [`prompt_remote_pipeline_name()`](utils#nf_core.utils.prompt_remote_pipeline_name)
+ - [`rich_force_colors()`](utils#nf_core.utils.rich_force_colors)
+ - [`setup_requests_cachedir()`](utils#nf_core.utils.setup_requests_cachedir)
+ - [`wait_cli_function()`](utils#nf_core.utils.wait_cli_function)
diff --git a/src/content/tools/docs/2.0.1/api/launch.md b/src/content/tools/docs/2.0.1/api/launch.md
new file mode 100644
index 0000000000..8affe0fa4d
--- /dev/null
+++ b/src/content/tools/docs/2.0.1/api/launch.md
@@ -0,0 +1,87 @@
+# nf_core.launch
+
+Launch a pipeline, interactively collecting params
+
+### _`class{:python}`_`nf_core.launch.Launch(pipeline=None, revision=None, command_only=False, params_in=None, params_out=None, save_all=False, show_hidden=False, url=None, web_id=None){:python}`
+
+Bases: `object`
+
+Class to hold config option to launch a pipeline
+
+#### `build_command(){:python}`
+
+Build the nextflow run command based on what we know
+
+#### `get_pipeline_schema(){:python}`
+
+Load and validate the schema from the supplied pipeline
+
+#### `get_web_launch_response(){:python}`
+
+Given a URL for a web-gui launch response, recursively query it until results are ready.
+
+#### `launch_pipeline(){:python}`
+
+#### `launch_web_gui(){:python}`
+
+Send schema to nf-core website and launch input GUI
+
+#### `launch_workflow(){:python}`
+
+Launch nextflow if required
+
+#### `merge_nxf_flag_schema(){:python}`
+
+Take the Nextflow flag schema and merge it with the pipeline schema
+
+#### `print_param_header(param_id, param_obj, is_group=False){:python}`
+
+#### `prompt_group(group_id, group_obj){:python}`
+
+Prompt for edits to a group of parameters (subschema in ‘definitions’)
+
+- **Parameters:**
+ - **group_id** – Paramater ID (string)
+ - **group_obj** – JSON Schema keys (dict)
+- **Returns:**
+ val answers
+- **Return type:**
+ Dict of param_id
+
+#### `prompt_param(param_id, param_obj, is_required, answers){:python}`
+
+Prompt for a single parameter
+
+#### `prompt_schema(){:python}`
+
+Go through the pipeline schema and prompt user to change defaults
+
+#### `prompt_web_gui(){:python}`
+
+Ask whether to use the web-based or cli wizard to collect params
+
+#### `sanitise_web_response(){:python}`
+
+The web builder returns everything as strings.
+Use the functions defined in the cli wizard to convert to the correct types.
+
+#### `set_schema_inputs(){:python}`
+
+Take the loaded schema and set the defaults as the input parameters
+If a nf_params.json file is supplied, apply these over the top
+
+#### `single_param_to_questionary(param_id, param_obj, answers=None, print_help=True){:python}`
+
+Convert a JSONSchema param to a Questionary question
+
+- **Parameters:**
+ - **param_id** – Parameter ID (string)
+ - **param_obj** – JSON Schema keys (dict)
+ - **answers** – Optional preexisting answers (dict)
+ - **print_help** – If description and help_text should be printed (bool)
+- **Returns:**
+ Single Questionary dict, to be appended to questions list
+
+#### `strip_default_params(){:python}`
+
+Strip parameters if they have not changed from the default
diff --git a/src/content/tools/docs/2.0.1/api/licences.md b/src/content/tools/docs/2.0.1/api/licences.md
new file mode 100644
index 0000000000..578ddaae1d
--- /dev/null
+++ b/src/content/tools/docs/2.0.1/api/licences.md
@@ -0,0 +1,37 @@
+# nf_core.licences
+
+Lists software licences for a given workflow.
+
+### _`class{:python}`_`nf_core.licences.WorkflowLicences(pipeline){:python}`
+
+Bases: `object`
+
+A nf-core workflow licenses collection.
+
+Tries to retrieve the license information from all dependencies
+of a given nf-core pipeline.
+
+A condensed overview with license per dependency can be printed out.
+
+- **Parameters:**
+ **pipeline** (_str_) – An existing nf-core pipeline name, like nf-core/hlatyping
+ or short hlatyping.
+
+#### `fetch_conda_licences(){:python}`
+
+Fetch package licences from Anaconda and PyPi.
+
+#### `get_environment_file(){:python}`
+
+Get the conda environment file for the pipeline
+
+#### `print_licences(){:python}`
+
+Prints the fetched license information.
+
+- **Parameters:**
+ **as_json** (_boolean_) – Prints the information in JSON. Defaults to False.
+
+#### `run_licences(){:python}`
+
+Run the nf-core licences action
diff --git a/src/content/tools/docs/2.0.1/api/lint.md b/src/content/tools/docs/2.0.1/api/lint.md
new file mode 100644
index 0000000000..58da46f58a
--- /dev/null
+++ b/src/content/tools/docs/2.0.1/api/lint.md
@@ -0,0 +1,138 @@
+# nf_core.lint
+
+#### `SEE ALSO{:python}`
+
+See the [Lint Tests](../lint_tests/index.html) docs for information about specific linting functions.
+
+
+
+Linting policy for nf-core pipeline projects.
+
+Tests Nextflow-based pipelines to check that they adhere to
+the nf-core community guidelines.
+
+### `nf_core.lint.run_linting(pipeline_dir, release_mode=False, fix=(), key=(), show_passed=False, fail_ignored=False, md_fn=None, json_fn=None){:python}`
+
+Runs all nf-core linting checks on a given Nextflow pipeline project
+in either release mode or normal mode (default). Returns an object
+of type [`PipelineLint`](#nf_core.lint.PipelineLint) after finished.
+
+- **Parameters:**
+ - **pipeline_dir** (_str_) – The path to the Nextflow pipeline root directory
+ - **release_mode** (_bool_) – Set this to True, if the linting should be run in the release mode.
+ See [`PipelineLint`](#nf_core.lint.PipelineLint) for more information.
+- **Returns:**
+ An object of type [`PipelineLint`](#nf_core.lint.PipelineLint) that contains all the linting results.
+
+### _`class{:python}`_`nf_core.lint.PipelineLint(wf_path, release_mode=False, fix=(), key=(), fail_ignored=False){:python}`
+
+Bases: [`Pipeline`](utils#nf_core.utils.Pipeline)
+
+Object to hold linting information and results.
+
+Inherits [`nf_core.utils.Pipeline`](utils#nf_core.utils.Pipeline) class.
+
+Use the [`PipelineLint._lint_pipeline()`](#nf_core.lint.PipelineLint._lint_pipeline) function to run lint tests.
+
+- **Parameters:**
+ **path** (_str_) – The path to the nf-core pipeline directory.
+
+#### `failed{:python}`
+
+A list of tuples of the form: `(, )`
+
+- **Type:**
+ list
+
+#### `ignored{:python}`
+
+A list of tuples of the form: `(, )`
+
+- **Type:**
+ list
+
+#### `lint_config{:python}`
+
+The parsed nf-core linting config for this pipeline
+
+- **Type:**
+ dict
+
+#### `passed{:python}`
+
+A list of tuples of the form: `(, )`
+
+- **Type:**
+ list
+
+#### `release_mode{:python}`
+
+True, if you the to linting was run in release mode, False else.
+
+- **Type:**
+ bool
+
+#### `warned{:python}`
+
+A list of tuples of the form: `(, )`
+
+- **Type:**
+ list
+
+#### `_get_results_md(){:python}`
+
+Create a markdown file suitable for posting in a GitHub comment.
+
+- **Returns:**
+ Formatting markdown content
+- **Return type:**
+ markdown (str)
+
+#### `_lint_pipeline(){:python}`
+
+Main linting function.
+
+Takes the pipeline directory as the primary input and iterates through
+the different linting checks in order. Collects any warnings or errors
+into object attributes: `passed`, `ignored`, `warned` and `failed`.
+
+#### `_print_results(show_passed){:python}`
+
+Print linting results to the command line.
+
+Uses the `rich` library to print a set of formatted tables to the command line
+summarising the linting results.
+
+#### `_save_json_results(json_fn){:python}`
+
+Function to dump lint results to a JSON file for downstream use
+
+- **Parameters:**
+ **json_fn** (_str_) – File path to write JSON to.
+
+#### `_strip_ansi_codes(string, replace_with=''){:python}`
+
+Strip ANSI colouring codes from a string to return plain text.
+
+Solution found on Stack Overflow:
+
+#### `_wrap_quotes(files){:python}`
+
+Helper function to take a list of filenames and format with markdown.
+
+- **Parameters:**
+ **files** (_list_) –
+
+ List of filenames, eg:
+
+ ```default
+ ['foo', 'bar', 'baz']
+ ```
+
+- **Returns:**
+ Formatted string of paths separated by word `or`, eg:
+ ```default
+ `foo` or bar` or `baz`
+ ```
+- **Return type:**
+ markdown (str)
diff --git a/src/content/tools/docs/2.0.1/api/list.md b/src/content/tools/docs/2.0.1/api/list.md
new file mode 100644
index 0000000000..2fc6448de0
--- /dev/null
+++ b/src/content/tools/docs/2.0.1/api/list.md
@@ -0,0 +1,99 @@
+# nf_core.list
+
+Lists available nf-core pipelines and versions.
+
+### _`class{:python}`_`nf_core.list.LocalWorkflow(name){:python}`
+
+Bases: `object`
+
+Class to handle local workflows pulled by nextflow
+
+#### `get_local_nf_workflow_details(){:python}`
+
+Get full details about a local cached workflow
+
+### _`class{:python}`_`nf_core.list.RemoteWorkflow(data){:python}`
+
+Bases: `object`
+
+A information container for a remote workflow.
+
+- **Parameters:**
+ **data** (_dict_) – workflow information as they are retrieved from the GitHub repository REST API request
+ ().
+
+### _`class{:python}`_`nf_core.list.Workflows(filter_by=None, sort_by='release', show_archived=False){:python}`
+
+Bases: `object`
+
+Workflow container class.
+
+Is used to collect local and remote nf-core pipelines. Pipelines
+can be sorted, filtered and compared.
+
+- **Parameters:**
+ - **filter_by** (_list_) – A list of strings that can be used for filtering.
+ - **sort_by** (_str_) – workflows can be sorted by keywords. Keyword must be one of
+ release (default), name, stars.
+
+#### `compare_remote_local(){:python}`
+
+Matches local to remote workflows.
+
+If a matching remote workflow is found, the local workflow’s Git commit hash is compared
+with the latest one from remote.
+
+A boolean flag in `RemoteWorkflow.local_is_latest` is set to True, if the local workflow
+is the latest.
+
+#### `filtered_workflows(){:python}`
+
+Filters remote workflows for keywords.
+
+- **Returns:**
+ Filtered remote workflows.
+- **Return type:**
+ list
+
+#### `get_local_nf_workflows(){:python}`
+
+Retrieves local Nextflow workflows.
+
+Local workflows are stored in `self.local_workflows` list.
+
+#### `get_remote_workflows(){:python}`
+
+Retrieves remote workflows from [nf-co.re](https://nf-co.re).
+
+Remote workflows are stored in `self.remote_workflows` list.
+
+#### `print_json(){:python}`
+
+Dump JSON of all parsed information
+
+#### `print_summary(){:python}`
+
+Prints a summary of all pipelines.
+
+### `nf_core.list.get_local_wf(workflow, revision=None){:python}`
+
+Check if this workflow has a local copy and use nextflow to pull it if not
+
+### `nf_core.list.list_workflows(filter_by=None, sort_by='release', as_json=False, show_archived=False){:python}`
+
+Prints out a list of all nf-core workflows.
+
+- **Parameters:**
+ - **filter_by** (_list_) – A list of strings that can be used for filtering.
+ - **sort_by** (_str_) – workflows can be sorted by keywords. Keyword must be one of
+ release (default), name, stars.
+ - **as_json** (_boolean_) – Set to true, if the lists should be printed in JSON.
+
+### `nf_core.list.pretty_date(time){:python}`
+
+Transforms a datetime object or a int() Epoch timestamp into a
+pretty string like ‘an hour ago’, ‘Yesterday’, ‘3 months ago’,
+‘just now’, etc
+
+Based on
+Adapted by sven1103
diff --git a/src/content/tools/docs/2.0.1/api/modules.md b/src/content/tools/docs/2.0.1/api/modules.md
new file mode 100644
index 0000000000..2dd47b6359
--- /dev/null
+++ b/src/content/tools/docs/2.0.1/api/modules.md
@@ -0,0 +1 @@
+# nf_core.modules
diff --git a/src/content/tools/docs/2.0.1/api/schema.md b/src/content/tools/docs/2.0.1/api/schema.md
new file mode 100644
index 0000000000..bbb2b2e39b
--- /dev/null
+++ b/src/content/tools/docs/2.0.1/api/schema.md
@@ -0,0 +1,113 @@
+# nf_core.schema
+
+Code to deal with pipeline JSON Schema
+
+### _`class{:python}`_`nf_core.schema.PipelineSchema{:python}`
+
+Bases: `object`
+
+Class to generate a schema object with
+functions to handle pipeline JSON Schema
+
+#### `add_schema_found_configs(){:python}`
+
+Add anything that’s found in the Nextflow params that’s missing in the pipeline schema
+
+#### `build_schema(pipeline_dir, no_prompts, web_only, url){:python}`
+
+Interactively build a new pipeline schema for a pipeline
+
+#### `build_schema_param(p_val){:python}`
+
+Build a pipeline schema dictionary for an param interactively
+
+#### `get_schema_defaults(){:python}`
+
+Generate set of default input parameters from schema.
+
+Saves defaults to self.schema_defaults
+Returns count of how many parameters were found (with or without a default value)
+
+#### `get_schema_path(path, local_only=False, revision=None){:python}`
+
+Given a pipeline name, directory, or path, set self.schema_filename
+
+#### `get_web_builder_response(){:python}`
+
+Given a URL for a Schema build response, recursively query it until results are ready.
+Once ready, validate Schema and write to disk.
+
+#### `get_wf_params(){:python}`
+
+Load the pipeline parameter defaults using nextflow config
+Strip out only the params. values and ignore anything that is not a flat variable
+
+#### `launch_web_builder(){:python}`
+
+Send pipeline schema to web builder and wait for response
+
+#### `load_input_params(params_path){:python}`
+
+Load a given a path to a parameters file (JSON/YAML)
+
+These should be input parameters used to run a pipeline with
+the Nextflow -params-file option.
+
+#### `load_lint_schema(){:python}`
+
+Load and lint a given schema to see if it looks valid
+
+#### `load_schema(){:python}`
+
+Load a pipeline schema from a file
+
+#### `make_skeleton_schema(){:python}`
+
+Make a new pipeline schema from the template
+
+#### `prompt_remove_schema_notfound_config(p_key){:python}`
+
+Check if a given key is found in the nextflow config params and prompt to remove it if note
+
+Returns True if it should be removed, False if not.
+
+#### `remove_schema_notfound_configs(){:python}`
+
+Go through top-level schema and all definitions sub-schemas to remove
+anything that’s not in the nextflow config.
+
+#### `remove_schema_notfound_configs_single_schema(schema){:python}`
+
+Go through a single schema / set of properties and strip out
+anything that’s not in the nextflow config.
+
+Takes: Schema or sub-schema with properties key
+Returns: Cleaned schema / sub-schema
+
+#### `sanitise_param_default(param){:python}`
+
+Given a param, ensure that the default value is the correct variable type
+
+#### `save_schema(){:python}`
+
+Save a pipeline schema to a file
+
+#### `validate_default_params(){:python}`
+
+Check that all default parameters in the schema are valid
+Ignores ‘required’ flag, as required parameters might have no defaults
+
+#### `validate_params(){:python}`
+
+Check given parameters against a schema and validate
+
+#### `validate_schema(schema=None){:python}`
+
+Check that the Schema is valid
+
+Returns: Number of parameters found
+
+#### `validate_schema_title_description(schema=None){:python}`
+
+Extra validation command for linting.
+Checks that the schema “$id”, “title” and “description” attributes match the piipeline config.
diff --git a/src/content/tools/docs/2.0.1/api/sync.md b/src/content/tools/docs/2.0.1/api/sync.md
new file mode 100644
index 0000000000..e327553c3f
--- /dev/null
+++ b/src/content/tools/docs/2.0.1/api/sync.md
@@ -0,0 +1,151 @@
+# nf_core.sync
+
+Synchronise a pipeline TEMPLATE branch with the template.
+
+### _`class{:python}`_`nf_core.sync.PipelineSync(pipeline_dir, from_branch=None, make_pr=False, gh_repo=None, gh_username=None){:python}`
+
+Bases: `object`
+
+Object to hold syncing information and results.
+
+- **Parameters:**
+ - **pipeline_dir** (_str_) – The path to the Nextflow pipeline root directory
+ - **from_branch** (_str_) – The branch to use to fetch config vars. If not set, will use current active branch
+ - **make_pr** (_bool_) – Set this to True to create a GitHub pull-request with the changes
+ - **gh_username** (_str_) – GitHub username
+ - **gh_repo** (_str_) – GitHub repository name
+
+#### `pipeline_dir{:python}`
+
+Path to target pipeline directory
+
+- **Type:**
+ str
+
+#### `from_branch{:python}`
+
+Repo branch to use when collecting workflow variables. Default: active branch.
+
+- **Type:**
+ str
+
+#### `original_branch{:python}`
+
+Repo branch that was checked out before we started.
+
+- **Type:**
+ str
+
+#### `made_changes{:python}`
+
+Whether making the new template pipeline introduced any changes
+
+- **Type:**
+ bool
+
+#### `make_pr{:python}`
+
+Whether to try to automatically make a PR on GitHub.com
+
+- **Type:**
+ bool
+
+#### `required_config_vars{:python}`
+
+List of nextflow variables required to make template pipeline
+
+- **Type:**
+ list
+
+#### `gh_username{:python}`
+
+GitHub username
+
+- **Type:**
+ str
+
+#### `gh_repo{:python}`
+
+GitHub repository name
+
+- **Type:**
+ str
+
+#### `checkout_template_branch(){:python}`
+
+Try to check out the origin/TEMPLATE in a new TEMPLATE branch.
+If this fails, try to check out an existing local TEMPLATE branch.
+
+#### `close_open_pr(pr){:python}`
+
+Given a PR API response, add a comment and close.
+
+#### `close_open_template_merge_prs(){:python}`
+
+Get all template merging branches (starting with ‘nf-core-template-merge-‘)
+and check for any open PRs from these branches to the self.from_branch
+If open PRs are found, add a comment and close them
+
+#### `commit_template_changes(){:python}`
+
+If we have any changes with the new template files, make a git commit
+
+#### `create_merge_base_branch(){:python}`
+
+Create a new branch from the updated TEMPLATE branch
+This branch will then be used to create the PR
+
+#### `delete_template_branch_files(){:python}`
+
+Delete all files in the TEMPLATE branch
+
+#### `get_wf_config(){:python}`
+
+Check out the target branch if requested and fetch the nextflow config.
+Check that we have the required config variables.
+
+#### `inspect_sync_dir(){:python}`
+
+Takes a look at the target directory for syncing. Checks that it’s a git repo
+and makes sure that there are no uncommitted changes.
+
+#### `make_pull_request(){:python}`
+
+Create a pull request to a base branch (default: dev),
+from a head branch (default: TEMPLATE)
+
+Returns: An instance of class requests.Response
+
+#### `make_template_pipeline(){:python}`
+
+Delete all files and make a fresh template using the workflow variables
+
+#### `push_merge_branch(){:python}`
+
+Push the newly created merge branch to the remote repository
+
+#### `push_template_branch(){:python}`
+
+If we made any changes, push the TEMPLATE branch to the default remote
+and try to make a PR. If we don’t have the auth token, try to figure out a URL
+for the PR and print this to the console.
+
+#### `reset_target_dir(){:python}`
+
+Reset the target pipeline directory. Check out the original branch.
+
+#### `sync(){:python}`
+
+Find workflow attributes, create a new template pipeline on TEMPLATE
+
+### _`exception{:python}`_`nf_core.sync.PullRequestException{:python}`
+
+Bases: `Exception`
+
+Exception raised when there was an error creating a Pull-Request on GitHub.com
+
+### _`exception{:python}`_`nf_core.sync.SyncException{:python}`
+
+Bases: `Exception`
+
+Exception raised when there was an error with TEMPLATE branch synchronisation
diff --git a/src/content/tools/docs/2.0.1/api/utils.md b/src/content/tools/docs/2.0.1/api/utils.md
new file mode 100644
index 0000000000..1895d7da6f
--- /dev/null
+++ b/src/content/tools/docs/2.0.1/api/utils.md
@@ -0,0 +1,264 @@
+# nf_core.utils
+
+Common utility functions for the nf-core python package.
+
+### _`class{:python}`_`nf_core.utils.Pipeline(wf_path){:python}`
+
+Bases: `object`
+
+Object to hold information about a local pipeline.
+
+- **Parameters:**
+ **path** (_str_) – The path to the nf-core pipeline directory.
+
+#### `conda_config{:python}`
+
+The parsed conda configuration file content (`environment.yml`).
+
+- **Type:**
+ dict
+
+#### `conda_package_info{:python}`
+
+The conda package(s) information, based on the API requests to Anaconda cloud.
+
+- **Type:**
+ dict
+
+#### `nf_config{:python}`
+
+The Nextflow pipeline configuration file content.
+
+- **Type:**
+ dict
+
+#### `files{:python}`
+
+A list of files found during the linting process.
+
+- **Type:**
+ list
+
+#### `git_sha{:python}`
+
+The git sha for the repo commit / current GitHub pull-request ($GITHUB_PR_COMMIT)
+
+- **Type:**
+ str
+
+#### `minNextflowVersion{:python}`
+
+The minimum required Nextflow version to run the pipeline.
+
+- **Type:**
+ str
+
+#### `wf_path{:python}`
+
+Path to the pipeline directory.
+
+- **Type:**
+ str
+
+#### `pipeline_name{:python}`
+
+The pipeline name, without the nf-core tag, for example hlatyping.
+
+- **Type:**
+ str
+
+#### `schema_obj{:python}`
+
+A `PipelineSchema` object
+
+- **Type:**
+ obj
+
+#### `_fp(fn){:python}`
+
+Convenience function to get full path to a file in the pipeline
+
+#### `_list_files(){:python}`
+
+Get a list of all files in the pipeline
+
+#### `_load(){:python}`
+
+Run core load functions
+
+#### `_load_conda_environment(){:python}`
+
+Try to load the pipeline environment.yml file, if it exists
+
+#### `_load_pipeline_config(){:python}`
+
+Get the nextflow config for this pipeline
+
+Once loaded, set a few convienence reference class attributes
+
+### `nf_core.utils.anaconda_package(dep, dep_channels=['conda-forge', 'bioconda', 'defaults']){:python}`
+
+Query conda package information.
+
+Sends a HTTP GET request to the Anaconda remote API.
+
+- **Parameters:**
+ - **dep** (_str_) – A conda package name.
+ - **dep_channels** (_list_) – list of conda channels to use
+- **Raises:**
+ - **A LookupError**\*\*,\*\* **if the connection fails** **or** **times out** **or** **gives an unexpected status code** –
+ - **A ValueError**\*\*,\*\* **if the package name can not be found** **(\*\***404\***\*)** –
+
+### `nf_core.utils.check_if_outdated(current_version=None, remote_version=None, source_url='https://nf-co.re/tools_version'){:python}`
+
+Check if the current version of nf-core is outdated
+
+### `nf_core.utils.custom_yaml_dumper(){:python}`
+
+Overwrite default PyYAML output to make Prettier YAML linting happy
+
+### `nf_core.utils.fetch_wf_config(wf_path){:python}`
+
+Uses Nextflow to retrieve the the configuration variables
+from a Nextflow workflow.
+
+- **Parameters:**
+ **wf_path** (_str_) – Nextflow workflow file system path.
+- **Returns:**
+ Workflow configuration settings.
+- **Return type:**
+ dict
+
+### `nf_core.utils.get_biocontainer_tag(package, version){:python}`
+
+Given a bioconda package and version, looks for Docker and Singularity containers
+using the biocontaineres API, e.g.:
+/{tool}/versions/{tool}-{version}
+Returns the most recent container versions by default.
+:param package: A bioconda package name.
+:type package: str
+:param version: Version of the bioconda package
+:type version: str
+
+- **Raises:**
+ - **A LookupError**\*\*,\*\* **if the connection fails** **or** **times out** **or** **gives an unexpected status code** –
+ - **A ValueError**\*\*,\*\* **if the package name can not be found** **(\*\***404\***\*)** –
+
+### `nf_core.utils.get_repo_releases_branches(pipeline, wfs){:python}`
+
+Fetches details of a nf-core workflow to download.
+
+- **Parameters:**
+ - **pipeline** (_str_) – GitHub repo username/repo
+ - **wfs** – A nf_core.list.Workflows() object, where get_remote_workflows() has been called.
+- **Returns:**
+ Array of releases, Array of branches
+- **Return type:**
+ wf_releases, wf_branches (tuple)
+- **Raises:**
+ **LockupError**\*\*,\*\* **if the pipeline can not be found.** –
+
+### `nf_core.utils.github_api_auto_auth(){:python}`
+
+### `nf_core.utils.is_file_binary(path){:python}`
+
+Check file path to see if it is a binary file
+
+### `nf_core.utils.is_pipeline_directory(wf_path){:python}`
+
+Checks if the specified directory have the minimum required files
+(‘main.nf’, ‘nextflow.config’) for a pipeline directory
+
+- **Parameters:**
+ **wf_path** (_str_) – The directory to be inspected
+- **Raises:**
+ **UserWarning** – If one of the files are missing
+
+### `nf_core.utils.load_tools_config(dir='.'){:python}`
+
+Parse the nf-core.yml configuration file
+
+Look for a file called either .nf-core.yml or .nf-core.yaml
+
+Also looks for the deprecated file .nf-core-lint.yml/yaml and issues
+a warning that this file will be deprecated in the future
+
+Returns the loaded config dict or False, if the file couldn’t be loaded
+
+### `nf_core.utils.nextflow_cmd(cmd){:python}`
+
+Run a Nextflow command and capture the output. Handle errors nicely
+
+### `nf_core.utils.parse_anaconda_licence(anaconda_response, version=None){:python}`
+
+Given a response from the anaconda API using anaconda_package, parse the software licences.
+
+Returns: Set of licence types
+
+### `nf_core.utils.pip_package(dep){:python}`
+
+Query PyPI package information.
+
+Sends a HTTP GET request to the PyPI remote API.
+
+- **Parameters:**
+ **dep** (_str_) – A PyPI package name.
+- **Raises:**
+ - **A LookupError**\*\*,\*\* **if the connection fails** **or** **times out** –
+ - **A ValueError**\*\*,\*\* **if the package name can not be found** –
+
+### `nf_core.utils.poll_nfcore_web_api(api_url, post_data=None){:python}`
+
+Poll the nf-core website API
+
+Takes argument api_url for URL
+
+Expects API reponse to be valid JSON and contain a top-level ‘status’ key.
+
+### `nf_core.utils.prompt_pipeline_release_branch(wf_releases, wf_branches){:python}`
+
+Prompt for pipeline release / branch
+
+- **Parameters:**
+ - **wf_releases** (_array_) – Array of repo releases as returned by the GitHub API
+ - **wf_branches** (_array_) – Array of repo branches, as returned by the GitHub API
+- **Returns:**
+ Selected release / branch name
+- **Return type:**
+ choice (str)
+
+### `nf_core.utils.prompt_remote_pipeline_name(wfs){:python}`
+
+Prompt for the pipeline name with questionary
+
+- **Parameters:**
+ **wfs** – A nf_core.list.Workflows() object, where get_remote_workflows() has been called.
+- **Returns:**
+ GitHub repo - username/repo
+- **Return type:**
+ pipeline (str)
+- **Raises:**
+ **AssertionError**\*\*,\*\* **if pipeline cannot be found** –
+
+### `nf_core.utils.rich_force_colors(){:python}`
+
+Check if any environment variables are set to force Rich to use coloured output
+
+### `nf_core.utils.setup_requests_cachedir(){:python}`
+
+Sets up local caching for faster remote HTTP requests.
+
+Caching directory will be set up in the user’s home directory under
+a .nfcore_cache subdir.
+
+### `nf_core.utils.wait_cli_function(poll_func, poll_every=20){:python}`
+
+Display a command-line spinner while calling a function repeatedly.
+
+Keep waiting until that function returns True
+
+- **Parameters:**
+ - **poll_func** (_function_) – Function to call
+ - **poll_every** (_int_) – How many tenths of a second to wait between function calls. Default: 20.
+- **Returns:**
+ None. Just sits in an infite loop until the function returns True.
diff --git a/src/content/tools/docs/2.0.1/index.md b/src/content/tools/docs/2.0.1/index.md
new file mode 100644
index 0000000000..c3d07a7628
--- /dev/null
+++ b/src/content/tools/docs/2.0.1/index.md
@@ -0,0 +1,13 @@
+# nf-core/tools documentation
+
+This documentation is for the `nf-core/tools` package.
+
+Primarily, it describes the different [code lint tests](lint_tests/index.html)
+run by `nf-core lint` (typically visited by a developer when their pipeline fails a given
+test), and also reference for the `nf_core` [Python package API](api/index.html).
+
+# Indices and tables
+
+- [Index](genindex)
+- [Module Index](py-modindex)
+- [Search Page](search)
diff --git a/src/content/tools/docs/2.0.1/pipeline_lint_tests/actions_awsfulltest.md b/src/content/tools/docs/2.0.1/pipeline_lint_tests/actions_awsfulltest.md
new file mode 100644
index 0000000000..9973427a79
--- /dev/null
+++ b/src/content/tools/docs/2.0.1/pipeline_lint_tests/actions_awsfulltest.md
@@ -0,0 +1,30 @@
+# actions_awsfulltest
+
+#### `PipelineLint.actions_awsfulltest(){:python}`
+
+Checks the GitHub Actions awsfulltest is valid.
+
+In addition to small test datasets run on GitHub Actions, we provide the possibility of testing the pipeline on full size datasets on AWS.
+This should ensure that the pipeline runs as expected on AWS and provide a resource estimation.
+
+The GitHub Actions workflow is called `awsfulltest.yml`, and it can be found in the `.github/workflows/` directory.
+
+:::warning
+This workflow incurs AWS costs, therefore it should only be triggered for pipeline releases:
+`release` (after the pipeline release) and `workflow_dispatch`.
+:::
+
+:::note
+You can manually trigger the AWS tests by going to the Actions tab on the pipeline GitHub repository and selecting the
+nf-core AWS full size tests workflow on the left.
+:::
+
+:::note
+For tests on full data prior to release, [Nextflow Tower](https://tower.nf) launch feature can be employed.
+:::
+
+The `.github/workflows/awsfulltest.yml` file is tested for the following:
+
+- Must be turned on `workflow_dispatch`.
+- Must be turned on for `release` with `types: [published]`
+- Should run the profile `test_full` that should be edited to provide the links to full-size datasets. If it runs the profile `test`, a warning is given.
diff --git a/src/content/tools/docs/2.0.1/pipeline_lint_tests/actions_awstest.md b/src/content/tools/docs/2.0.1/pipeline_lint_tests/actions_awstest.md
new file mode 100644
index 0000000000..4e4698c4e9
--- /dev/null
+++ b/src/content/tools/docs/2.0.1/pipeline_lint_tests/actions_awstest.md
@@ -0,0 +1,24 @@
+# actions_awstest
+
+#### `PipelineLint.actions_awstest(){:python}`
+
+Checks the GitHub Actions awstest is valid.
+
+In addition to small test datasets run on GitHub Actions, we provide the possibility of testing the pipeline on AWS.
+This should ensure that the pipeline runs as expected on AWS (which often has its own unique edge cases).
+
+:::warning
+Running tests on AWS incurs costs, so these tests are not triggered automatically.
+Instead, they use the `workflow_dispatch` trigger, which allows for manual triggering
+of the workflow when testing on AWS is desired.
+:::
+
+:::note
+You can trigger the tests by going to the Actions tab on the pipeline GitHub repository
+and selecting the nf-core AWS test workflow on the left.
+:::
+
+The `.github/workflows/awstest.yml` file is tested for the following:
+
+- Must _not_ be turned on for `push` or `pull_request`.
+- Must be turned on for `workflow_dispatch`.
diff --git a/src/content/tools/docs/2.0.1/pipeline_lint_tests/actions_ci.md b/src/content/tools/docs/2.0.1/pipeline_lint_tests/actions_ci.md
new file mode 100644
index 0000000000..34f2096391
--- /dev/null
+++ b/src/content/tools/docs/2.0.1/pipeline_lint_tests/actions_ci.md
@@ -0,0 +1,63 @@
+# actions_ci
+
+#### `PipelineLint.actions_ci(){:python}`
+
+Checks that the GitHub Actions pipeline CI (Continuous Integration) workflow is valid.
+
+The `.github/workflows/ci.yml` GitHub Actions workflow runs the pipeline on a minimal test
+dataset using `-profile test` to check that no breaking changes have been introduced.
+Final result files are not checked, just that the pipeline exists successfully.
+
+This lint test checks this GitHub Actions workflow file for the following:
+
+- Workflow must be triggered on the following events:
+ ```yaml
+ on:
+ push:
+ branches:
+ - dev
+ pull_request:
+ release:
+ types: [published]
+ ```
+- The minimum Nextflow version specified in the pipeline’s `nextflow.config` matches that defined by `nxf_ver` in the test matrix:
+
+ ```yaml
+ strategy:
+ matrix:
+ # Nextflow versions: check pipeline minimum and current latest
+ nxf_ver: ['19.10.0', '']
+ ```
+
+ :::note
+ These `matrix` variables run the test workflow twice, varying the `nxf_ver` variable each time.
+ This is used in the `nextflow run` commands to test the pipeline with both the latest available version
+ of the pipeline (`''`) and the stated minimum required version.
+ :::
+
+- The Docker container for the pipeline must use the correct pipeline version number:
+
+ > - Development pipelines:
+ > ```bash
+ > docker tag nfcore/:dev nfcore/:dev
+ > ```
+ > - Released pipelines:
+ > ```bash
+ > docker tag nfcore/:dev nfcore/:
+ > ```
+ > - Complete example for a released pipeline called _nf-core/example_ with version number `1.0.0`:
+ > ```yaml
+ > - name: Build new docker image
+ > if: env.GIT_DIFF
+ > run: docker build --no-cache . -t nfcore/example:1.0.0
+ > ```
+
+ > - name: Pull docker image
+ > if: ${{ !env.GIT\_DIFF }}
+ > run: |
+ > docker pull nfcore/example:dev
+ > docker tag nfcore/example:dev nfcore/example:1.0.0
+ >
+ > ```
+ >
+ > ```
diff --git a/src/content/tools/docs/2.0.1/pipeline_lint_tests/actions_schema_validation.md b/src/content/tools/docs/2.0.1/pipeline_lint_tests/actions_schema_validation.md
new file mode 100644
index 0000000000..518037bc74
--- /dev/null
+++ b/src/content/tools/docs/2.0.1/pipeline_lint_tests/actions_schema_validation.md
@@ -0,0 +1,12 @@
+# actions_schema_validation
+
+#### `PipelineLint.actions_schema_validation(){:python}`
+
+Checks that the GitHub Action workflow yml/yaml files adhere to the correct schema
+
+nf-core pipelines use GitHub actions workflows to run CI tests, check formatting and also linting, among others.
+These workflows are defined by `yml` scripts in `.github/workflows/`. This lint test verifies that these scripts are valid
+by comparing them against the [JSON schema for GitHub workflows](https://json.schemastore.org/github-workflow).
+
+To pass this test, make sure that all your workflows contain the required properties `on` and `jobs` and that
+all other properties are of the correct type, as specified in the schema (link above).
diff --git a/src/content/tools/docs/2.0.1/pipeline_lint_tests/files_exist.md b/src/content/tools/docs/2.0.1/pipeline_lint_tests/files_exist.md
new file mode 100644
index 0000000000..e4e5cb0a57
--- /dev/null
+++ b/src/content/tools/docs/2.0.1/pipeline_lint_tests/files_exist.md
@@ -0,0 +1,85 @@
+# files_exist
+
+#### `PipelineLint.files_exist(){:python}`
+
+Checks a given pipeline directory for required files.
+
+Iterates through the pipeline’s directory content and checks that specified
+files are either present or absent, as required.
+
+:::note
+This test raises an `AssertionError` if neither `nextflow.config` or `main.nf` are found.
+If these files are not found then this cannot be a Nextflow pipeline and something has gone badly wrong.
+All lint tests are stopped immediately with a critical error message.
+:::
+
+Files that _must_ be present:
+
+```bash
+.gitattributes
+.gitignore
+.markdownlint.yml
+.github/.dockstore.yml
+.github/CONTRIBUTING.md
+.github/ISSUE_TEMPLATE/bug_report.md
+.github/ISSUE_TEMPLATE/config.yml
+.github/ISSUE_TEMPLATE/feature_request.md
+.github/PULL_REQUEST_TEMPLATE.md
+.github/workflows/branch.yml
+.github/workflows/ci.yml
+.github/workflows/linting_comment.yml
+.github/workflows/linting.yml
+[LICENSE, LICENSE.md, LICENCE, LICENCE.md] # NB: British / American spelling
+assets/email_template.html
+assets/email_template.txt
+assets/nf-core-PIPELINE_logo.png
+assets/sendmail_template.txt
+bin/scrape_software_versions.py
+conf/modules.config
+conf/test.config
+conf/test_full.config
+CHANGELOG.md
+CITATIONS.md
+CODE_OF_CONDUCT.md
+docs/images/nf-core-PIPELINE_logo.png
+docs/output.md
+docs/README.md
+docs/usage.md
+lib/nfcore_external_java_deps.jar
+lib/NfcoreSchema.groovy
+lib/NfcoreTemplate.groovy
+lib/Utils.groovy
+lib/WorkflowMain.groovy
+modules/local/get_software_versions.nf
+nextflow_schema.json
+nextflow.config
+README.md
+```
+
+Files that _should_ be present:
+
+```bash
+main.nf
+assets/multiqc_config.yaml
+conf/base.config
+conf/igenomes.config
+.github/workflows/awstest.yml
+.github/workflows/awsfulltest.yml
+lib/WorkflowPIPELINE.groovy
+```
+
+Files that _must not_ be present:
+
+```bash
+Singularity
+parameters.settings.json
+bin/markdown_to_html.r
+conf/aws.config
+.github/workflows/push_dockerhub.yml
+```
+
+Files that _should not_ be present:
+
+```bash
+.travis.yml
+```
diff --git a/src/content/tools/docs/2.0.1/pipeline_lint_tests/files_unchanged.md b/src/content/tools/docs/2.0.1/pipeline_lint_tests/files_unchanged.md
new file mode 100644
index 0000000000..9b7e597ec6
--- /dev/null
+++ b/src/content/tools/docs/2.0.1/pipeline_lint_tests/files_unchanged.md
@@ -0,0 +1,55 @@
+# files_unchanged
+
+#### `PipelineLint.files_unchanged(){:python}`
+
+Checks that certain pipeline files are not modified from template output.
+
+Iterates through the pipeline’s directory content and compares specified files
+against output from the template using the pipeline’s metadata. File content
+should not be modified / missing.
+
+Files that must be unchanged:
+
+```default
+.gitattributes
+.markdownlint.yml
+.github/.dockstore.yml
+.github/CONTRIBUTING.md
+.github/ISSUE_TEMPLATE/bug_report.md
+.github/ISSUE_TEMPLATE/config.yml
+.github/ISSUE_TEMPLATE/feature_request.md
+.github/PULL_REQUEST_TEMPLATE.md
+.github/workflows/branch.yml
+.github/workflows/linting_comment.yml
+.github/workflows/linting.yml
+assets/email_template.html
+assets/email_template.txt
+assets/nf-core-PIPELINE_logo.png
+assets/sendmail_template.txt
+CODE_OF_CONDUCT.md
+docs/images/nf-core-PIPELINE_logo.png
+docs/README.md'
+lib/nfcore_external_java_deps.jar
+lib/NfcoreSchema.groovy
+lib/NfcoreTemplate.groovy
+['LICENSE', 'LICENSE.md', 'LICENCE', 'LICENCE.md'], # NB: British / American spelling
+```
+
+Files that can have additional content but must include the template contents:
+
+```default
+.gitignore
+assets/multiqc_config.yaml
+```
+
+:::note
+You can configure the `nf-core lint` tests to ignore any of these checks by setting
+the `files_unchanged` key as follows in your linting config file. For example:
+
+```yaml
+files_unchanged:
+ - .github/workflows/branch.yml
+ - assets/multiqc_config.yaml
+```
+
+:::
diff --git a/src/content/tools/docs/2.0.1/pipeline_lint_tests/index.md b/src/content/tools/docs/2.0.1/pipeline_lint_tests/index.md
new file mode 100644
index 0000000000..d73f5fbd15
--- /dev/null
+++ b/src/content/tools/docs/2.0.1/pipeline_lint_tests/index.md
@@ -0,0 +1,38 @@
+# Pipline lint tests
+
+# Tests:
+
+- [actions_awsfulltest](actions_awsfulltest)
+ - [`PipelineLint.actions_awsfulltest()`](actions_awsfulltest#nf_core.lint.PipelineLint.actions_awsfulltest)
+- [actions_awstest](actions_awstest)
+ - [`PipelineLint.actions_awstest()`](actions_awstest#nf_core.lint.PipelineLint.actions_awstest)
+- [actions_ci](actions_ci)
+ - [`PipelineLint.actions_ci()`](actions_ci#nf_core.lint.PipelineLint.actions_ci)
+- [actions_schema_validation](actions_schema_validation)
+ - [`PipelineLint.actions_schema_validation()`](actions_schema_validation#nf_core.lint.PipelineLint.actions_schema_validation)
+- [files_exist](files_exist)
+ - [`PipelineLint.files_exist()`](files_exist#nf_core.lint.PipelineLint.files_exist)
+- [files_unchanged](files_unchanged)
+ - [`PipelineLint.files_unchanged()`](files_unchanged#nf_core.lint.PipelineLint.files_unchanged)
+- [merge_markers](merge_markers)
+ - [`PipelineLint.merge_markers()`](merge_markers#nf_core.lint.PipelineLint.merge_markers)
+- [nextflow_config](modules_json)
+ - [`PipelineLint.modules_json()`](modules_json#nf_core.lint.PipelineLint.modules_json)
+- [nextflow_config](nextflow_config)
+ - [`PipelineLint.nextflow_config()`](nextflow_config#nf_core.lint.PipelineLint.nextflow_config)
+- [pipeline_name_conventions](pipeline_name_conventions)
+ - [`PipelineLint.pipeline_name_conventions()`](pipeline_name_conventions#nf_core.lint.PipelineLint.pipeline_name_conventions)
+- [pipeline_todos](pipeline_todos)
+ - [`PipelineLint.pipeline_todos()`](pipeline_todos#nf_core.lint.PipelineLint.pipeline_todos)
+- [readme](readme)
+ - [`PipelineLint.readme()`](readme#nf_core.lint.PipelineLint.readme)
+- [schema_description](schema_description)
+ - [`PipelineLint.schema_description()`](schema_description#nf_core.lint.PipelineLint.schema_description)
+- [schema_lint](schema_lint)
+ - [`PipelineLint.schema_lint()`](schema_lint#nf_core.lint.PipelineLint.schema_lint)
+- [schema_params](schema_params)
+ - [`PipelineLint.schema_params()`](schema_params#nf_core.lint.PipelineLint.schema_params)
+- [template_strings](template_strings)
+ - [`PipelineLint.template_strings()`](template_strings#nf_core.lint.PipelineLint.template_strings)
+- [version_consistency](version_consistency)
+ - [`PipelineLint.version_consistency()`](version_consistency#nf_core.lint.PipelineLint.version_consistency)
diff --git a/src/content/tools/docs/2.0.1/pipeline_lint_tests/merge_markers.md b/src/content/tools/docs/2.0.1/pipeline_lint_tests/merge_markers.md
new file mode 100644
index 0000000000..38076814b1
--- /dev/null
+++ b/src/content/tools/docs/2.0.1/pipeline_lint_tests/merge_markers.md
@@ -0,0 +1,8 @@
+# merge_markers
+
+#### `PipelineLint.merge_markers(){:python}`
+
+Check for remaining merge markers.
+
+This test looks for remaining merge markers in the code, e.g.:
+`>>>>>>>` or `<<<<<<<`
diff --git a/src/content/tools/docs/2.0.1/pipeline_lint_tests/modules_json.md b/src/content/tools/docs/2.0.1/pipeline_lint_tests/modules_json.md
new file mode 100644
index 0000000000..40c566e813
--- /dev/null
+++ b/src/content/tools/docs/2.0.1/pipeline_lint_tests/modules_json.md
@@ -0,0 +1,10 @@
+# nextflow_config
+
+#### `PipelineLint.modules_json(){:python}`
+
+Make sure all modules described in the `modules.json` file are actually installed
+
+Every module installed from `nf-core/modules` must have an entry in the `modules.json` file
+with an associated version git_sha hash.
+
+- Failure: If module entries are found in `modules.json` for modules that are not installed
diff --git a/src/content/tools/docs/2.0.1/pipeline_lint_tests/nextflow_config.md b/src/content/tools/docs/2.0.1/pipeline_lint_tests/nextflow_config.md
new file mode 100644
index 0000000000..1e04ccd548
--- /dev/null
+++ b/src/content/tools/docs/2.0.1/pipeline_lint_tests/nextflow_config.md
@@ -0,0 +1,92 @@
+# nextflow_config
+
+#### `PipelineLint.nextflow_config(){:python}`
+
+Checks the pipeline configuration for required variables.
+
+All nf-core pipelines are required to be configured with a minimal set of variable
+names. This test fails or throws warnings if required variables are not set.
+
+:::note
+These config variables must be set in `nextflow.config` or another config
+file imported from there. Any variables set in nextflow script files (eg. `main.nf`)
+are not checked and will be assumed to be missing.
+:::
+
+**The following variables fail the test if missing:**
+
+- `params.outdir`: A directory in which all pipeline results should be saved
+- `manifest.name`: The pipeline name. Should begin with `nf-core/`
+- `manifest.description`: A description of the pipeline
+- `manifest.version`
+ - The version of this pipeline. This should correspond to a [GitHub release](https://help.github.com/articles/creating-releases/).
+ - If `--release` is set when running `nf-core lint`, the version number must not contain the string `dev`
+ - If `--release` is \_not\_ set, the version should end in `dev` (warning triggered if not)
+- `manifest.nextflowVersion`
+ - The minimum version of Nextflow required to run the pipeline.
+ - Should be `>=` or `!>=` and a version number, eg. `manifest.nextflowVersion = '>=0.31.0'` (see [Nextflow documentation](https://www.nextflow.io/docs/latest/config.html#scope-manifest))
+ - `>=` warns about old versions but tries to run anyway, `!>=` fails for old versions. Only use the latter if you _know_ that the pipeline will certainly fail before this version.
+ - This should correspond to the `NXF_VER` version tested by GitHub Actions.
+- `manifest.homePage`
+ - The homepage for the pipeline. Should be the nf-core GitHub repository URL,
+ so beginning with `https://github.com/nf-core/`
+- `timeline.enabled`, `trace.enabled`, `report.enabled`, `dag.enabled`
+ - The nextflow timeline, trace, report and DAG should be enabled by default (set to `true`)
+- `process.cpus`, `process.memory`, `process.time`
+ - Default CPUs, memory and time limits for tasks
+- `params.input`
+ - Input parameter to specify input data, specify this to avoid a warning
+ - Typical usage:
+ - `params.input`: Input data that is not NGS sequencing data
+- `params.custom_config_version`
+ > - Should always be set to default value `master`
+- `params.custom_config_base`
+
+ > - Should always be set to default value:
+
+ > `https://raw.githubusercontent.com/nf-core/configs/${params.custom_config_version}`
+
+- `params.show_hidden_params`
+ > - Determines whether boilerplate params are showed by schema. Set to `false` by default
+- `params.schema_ignore_params`
+ > - A comma separated string of inputs the schema validation should ignore.
+
+**The following variables throw warnings if missing:**
+
+- `manifest.mainScript`: The filename of the main pipeline script (should be `main.nf`)
+- `timeline.file`, `trace.file`, `report.file`, `dag.file`
+ - Default filenames for the timeline, trace and report
+ - The DAG file path should end with `.svg` (If Graphviz is not installed, Nextflow will generate a `.dot` file instead)
+
+**The following variables are depreciated and fail the test if they are still present:**
+
+- `params.version`: The old method for specifying the pipeline version. Replaced by `manifest.version`
+- `params.nf_required_version`: The old method for specifying the minimum Nextflow version. Replaced by `manifest.nextflowVersion`
+- `params.container`: The old method for specifying the dockerhub container address. Replaced by `process.container`
+- `igenomesIgnore`: Changed to `igenomes_ignore`
+ > :::note
+ > The `snake_case` convention should now be used when defining pipeline parameters
+ > :::
+
+**The following Nextflow syntax is depreciated and fails the test if present:**
+
+- Process-level configuration syntax still using the old Nextflow syntax, for example: `process.$fastqc` instead of `process withName:'fastqc'`.
+
+:::note
+You can choose to ignore tests for the presence or absence of specific config variables
+by creating a file called `.nf-core-lint.yml` in the root of your pipeline and creating
+a list the config variables that should be ignored. For example:
+
+```yaml
+nextflow_config:
+ - params.input
+```
+
+:::
+
+The other checks in this test (depreciated syntax etc) can not be individually identified,
+but you can skip the entire test block if you wish:
+
+```yaml
+nextflow_config: False
+```
diff --git a/src/content/tools/docs/2.0.1/pipeline_lint_tests/pipeline_name_conventions.md b/src/content/tools/docs/2.0.1/pipeline_lint_tests/pipeline_name_conventions.md
new file mode 100644
index 0000000000..788d77e41a
--- /dev/null
+++ b/src/content/tools/docs/2.0.1/pipeline_lint_tests/pipeline_name_conventions.md
@@ -0,0 +1,13 @@
+# pipeline_name_conventions
+
+#### `PipelineLint.pipeline_name_conventions(){:python}`
+
+Checks that the pipeline name adheres to nf-core conventions.
+
+In order to ensure consistent naming, pipeline names should contain only lower case, alphanumeric characters.
+Otherwise a warning is displayed.
+
+:::warning
+DockerHub is very picky about image names and doesn’t even allow hyphens (we are `nfcore`).
+This is a large part of why we set this rule.
+:::
diff --git a/src/content/tools/docs/2.0.1/pipeline_lint_tests/pipeline_todos.md b/src/content/tools/docs/2.0.1/pipeline_lint_tests/pipeline_todos.md
new file mode 100644
index 0000000000..e8a73d1663
--- /dev/null
+++ b/src/content/tools/docs/2.0.1/pipeline_lint_tests/pipeline_todos.md
@@ -0,0 +1,28 @@
+# pipeline_todos
+
+#### `PipelineLint.pipeline_todos(){:python}`
+
+Check for nf-core _TODO_ lines.
+
+The nf-core workflow template contains a number of comment lines to help developers
+of new pipelines know where they need to edit files and add content.
+They typically have the following format:
+
+```groovy
+// TODO nf-core: Make some kind of change to the workflow here
+```
+
+..or in markdown:
+
+```html
+
+```
+
+This lint test runs through all files in the pipeline and searches for these lines.
+If any are found they will throw a warning.
+
+:::note
+Note that many GUI code editors have plugins to list all instances of _TODO_
+in a given project directory. This is a very quick and convenient way to get
+started on your pipeline!
+:::
diff --git a/src/content/tools/docs/2.0.1/pipeline_lint_tests/readme.md b/src/content/tools/docs/2.0.1/pipeline_lint_tests/readme.md
new file mode 100644
index 0000000000..51b8040c97
--- /dev/null
+++ b/src/content/tools/docs/2.0.1/pipeline_lint_tests/readme.md
@@ -0,0 +1,25 @@
+# readme
+
+#### `PipelineLint.readme(){:python}`
+
+Repository `README.md` tests
+
+The `README.md` files for a project are very important and must meet some requirements:
+
+- Nextflow badge
+ - If no Nextflow badge is found, a warning is given
+ - If a badge is found but the version doesn’t match the minimum version in the config file, the test fails
+ - Example badge code:
+ ```md
+ [![Nextflow](https://img.shields.io/badge/nextflow-%E2%89%A50.27.6-brightgreen.svg)](https://www.nextflow.io/)
+ ```
+- Bioconda badge
+ - If your pipeline contains a file called `environment.yml` in the root directory, a bioconda badge is required
+ - Required badge code:
+ ```md
+ [![install with bioconda](https://img.shields.io/badge/install%20with-bioconda-brightgreen.svg)](https://bioconda.github.io/)
+ ```
+
+:::note
+These badges are a markdown image `![alt-text]()` _inside_ a markdown link `[markdown image]()`, so a bit fiddly to write.
+:::
diff --git a/src/content/tools/docs/2.0.1/pipeline_lint_tests/schema_description.md b/src/content/tools/docs/2.0.1/pipeline_lint_tests/schema_description.md
new file mode 100644
index 0000000000..fafc6d6c59
--- /dev/null
+++ b/src/content/tools/docs/2.0.1/pipeline_lint_tests/schema_description.md
@@ -0,0 +1,11 @@
+# schema_description
+
+#### `PipelineLint.schema_description(){:python}`
+
+Check that every parameter in the schema has a description
+
+The `nextflow_schema.json` pipeline schema should describe every flat parameter
+Furthermore warns about parameters outside of groups
+
+- Warning: Parameters in `nextflow_schema.json` without a description
+- Warning: Parameters in `nextflow_schema.json` that are defined outside of a group
diff --git a/src/content/tools/docs/2.0.1/pipeline_lint_tests/schema_lint.md b/src/content/tools/docs/2.0.1/pipeline_lint_tests/schema_lint.md
new file mode 100644
index 0000000000..b5e29febba
--- /dev/null
+++ b/src/content/tools/docs/2.0.1/pipeline_lint_tests/schema_lint.md
@@ -0,0 +1,59 @@
+# schema_lint
+
+#### `PipelineLint.schema_lint(){:python}`
+
+Pipeline schema syntax
+
+Pipelines should have a `nextflow_schema.json` file that describes the different
+pipeline parameters (eg. `params.something`, `--something`).
+
+:::note
+Reminder: you should generally never need to edit this JSON file by hand.
+The `nf-core schema build` command can create _and edit_ the file for you
+to keep it up to date, with a friendly user-interface for customisation.
+:::
+
+The lint test checks the schema for the following:
+
+- Schema should be a valid JSON file
+- Schema should adhere to [JSONSchema](https://json-schema.org/), Draft 7.
+- Parameters can be described in two places:
+ > - As `properties` in the top-level schema object
+ > - As `properties` within subschemas listed in a top-level `definitions` objects
+- The schema must describe at least one parameter
+- There must be no duplicate parameter IDs across the schema and definition subschema
+- All subschema in `definitions` must be referenced in the top-level `allOf` key
+- The top-level `allOf` key must not describe any non-existent definitions
+- Default parameters in the schema must be valid
+- Core top-level schema attributes should exist and be set as follows:
+ > - `$schema`: `https://json-schema.org/draft-07/schema`
+ > - `$id`: URL to the raw schema file, eg. `https://raw.githubusercontent.com/YOURPIPELINE/master/nextflow_schema.json`
+ > - `title`: `YOURPIPELINE pipeline parameters`
+ > - `description`: The pipeline config `manifest.description`
+
+For example, an _extremely_ minimal schema could look like this:
+
+```json
+{
+ "$schema": "https://json-schema.org/draft-07/schema",
+ "$id": "https://raw.githubusercontent.com/YOURPIPELINE/master/nextflow_schema.json",
+ "title": "YOURPIPELINE pipeline parameters",
+ "description": "This pipeline is for testing",
+ "properties": {
+ "first_param": { "type": "string" }
+ },
+ "definitions": {
+ "my_first_group": {
+ "properties": {
+ "second_param": { "type": "string" }
+ }
+ }
+ },
+ "allOf": [{ "$ref": "#/definitions/my_first_group" }]
+}
+```
+
+:::note
+You can check your pipeline schema without having to run the entire pipeline lint
+by running `nf-core schema lint` instead of `nf-core lint`
+:::
diff --git a/src/content/tools/docs/2.0.1/pipeline_lint_tests/schema_params.md b/src/content/tools/docs/2.0.1/pipeline_lint_tests/schema_params.md
new file mode 100644
index 0000000000..7d9440fa15
--- /dev/null
+++ b/src/content/tools/docs/2.0.1/pipeline_lint_tests/schema_params.md
@@ -0,0 +1,11 @@
+# schema_params
+
+#### `PipelineLint.schema_params(){:python}`
+
+Check that the schema describes all flat params in the pipeline.
+
+The `nextflow_schema.json` pipeline schema should describe every flat parameter
+returned from the `nextflow config` command (params that are objects or more complex structures are ignored).
+
+- Failure: If parameters are found in `nextflow_schema.json` that are not in `nextflow_schema.json`
+- Warning: If parameters are found in `nextflow_schema.json` that are not in `nextflow_schema.json`
diff --git a/src/content/tools/docs/2.0.1/pipeline_lint_tests/template_strings.md b/src/content/tools/docs/2.0.1/pipeline_lint_tests/template_strings.md
new file mode 100644
index 0000000000..23c06bb63d
--- /dev/null
+++ b/src/content/tools/docs/2.0.1/pipeline_lint_tests/template_strings.md
@@ -0,0 +1,17 @@
+# template_strings
+
+#### `PipelineLint.template_strings(){:python}`
+
+Check for template placeholders.
+
+The `nf-core create` pipeline template uses
+[Jinja](https://jinja.palletsprojects.com/en/2.11.x/) behind the scenes.
+
+This lint test fails if any Jinja template variables such as
+`{{ pipeline_name }}` are found in your pipeline code.
+
+Finding a placeholder like this means that something was probably copied and pasted
+from the template without being properly rendered for your pipeline.
+
+This test ignores any double-brackets prefixed with a dollar sign, such as
+`${{ secrets.AWS_ACCESS_KEY_ID }}` as these placeholders are used in GitHub Actions workflows.
diff --git a/src/content/tools/docs/2.0.1/pipeline_lint_tests/version_consistency.md b/src/content/tools/docs/2.0.1/pipeline_lint_tests/version_consistency.md
new file mode 100644
index 0000000000..040f5e57fc
--- /dev/null
+++ b/src/content/tools/docs/2.0.1/pipeline_lint_tests/version_consistency.md
@@ -0,0 +1,23 @@
+# version_consistency
+
+#### `PipelineLint.version_consistency(){:python}`
+
+Pipeline and container version number consistency.
+
+:::note
+This test only runs when the `--release` flag is set for `nf-core lint`,
+or `$GITHUB_REF` is equal to `master`.
+:::
+
+This lint fetches the pipeline version number from three possible locations:
+
+- The pipeline config, `manifest.version`
+- The docker container in the pipeline config, `process.container`
+ > - Some pipelines may not have this set on a pipeline level. If it is not found, it is ignored.
+- `$GITHUB_REF`, if it looks like a release tag (`refs/tags/`)
+
+The test then checks that:
+
+- The container name has a tag specified (eg. `nfcore/pipeline:version`)
+- The pipeline version number is numeric (contains only numbers and dots)
+- That the version numbers all match one another
diff --git a/src/content/tools/docs/2.0/api/bump_version.md b/src/content/tools/docs/2.0/api/bump_version.md
new file mode 100644
index 0000000000..3f45f9fefb
--- /dev/null
+++ b/src/content/tools/docs/2.0/api/bump_version.md
@@ -0,0 +1,35 @@
+# nf_core.bump_version
+
+Bumps the version number in all appropriate files for
+a nf-core pipeline.
+
+### `nf_core.bump_version.bump_nextflow_version(pipeline_obj, new_version){:python}`
+
+Bumps the required Nextflow version number of a pipeline.
+
+- **Parameters:**
+ - **pipeline_obj** ([_nf_core.utils.Pipeline_](utils#nf_core.utils.Pipeline)) – A Pipeline object that holds information
+ about the pipeline contents and build files.
+ - **new_version** (_str_) – The new version tag for the required Nextflow version.
+
+### `nf_core.bump_version.bump_pipeline_version(pipeline_obj, new_version){:python}`
+
+Bumps a pipeline version number.
+
+- **Parameters:**
+ - **pipeline_obj** ([_nf_core.utils.Pipeline_](utils#nf_core.utils.Pipeline)) – A Pipeline object that holds information
+ about the pipeline contents and build files.
+ - **new_version** (_str_) – The new version tag for the pipeline. Semantic versioning only.
+
+### `nf_core.bump_version.update_file_version(filename, pipeline_obj, patterns){:python}`
+
+Updates the version number in a requested file.
+
+- **Parameters:**
+ - **filename** (_str_) – File to scan.
+ - **pipeline_obj** ([_nf_core.lint.PipelineLint_](lint#nf_core.lint.PipelineLint)) – A PipelineLint object that holds information
+ about the pipeline contents and build files.
+ - **pattern** (_str_) – Regex pattern to apply.
+ - **newstr** (_str_) – The replaced string.
+- **Raises:**
+ **ValueError**\*\*,\*\* **if the version number cannot be found.** –
diff --git a/src/content/tools/docs/2.0/api/create.md b/src/content/tools/docs/2.0/api/create.md
new file mode 100644
index 0000000000..302bfd36a6
--- /dev/null
+++ b/src/content/tools/docs/2.0/api/create.md
@@ -0,0 +1,36 @@
+# nf_core.create
+
+Creates a nf-core pipeline matching the current
+organization’s specification based on a template.
+
+### _`class{:python}`_`nf_core.create.PipelineCreate(name, description, author, version='1.0dev', no_git=False, force=False, outdir=None){:python}`
+
+Bases: `object`
+
+Creates a nf-core pipeline a la carte from the nf-core best-practice template.
+
+- **Parameters:**
+ - **name** (_str_) – Name for the pipeline.
+ - **description** (_str_) – Description for the pipeline.
+ - **author** (_str_) – Authors name of the pipeline.
+ - **version** (_str_) – Version flag. Semantic versioning only. Defaults to 1.0dev.
+ - **no_git** (_bool_) – Prevents the creation of a local Git repository for the pipeline. Defaults to False.
+ - **force** (_bool_) – Overwrites a given workflow directory with the same name. Defaults to False.
+ May the force be with you.
+ - **outdir** (_str_) – Path to the local output directory.
+
+#### `git_init_pipeline(){:python}`
+
+Initialises the new pipeline as a Git repository and submits first commit.
+
+#### `init_pipeline(){:python}`
+
+Creates the nf-core pipeline.
+
+#### `make_pipeline_logo(){:python}`
+
+Fetch a logo for the new pipeline from the nf-core website
+
+#### `render_template(){:python}`
+
+Runs Jinja to create a new nf-core pipeline.
diff --git a/src/content/tools/docs/2.0/api/download.md b/src/content/tools/docs/2.0/api/download.md
new file mode 100644
index 0000000000..7769825e08
--- /dev/null
+++ b/src/content/tools/docs/2.0/api/download.md
@@ -0,0 +1,142 @@
+# nf_core.download
+
+Downloads a nf-core pipeline to the local file system.
+
+### _`class{:python}`_`nf_core.download.DownloadProgress(*columns: str | ProgressColumn, console: Console | None = None, auto_refresh: bool = True, refresh_per_second: float = 10, speed_estimate_period: float = 30.0, transient: bool = False, redirect_stdout: bool = True, redirect_stderr: bool = True, get_time: Callable[[], float] | None = None, disable: bool = False, expand: bool = False){:python}`
+
+Bases: `Progress`
+
+Custom Progress bar class, allowing us to have two progress
+bars with different columns / layouts.
+
+#### `get_renderables(){:python}`
+
+Get a number of renderables for the progress display.
+
+### _`class{:python}`_`nf_core.download.DownloadWorkflow(pipeline=None, release=None, outdir=None, compress_type=None, force=False, container=None, singularity_cache_only=False, parallel_downloads=4){:python}`
+
+Bases: `object`
+
+Downloads a nf-core workflow from GitHub to the local file system.
+
+Can also download its Singularity container image if required.
+
+- **Parameters:**
+ - **pipeline** (_str_) – A nf-core pipeline name.
+ - **release** (_str_) – The workflow release version to download, like 1.0. Defaults to None.
+ - **singularity** (_bool_) – Flag, if the Singularity container should be downloaded as well. Defaults to False.
+ - **outdir** (_str_) – Path to the local download directory. Defaults to None.
+
+#### `compress_download(){:python}`
+
+Take the downloaded files and make a compressed .tar.gz archive.
+
+#### `download_configs(){:python}`
+
+Downloads the centralised config profiles from nf-core/configs to `self.outdir`.
+
+#### `download_wf_files(){:python}`
+
+Downloads workflow files from GitHub to the `self.outdir`.
+
+#### `download_workflow(){:python}`
+
+Starts a nf-core workflow download.
+
+#### `find_container_images(){:python}`
+
+Find container image names for workflow.
+
+Starts by using nextflow config to pull out any process.container
+declarations. This works for DSL1.
+
+Second, we look for DSL2 containers. These can’t be found with
+nextflow config at the time of writing, so we scrape the pipeline files.
+
+#### `get_release_hash(){:python}`
+
+Find specified release / branch hash
+
+#### `get_singularity_images(){:python}`
+
+Loop through container names and download Singularity images
+
+#### `prompt_compression_type(){:python}`
+
+Ask user if we should compress the downloaded files
+
+#### `prompt_container_download(){:python}`
+
+Prompt whether to download container images or not
+
+#### `prompt_pipeline_name(){:python}`
+
+Prompt for the pipeline name if not set with a flag
+
+#### `prompt_release(){:python}`
+
+Prompt for pipeline release / branch
+
+#### `prompt_singularity_cachedir_only(){:python}`
+
+Ask if we should _only_ use $NXF_SINGULARITY_CACHEDIR without copying into target
+
+#### `prompt_use_singularity_cachedir(){:python}`
+
+Prompt about using $NXF_SINGULARITY_CACHEDIR if not already set
+
+#### `singularity_copy_cache_image(container, out_path, cache_path){:python}`
+
+Copy Singularity image from NXF_SINGULARITY_CACHEDIR to target folder.
+
+#### `singularity_download_image(container, out_path, cache_path, progress){:python}`
+
+Download a singularity image from the web.
+
+Use native Python to download the file.
+
+- **Parameters:**
+ - **container** (_str_) – A pipeline’s container name. Usually it is of similar format
+ to `https://depot.galaxyproject.org/singularity/name:version`
+ - **out_path** (_str_) – The final target output path
+ - **cache_path** (_str_\*,\* _None_) – The NXF_SINGULARITY_CACHEDIR path if set, None if not
+ - **progress** (_Progress_) – Rich progress bar instance to add tasks to.
+
+#### `singularity_image_filenames(container){:python}`
+
+Check Singularity cache for image, copy to destination folder if found.
+
+- **Parameters:**
+ **container** (_str_) – A pipeline’s container name. Can be direct download URL
+ or a Docker Hub repository ID.
+- **Returns:**
+ Returns True if we have the image in the target location.
+ : Returns a download path if not.
+- **Return type:**
+ results (bool, str)
+
+#### `singularity_pull_image(container, out_path, cache_path, progress){:python}`
+
+Pull a singularity image using `singularity pull`
+
+Attempt to use a local installation of singularity to pull the image.
+
+- **Parameters:**
+ **container** (_str_) – A pipeline’s container name. Usually it is of similar format
+ to `nfcore/name:version`.
+- **Raises:**
+ **Various exceptions possible from subprocess execution** **of** **Singularity.** –
+
+#### `validate_md5(fname, expected=None){:python}`
+
+Calculates the md5sum for a file on the disk and validate with expected.
+
+- **Parameters:**
+ - **fname** (_str_) – Path to a local file.
+ - **expected** (_str_) – The expected md5sum.
+- **Raises:**
+ **IOError**\*\*,\*\* **if the md5sum does not match the remote sum.** –
+
+#### `wf_use_local_configs(){:python}`
+
+Edit the downloaded nextflow.config file to use the local config files
diff --git a/src/content/tools/docs/2.0/api/index.md b/src/content/tools/docs/2.0/api/index.md
new file mode 100644
index 0000000000..958932acef
--- /dev/null
+++ b/src/content/tools/docs/2.0/api/index.md
@@ -0,0 +1,55 @@
+# API Reference
+
+# Tests:
+
+- [nf_core.bump_version](bump_version)
+ - [`bump_nextflow_version()`](bump_version#nf_core.bump_version.bump_nextflow_version)
+ - [`bump_pipeline_version()`](bump_version#nf_core.bump_version.bump_pipeline_version)
+ - [`update_file_version()`](bump_version#nf_core.bump_version.update_file_version)
+- [nf_core.create](create)
+ - [`PipelineCreate`](create#nf_core.create.PipelineCreate)
+- [nf_core.download](download)
+ - [`DownloadProgress`](download#nf_core.download.DownloadProgress)
+ - [`DownloadWorkflow`](download#nf_core.download.DownloadWorkflow)
+- [nf_core.launch](launch)
+ - [`Launch`](launch#nf_core.launch.Launch)
+- [nf_core.licences](licences)
+ - [`WorkflowLicences`](licences#nf_core.licences.WorkflowLicences)
+- [nf_core.lint](lint)
+ - [`run_linting()`](lint#nf_core.lint.run_linting)
+ - [`PipelineLint`](lint#nf_core.lint.PipelineLint)
+- [nf_core.list](list)
+ - [`LocalWorkflow`](list#nf_core.list.LocalWorkflow)
+ - [`RemoteWorkflow`](list#nf_core.list.RemoteWorkflow)
+ - [`Workflows`](list#nf_core.list.Workflows)
+ - [`get_local_wf()`](list#nf_core.list.get_local_wf)
+ - [`list_workflows()`](list#nf_core.list.list_workflows)
+ - [`pretty_date()`](list#nf_core.list.pretty_date)
+- [nf_core.modules](modules)
+- [nf_core.schema](schema)
+ - [`PipelineSchema`](schema#nf_core.schema.PipelineSchema)
+- [nf_core.sync](sync)
+ - [`PipelineSync`](sync#nf_core.sync.PipelineSync)
+ - [`PullRequestException`](sync#nf_core.sync.PullRequestException)
+ - [`SyncException`](sync#nf_core.sync.SyncException)
+- [nf_core.utils](utils)
+ - [`Pipeline`](utils#nf_core.utils.Pipeline)
+ - [`anaconda_package()`](utils#nf_core.utils.anaconda_package)
+ - [`check_if_outdated()`](utils#nf_core.utils.check_if_outdated)
+ - [`custom_yaml_dumper()`](utils#nf_core.utils.custom_yaml_dumper)
+ - [`fetch_wf_config()`](utils#nf_core.utils.fetch_wf_config)
+ - [`get_biocontainer_tag()`](utils#nf_core.utils.get_biocontainer_tag)
+ - [`get_repo_releases_branches()`](utils#nf_core.utils.get_repo_releases_branches)
+ - [`github_api_auto_auth()`](utils#nf_core.utils.github_api_auto_auth)
+ - [`is_file_binary()`](utils#nf_core.utils.is_file_binary)
+ - [`is_pipeline_directory()`](utils#nf_core.utils.is_pipeline_directory)
+ - [`load_tools_config()`](utils#nf_core.utils.load_tools_config)
+ - [`nextflow_cmd()`](utils#nf_core.utils.nextflow_cmd)
+ - [`parse_anaconda_licence()`](utils#nf_core.utils.parse_anaconda_licence)
+ - [`pip_package()`](utils#nf_core.utils.pip_package)
+ - [`poll_nfcore_web_api()`](utils#nf_core.utils.poll_nfcore_web_api)
+ - [`prompt_pipeline_release_branch()`](utils#nf_core.utils.prompt_pipeline_release_branch)
+ - [`prompt_remote_pipeline_name()`](utils#nf_core.utils.prompt_remote_pipeline_name)
+ - [`rich_force_colors()`](utils#nf_core.utils.rich_force_colors)
+ - [`setup_requests_cachedir()`](utils#nf_core.utils.setup_requests_cachedir)
+ - [`wait_cli_function()`](utils#nf_core.utils.wait_cli_function)
diff --git a/src/content/tools/docs/2.0/api/launch.md b/src/content/tools/docs/2.0/api/launch.md
new file mode 100644
index 0000000000..8affe0fa4d
--- /dev/null
+++ b/src/content/tools/docs/2.0/api/launch.md
@@ -0,0 +1,87 @@
+# nf_core.launch
+
+Launch a pipeline, interactively collecting params
+
+### _`class{:python}`_`nf_core.launch.Launch(pipeline=None, revision=None, command_only=False, params_in=None, params_out=None, save_all=False, show_hidden=False, url=None, web_id=None){:python}`
+
+Bases: `object`
+
+Class to hold config option to launch a pipeline
+
+#### `build_command(){:python}`
+
+Build the nextflow run command based on what we know
+
+#### `get_pipeline_schema(){:python}`
+
+Load and validate the schema from the supplied pipeline
+
+#### `get_web_launch_response(){:python}`
+
+Given a URL for a web-gui launch response, recursively query it until results are ready.
+
+#### `launch_pipeline(){:python}`
+
+#### `launch_web_gui(){:python}`
+
+Send schema to nf-core website and launch input GUI
+
+#### `launch_workflow(){:python}`
+
+Launch nextflow if required
+
+#### `merge_nxf_flag_schema(){:python}`
+
+Take the Nextflow flag schema and merge it with the pipeline schema
+
+#### `print_param_header(param_id, param_obj, is_group=False){:python}`
+
+#### `prompt_group(group_id, group_obj){:python}`
+
+Prompt for edits to a group of parameters (subschema in ‘definitions’)
+
+- **Parameters:**
+ - **group_id** – Paramater ID (string)
+ - **group_obj** – JSON Schema keys (dict)
+- **Returns:**
+ val answers
+- **Return type:**
+ Dict of param_id
+
+#### `prompt_param(param_id, param_obj, is_required, answers){:python}`
+
+Prompt for a single parameter
+
+#### `prompt_schema(){:python}`
+
+Go through the pipeline schema and prompt user to change defaults
+
+#### `prompt_web_gui(){:python}`
+
+Ask whether to use the web-based or cli wizard to collect params
+
+#### `sanitise_web_response(){:python}`
+
+The web builder returns everything as strings.
+Use the functions defined in the cli wizard to convert to the correct types.
+
+#### `set_schema_inputs(){:python}`
+
+Take the loaded schema and set the defaults as the input parameters
+If a nf_params.json file is supplied, apply these over the top
+
+#### `single_param_to_questionary(param_id, param_obj, answers=None, print_help=True){:python}`
+
+Convert a JSONSchema param to a Questionary question
+
+- **Parameters:**
+ - **param_id** – Parameter ID (string)
+ - **param_obj** – JSON Schema keys (dict)
+ - **answers** – Optional preexisting answers (dict)
+ - **print_help** – If description and help_text should be printed (bool)
+- **Returns:**
+ Single Questionary dict, to be appended to questions list
+
+#### `strip_default_params(){:python}`
+
+Strip parameters if they have not changed from the default
diff --git a/src/content/tools/docs/2.0/api/licences.md b/src/content/tools/docs/2.0/api/licences.md
new file mode 100644
index 0000000000..578ddaae1d
--- /dev/null
+++ b/src/content/tools/docs/2.0/api/licences.md
@@ -0,0 +1,37 @@
+# nf_core.licences
+
+Lists software licences for a given workflow.
+
+### _`class{:python}`_`nf_core.licences.WorkflowLicences(pipeline){:python}`
+
+Bases: `object`
+
+A nf-core workflow licenses collection.
+
+Tries to retrieve the license information from all dependencies
+of a given nf-core pipeline.
+
+A condensed overview with license per dependency can be printed out.
+
+- **Parameters:**
+ **pipeline** (_str_) – An existing nf-core pipeline name, like nf-core/hlatyping
+ or short hlatyping.
+
+#### `fetch_conda_licences(){:python}`
+
+Fetch package licences from Anaconda and PyPi.
+
+#### `get_environment_file(){:python}`
+
+Get the conda environment file for the pipeline
+
+#### `print_licences(){:python}`
+
+Prints the fetched license information.
+
+- **Parameters:**
+ **as_json** (_boolean_) – Prints the information in JSON. Defaults to False.
+
+#### `run_licences(){:python}`
+
+Run the nf-core licences action
diff --git a/src/content/tools/docs/2.0/api/lint.md b/src/content/tools/docs/2.0/api/lint.md
new file mode 100644
index 0000000000..58da46f58a
--- /dev/null
+++ b/src/content/tools/docs/2.0/api/lint.md
@@ -0,0 +1,138 @@
+# nf_core.lint
+
+#### `SEE ALSO{:python}`
+
+See the [Lint Tests](../lint_tests/index.html) docs for information about specific linting functions.
+
+
+
+Linting policy for nf-core pipeline projects.
+
+Tests Nextflow-based pipelines to check that they adhere to
+the nf-core community guidelines.
+
+### `nf_core.lint.run_linting(pipeline_dir, release_mode=False, fix=(), key=(), show_passed=False, fail_ignored=False, md_fn=None, json_fn=None){:python}`
+
+Runs all nf-core linting checks on a given Nextflow pipeline project
+in either release mode or normal mode (default). Returns an object
+of type [`PipelineLint`](#nf_core.lint.PipelineLint) after finished.
+
+- **Parameters:**
+ - **pipeline_dir** (_str_) – The path to the Nextflow pipeline root directory
+ - **release_mode** (_bool_) – Set this to True, if the linting should be run in the release mode.
+ See [`PipelineLint`](#nf_core.lint.PipelineLint) for more information.
+- **Returns:**
+ An object of type [`PipelineLint`](#nf_core.lint.PipelineLint) that contains all the linting results.
+
+### _`class{:python}`_`nf_core.lint.PipelineLint(wf_path, release_mode=False, fix=(), key=(), fail_ignored=False){:python}`
+
+Bases: [`Pipeline`](utils#nf_core.utils.Pipeline)
+
+Object to hold linting information and results.
+
+Inherits [`nf_core.utils.Pipeline`](utils#nf_core.utils.Pipeline) class.
+
+Use the [`PipelineLint._lint_pipeline()`](#nf_core.lint.PipelineLint._lint_pipeline) function to run lint tests.
+
+- **Parameters:**
+ **path** (_str_) – The path to the nf-core pipeline directory.
+
+#### `failed{:python}`
+
+A list of tuples of the form: `(, )`
+
+- **Type:**
+ list
+
+#### `ignored{:python}`
+
+A list of tuples of the form: `(, )`
+
+- **Type:**
+ list
+
+#### `lint_config{:python}`
+
+The parsed nf-core linting config for this pipeline
+
+- **Type:**
+ dict
+
+#### `passed{:python}`
+
+A list of tuples of the form: `(, )`
+
+- **Type:**
+ list
+
+#### `release_mode{:python}`
+
+True, if you the to linting was run in release mode, False else.
+
+- **Type:**
+ bool
+
+#### `warned{:python}`
+
+A list of tuples of the form: `(