diff --git a/CHANGELOG.md b/CHANGELOG.md index 76fc1c5524..593e74be8f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,6 +14,10 @@ ### Linting +- Update modules lint test to fail if enable_conda is found ([#2213](https://github.com/nf-core/tools/pull/2213)) +- Read module lint configuration from `.nf-core.yml`, not `.nf-core-lint.yml` ([#2221](https://github.com/nf-core/tools/pull/2221)) +- `nf-core schema lint` now defaults to linting `nextflow_schema.json` if no filename is provided ([#2225](https://github.com/nf-core/tools/pull/2225)) + ### Modules - Add an `--empty-template` option to create a module without TODO statements or examples ([#2175](https://github.com/nf-core/tools/pull/2175) & [#2177](https://github.com/nf-core/tools/pull/2177)) @@ -22,11 +26,13 @@ - Fixing problem when a module included in a subworkflow had a name change from TOOL to TOOL/SUBTOOL ([#2177](https://github.com/nf-core/tools/pull/2177)) - Fix `nf-core subworkflows test` not running subworkflow tests ([#2181](https://github.com/nf-core/tools/pull/2181)) +- Add tests for `nf-core subworkflows create-test-yml` ([#2219](https://github.com/nf-core/tools/pull/2219)) ### General - `nf-core modules/subworkflows info` now prints the include statement for the module/subworkflow ([#2182](https://github.com/nf-core/tools/pull/2182)). - Add a stale GHA wich stale + close issues and stale PRs with specific labels ([#2183](https://github.com/nf-core/tools/pull/2183)) +- update minimum version of rich to 13.3.1 ([#2185](https://github.com/nf-core/tools/pull/2185)) - Add the Nextflow version to Gitpod container matching the minimal Nextflow version for nf-core (according to `nextflow.config`) ([#2196](https://github.com/nf-core/tools/pull/2196)) - Use `nfcore/gitpod:dev` container in the dev branch ([#2196](https://github.com/nf-core/tools/pull/2196)) - Replace requests_mock with responses in test mocks ([#2165](https://github.com/nf-core/tools/pull/2165)). diff --git a/README.md b/README.md index 2fe09e80d2..8235bf4386 100644 --- a/README.md +++ b/README.md @@ -634,13 +634,13 @@ It's important to change the default value of a parameter in the `nextflow.confi The pipeline schema is linted as part of the main pipeline `nf-core lint` command, however sometimes it can be useful to quickly check the syntax of the JSONSchema without running a full lint run. -Usage is `nf-core schema lint `, eg: +Usage is `nf-core schema lint ` (defaulting to `nextflow_schema.json`), eg: -![`nf-core schema lint nextflow_schema.json`](docs/images/nf-core-schema-lint.svg) +![`nf-core schema lint`](docs/images/nf-core-schema-lint.svg) ## Bumping a pipeline version number diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 841cb9f7e7..15bacde92f 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -341,7 +341,7 @@ def lint(ctx, dir, release, fix, key, show_passed, fail_ignored, fail_warned, ma meets the nf-core guidelines. Documentation of all lint tests can be found on the nf-core website: [link=https://nf-co.re/tools-docs/]https://nf-co.re/tools-docs/[/] - You can ignore tests using a file called [blue].nf-core-lint.yaml[/] [i](if you have a good reason!)[/]. + You can ignore tests using a file called [blue].nf-core.yml[/] [i](if you have a good reason!)[/]. See the documentation for details. """ @@ -1398,7 +1398,9 @@ def build(dir, no_prompts, web_only, url): # nf-core schema lint @schema.command() -@click.argument("schema_path", type=click.Path(exists=True), required=True, metavar="") +@click.argument( + "schema_path", type=click.Path(exists=True), default="nextflow_schema.json", metavar="" +) def lint(schema_path): """ Check that a given pipeline schema is valid. @@ -1408,6 +1410,8 @@ def lint(schema_path): This function runs as part of the nf-core lint command, this is a convenience command that does just the schema linting nice and quickly. + + If no schema path is provided, "nextflow_schema.json" will be used (if it exists). """ schema_obj = nf_core.schema.PipelineSchema() try: diff --git a/nf_core/components/components_command.py b/nf_core/components/components_command.py index 31ab1a71fb..775b205cf5 100644 --- a/nf_core/components/components_command.py +++ b/nf_core/components/components_command.py @@ -6,6 +6,7 @@ import yaml +import nf_core.utils from nf_core.modules.modules_json import ModulesJson from nf_core.modules.modules_repo import ModulesRepo @@ -162,24 +163,13 @@ def install_component_files(self, component_name, component_version, modules_rep def load_lint_config(self): """Parse a pipeline lint config file. - Look for a file called either `.nf-core-lint.yml` or - `.nf-core-lint.yaml` in the pipeline root directory and parse it. - (`.yml` takes precedence). + Load the '.nf-core.yml' config file and extract + the lint config from it Add parsed config to the `self.lint_config` class attribute. """ - config_fn = os.path.join(self.dir, ".nf-core-lint.yml") - - # Pick up the file if it's .yaml instead of .yml - if not os.path.isfile(config_fn): - config_fn = os.path.join(self.dir, ".nf-core-lint.yaml") - - # Load the YAML - try: - with open(config_fn, "r") as fh: - self.lint_config = yaml.safe_load(fh) - except FileNotFoundError: - log.debug(f"No lint config file found: {config_fn}") + _, tools_config = nf_core.utils.load_tools_config(self.dir) + self.lint_config = tools_config.get("lint", {}) def check_modules_structure(self): """ diff --git a/nf_core/lint/nextflow_config.py b/nf_core/lint/nextflow_config.py index 79bce3e7f1..af018331f0 100644 --- a/nf_core/lint/nextflow_config.py +++ b/nf_core/lint/nextflow_config.py @@ -92,20 +92,22 @@ def nextflow_config(self): * Process-level configuration syntax still using the old Nextflow syntax, for example: ``process.$fastqc`` instead of ``process withName:'fastqc'``. .. tip:: You can choose to ignore tests for the presence or absence of specific config variables - by creating a file called ``.nf-core-lint.yml`` in the root of your pipeline and creating + by creating a file called ``.nf-core.yml`` in the root of your pipeline and creating a list the config variables that should be ignored. For example: .. code-block:: yaml - nextflow_config: - - params.input + lint: + nextflow_config: + - params.input The other checks in this test (depreciated syntax etc) can not be individually identified, but you can skip the entire test block if you wish: .. code-block:: yaml - nextflow_config: False + lint: + nextflow_config: False """ passed = [] warned = [] diff --git a/nf_core/modules/lint/main_nf.py b/nf_core/modules/lint/main_nf.py index d44fe90f1e..dd565c9a4e 100644 --- a/nf_core/modules/lint/main_nf.py +++ b/nf_core/modules/lint/main_nf.py @@ -257,9 +257,26 @@ def check_process_section(self, lines, fix_version, progress_bar): self.warned.append(("process_standard_label", "Process label unspecified", self.main_nf)) for i, l in enumerate(lines): url = None - if _container_type(l) == "bioconda": - bioconda_packages = [b for b in l.split() if "bioconda::" in b] l = l.strip(" '\"") + if _container_type(l) == "conda": + bioconda_packages = [b for b in l.split() if "bioconda::" in b] + match = re.search(r"params\.enable_conda", l) + if match is None: + self.passed.append( + ( + "deprecated_enable_conda", + f"Deprecated parameter 'params.enable_conda' correctly not found in the conda definition", + self.main_nf, + ) + ) + else: + self.failed.append( + ( + "deprecated_enable_conda", + f"Found deprecated parameter 'params.enable_conda' in the conda definition", + self.main_nf, + ) + ) if _container_type(l) == "singularity": # e.g. "https://containers.biocontainers.pro/s3/SingImgsRepo/biocontainers/v1.2.0_cv1/biocontainers_v1.2.0_cv1.img' :" -> v1.2.0_cv1 # e.g. "https://depot.galaxyproject.org/singularity/fastqc:0.11.9--0' :" -> 0.11.9--0 @@ -471,7 +488,7 @@ def _fix_module_version(self, current_version, latest_version, singularity_tag, for line in lines: l = line.strip(" '\"") build_type = _container_type(l) - if build_type == "bioconda": + if build_type == "conda": new_lines.append(re.sub(rf"{current_version}", f"{latest_version}", line)) elif build_type in ("singularity", "docker"): # Check that the new url is valid @@ -516,8 +533,8 @@ def _get_build(response): def _container_type(line): """Returns the container type of a build.""" - if re.search("bioconda::", line): - return "bioconda" + if line.startswith("conda"): + return "conda" if line.startswith("https://containers") or line.startswith("https://depot"): # Look for a http download URL. # Thanks Stack Overflow for the regex: https://stackoverflow.com/a/3809435/713980 diff --git a/nf_core/subworkflow-template/tests/main.nf b/nf_core/subworkflow-template/tests/main.nf index 2bf63da2f3..f8c9b10dcb 100644 --- a/nf_core/subworkflow-template/tests/main.nf +++ b/nf_core/subworkflow-template/tests/main.nf @@ -4,7 +4,7 @@ nextflow.enable.dsl = 2 include { {{ subworkflow_name|upper }} } from '../../../../subworkflows/{{ org }}/{{ subworkflow_dir }}/main.nf' -workflow test_{{ subworkflow_name }} { +workflow test_{{ component_name_underscore }} { {% if has_meta %} input = [ [ id:'test' ], // meta map diff --git a/nf_core/subworkflows/test_yml_builder.py b/nf_core/subworkflows/test_yml_builder.py index 39877ca241..2ad50d4e25 100644 --- a/nf_core/subworkflows/test_yml_builder.py +++ b/nf_core/subworkflows/test_yml_builder.py @@ -139,7 +139,7 @@ def scrape_workflow_entry_points(self): if match: self.entry_points.append(match.group(1)) if len(self.entry_points) == 0: - raise UserWarning("No workflow entry points found in 'self.module_test_main'") + raise UserWarning(f"No workflow entry points found in '{self.subworkflow_test_main}'") def build_all_tests(self): """ @@ -195,7 +195,7 @@ def build_single_test(self, entry_point): ).strip() ep_test["tags"] = [t.strip() for t in prompt_tags.split(",")] - ep_test["files"] = self.get_md5_sums(entry_point, ep_test["command"]) + ep_test["files"] = self.get_md5_sums(ep_test["command"]) return ep_test @@ -272,7 +272,7 @@ def create_test_file_dict(self, results_dir, is_repeat=False): return test_files - def get_md5_sums(self, entry_point, command, results_dir=None, results_dir_repeat=None): + def get_md5_sums(self, command, results_dir=None, results_dir_repeat=None): """ Recursively go through directories and subdirectories and generate tuples of (, ) diff --git a/requirements.txt b/requirements.txt index b3d1f251bf..9dda69b407 100644 --- a/requirements.txt +++ b/requirements.txt @@ -15,6 +15,6 @@ questionary>=1.8.0 refgenie requests requests_cache -rich-click>=1.0.0 -rich>=10.7.0 +rich-click>=1.6.1 +rich>=13.3.1 tabulate diff --git a/tests/modules/create_test_yml.py b/tests/modules/create_test_yml.py index d444ff841a..243378af78 100644 --- a/tests/modules/create_test_yml.py +++ b/tests/modules/create_test_yml.py @@ -11,19 +11,19 @@ @with_temporary_folder def test_modules_custom_yml_dumper(self, out_dir): """Try to create a yml file with the custom yml dumper""" - yml_output_path = os.path.join(out_dir, "test.yml") + yml_output_path = Path(out_dir, "test.yml") meta_builder = nf_core.modules.ModulesTestYmlBuilder("test/tool", self.pipeline_dir, False, "./", False, True) meta_builder.test_yml_output_path = yml_output_path meta_builder.tests = [{"testname": "myname"}] meta_builder.print_test_yml() - assert os.path.isfile(yml_output_path) + assert Path(yml_output_path).is_file() @with_temporary_folder def test_modules_test_file_dict(self, test_file_dir): """Create dict of test files and create md5 sums""" meta_builder = nf_core.modules.ModulesTestYmlBuilder("test/tool", self.pipeline_dir, False, "./", False, True) - with open(os.path.join(test_file_dir, "test_file.txt"), "w") as fh: + with open(Path(test_file_dir, "test_file.txt"), "w") as fh: fh.write("this line is just for testing") test_files = meta_builder.create_test_file_dict(test_file_dir) assert len(test_files) == 1 @@ -34,7 +34,7 @@ def test_modules_test_file_dict(self, test_file_dir): def test_modules_create_test_yml_get_md5(self, test_file_dir): """Get md5 sums from a dummy output""" meta_builder = nf_core.modules.ModulesTestYmlBuilder("test/tool", self.pipeline_dir, False, "./", False, True) - with open(os.path.join(test_file_dir, "test_file.txt"), "w") as fh: + with open(Path(test_file_dir, "test_file.txt"), "w") as fh: fh.write("this line is just for testing") test_files = meta_builder.get_md5_sums(command="dummy", results_dir=test_file_dir, results_dir_repeat=test_file_dir) assert test_files[0]["md5sum"] == "2191e06b28b5ba82378bcc0672d01786" @@ -43,9 +43,7 @@ def test_modules_create_test_yml_get_md5(self, test_file_dir): def test_modules_create_test_yml_entry_points(self): """Test extracting test entry points from a main.nf file""" meta_builder = nf_core.modules.ModulesTestYmlBuilder("bpipe/test", self.pipeline_dir, False, "./", False, True) - meta_builder.module_test_main = os.path.join( - self.nfcore_modules, "tests", "modules", "nf-core", "bpipe", "test", "main.nf" - ) + meta_builder.module_test_main = Path(self.nfcore_modules, "tests", "modules", "nf-core", "bpipe", "test", "main.nf") meta_builder.scrape_workflow_entry_points() assert meta_builder.entry_points[0] == "test_bpipe_test" @@ -55,7 +53,7 @@ def test_modules_create_test_yml_check_inputs(self): cwd = os.getcwd() os.chdir(self.nfcore_modules) meta_builder = nf_core.modules.ModulesTestYmlBuilder("bpipe/test", ".", False, "./", False, True) - meta_builder.module_test_main = os.path.join(self.nfcore_modules, "tests", "modules", "bpipe", "test", "main.nf") + meta_builder.module_test_main = Path(self.nfcore_modules, "tests", "modules", "bpipe", "test", "main.nf") with pytest.raises(UserWarning) as excinfo: meta_builder.check_inputs() os.chdir(cwd) diff --git a/tests/modules/lint.py b/tests/modules/lint.py index 476481a109..18c0dc4dab 100644 --- a/tests/modules/lint.py +++ b/tests/modules/lint.py @@ -66,7 +66,7 @@ def test_modules_lint_gitlab_modules(self): self.mods_install_gitlab.install("multiqc") module_lint = nf_core.modules.ModuleLint(dir=self.pipeline_dir, remote_url=GITLAB_URL) module_lint.lint(print_results=False, all_modules=True) - assert len(module_lint.failed) == 0 + assert len(module_lint.failed) == 2 assert len(module_lint.passed) > 0 assert len(module_lint.warned) >= 0 @@ -77,7 +77,7 @@ def test_modules_lint_multiple_remotes(self): self.mods_install_gitlab.install("multiqc") module_lint = nf_core.modules.ModuleLint(dir=self.pipeline_dir, remote_url=GITLAB_URL) module_lint.lint(print_results=False, all_modules=True) - assert len(module_lint.failed) == 0 + assert len(module_lint.failed) == 1 assert len(module_lint.passed) > 0 assert len(module_lint.warned) >= 0 @@ -103,6 +103,6 @@ def test_modules_lint_patched_modules(self): all_modules=True, ) - assert len(module_lint.failed) == 0 + assert len(module_lint.failed) == 1 assert len(module_lint.passed) > 0 assert len(module_lint.warned) >= 0 diff --git a/tests/subworkflows/create_test_yml.py b/tests/subworkflows/create_test_yml.py new file mode 100644 index 0000000000..40384b420f --- /dev/null +++ b/tests/subworkflows/create_test_yml.py @@ -0,0 +1,96 @@ +import os +from pathlib import Path +from unittest import mock + +import pytest + +import nf_core.subworkflows + +from ..utils import with_temporary_folder + + +@with_temporary_folder +def test_subworkflows_custom_yml_dumper(self, out_dir): + """Try to create a yml file with the custom yml dumper""" + yml_output_path = Path(out_dir, "test.yml") + meta_builder = nf_core.subworkflows.SubworkflowTestYmlBuilder( + subworkflow="test/tool", + directory=self.pipeline_dir, + test_yml_output_path=yml_output_path, + no_prompts=True, + ) + meta_builder.test_yml_output_path = yml_output_path + meta_builder.tests = [{"testname": "myname"}] + meta_builder.print_test_yml() + assert Path(yml_output_path).is_file() + + +@with_temporary_folder +def test_subworkflows_test_file_dict(self, test_file_dir): + """Create dict of test files and create md5 sums""" + meta_builder = nf_core.subworkflows.SubworkflowTestYmlBuilder( + subworkflow="test/tool", + directory=self.pipeline_dir, + test_yml_output_path="./", + no_prompts=True, + ) + with open(Path(test_file_dir, "test_file.txt"), "w") as fh: + fh.write("this line is just for testing") + test_files = meta_builder.create_test_file_dict(test_file_dir) + assert len(test_files) == 1 + assert test_files[0]["md5sum"] == "2191e06b28b5ba82378bcc0672d01786" + + +@with_temporary_folder +def test_subworkflows_create_test_yml_get_md5(self, test_file_dir): + """Get md5 sums from a dummy output""" + meta_builder = nf_core.subworkflows.SubworkflowTestYmlBuilder( + subworkflow="test/tool", + directory=self.pipeline_dir, + test_yml_output_path="./", + no_prompts=True, + ) + with open(Path(test_file_dir, "test_file.txt"), "w") as fh: + fh.write("this line is just for testing") + test_files = meta_builder.get_md5_sums( + command="dummy", + results_dir=test_file_dir, + results_dir_repeat=test_file_dir, + ) + assert test_files[0]["md5sum"] == "2191e06b28b5ba82378bcc0672d01786" + + +def test_subworkflows_create_test_yml_entry_points(self): + """Test extracting test entry points from a main.nf file""" + subworkflow = "test_subworkflow" + meta_builder = nf_core.subworkflows.SubworkflowTestYmlBuilder( + subworkflow=f"{subworkflow}/test", + directory=self.pipeline_dir, + test_yml_output_path="./", + no_prompts=True, + ) + meta_builder.subworkflow_test_main = Path( + self.nfcore_modules, "tests", "subworkflows", "nf-core", subworkflow, "main.nf" + ) + meta_builder.scrape_workflow_entry_points() + assert meta_builder.entry_points[0] == f"test_{subworkflow}" + + +def test_subworkflows_create_test_yml_check_inputs(self): + """Test the check_inputs() function - raise UserWarning because test.yml exists""" + cwd = os.getcwd() + os.chdir(self.nfcore_modules) + subworkflow = "test_subworkflow" + meta_builder = nf_core.subworkflows.SubworkflowTestYmlBuilder( + subworkflow=f"{subworkflow}", + directory=self.pipeline_dir, + test_yml_output_path="./", + no_prompts=True, + ) + meta_builder.subworkflow_test_main = Path( + self.nfcore_modules, "tests", "subworkflows", "nf-core", subworkflow, "main.nf" + ) + with pytest.raises(UserWarning) as excinfo: + meta_builder.check_inputs() + os.chdir(cwd) + assert "Test YAML file already exists!" in str(excinfo.value) diff --git a/tests/test_cli.py b/tests/test_cli.py index 2bd8af5c59..58c4525a76 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -350,3 +350,20 @@ def test_lint_log_user_warning(self, mock_lint, mock_is_pipeline): assert result.exit_code == 1 assert error_txt in captured_logs.output[-1] assert captured_logs.records[-1].levelname == "ERROR" + + @mock.patch("nf_core.schema.PipelineSchema.get_schema_path") + def test_schema_lint(self, mock_get_schema_path): + """Test nf-core schema lint defaults to nextflow_schema.json""" + cmd = ["schema", "lint"] + result = self.invoke_cli(cmd) + assert mock_get_schema_path.called_with("nextflow_schema.json") + assert "nextflow_schema.json" in result.output + + @mock.patch("nf_core.schema.PipelineSchema.get_schema_path") + def test_schema_lint_filename(self, mock_get_schema_path): + """Test nf-core schema lint accepts a filename""" + cmd = ["schema", "lint", "some_other_filename"] + result = self.invoke_cli(cmd) + assert mock_get_schema_path.called_with("some_other_filename") + assert "some_other_filename" in result.output + assert "nextflow_schema.json" not in result.output diff --git a/tests/test_subworkflows.py b/tests/test_subworkflows.py index c5d768ab10..19a090f7f0 100644 --- a/tests/test_subworkflows.py +++ b/tests/test_subworkflows.py @@ -93,6 +93,13 @@ def tearDown(self): test_subworkflows_create_nfcore_modules, test_subworkflows_create_succeed, ) + from .subworkflows.create_test_yml import ( + test_subworkflows_create_test_yml_check_inputs, + test_subworkflows_create_test_yml_entry_points, + test_subworkflows_create_test_yml_get_md5, + test_subworkflows_custom_yml_dumper, + test_subworkflows_test_file_dict, + ) from .subworkflows.info import ( test_subworkflows_info_in_modules_repo, test_subworkflows_info_local,