diff --git a/nf_core/__main__.py b/nf_core/__main__.py index a15369485b..86ec88e1ee 100755 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -454,7 +454,7 @@ def remote(ctx, keywords, json): ctx.obj["modules_repo_branch"], ctx.obj["modules_repo_no_pull"], ) - stdout.print(module_list.list_modules(keywords, json)) + stdout.print(module_list.list_components(keywords, json)) except (UserWarning, LookupError) as e: log.critical(e) sys.exit(1) @@ -484,7 +484,7 @@ def local(ctx, keywords, json, dir): # pylint: disable=redefined-builtin ctx.obj["modules_repo_branch"], ctx.obj["modules_repo_no_pull"], ) - stdout.print(module_list.list_modules(keywords, json)) + stdout.print(module_list.list_components(keywords, json)) except (UserWarning, LookupError) as e: log.error(e) sys.exit(1) @@ -999,6 +999,70 @@ def install(ctx, subworkflow, dir, prompt, force, sha): sys.exit(1) +# nf-core subworkflows list subcommands +@subworkflows.group() +@click.pass_context +def list(ctx): + """ + List modules in a local pipeline or remote repository. + """ + pass + + +# nf-core subworkflows list remote +@list.command() +@click.pass_context +@click.argument("keywords", required=False, nargs=-1, metavar="") +@click.option("-j", "--json", is_flag=True, help="Print as JSON to stdout") +def remote(ctx, keywords, json): + """ + List subworkflows in a remote GitHub repo [dim i](e.g [link=https://github.com/nf-core/modules]nf-core/modules[/])[/]. + """ + try: + subworkflow_list = nf_core.subworkflows.SubworkflowList( + None, + True, + ctx.obj["modules_repo_url"], + ctx.obj["modules_repo_branch"], + ctx.obj["modules_repo_no_pull"], + ) + + stdout.print(subworkflow_list.list_components(keywords, json)) + except (UserWarning, LookupError) as e: + log.critical(e) + sys.exit(1) + + +# nf-core subworkflows list local +@list.command() +@click.pass_context +@click.argument("keywords", required=False, nargs=-1, metavar="") +@click.option("-j", "--json", is_flag=True, help="Print as JSON to stdout") +@click.option( + "-d", + "--dir", + type=click.Path(exists=True), + default=".", + help=r"Pipeline directory. [dim]\[default: Current working directory][/]", +) +def local(ctx, keywords, json, dir): # pylint: disable=redefined-builtin + """ + List subworkflows installed locally in a pipeline + """ + try: + subworkflow_list = nf_core.subworkflows.SubworkflowList( + dir, + False, + ctx.obj["modules_repo_url"], + ctx.obj["modules_repo_branch"], + ctx.obj["modules_repo_no_pull"], + ) + stdout.print(subworkflow_list.list_components(keywords, json)) + except (UserWarning, LookupError) as e: + log.error(e) + sys.exit(1) + + # nf-core schema subcommands @nf_core_cli.group() def schema(): diff --git a/nf_core/components/components_command.py b/nf_core/components/components_command.py index 6eabb960f3..d89fb4652e 100644 --- a/nf_core/components/components_command.py +++ b/nf_core/components/components_command.py @@ -157,3 +157,40 @@ def load_lint_config(self): self.lint_config = yaml.safe_load(fh) except FileNotFoundError: log.debug(f"No lint config file found: {config_fn}") + + def check_component_structure(self, component_name): + """ + Check that the structure of the modules/subworkflow directory in a pipeline is the correct one: + modules/nf-core/TOOL/SUBTOOL | subworkflows/nf-core/SUBWORKFLOW + + Prior to nf-core/tools release 2.6 the directory structure had an additional level of nesting: + modules/nf-core/modules/TOOL/SUBTOOL + """ + if self.repo_type == "pipeline": + wrong_location_modules = [] + for directory, _, files in os.walk(Path(self.dir, component_name)): + if "main.nf" in files: + module_path = Path(directory).relative_to(Path(self.dir, component_name)) + parts = module_path.parts + # Check that there are modules installed directly under the 'modules' directory + if parts[1] == component_name: + wrong_location_modules.append(module_path) + # If there are modules installed in the wrong location + if len(wrong_location_modules) > 0: + log.info("The modules folder structure is outdated. Reinstalling modules.") + # Remove the local copy of the modules repository + log.info(f"Updating '{self.modules_repo.local_repo_dir}'") + self.modules_repo.setup_local_repo( + self.modules_repo.remote_url, self.modules_repo.branch, self.hide_progress + ) + # Move wrong modules to the right directory + for module in wrong_location_modules: + modules_dir = Path(component_name).resolve() + correct_dir = Path(modules_dir, self.modules_repo.repo_path, Path(*module.parts[2:])) + wrong_dir = Path(modules_dir, module) + shutil.move(wrong_dir, correct_dir) + log.info(f"Moved {wrong_dir} to {correct_dir}.") + shutil.rmtree(Path(self.dir, component_name, self.modules_repo.repo_path, component_name)) + # Regenerate modules.json file + modules_json = ModulesJson(self.dir) + modules_json.check_up_to_date() diff --git a/nf_core/components/list.py b/nf_core/components/list.py new file mode 100644 index 0000000000..eeacb9c39b --- /dev/null +++ b/nf_core/components/list.py @@ -0,0 +1,144 @@ +import json +import logging + +import rich + +import nf_core.modules.modules_utils +from nf_core.components.components_command import ComponentCommand +from nf_core.modules.modules_json import ModulesJson + +# from .modules_command import ModulesRepo +from nf_core.modules.modules_repo import ModulesRepo + +log = logging.getLogger(__name__) + + +class ComponentList(ComponentCommand): + def __init__(self, component_type, pipeline_dir, remote=True, remote_url=None, branch=None, no_pull=False): + super().__init__(component_type, pipeline_dir, remote_url, branch, no_pull) + self.remote = remote + + def list_components(self, keywords=None, print_json=False): + keywords = keywords or [] + """ + Get available modules/subworkflows names from GitHub tree for repo + and print as list to stdout + """ + # Check modules directory structure + # self.check_component_structure(self.component_type) + + # Initialise rich table + table = rich.table.Table() + table.add_column(f"{self.component_type[:-1].capitalize()} Name") + components = [] + + def pattern_msg(keywords): + if len(keywords) == 0: + return "" + if len(keywords) == 1: + return f" matching pattern '{keywords[0]}'" + else: + quoted_keywords = (f"'{key}'" for key in keywords) + return f" matching patterns {', '.join(quoted_keywords)}" + + # No pipeline given - show all remote + if self.remote: + # Filter the modules/subworkflows by keywords + components = [ + comp + for comp in self.modules_repo.get_avail_components(self.component_type) + if all(k in comp for k in keywords) + ] + + # Nothing found + if len(components) == 0: + log.info( + f"No available {self.component_type} found in {self.modules_repo.remote_url} ({self.modules_repo.branch})" + f"{pattern_msg(keywords)}" + ) + return "" + + for comp in sorted(components): + table.add_row(comp) + + # We have a pipeline - list what's installed + else: + # Check that we are in a pipeline directory + + try: + _, repo_type = nf_core.modules.modules_utils.get_repo_type(self.dir) + if repo_type != "pipeline": + raise UserWarning( + f"The command 'nf-core {self.component_type} list local' must be run from a pipeline directory.", + ) + except UserWarning as e: + log.error(e) + return "" + # Check whether pipelines is valid + try: + self.has_valid_directory() + except UserWarning as e: + log.error(e) + return "" + + # Verify that 'modules.json' is consistent with the installed modules + modules_json = ModulesJson(self.dir) + modules_json.check_up_to_date() + + # Filter by keywords + repos_with_comps = { + repo_url: [comp for comp in components if all(k in comp[1] for k in keywords)] + for repo_url, components in modules_json.get_all_components(self.component_type).items() + } + + # Nothing found + if sum(map(len, repos_with_comps)) == 0: + log.info(f"No nf-core {self.component_type} found in '{self.dir}'{pattern_msg(keywords)}") + return "" + + table.add_column("Repository") + table.add_column("Version SHA") + table.add_column("Message") + table.add_column("Date") + + # Load 'modules.json' + modules_json = modules_json.modules_json + + for repo_url, component_with_dir in sorted(repos_with_comps.items()): + repo_entry = modules_json["repos"].get(repo_url, {}) + for install_dir, component in sorted(component_with_dir): + repo_modules = repo_entry.get(self.component_type) + component_entry = repo_modules.get(install_dir).get(component) + + if component_entry: + version_sha = component_entry["git_sha"] + try: + # pass repo_name to get info on modules even outside nf-core/modules + message, date = ModulesRepo( + remote_url=repo_url, + branch=component_entry["branch"], + ).get_commit_info(version_sha) + except LookupError as e: + log.warning(e) + date = "[red]Not Available" + message = "[red]Not Available" + else: + log.warning( + f"Commit SHA for {self.component_type[:-1]} '{install_dir}/{self.component_type}' is missing from 'modules.json'" + ) + version_sha = "[red]Not Available" + date = "[red]Not Available" + message = "[red]Not Available" + table.add_row(component, repo_url, version_sha, message, date) + + if print_json: + return json.dumps(components, sort_keys=True, indent=4) + + if self.remote: + log.info( + f"{self.component_type.capitalize()} available from {self.modules_repo.remote_url} ({self.modules_repo.branch})" + f"{pattern_msg(keywords)}:\n" + ) + else: + log.info(f"{self.component_type.capitalize()} installed in '{self.dir}'{pattern_msg(keywords)}:\n") + return table diff --git a/nf_core/modules/list.py b/nf_core/modules/list.py index ea9c69c4a6..c7dc943f9e 100644 --- a/nf_core/modules/list.py +++ b/nf_core/modules/list.py @@ -1,143 +1,10 @@ -import json import logging -import rich - -import nf_core.modules.modules_utils - -from .modules_command import ModuleCommand -from .modules_json import ModulesJson -from .modules_repo import ModulesRepo +from nf_core.components.list import ComponentList log = logging.getLogger(__name__) -class ModuleList(ModuleCommand): +class ModuleList(ComponentList): def __init__(self, pipeline_dir, remote=True, remote_url=None, branch=None, no_pull=False): - super().__init__(pipeline_dir, remote_url, branch, no_pull) - self.remote = remote - - def list_modules(self, keywords=None, print_json=False): - """ - Get available module names from GitHub tree for repo - and print as list to stdout - """ - # Check modules directory structure - self.check_modules_structure() - - # Initialise rich table - table = rich.table.Table() - table.add_column("Module Name") - modules = [] - - if keywords is None: - keywords = [] - - def pattern_msg(keywords): - if len(keywords) == 0: - return "" - if len(keywords) == 1: - return f" matching pattern '{keywords[0]}'" - else: - quoted_keywords = (f"'{key}'" for key in keywords) - return f" matching patterns {', '.join(quoted_keywords)}" - - # No pipeline given - show all remote - if self.remote: - - # Filter the modules by keywords - modules = [ - mod - for mod in self.modules_repo.get_avail_components(self.component_type) - if all(k in mod for k in keywords) - ] - - # Nothing found - if len(modules) == 0: - log.info( - f"No available modules found in {self.modules_repo.remote_url} ({self.modules_repo.branch})" - f"{pattern_msg(keywords)}" - ) - return "" - - for mod in sorted(modules): - table.add_row(mod) - - # We have a pipeline - list what's installed - else: - # Check that we are in a pipeline directory - try: - _, repo_type = nf_core.modules.modules_utils.get_repo_type(self.dir) - if repo_type != "pipeline": - raise UserWarning( - "The command 'nf-core modules list local' must be run from a pipeline directory.", - ) - except UserWarning as e: - log.error(e) - return "" - # Check whether pipelines is valid - try: - self.has_valid_directory() - except UserWarning as e: - log.error(e) - return "" - - # Verify that 'modules.json' is consistent with the installed modules - modules_json = ModulesJson(self.dir) - modules_json.check_up_to_date() - - # Filter by keywords - repos_with_mods = { - repo_url: [mod for mod in modules if all(k in mod[1] for k in keywords)] - for repo_url, modules in modules_json.get_all_modules().items() - } - - # Nothing found - if sum(map(len, repos_with_mods)) == 0: - log.info(f"No nf-core modules found in '{self.dir}'{pattern_msg(keywords)}") - return "" - - table.add_column("Repository") - table.add_column("Version SHA") - table.add_column("Message") - table.add_column("Date") - - # Load 'modules.json' - modules_json = modules_json.modules_json - - for repo_url, module_with_dir in sorted(repos_with_mods.items()): - repo_entry = modules_json["repos"].get(repo_url, {}) - for install_dir, module in sorted(module_with_dir): - repo_modules = repo_entry.get("modules") - module_entry = repo_modules.get(install_dir).get(module) - - if module_entry: - version_sha = module_entry["git_sha"] - try: - # pass repo_name to get info on modules even outside nf-core/modules - message, date = ModulesRepo( - remote_url=repo_url, - branch=module_entry["branch"], - ).get_commit_info(version_sha) - except LookupError as e: - log.warning(e) - date = "[red]Not Available" - message = "[red]Not Available" - else: - log.warning(f"Commit SHA for module '{install_dir}/{module}' is missing from 'modules.json'") - version_sha = "[red]Not Available" - date = "[red]Not Available" - message = "[red]Not Available" - table.add_row(module, repo_url, version_sha, message, date) - - if print_json: - return json.dumps(modules, sort_keys=True, indent=4) - - if self.remote: - log.info( - f"Modules available from {self.modules_repo.remote_url} ({self.modules_repo.branch})" - f"{pattern_msg(keywords)}:\n" - ) - else: - log.info(f"Modules installed in '{self.dir}'{pattern_msg(keywords)}:\n") - return table + super().__init__("modules", pipeline_dir, remote, remote_url, branch, no_pull) diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 96a62dd2ef..1b8ea36687 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -38,6 +38,7 @@ def __init__(self, pipeline_dir): self.modules_json = None self.pipeline_modules = None self.pipeline_subworkflows = None + self.pipeline_components = None def create(self): """ @@ -329,15 +330,15 @@ def move_module_to_local(self, module, repo_name): to_name += f"-{datetime.datetime.now().strftime('%y%m%d%H%M%S')}" shutil.move(current_path, local_modules_dir / to_name) - def unsynced_modules(self): + def unsynced_components(self): """ - Compute the difference between the modules in the directory and the - modules in the 'modules.json' file. This is done by looking at all + Compute the difference between the modules/subworkflows in the directory and the + modules/subworkflows in the 'modules.json' file. This is done by looking at all directories containing a 'main.nf' file Returns: (untrack_dirs ([ Path ]), missing_installation (dict)): Directories that are not tracked - by the modules.json file, and modules in the modules.json where + by the modules.json file, and modules/subworkflows in the modules.json where the installation directory is missing """ # Add all modules from modules.json to missing_installation @@ -380,6 +381,7 @@ def parse_dirs(self, dirs, missing_installation, component_type): if not component_in_file: # If it is not, add it to the list of missing subworkflow untracked_dirs.append(component) + else: # If it does, remove the subworkflow from missing_installation module_repo = missing_installation[git_url] @@ -392,6 +394,7 @@ def parse_dirs(self, dirs, missing_installation, component_type): if len(module_repo[component_type][install_dir]) == 0: # If no modules/subworkflows with missing installation left, remove the git_url from missing_installation missing_installation.pop(git_url) + return untracked_dirs, missing_installation def has_git_url_and_modules(self): @@ -479,7 +482,7 @@ def check_up_to_date(self): modules_missing_from_modules_json, subworkflows_missing_from_modules_json, missing_installation, - ) = self.unsynced_modules() + ) = self.unsynced_components() # If there are any modules/subworkflows left in 'modules.json' after all installed are removed, # we try to reinstall them @@ -797,11 +800,11 @@ def get_all_components(self, component_type): if self.pipeline_components is None: self.pipeline_components = {} for repo, repo_entry in self.modules_json.get("repos", {}).items(): - if "modules" in repo_entry: - for dir, modules in repo_entry[component_type].items(): - self.pipeline_components[repo] = [(dir, m) for m in modules] + if component_type in repo_entry: + for dir, components in repo_entry[component_type].items(): + self.pipeline_components[repo] = [(dir, m) for m in components] - return self.pipeline_modules + return self.pipeline_components def get_module_branch(self, module, repo_url, install_dir): """ diff --git a/nf_core/subworkflows/__init__.py b/nf_core/subworkflows/__init__.py index a57bd9686c..f10c850d8d 100644 --- a/nf_core/subworkflows/__init__.py +++ b/nf_core/subworkflows/__init__.py @@ -1,4 +1,5 @@ from .create import SubworkflowCreate from .install import SubworkflowInstall +from .list import SubworkflowList from .subworkflows_test import SubworkflowsTest from .test_yml_builder import SubworkflowTestYmlBuilder diff --git a/nf_core/subworkflows/install.py b/nf_core/subworkflows/install.py index 439cf187b7..d087bd1abd 100644 --- a/nf_core/subworkflows/install.py +++ b/nf_core/subworkflows/install.py @@ -79,8 +79,10 @@ def install(self, subworkflow, silent=False): if not version: return False - # Remove subworkflow if force is set - if self.force: + # Remove subworkflow if force is set and component is installed + if self.force and nf_core.components.components_install.check_component_installed( + self.component_type, subworkflow, current_version, subworkflow_dir, self.modules_repo, self.force + ): log.info(f"Removing installed version of '{self.modules_repo.repo_path}/{subworkflow}'") self.clear_component_dir(subworkflow, subworkflow_dir) nf_core.components.components_install.clean_modules_json( diff --git a/nf_core/subworkflows/list.py b/nf_core/subworkflows/list.py new file mode 100644 index 0000000000..ddf144ee00 --- /dev/null +++ b/nf_core/subworkflows/list.py @@ -0,0 +1,10 @@ +import logging + +from nf_core.components.list import ComponentList + +log = logging.getLogger(__name__) + + +class SubworkflowList(ComponentList): + def __init__(self, pipeline_dir, remote=True, remote_url=None, branch=None, no_pull=False): + super().__init__("subworkflows", pipeline_dir, remote, remote_url, branch, no_pull) diff --git a/tests/modules/list.py b/tests/modules/list.py index 1f2b39abf9..2cd1333faf 100644 --- a/tests/modules/list.py +++ b/tests/modules/list.py @@ -8,7 +8,7 @@ def test_modules_list_remote(self): """Test listing available modules""" mods_list = nf_core.modules.ModuleList(None, remote=True) - listed_mods = mods_list.list_modules() + listed_mods = mods_list.list_components() console = Console(record=True) console.print(listed_mods) output = console.export_text() @@ -18,7 +18,7 @@ def test_modules_list_remote(self): def test_modules_list_remote_gitlab(self): """Test listing the modules in the remote gitlab repo""" mods_list = nf_core.modules.ModuleList(None, remote=True, remote_url=GITLAB_URL, branch=GITLAB_DEFAULT_BRANCH) - listed_mods = mods_list.list_modules() + listed_mods = mods_list.list_components() print(f"listed modules are {listed_mods}") console = Console(record=True) console.print(listed_mods) @@ -29,7 +29,7 @@ def test_modules_list_remote_gitlab(self): def test_modules_list_pipeline(self): """Test listing locally installed modules""" mods_list = nf_core.modules.ModuleList(self.pipeline_dir, remote=False) - listed_mods = mods_list.list_modules() + listed_mods = mods_list.list_components() console = Console(record=True) console.print(listed_mods) output = console.export_text() @@ -41,7 +41,7 @@ def test_modules_install_and_list_pipeline(self): """Test listing locally installed modules""" self.mods_install.install("trimgalore") mods_list = nf_core.modules.ModuleList(self.pipeline_dir, remote=False) - listed_mods = mods_list.list_modules() + listed_mods = mods_list.list_components() console = Console(record=True) console.print(listed_mods) output = console.export_text() @@ -52,7 +52,7 @@ def test_modules_install_gitlab_and_list_pipeline(self): """Test listing locally installed modules""" self.mods_install_gitlab.install("fastqc") mods_list = nf_core.modules.ModuleList(self.pipeline_dir, remote=False) - listed_mods = mods_list.list_modules() + listed_mods = mods_list.list_components() console = Console(record=True) console.print(listed_mods) output = console.export_text() diff --git a/tests/subworkflows/create.py b/tests/subworkflows/create.py index ac421064e8..eac4929136 100644 --- a/tests/subworkflows/create.py +++ b/tests/subworkflows/create.py @@ -7,14 +7,18 @@ def test_subworkflows_create_succeed(self): """Succeed at creating a subworkflow from the template inside a pipeline""" - subworkflow_create = nf_core.subworkflows.SubworkflowCreate(self.pipeline_dir, "test_subworkflow", "@author", True) + subworkflow_create = nf_core.subworkflows.SubworkflowCreate( + self.pipeline_dir, "test_subworkflow_local", "@author", True + ) subworkflow_create.create() - assert os.path.exists(os.path.join(self.pipeline_dir, "subworkflows", "local", "test_subworkflow.nf")) + assert os.path.exists(os.path.join(self.pipeline_dir, "subworkflows", "local", "test_subworkflow_local.nf")) def test_subworkflows_create_fail_exists(self): """Fail at creating the same subworkflow twice""" - subworkflow_create = nf_core.subworkflows.SubworkflowCreate(self.pipeline_dir, "test_subworkflow", "@author", False) + subworkflow_create = nf_core.subworkflows.SubworkflowCreate( + self.pipeline_dir, "test_subworkflow2", "@author", False + ) subworkflow_create.create() with pytest.raises(UserWarning) as excinfo: subworkflow_create.create() @@ -24,7 +28,7 @@ def test_subworkflows_create_fail_exists(self): def test_subworkflows_create_nfcore_modules(self): """Create a subworkflow in nf-core/modules clone""" subworkflow_create = nf_core.subworkflows.SubworkflowCreate( - self.nfcore_modules, "test_subworkflow", "@author", False + self.nfcore_modules, "test_subworkflow", "@author", True ) subworkflow_create.create() assert os.path.exists(os.path.join(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "main.nf")) diff --git a/tests/subworkflows/list.py b/tests/subworkflows/list.py new file mode 100644 index 0000000000..8daf5fb599 --- /dev/null +++ b/tests/subworkflows/list.py @@ -0,0 +1,49 @@ +from rich.console import Console + +import nf_core.subworkflows + +from ..utils import GITLAB_SUBWORKFLOWS_BRANCH, GITLAB_URL + + +def test_subworkflows_list_remote(self): + """Test listing available subworkflows""" + subworkflows_list = nf_core.subworkflows.SubworkflowList(None, remote=True) + listed_subworkflows = subworkflows_list.list_components() + console = Console(record=True) + console.print(listed_subworkflows) + output = console.export_text() + assert "bam_stats" in output + + +def test_subworkflows_list_remote_gitlab(self): + """Test listing the subworkflows in the remote gitlab repo""" + subworkflows_list = nf_core.subworkflows.SubworkflowList( + None, remote=True, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH + ) + listed_subworkflows = subworkflows_list.list_components() + console = Console(record=True) + console.print(listed_subworkflows) + output = console.export_text() + assert "bam_stats" in output + + +def test_subworkflows_install_and_list_subworkflows(self): + """Test listing locally installed subworkflows""" + self.sw_install.install("bam_sort_stats_samtools") + subworkflows_list = nf_core.subworkflows.SubworkflowList(self.pipeline_dir, remote=False) + listed_subworkflows = subworkflows_list.list_components() + console = Console(record=True) + console.print(listed_subworkflows) + output = console.export_text() + assert "bam_stats" in output + + +def test_subworkflows_install_gitlab_and_list_subworkflows(self): + """Test listing locally installed subworkflows""" + self.sw_install_gitlab.install("bam_sort_stats_samtools") + subworkflows_list = nf_core.subworkflows.SubworkflowList(self.pipeline_dir, remote=False) + listed_subworkflows = subworkflows_list.list_components() + console = Console(record=True) + console.print(listed_subworkflows) + output = console.export_text() + assert "bam_stats" in output diff --git a/tests/test_modules.py b/tests/test_modules.py index e1b9609699..d52386665c 100644 --- a/tests/test_modules.py +++ b/tests/test_modules.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """ Tests covering the modules commands """ diff --git a/tests/test_subworkflows.py b/tests/test_subworkflows.py index ea4fa986dd..87e3e5ca28 100644 --- a/tests/test_subworkflows.py +++ b/tests/test_subworkflows.py @@ -1,9 +1,7 @@ -#!/usr/bin/env python """ Tests covering the subworkflows commands """ import os -import shutil import tempfile import unittest @@ -13,7 +11,7 @@ import nf_core.modules import nf_core.subworkflows -from .utils import GITLAB_URL, mock_api_calls +from .utils import GITLAB_SUBWORKFLOWS_BRANCH, GITLAB_URL, mock_api_calls def create_modules_repo_dummy(tmp_dir): @@ -32,10 +30,8 @@ def create_modules_repo_dummy(tmp_dir): fh.writelines(["repository_type: modules", "\n"]) with requests_mock.Mocker() as mock: - mock_api_calls(mock, "bpipe", "0.9.11") - # bpipe is a valid package on bioconda that is very unlikely to ever be added to nf-core/modules - module_create = nf_core.modules.ModuleCreate(root_dir, "bpipe/test", "@author", "process_medium", False, False) - module_create.create() + subworkflow_create = nf_core.subworkflows.SubworkflowCreate(root_dir, "test_subworkflow", "@author", True) + subworkflow_create.create() return root_dir @@ -58,6 +54,12 @@ def setUp(self): # Set up the nf-core/modules repo dummy self.nfcore_modules = create_modules_repo_dummy(self.tmp_dir) + # Set up install objects + self.sw_install = nf_core.subworkflows.SubworkflowInstall(self.pipeline_dir, prompt=False, force=False) + self.sw_install_gitlab = nf_core.subworkflows.SubworkflowInstall( + self.pipeline_dir, prompt=False, force=False, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH + ) + ############################################ # Test of the individual modules commands. # ############################################ @@ -67,6 +69,12 @@ def setUp(self): test_subworkflows_create_nfcore_modules, test_subworkflows_create_succeed, ) + from .subworkflows.list import ( + test_subworkflows_install_and_list_subworkflows, + test_subworkflows_install_gitlab_and_list_subworkflows, + test_subworkflows_list_remote, + test_subworkflows_list_remote_gitlab, + ) from .subworkflows.subworkflows_test import ( test_subworkflows_test_check_inputs, test_subworkflows_test_no_installed_subworkflows, diff --git a/tests/utils.py b/tests/utils.py index 483f817f20..65c7d48758 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -15,6 +15,7 @@ GITLAB_URL = "https://gitlab.com/nf-core/modules-test.git" GITLAB_REPO = "nf-core" GITLAB_DEFAULT_BRANCH = "main-restructure" +GITLAB_SUBWORKFLOWS_BRANCH = "subworkflows" # Branch test stuff GITLAB_BRANCH_TEST_BRANCH = "branch-tester-restructure" GITLAB_BRANCH_TEST_OLD_SHA = "bce3f17980b8d1beae5e917cfd3c65c0c69e04b5"