From d257da75e15ef5d0a3267fe26b22126805ad419b Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 18 Dec 2023 17:59:19 +0000 Subject: [PATCH 1/2] chore: pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pre-commit/pre-commit-hooks: v4.4.0 → v4.5.0](https://github.com/pre-commit/pre-commit-hooks/compare/v4.4.0...v4.5.0) - [github.com/astral-sh/ruff-pre-commit: v0.0.277 → v0.1.8](https://github.com/astral-sh/ruff-pre-commit/compare/v0.0.277...v0.1.8) - [github.com/psf/black: 23.3.0 → 23.12.0](https://github.com/psf/black/compare/23.3.0...23.12.0) - [github.com/adrienverge/yamllint.git: v1.32.0 → v1.33.0](https://github.com/adrienverge/yamllint.git/compare/v1.32.0...v1.33.0) - [github.com/codespell-project/codespell: v2.2.5 → v2.2.6](https://github.com/codespell-project/codespell/compare/v2.2.5...v2.2.6) - [github.com/compilerla/conventional-pre-commit: v2.3.0 → v3.0.0](https://github.com/compilerla/conventional-pre-commit/compare/v2.3.0...v3.0.0) --- .pre-commit-config.yaml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f245d0ac3..7595d2e84 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -6,7 +6,7 @@ ci: autoupdate_commit_msg: "chore: pre-commit autoupdate" repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 + rev: v4.5.0 hooks: - id: trailing-whitespace - id: end-of-file-fixer @@ -14,21 +14,21 @@ repos: exclude: template - id: check-added-large-files - repo: https://github.com/astral-sh/ruff-pre-commit - rev: 'v0.0.277' + rev: 'v0.1.8' hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] - repo: https://github.com/psf/black - rev: 23.3.0 + rev: 23.12.0 hooks: - id: black - repo: https://github.com/adrienverge/yamllint.git - rev: v1.32.0 + rev: v1.33.0 hooks: - id: yamllint exclude: template - repo: https://github.com/codespell-project/codespell - rev: v2.2.5 + rev: v2.2.6 hooks: - id: codespell entry: codespell -I .codespell-ignore-words.txt @@ -37,7 +37,7 @@ repos: test/test_util_path.py )$ - repo: https://github.com/compilerla/conventional-pre-commit - rev: v2.3.0 + rev: v3.0.0 hooks: - id: conventional-pre-commit stages: [commit-msg] From 7ccb6e95c52dab18b996b71967a4353740c7f690 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 18 Dec 2023 17:59:28 +0000 Subject: [PATCH 2/2] style: pre-commit.ci auto fixes [...] --- src/taskgraph/actions/cancel.py | 4 +--- src/taskgraph/actions/cancel_all.py | 4 +--- src/taskgraph/actions/registry.py | 8 ++------ src/taskgraph/actions/retrigger.py | 12 ++++-------- src/taskgraph/decision.py | 6 +++--- src/taskgraph/parameters.py | 2 +- src/taskgraph/transforms/docker_image.py | 12 +++--------- src/taskgraph/transforms/run/__init__.py | 12 ++++-------- src/taskgraph/util/keyed_by.py | 12 ++++++------ src/taskgraph/util/parameterization.py | 4 +--- src/taskgraph/util/schema.py | 6 ++---- src/taskgraph/util/taskcluster.py | 6 +----- src/taskgraph/util/time.py | 4 +--- src/taskgraph/util/verify.py | 16 ++++------------ 14 files changed, 34 insertions(+), 74 deletions(-) diff --git a/src/taskgraph/actions/cancel.py b/src/taskgraph/actions/cancel.py index 03788c653..33a5394e6 100644 --- a/src/taskgraph/actions/cancel.py +++ b/src/taskgraph/actions/cancel.py @@ -34,9 +34,7 @@ def cancel_action(parameters, graph_config, input, task_group_id, task_id): # cannot be cancelled at this time, but it's also not running # anymore, so we can ignore this error. logger.info( - 'Task "{}" is past its deadline and cannot be cancelled.'.format( - task_id - ) + f'Task "{task_id}" is past its deadline and cannot be cancelled.' ) return raise diff --git a/src/taskgraph/actions/cancel_all.py b/src/taskgraph/actions/cancel_all.py index d3e044083..55453b762 100644 --- a/src/taskgraph/actions/cancel_all.py +++ b/src/taskgraph/actions/cancel_all.py @@ -43,9 +43,7 @@ def do_cancel_task(task_id): # cannot be cancelled at this time, but it's also not running # anymore, so we can ignore this error. logger.info( - "Task {} is past its deadline and cannot be cancelled.".format( - task_id - ) + f"Task {task_id} is past its deadline and cannot be cancelled." ) return raise diff --git a/src/taskgraph/actions/registry.py b/src/taskgraph/actions/registry.py index 1e909d30c..cec76c890 100644 --- a/src/taskgraph/actions/registry.py +++ b/src/taskgraph/actions/registry.py @@ -154,9 +154,7 @@ def register_callback(cb, cb_name=cb_name): ], "register_callback_action must be used as decorator" if not cb_name: cb_name = name - assert cb_name not in callbacks, "callback name {} is not unique".format( - cb_name - ) + assert cb_name not in callbacks, f"callback name {cb_name} is not unique" def action_builder(parameters, graph_config, decision_task_id): if not available(parameters): @@ -219,9 +217,7 @@ def action_builder(parameters, graph_config, decision_task_id): { "kind": "hook", "hookGroupId": f"project-{trustDomain}", - "hookId": "in-tree-action-{}-{}/{}".format( - level, actionPerm, tcyml_hash - ), + "hookId": f"in-tree-action-{level}-{actionPerm}/{tcyml_hash}", "hookPayload": { # provide the decision-task parameters as context for triggerHook "decision": { diff --git a/src/taskgraph/actions/retrigger.py b/src/taskgraph/actions/retrigger.py index fd488b35f..01fba8557 100644 --- a/src/taskgraph/actions/retrigger.py +++ b/src/taskgraph/actions/retrigger.py @@ -33,9 +33,7 @@ def _should_retrigger(task_graph, label): """ if label not in task_graph: logger.info( - "Task {} not in full taskgraph, assuming task should not be retriggered.".format( - label - ) + f"Task {label} not in full taskgraph, assuming task should not be retriggered." ) return False return task_graph[label].attributes.get("retrigger", False) @@ -155,8 +153,8 @@ def retrigger_action(parameters, graph_config, input, task_group_id, task_id): if not input.get("force", None) and not _should_retrigger(full_task_graph, label): logger.info( - "Not retriggering task {}, task should not be retrigged " - "and force not specified.".format(label) + f"Not retriggering task {label}, task should not be retrigged " + "and force not specified." ) sys.exit(1) @@ -218,9 +216,7 @@ def _rerun_task(task_id, label): state = taskcluster.state_task(task_id) if state not in RERUN_STATES: logger.warning( - "No need to rerun {}: state '{}' not in {}!".format( - label, state, RERUN_STATES - ) + f"No need to rerun {label}: state '{state}' not in {RERUN_STATES}!" ) return taskcluster.rerun_task(task_id) diff --git a/src/taskgraph/decision.py b/src/taskgraph/decision.py index 71b727a0c..522e37645 100644 --- a/src/taskgraph/decision.py +++ b/src/taskgraph/decision.py @@ -214,9 +214,9 @@ def get_decision_parameters(graph_config, options): parameters.update(PER_PROJECT_PARAMETERS[project]) except KeyError: logger.warning( - "using default project parameters; add {} to " - "PER_PROJECT_PARAMETERS in {} to customize behavior " - "for this project".format(project, __file__) + f"using default project parameters; add {project} to " + f"PER_PROJECT_PARAMETERS in {__file__} to customize behavior " + "for this project" ) parameters.update(PER_PROJECT_PARAMETERS["default"]) diff --git a/src/taskgraph/parameters.py b/src/taskgraph/parameters.py index 48571d97a..30b723d18 100644 --- a/src/taskgraph/parameters.py +++ b/src/taskgraph/parameters.py @@ -284,7 +284,7 @@ def file_url(self, path, pretty=False): else: raise ParameterMismatch( "Don't know how to determine file URL for non-github" - "repo: {}".format(repo) + f"repo: {repo}" ) else: raise RuntimeError( diff --git a/src/taskgraph/transforms/docker_image.py b/src/taskgraph/transforms/docker_image.py index d0c5b9c97..05fe68e5b 100644 --- a/src/taskgraph/transforms/docker_image.py +++ b/src/taskgraph/transforms/docker_image.py @@ -92,9 +92,7 @@ def fill_template(config, tasks): for p in packages: if p not in available_packages: raise Exception( - "Missing package job for {}-{}: {}".format( - config.kind, image_name, p - ) + f"Missing package job for {config.kind}-{image_name}: {p}" ) if not taskgraph.fast: @@ -119,9 +117,7 @@ def fill_template(config, tasks): digest_data += [json.dumps(args, sort_keys=True)] context_hashes[image_name] = context_hash - description = "Build the docker image {} for use by dependent tasks".format( - image_name - ) + description = f"Build the docker image {image_name} for use by dependent tasks" args["DOCKER_IMAGE_PACKAGES"] = " ".join(f"<{p}>" for p in packages) @@ -158,9 +154,7 @@ def fill_template(config, tasks): ], "env": { "CONTEXT_TASK_ID": {"task-reference": ""}, - "CONTEXT_PATH": "public/docker-contexts/{}.tar.gz".format( - image_name - ), + "CONTEXT_PATH": f"public/docker-contexts/{image_name}.tar.gz", "HASH": context_hash, "PROJECT": config.params["project"], "IMAGE_NAME": image_name, diff --git a/src/taskgraph/transforms/run/__init__.py b/src/taskgraph/transforms/run/__init__.py index d221e817b..4a1e146d4 100644 --- a/src/taskgraph/transforms/run/__init__.py +++ b/src/taskgraph/transforms/run/__init__.py @@ -255,9 +255,7 @@ def use_fetches(config, tasks): label = aliases.get(label, label) if label not in artifact_names: raise Exception( - "Missing fetch task for {kind}-{name}: {fetch}".format( - kind=config.kind, name=name, fetch=fetch_name - ) + f"Missing fetch task for {config.kind}-{name}: {fetch_name}" ) if label in extra_env: env.update(extra_env[label]) @@ -275,8 +273,8 @@ def use_fetches(config, tasks): else: if kind not in dependencies: raise Exception( - "{name} can't fetch {kind} artifacts because " - "it has no {kind} dependencies!".format(name=name, kind=kind) + f"{name} can't fetch {kind} artifacts because " + f"it has no {kind} dependencies!" ) dep_label = dependencies[kind] if dep_label in artifact_prefixes: @@ -436,9 +434,7 @@ def configure_taskdesc_for_run(config, task, taskdesc, worker_implementation): if worker_implementation not in registry[run_using]: raise Exception( - "no functions for run.using {!r} on {!r}".format( - run_using, worker_implementation - ) + f"no functions for run.using {run_using!r} on {worker_implementation!r}" ) func, schema, defaults = registry[run_using][worker_implementation] diff --git a/src/taskgraph/util/keyed_by.py b/src/taskgraph/util/keyed_by.py index 9b0c5a44f..9eae63b5d 100644 --- a/src/taskgraph/util/keyed_by.py +++ b/src/taskgraph/util/keyed_by.py @@ -66,8 +66,8 @@ def evaluate_keyed_by( # Error out when only 'default' is specified as only alternatives, # because we don't need to by-{keyed_by} there. raise Exception( - "Keyed-by '{}' unnecessary with only value 'default' " - "found, when determining item {}".format(keyed_by, item_name) + f"Keyed-by '{keyed_by}' unnecessary with only value 'default' " + f"found, when determining item {item_name}" ) if key is None: @@ -76,15 +76,15 @@ def evaluate_keyed_by( continue else: raise Exception( - "No attribute {} and no value for 'default' found " - "while determining item {}".format(keyed_by, item_name) + f"No attribute {keyed_by} and no value for 'default' found " + f"while determining item {item_name}" ) matches = keymatch(alternatives, key) if enforce_single_match and len(matches) > 1: raise Exception( - "Multiple matching values for {} {!r} found while " - "determining item {}".format(keyed_by, key, item_name) + f"Multiple matching values for {keyed_by} {key!r} found while " + f"determining item {item_name}" ) elif matches: value = matches[0] diff --git a/src/taskgraph/util/parameterization.py b/src/taskgraph/util/parameterization.py index b96ae1f75..61a3eb77e 100644 --- a/src/taskgraph/util/parameterization.py +++ b/src/taskgraph/util/parameterization.py @@ -80,9 +80,7 @@ def repl(match): task_id = dependencies[dependency] except KeyError: raise KeyError( - "task '{}' has no dependency named '{}'".format( - label, dependency - ) + f"task '{label}' has no dependency named '{dependency}'" ) assert artifact_name.startswith( diff --git a/src/taskgraph/util/schema.py b/src/taskgraph/util/schema.py index 816f60695..02e79a3a2 100644 --- a/src/taskgraph/util/schema.py +++ b/src/taskgraph/util/schema.py @@ -182,7 +182,7 @@ def check_identifier(path, k): if not identifier_re.match(k) and not excepted(path): raise RuntimeError( "YAML schemas should use dashed lower-case identifiers, " - "not {!r} @ {}".format(k, path) + f"not {k!r} @ {path}" ) elif isinstance(k, (voluptuous.Optional, voluptuous.Required)): check_identifier(path, k.schema) @@ -191,9 +191,7 @@ def check_identifier(path, k): check_identifier(path, v) elif not excepted(path): raise RuntimeError( - "Unexpected type in YAML schema: {} @ {}".format( - type(k).__name__, path - ) + f"Unexpected type in YAML schema: {type(k).__name__} @ {path}" ) if isinstance(sch, collections.abc.Mapping): diff --git a/src/taskgraph/util/taskcluster.py b/src/taskgraph/util/taskcluster.py index 1c0f44d65..2fc92a3d6 100644 --- a/src/taskgraph/util/taskcluster.py +++ b/src/taskgraph/util/taskcluster.py @@ -327,11 +327,7 @@ def get_purge_cache_url(provisioner_id, worker_type, use_proxy=False): def purge_cache(provisioner_id, worker_type, cache_name, use_proxy=False): """Requests a cache purge from the purge-caches service.""" if testing: - logger.info( - "Would have purged {}/{}/{}.".format( - provisioner_id, worker_type, cache_name - ) - ) + logger.info(f"Would have purged {provisioner_id}/{worker_type}/{cache_name}.") else: logger.info(f"Purging {provisioner_id}/{worker_type}/{cache_name}.") purge_cache_url = get_purge_cache_url(provisioner_id, worker_type, use_proxy) diff --git a/src/taskgraph/util/time.py b/src/taskgraph/util/time.py index e511978b5..6639e5ddd 100644 --- a/src/taskgraph/util/time.py +++ b/src/taskgraph/util/time.py @@ -73,9 +73,7 @@ def value_of(input_str): if unit not in ALIASES: raise UnknownTimeMeasurement( - "{} is not a valid time measure use one of {}".format( - unit, sorted(ALIASES.keys()) - ) + f"{unit} is not a valid time measure use one of {sorted(ALIASES.keys())}" ) return ALIASES[unit](value) diff --git a/src/taskgraph/util/verify.py b/src/taskgraph/util/verify.py index e6705c16c..68260e0dd 100644 --- a/src/taskgraph/util/verify.py +++ b/src/taskgraph/util/verify.py @@ -134,10 +134,8 @@ def verify_task_graph_symbol(task, taskgraph, scratch_pad, graph_config, paramet collection_keys = tuple(sorted(treeherder.get("collection", {}).keys())) if len(collection_keys) != 1: raise Exception( - "Task {} can't be in multiple treeherder collections " - "(the part of the platform after `/`): {}".format( - task.label, collection_keys - ) + f"Task {task.label} can't be in multiple treeherder collections " + f"(the part of the platform after `/`): {collection_keys}" ) platform = treeherder.get("machine", {}).get("platform") group_symbol = treeherder.get("groupSymbol") @@ -206,9 +204,7 @@ def verify_routes_notification_filters( route_filter = route.split(".")[-1] if route_filter not in valid_filters: raise Exception( - "{} has invalid notification filter ({})".format( - task.label, route_filter - ) + f"{task.label} has invalid notification filter ({route_filter})" ) @@ -262,11 +258,7 @@ def verify_toolchain_alias(task, taskgraph, scratch_pad, graph_config, parameter if key in scratch_pad: raise Exception( "Duplicate toolchain-alias in tasks " - "`{}`and `{}`: {}".format( - task.label, - scratch_pad[key], - key, - ) + f"`{task.label}`and `{scratch_pad[key]}`: {key}" ) else: scratch_pad[key] = task.label