Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore: pre-commit autoupdate #396

Merged
merged 2 commits into from
Dec 19, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 6 additions & 6 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -6,29 +6,29 @@ ci:
autoupdate_commit_msg: "chore: pre-commit autoupdate"
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.4.0
rev: v4.5.0
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
- id: check-yaml
exclude: template
- id: check-added-large-files
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: 'v0.0.277'
rev: 'v0.1.8'
hooks:
- id: ruff
args: [--fix, --exit-non-zero-on-fix]
- repo: https://github.com/psf/black
rev: 23.3.0
rev: 23.12.0
hooks:
- id: black
- repo: https://github.com/adrienverge/yamllint.git
rev: v1.32.0
rev: v1.33.0
hooks:
- id: yamllint
exclude: template
- repo: https://github.com/codespell-project/codespell
rev: v2.2.5
rev: v2.2.6
hooks:
- id: codespell
entry: codespell -I .codespell-ignore-words.txt
Expand All @@ -37,7 +37,7 @@ repos:
test/test_util_path.py
)$
- repo: https://github.com/compilerla/conventional-pre-commit
rev: v2.3.0
rev: v3.0.0
hooks:
- id: conventional-pre-commit
stages: [commit-msg]
Expand Down
4 changes: 1 addition & 3 deletions src/taskgraph/actions/cancel.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,9 +34,7 @@ def cancel_action(parameters, graph_config, input, task_group_id, task_id):
# cannot be cancelled at this time, but it's also not running
# anymore, so we can ignore this error.
logger.info(
'Task "{}" is past its deadline and cannot be cancelled.'.format(
task_id
)
f'Task "{task_id}" is past its deadline and cannot be cancelled.'
)
return
raise
4 changes: 1 addition & 3 deletions src/taskgraph/actions/cancel_all.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,9 +43,7 @@ def do_cancel_task(task_id):
# cannot be cancelled at this time, but it's also not running
# anymore, so we can ignore this error.
logger.info(
"Task {} is past its deadline and cannot be cancelled.".format(
task_id
)
f"Task {task_id} is past its deadline and cannot be cancelled."
)
return
raise
Expand Down
8 changes: 2 additions & 6 deletions src/taskgraph/actions/registry.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,9 +154,7 @@ def register_callback(cb, cb_name=cb_name):
], "register_callback_action must be used as decorator"
if not cb_name:
cb_name = name
assert cb_name not in callbacks, "callback name {} is not unique".format(
cb_name
)
assert cb_name not in callbacks, f"callback name {cb_name} is not unique"

def action_builder(parameters, graph_config, decision_task_id):
if not available(parameters):
Expand Down Expand Up @@ -219,9 +217,7 @@ def action_builder(parameters, graph_config, decision_task_id):
{
"kind": "hook",
"hookGroupId": f"project-{trustDomain}",
"hookId": "in-tree-action-{}-{}/{}".format(
level, actionPerm, tcyml_hash
),
"hookId": f"in-tree-action-{level}-{actionPerm}/{tcyml_hash}",
"hookPayload": {
# provide the decision-task parameters as context for triggerHook
"decision": {
Expand Down
12 changes: 4 additions & 8 deletions src/taskgraph/actions/retrigger.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,9 +33,7 @@ def _should_retrigger(task_graph, label):
"""
if label not in task_graph:
logger.info(
"Task {} not in full taskgraph, assuming task should not be retriggered.".format(
label
)
f"Task {label} not in full taskgraph, assuming task should not be retriggered."
)
return False
return task_graph[label].attributes.get("retrigger", False)
Expand Down Expand Up @@ -155,8 +153,8 @@ def retrigger_action(parameters, graph_config, input, task_group_id, task_id):

if not input.get("force", None) and not _should_retrigger(full_task_graph, label):
logger.info(
"Not retriggering task {}, task should not be retrigged "
"and force not specified.".format(label)
f"Not retriggering task {label}, task should not be retrigged "
"and force not specified."
)
sys.exit(1)

Expand Down Expand Up @@ -218,9 +216,7 @@ def _rerun_task(task_id, label):
state = taskcluster.state_task(task_id)
if state not in RERUN_STATES:
logger.warning(
"No need to rerun {}: state '{}' not in {}!".format(
label, state, RERUN_STATES
)
f"No need to rerun {label}: state '{state}' not in {RERUN_STATES}!"
)
return
taskcluster.rerun_task(task_id)
Expand Down
6 changes: 3 additions & 3 deletions src/taskgraph/decision.py
Original file line number Diff line number Diff line change
Expand Up @@ -214,9 +214,9 @@ def get_decision_parameters(graph_config, options):
parameters.update(PER_PROJECT_PARAMETERS[project])
except KeyError:
logger.warning(
"using default project parameters; add {} to "
"PER_PROJECT_PARAMETERS in {} to customize behavior "
"for this project".format(project, __file__)
f"using default project parameters; add {project} to "
f"PER_PROJECT_PARAMETERS in {__file__} to customize behavior "
"for this project"
)
parameters.update(PER_PROJECT_PARAMETERS["default"])

Expand Down
2 changes: 1 addition & 1 deletion src/taskgraph/parameters.py
Original file line number Diff line number Diff line change
Expand Up @@ -284,7 +284,7 @@ def file_url(self, path, pretty=False):
else:
raise ParameterMismatch(
"Don't know how to determine file URL for non-github"
"repo: {}".format(repo)
f"repo: {repo}"
)
else:
raise RuntimeError(
Expand Down
12 changes: 3 additions & 9 deletions src/taskgraph/transforms/docker_image.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,9 +92,7 @@ def fill_template(config, tasks):
for p in packages:
if p not in available_packages:
raise Exception(
"Missing package job for {}-{}: {}".format(
config.kind, image_name, p
)
f"Missing package job for {config.kind}-{image_name}: {p}"
)

if not taskgraph.fast:
Expand All @@ -119,9 +117,7 @@ def fill_template(config, tasks):
digest_data += [json.dumps(args, sort_keys=True)]
context_hashes[image_name] = context_hash

description = "Build the docker image {} for use by dependent tasks".format(
image_name
)
description = f"Build the docker image {image_name} for use by dependent tasks"

args["DOCKER_IMAGE_PACKAGES"] = " ".join(f"<{p}>" for p in packages)

Expand Down Expand Up @@ -158,9 +154,7 @@ def fill_template(config, tasks):
],
"env": {
"CONTEXT_TASK_ID": {"task-reference": "<decision>"},
"CONTEXT_PATH": "public/docker-contexts/{}.tar.gz".format(
image_name
),
"CONTEXT_PATH": f"public/docker-contexts/{image_name}.tar.gz",
"HASH": context_hash,
"PROJECT": config.params["project"],
"IMAGE_NAME": image_name,
Expand Down
12 changes: 4 additions & 8 deletions src/taskgraph/transforms/run/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -255,9 +255,7 @@ def use_fetches(config, tasks):
label = aliases.get(label, label)
if label not in artifact_names:
raise Exception(
"Missing fetch task for {kind}-{name}: {fetch}".format(
kind=config.kind, name=name, fetch=fetch_name
)
f"Missing fetch task for {config.kind}-{name}: {fetch_name}"
)
if label in extra_env:
env.update(extra_env[label])
Expand All @@ -275,8 +273,8 @@ def use_fetches(config, tasks):
else:
if kind not in dependencies:
raise Exception(
"{name} can't fetch {kind} artifacts because "
"it has no {kind} dependencies!".format(name=name, kind=kind)
f"{name} can't fetch {kind} artifacts because "
f"it has no {kind} dependencies!"
)
dep_label = dependencies[kind]
if dep_label in artifact_prefixes:
Expand Down Expand Up @@ -436,9 +434,7 @@ def configure_taskdesc_for_run(config, task, taskdesc, worker_implementation):

if worker_implementation not in registry[run_using]:
raise Exception(
"no functions for run.using {!r} on {!r}".format(
run_using, worker_implementation
)
f"no functions for run.using {run_using!r} on {worker_implementation!r}"
)

func, schema, defaults = registry[run_using][worker_implementation]
Expand Down
12 changes: 6 additions & 6 deletions src/taskgraph/util/keyed_by.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,8 +66,8 @@ def evaluate_keyed_by(
# Error out when only 'default' is specified as only alternatives,
# because we don't need to by-{keyed_by} there.
raise Exception(
"Keyed-by '{}' unnecessary with only value 'default' "
"found, when determining item {}".format(keyed_by, item_name)
f"Keyed-by '{keyed_by}' unnecessary with only value 'default' "
f"found, when determining item {item_name}"
)

if key is None:
Expand All @@ -76,15 +76,15 @@ def evaluate_keyed_by(
continue
else:
raise Exception(
"No attribute {} and no value for 'default' found "
"while determining item {}".format(keyed_by, item_name)
f"No attribute {keyed_by} and no value for 'default' found "
f"while determining item {item_name}"
)

matches = keymatch(alternatives, key)
if enforce_single_match and len(matches) > 1:
raise Exception(
"Multiple matching values for {} {!r} found while "
"determining item {}".format(keyed_by, key, item_name)
f"Multiple matching values for {keyed_by} {key!r} found while "
f"determining item {item_name}"
)
elif matches:
value = matches[0]
Expand Down
4 changes: 1 addition & 3 deletions src/taskgraph/util/parameterization.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,9 +80,7 @@ def repl(match):
task_id = dependencies[dependency]
except KeyError:
raise KeyError(
"task '{}' has no dependency named '{}'".format(
label, dependency
)
f"task '{label}' has no dependency named '{dependency}'"
)

assert artifact_name.startswith(
Expand Down
6 changes: 2 additions & 4 deletions src/taskgraph/util/schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -182,7 +182,7 @@ def check_identifier(path, k):
if not identifier_re.match(k) and not excepted(path):
raise RuntimeError(
"YAML schemas should use dashed lower-case identifiers, "
"not {!r} @ {}".format(k, path)
f"not {k!r} @ {path}"
)
elif isinstance(k, (voluptuous.Optional, voluptuous.Required)):
check_identifier(path, k.schema)
Expand All @@ -191,9 +191,7 @@ def check_identifier(path, k):
check_identifier(path, v)
elif not excepted(path):
raise RuntimeError(
"Unexpected type in YAML schema: {} @ {}".format(
type(k).__name__, path
)
f"Unexpected type in YAML schema: {type(k).__name__} @ {path}"
)

if isinstance(sch, collections.abc.Mapping):
Expand Down
6 changes: 1 addition & 5 deletions src/taskgraph/util/taskcluster.py
Original file line number Diff line number Diff line change
Expand Up @@ -327,11 +327,7 @@ def get_purge_cache_url(provisioner_id, worker_type, use_proxy=False):
def purge_cache(provisioner_id, worker_type, cache_name, use_proxy=False):
"""Requests a cache purge from the purge-caches service."""
if testing:
logger.info(
"Would have purged {}/{}/{}.".format(
provisioner_id, worker_type, cache_name
)
)
logger.info(f"Would have purged {provisioner_id}/{worker_type}/{cache_name}.")
else:
logger.info(f"Purging {provisioner_id}/{worker_type}/{cache_name}.")
purge_cache_url = get_purge_cache_url(provisioner_id, worker_type, use_proxy)
Expand Down
4 changes: 1 addition & 3 deletions src/taskgraph/util/time.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,9 +73,7 @@ def value_of(input_str):

if unit not in ALIASES:
raise UnknownTimeMeasurement(
"{} is not a valid time measure use one of {}".format(
unit, sorted(ALIASES.keys())
)
f"{unit} is not a valid time measure use one of {sorted(ALIASES.keys())}"
)

return ALIASES[unit](value)
Expand Down
16 changes: 4 additions & 12 deletions src/taskgraph/util/verify.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,10 +134,8 @@ def verify_task_graph_symbol(task, taskgraph, scratch_pad, graph_config, paramet
collection_keys = tuple(sorted(treeherder.get("collection", {}).keys()))
if len(collection_keys) != 1:
raise Exception(
"Task {} can't be in multiple treeherder collections "
"(the part of the platform after `/`): {}".format(
task.label, collection_keys
)
f"Task {task.label} can't be in multiple treeherder collections "
f"(the part of the platform after `/`): {collection_keys}"
)
platform = treeherder.get("machine", {}).get("platform")
group_symbol = treeherder.get("groupSymbol")
Expand Down Expand Up @@ -206,9 +204,7 @@ def verify_routes_notification_filters(
route_filter = route.split(".")[-1]
if route_filter not in valid_filters:
raise Exception(
"{} has invalid notification filter ({})".format(
task.label, route_filter
)
f"{task.label} has invalid notification filter ({route_filter})"
)


Expand Down Expand Up @@ -262,11 +258,7 @@ def verify_toolchain_alias(task, taskgraph, scratch_pad, graph_config, parameter
if key in scratch_pad:
raise Exception(
"Duplicate toolchain-alias in tasks "
"`{}`and `{}`: {}".format(
task.label,
scratch_pad[key],
key,
)
f"`{task.label}`and `{scratch_pad[key]}`: {key}"
)
else:
scratch_pad[key] = task.label
Expand Down