Skip to content

Commit

Permalink
chore: eliminate calls at import time (#5889)
Browse files Browse the repository at this point in the history
It is not best practice, and it often adds unnecessary overhead.

Fixes GH-5344
  • Loading branch information
holmanb authored Dec 2, 2024
1 parent 13ffd19 commit 3f82153
Show file tree
Hide file tree
Showing 15 changed files with 85 additions and 106 deletions.
46 changes: 19 additions & 27 deletions cloudinit/config/cc_chef.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,21 +23,15 @@

RUBY_VERSION_DEFAULT = "1.8"

CHEF_DIRS = tuple(
[
"/etc/chef",
"/var/log/chef",
"/var/lib/chef",
"/var/cache/chef",
"/var/backups/chef",
"/var/run/chef",
]
)
REQUIRED_CHEF_DIRS = tuple(
[
"/etc/chef",
]
CHEF_DIRS = (
"/etc/chef",
"/var/log/chef",
"/var/lib/chef",
"/var/cache/chef",
"/var/backups/chef",
"/var/run/chef",
)
REQUIRED_CHEF_DIRS = ("/etc/chef",)

# Used if fetching chef from a omnibus style package
OMNIBUS_URL = "https://www.chef.io/chef/install.sh"
Expand Down Expand Up @@ -74,22 +68,20 @@
]
)
CHEF_RB_TPL_KEYS = frozenset(
itertools.chain(
CHEF_RB_TPL_DEFAULTS.keys(),
CHEF_RB_TPL_BOOL_KEYS,
CHEF_RB_TPL_PATH_KEYS,
[
"server_url",
"node_name",
"environment",
"validation_name",
"chef_license",
],
)
[
*CHEF_RB_TPL_DEFAULTS.keys(),
*CHEF_RB_TPL_BOOL_KEYS,
*CHEF_RB_TPL_PATH_KEYS,
"server_url",
"node_name",
"environment",
"validation_name",
"chef_license",
]
)
CHEF_RB_PATH = "/etc/chef/client.rb"
CHEF_EXEC_PATH = "/usr/bin/chef-client"
CHEF_EXEC_DEF_ARGS = tuple(["-d", "-i", "1800", "-s", "20"])
CHEF_EXEC_DEF_ARGS = ("-d", "-i", "1800", "-s", "20")


LOG = logging.getLogger(__name__)
Expand Down
6 changes: 2 additions & 4 deletions cloudinit/config/cc_mounts.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,10 +31,8 @@

# Shortname matches 'sda', 'sda1', 'xvda', 'hda', 'sdb', xvdb, vda, vdd1, sr0
DEVICE_NAME_FILTER = r"^([x]{0,1}[shv]d[a-z][0-9]*|sr[0-9]+)$"
DEVICE_NAME_RE = re.compile(DEVICE_NAME_FILTER)
# Name matches 'server:/path'
NETWORK_NAME_FILTER = r"^.+:.*"
NETWORK_NAME_RE = re.compile(NETWORK_NAME_FILTER)
FSTAB_PATH = "/etc/fstab"
MNT_COMMENT = "comment=cloudconfig"
MB = 2**20
Expand All @@ -57,7 +55,7 @@ def is_meta_device_name(name):

def is_network_device(name):
# return true if this is a network device
if NETWORK_NAME_RE.match(name):
if re.match(NETWORK_NAME_FILTER, name):
return True
return False

Expand Down Expand Up @@ -114,7 +112,7 @@ def sanitize_devname(startname, transformer, aliases=None):
device_path = "/dev/%s" % (device_path,)
LOG.debug("Mapped metadata name %s to %s", orig, device_path)
else:
if DEVICE_NAME_RE.match(startname):
if re.match(DEVICE_NAME_FILTER, startname):
device_path = "/dev/%s" % (device_path,)

partition_path = None
Expand Down
16 changes: 7 additions & 9 deletions cloudinit/config/cc_rsyslog.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,13 +59,6 @@

LOG = logging.getLogger(__name__)

COMMENT_RE = re.compile(r"[ ]*[#]+[ ]*")
HOST_PORT_RE = re.compile(
r"^(?P<proto>[@]{0,2})"
r"(([\[](?P<bracket_addr>[^\]]*)[\]])|(?P<addr>[^:]*))"
r"([:](?P<port>[0-9]+))?$"
)


def distro_default_rsyslog_config(distro: Distro):
"""Construct a distro-specific rsyslog config dictionary by merging
Expand Down Expand Up @@ -195,7 +188,7 @@ def apply_rsyslog_changes(configs, def_fname, cfg_dir):

def parse_remotes_line(line, name=None):
try:
data, comment = COMMENT_RE.split(line)
data, comment = re.split(r"[ ]*[#]+[ ]*", line)
comment = comment.strip()
except ValueError:
data, comment = (line, None)
Expand All @@ -209,7 +202,12 @@ def parse_remotes_line(line, name=None):
else:
raise ValueError("line had multiple spaces: %s" % data)

toks = HOST_PORT_RE.match(host_port)
toks = re.match(
r"^(?P<proto>[@]{0,2})"
r"(([\[](?P<bracket_addr>[^\]]*)[\]])|(?P<addr>[^:]*))"
r"([:](?P<port>[0-9]+))?$",
host_port,
)

if not toks:
raise ValueError("Invalid host specification '%s'" % host_port)
Expand Down
7 changes: 3 additions & 4 deletions cloudinit/config/cc_ssh.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,9 +38,6 @@
GENERATE_KEY_NAMES = ["rsa", "ecdsa", "ed25519"]
FIPS_UNSUPPORTED_KEY_NAMES = ["ed25519"]

pattern_unsupported_config_keys = re.compile(
"^(ecdsa-sk|ed25519-sk)_(private|public|certificate)$"
)
KEY_FILE_TPL = "/etc/ssh/ssh_host_%s_key"
PUBLISH_HOST_KEYS = True
# By default publish all supported hostkey types.
Expand Down Expand Up @@ -113,7 +110,9 @@ def handle(name: str, cfg: Config, cloud: Cloud, args: list) -> None:
cert_config = []
for key, val in cfg["ssh_keys"].items():
if key not in CONFIG_KEY_TO_FILE:
if pattern_unsupported_config_keys.match(key):
if re.match(
"^(ecdsa-sk|ed25519-sk)_(private|public|certificate)$", key
):
reason = "unsupported"
else:
reason = "unrecognized"
Expand Down
8 changes: 2 additions & 6 deletions cloudinit/config/schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -325,10 +325,6 @@ def _validator(
yield error_type(msg, schema.get("deprecated_version", "devel"))


_validator_deprecated = partial(_validator, filter_key="deprecated")
_validator_changed = partial(_validator, filter_key="changed")


def _anyOf(
validator,
anyOf,
Expand Down Expand Up @@ -474,8 +470,8 @@ def get_jsonschema_validator():

# Add deprecation handling
validators = dict(Draft4Validator.VALIDATORS)
validators[DEPRECATED_KEY] = _validator_deprecated
validators["changed"] = _validator_changed
validators[DEPRECATED_KEY] = partial(_validator, filter_key="deprecated")
validators["changed"] = partial(_validator, filter_key="changed")
validators["oneOf"] = _oneOf
validators["anyOf"] = _anyOf

Expand Down
11 changes: 6 additions & 5 deletions cloudinit/distros/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,10 +100,6 @@

LOG = logging.getLogger(__name__)

# This is a best guess regex, based on current EC2 AZs on 2017-12-11.
# It could break when Amazon adds new regions and new AZs.
_EC2_AZ_RE = re.compile("^[a-z][a-z]-(?:[a-z]+-)+[0-9][a-z]$")

# Default NTP Client Configurations
PREFERRED_NTP_CLIENTS = ["chrony", "systemd-timesyncd", "ntp", "ntpdate"]

Expand Down Expand Up @@ -1707,7 +1703,12 @@ def _get_package_mirror_info(

# ec2 availability zones are named cc-direction-[0-9][a-d] (us-east-1b)
# the region is us-east-1. so region = az[0:-1]
if _EC2_AZ_RE.match(data_source.availability_zone):
# This is a best guess regex, based on current EC2 AZs on 2017-12-11.
# It could break when Amazon adds new regions and new AZs.
if re.match(
"^[a-z][a-z]-(?:[a-z]+-)+[0-9][a-z]$",
data_source.availability_zone,
):
ec2_region = data_source.availability_zone[0:-1]

if ALLOW_EC2_MIRRORS_ON_NON_AWS_INSTANCE_TYPES:
Expand Down
21 changes: 10 additions & 11 deletions cloudinit/distros/parsers/sys_conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,20 +21,19 @@
# or look at the 'param_expand()' function in the subst.c file in the bash
# source tarball...
SHELL_VAR_RULE = r"[a-zA-Z_]+[a-zA-Z0-9_]*"
SHELL_VAR_REGEXES = [
# Basic variables
re.compile(r"\$" + SHELL_VAR_RULE),
# Things like $?, $0, $-, $@
re.compile(r"\$[0-9#\?\-@\*]"),
# Things like ${blah:1} - but this one
# gets very complex so just try the
# simple path
re.compile(r"\$\{.+\}"),
]


def _contains_shell_variable(text):
for r in SHELL_VAR_REGEXES:
for r in [
# Basic variables
re.compile(r"\$" + SHELL_VAR_RULE),
# Things like $?, $0, $-, $@
re.compile(r"\$[0-9#\?\-@\*]"),
# Things like ${blah:1} - but this one
# gets very complex so just try the
# simple path
re.compile(r"\$\{.+\}"),
]:
if r.search(text):
return True
return False
Expand Down
11 changes: 7 additions & 4 deletions cloudinit/dmi.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,7 @@
import logging
import os
import re
from collections import namedtuple
from typing import Optional
from typing import NamedTuple, Optional

from cloudinit import performance, subp
from cloudinit.util import (
Expand All @@ -18,8 +17,12 @@
# Path for DMI Data
DMI_SYS_PATH = "/sys/class/dmi/id"

KernelNames = namedtuple("KernelNames", ["linux", "freebsd", "openbsd"])
KernelNames.__new__.__defaults__ = (None, None, None)

class KernelNames(NamedTuple):
linux: str
freebsd: Optional[str]
openbsd: Optional[str]


# FreeBSD's kenv(1) and Linux /sys/class/dmi/id/* both use different names from
# dmidecode. The values are the same, and ultimately what we're interested in.
Expand Down
5 changes: 3 additions & 2 deletions cloudinit/handlers/cloud_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,6 @@
# a: 22
#
# This gets loaded into yaml with final result {'a': 22}
DEF_MERGERS = mergers.string_extract_mergers("dict(replace)+list()+str()")
CLOUD_PREFIX = "#cloud-config"
JSONP_PREFIX = "#cloud-config-jsonp"

Expand Down Expand Up @@ -103,7 +102,9 @@ def _extract_mergers(self, payload, headers):
all_mergers.extend(mergers_yaml)
all_mergers.extend(mergers_header)
if not all_mergers:
all_mergers = DEF_MERGERS
all_mergers = mergers.string_extract_mergers(
"dict(replace)+list()+str()"
)
return (payload_yaml, all_mergers)

def _merge_patch(self, payload):
Expand Down
4 changes: 1 addition & 3 deletions cloudinit/mergers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,6 @@

from cloudinit import importer, type_utils

NAME_MTCH = re.compile(r"(^[a-zA-Z_][A-Za-z0-9_]*)\((.*?)\)$")

DEF_MERGE_TYPE = "list()+dict()+str()"
MERGER_PREFIX = "m_"
MERGER_ATTR = "Merger"
Expand Down Expand Up @@ -108,7 +106,7 @@ def string_extract_mergers(merge_how):
m_name = m_name.replace("-", "_")
if not m_name:
continue
match = NAME_MTCH.match(m_name)
match = re.match(r"(^[a-zA-Z_][A-Za-z0-9_]*)\((.*?)\)$", m_name)
if not match:
msg = "Matcher identifier '%s' is not in the right format" % (
m_name
Expand Down
5 changes: 1 addition & 4 deletions cloudinit/net/renderer.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,6 @@ def filter_by_attr(match_name):
return lambda iface: (match_name in iface and iface[match_name])


filter_by_physical = filter_by_type("physical")


class Renderer(abc.ABC):
def __init__(self, config=None):
pass
Expand All @@ -34,7 +31,7 @@ def _render_persistent_net(network_state: NetworkState):
# TODO(harlowja): this seems shared between eni renderer and
# this, so move it to a shared location.
content = io.StringIO()
for iface in network_state.iter_interfaces(filter_by_physical):
for iface in network_state.iter_interfaces(filter_by_type("physical")):
# for physical interfaces write out a persist net udev rule
if "name" in iface and iface.get("mac_address"):
driver = iface.get("driver", None)
Expand Down
5 changes: 3 additions & 2 deletions cloudinit/net/sysconfig.py
Original file line number Diff line number Diff line change
Expand Up @@ -718,8 +718,9 @@ def _render_bonding_opts(cls, iface_cfg, iface, flavor):
def _render_physical_interfaces(
cls, network_state, iface_contents, flavor
):
physical_filter = renderer.filter_by_physical
for iface in network_state.iter_interfaces(physical_filter):
for iface in network_state.iter_interfaces(
renderer.filter_by_type("physical")
):
iface_name = iface.get("config_id") or iface["name"]
iface_subnets = iface.get("subnets", [])
iface_cfg = iface_contents[iface_name]
Expand Down
27 changes: 11 additions & 16 deletions cloudinit/sources/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,8 @@
import os
import pickle
import re
from collections import namedtuple
from enum import Enum, unique
from typing import Any, Dict, List, Optional, Tuple, Union
from typing import Any, Dict, List, NamedTuple, Optional, Tuple, Union

from cloudinit import (
atomic_helper,
Expand Down Expand Up @@ -177,20 +176,16 @@ def redact_sensitive_keys(metadata, redact_value=REDACT_SENSITIVE_VALUE):
return md_copy


URLParams = namedtuple(
"URLParams",
[
"max_wait_seconds",
"timeout_seconds",
"num_retries",
"sec_between_retries",
],
)
class URLParams(NamedTuple):
max_wait_seconds: int
timeout_seconds: int
num_retries: int
sec_between_retries: int

DataSourceHostname = namedtuple(
"DataSourceHostname",
["hostname", "is_default"],
)

class DataSourceHostname(NamedTuple):
hostname: Optional[str]
is_default: bool


class DataSource(CloudInitPickleMixin, metaclass=abc.ABCMeta):
Expand Down Expand Up @@ -827,7 +822,7 @@ def get_hostname(self, fqdn=False, resolve_ip=False, metadata_only=False):
@param metadata_only: Boolean, set True to avoid looking up hostname
if meta-data doesn't have local-hostname present.
@return: a DataSourceHostname namedtuple
@return: a DataSourceHostname NamedTuple
<hostname or qualified hostname>, <is_default> (str, bool).
is_default is a bool and
it's true only if hostname is localhost and was
Expand Down
Loading

0 comments on commit 3f82153

Please sign in to comment.