Skip to content

Commit

Permalink
D401 Support - Root Files (#33352)
Browse files Browse the repository at this point in the history
(cherry picked from commit 5e1e5fa)
  • Loading branch information
ferruzzi authored and ephraimbuddy committed Aug 28, 2023
1 parent 5988ea6 commit 14c91db
Show file tree
Hide file tree
Showing 7 changed files with 40 additions and 40 deletions.
1 change: 0 additions & 1 deletion airflow/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,6 @@


def main():
"""Main executable function."""
conf = configuration.conf
if conf.get("core", "security") == "kerberos":
os.environ["KRB5CCNAME"] = conf.get("kerberos", "ccache")
Expand Down
45 changes: 23 additions & 22 deletions airflow/configuration.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ def expand_env_var(env_var: str) -> str:

def expand_env_var(env_var: str | None) -> str | None:
"""
Expands (potentially nested) env vars.
Expand (potentially nested) env vars.
Repeat and apply `expandvars` and `expanduser` until
interpolation stops having any effect.
Expand All @@ -104,7 +104,7 @@ def expand_env_var(env_var: str | None) -> str | None:


def run_command(command: str) -> str:
"""Runs command and returns stdout."""
"""Run command and returns stdout."""
process = subprocess.Popen(
shlex.split(command), stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True
)
Expand Down Expand Up @@ -239,7 +239,7 @@ def is_template(self, section: str, key) -> bool:

def _update_defaults_from_string(self, config_string: str):
"""
The defaults in _default_values are updated based on values in config_string ("ini" format).
Update the defaults in _default_values based on values in config_string ("ini" format).
Note that those values are not validated and cannot contain variables because we are using
regular config parser to load them. This method is used to test the config parser in unit tests.
Expand Down Expand Up @@ -268,7 +268,7 @@ def _update_defaults_from_string(self, config_string: str):

def get_default_value(self, section: str, key: str, fallback: Any = None, raw=False, **kwargs) -> Any:
"""
Retrieves default value from default config parser.
Retrieve default value from default config parser.
This will retrieve the default value from the default config parser. Optionally a raw, stored
value can be retrieved by setting skip_interpolation to True. This is useful for example when
Expand Down Expand Up @@ -470,15 +470,15 @@ def inversed_deprecated_sections(self):

def get_sections_including_defaults(self) -> list[str]:
"""
Retrieves all sections from the configuration parser, including sections defined by built-in defaults.
Retrieve all sections from the configuration parser, including sections defined by built-in defaults.
:return: list of section names
"""
return list(dict.fromkeys(it.chain(self.configuration_description, self.sections())))

def get_options_including_defaults(self, section: str) -> list[str]:
"""
Retrieves all possible option from the configuration parser for the section given.
Retrieve all possible option from the configuration parser for the section given.
Includes options defined by built-in defaults.
Expand All @@ -490,7 +490,7 @@ def get_options_including_defaults(self, section: str) -> list[str]:

def optionxform(self, optionstr: str) -> str:
"""
This method transforms option names on every read, get, or set operation.
Transform option names on every read, get, or set operation.
This changes from the default behaviour of ConfigParser from lower-casing
to instead be case-preserving.
Expand Down Expand Up @@ -532,7 +532,7 @@ def _write_section_header(
section_config_description: dict[str, str],
section_to_write: str,
) -> None:
"""Writes header for configuration section."""
"""Write header for configuration section."""
file.write(f"[{section_to_write}]\n")
section_description = section_config_description.get("description")
if section_description and include_descriptions:
Expand All @@ -553,7 +553,8 @@ def _write_option_header(
section_to_write: str,
sources_dict: ConfigSourcesType,
) -> tuple[bool, bool]:
"""Writes header for configuration option.
"""
Write header for configuration option.
Returns tuple of (should_continue, needs_separation) where needs_separation should be
set if the option needs additional separation to visually separate it from the next option.
Expand Down Expand Up @@ -641,7 +642,7 @@ def write( # type: ignore[override]
**kwargs: Any,
) -> None:
"""
Writes configuration with comments and examples to a file.
Write configuration with comments and examples to a file.
:param file: file to write to
:param section: section of the config to write, defaults to all sections
Expand Down Expand Up @@ -1191,7 +1192,7 @@ def getfloat(self, section: str, key: str, **kwargs) -> float: # type: ignore[o

def getimport(self, section: str, key: str, **kwargs) -> Any:
"""
Reads options, imports the full qualified name, and returns the object.
Read options, import the full qualified name, and return the object.
In case of failure, it throws an exception with the key and section names
Expand Down Expand Up @@ -1235,7 +1236,7 @@ def gettimedelta(
self, section: str, key: str, fallback: Any = None, **kwargs
) -> datetime.timedelta | None:
"""
Gets the config value for the given section and key, and converts it into datetime.timedelta object.
Get the config value for the given section and key, and convert it into datetime.timedelta object.
If the key is missing, then it is considered as `None`.
Expand Down Expand Up @@ -1321,7 +1322,7 @@ def remove_option(self, section: str, option: str, remove_default: bool = True):

def getsection(self, section: str) -> ConfigOptionsDictType | None:
"""
Returns the section as a dict.
Return the section as a dict.
Values are converted to int, float, bool as required.
Expand Down Expand Up @@ -1374,7 +1375,7 @@ def as_dict(
include_secret: bool = True,
) -> ConfigSourcesType:
"""
Returns the current configuration as an OrderedDict of OrderedDicts.
Return the current configuration as an OrderedDict of OrderedDicts.
When materializing current configuration Airflow defaults are
materialized along with user set configs. If any of the `include_*`
Expand Down Expand Up @@ -1556,7 +1557,7 @@ def _filter_by_source(
getter_func,
):
"""
Deletes default configs from current configuration.
Delete default configs from current configuration.
An OrderedDict of OrderedDicts, if it would conflict with special sensitive_config_values.
Expand Down Expand Up @@ -1761,7 +1762,7 @@ def _replace_section_config_with_display_sources(

def load_test_config(self):
"""
Uses test configuration rather than the configuration coming from airflow defaults.
Use test configuration rather than the configuration coming from airflow defaults.
When running tests we use special the unit_test configuration to avoid accidental modifications and
different behaviours when running the tests. Values for those test configuration are stored in
Expand All @@ -1785,7 +1786,7 @@ def load_test_config(self):
log.info("Unit test configuration loaded from 'config_unit_tests.cfg'")

def expand_all_configuration_values(self):
"""Expands all configuration values using global and local variables defined in this module."""
"""Expand all configuration values using global and local variables defined in this module."""
all_vars = get_all_expansion_variables()
for section in self.sections():
for key, value in self.items(section):
Expand All @@ -1798,7 +1799,7 @@ def expand_all_configuration_values(self):
self.set(section, key, value.format(**all_vars))

def remove_all_read_configurations(self):
"""Removes all read configurations, leaving only default values in the config."""
"""Remove all read configurations, leaving only default values in the config."""
for section in self.sections():
self.remove_section(section)

Expand All @@ -1809,7 +1810,7 @@ def providers_configuration_loaded(self) -> bool:

def load_providers_configuration(self):
"""
Loads configuration for providers.
Load configuration for providers.
This should be done after initial configuration have been performed. Initializing and discovering
providers is an expensive operation and cannot be performed when we load configuration for the first
Expand Down Expand Up @@ -1920,7 +1921,7 @@ def _generate_fernet_key() -> str:

def create_default_config_parser(configuration_description: dict[str, dict[str, Any]]) -> ConfigParser:
"""
Creates default config parser based on configuration description.
Create default config parser based on configuration description.
It creates ConfigParser with all default values retrieved from the configuration description and
expands all the variables from the global and local variables defined in this module.
Expand All @@ -1947,7 +1948,7 @@ def create_default_config_parser(configuration_description: dict[str, dict[str,

def create_pre_2_7_defaults() -> ConfigParser:
"""
Creates parser using the old defaults from Airflow < 2.7.0.
Create parser using the old defaults from Airflow < 2.7.0.
This is used in order to be able to fall-back to those defaults when old version of provider,
not supporting "config contribution" is installed with Airflow 2.7.0+. This "default"
Expand Down Expand Up @@ -1985,7 +1986,7 @@ def write_default_airflow_configuration_if_needed() -> AirflowConfigParser:

def load_standard_airflow_configuration(airflow_config_parser: AirflowConfigParser):
"""
Loads standard airflow configuration.
Load standard airflow configuration.
In case it finds that the configuration file is missing, it will create it and write the default
configuration values there, based on defaults passed, and will add the comments and examples
Expand Down
8 changes: 4 additions & 4 deletions airflow/policies.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@
@local_settings_hookspec
def task_policy(task: BaseOperator) -> None:
"""
This policy setting allows altering tasks after they are loaded in the DagBag.
Allow altering tasks after they are loaded in the DagBag.
It allows administrator to rewire some task's parameters. Alternatively you can raise
``AirflowClusterPolicyViolation`` exception to stop DAG from being executed.
Expand All @@ -53,7 +53,7 @@ def task_policy(task: BaseOperator) -> None:
@local_settings_hookspec
def dag_policy(dag: DAG) -> None:
"""
This policy setting allows altering DAGs after they are loaded in the DagBag.
Allow altering DAGs after they are loaded in the DagBag.
It allows administrator to rewire some DAG's parameters.
Alternatively you can raise ``AirflowClusterPolicyViolation`` exception
Expand All @@ -71,7 +71,7 @@ def dag_policy(dag: DAG) -> None:
@local_settings_hookspec
def task_instance_mutation_hook(task_instance: TaskInstance) -> None:
"""
This setting allows altering task instances before being queued by the Airflow scheduler.
Allow altering task instances before being queued by the Airflow scheduler.
This could be used, for instance, to modify the task instance during retries.
Expand Down Expand Up @@ -108,7 +108,7 @@ def get_airflow_context_vars(context) -> dict[str, str]: # type: ignore[empty-b
@local_settings_hookspec(firstresult=True)
def get_dagbag_import_timeout(dag_file_path: str) -> int | float: # type: ignore[empty-body]
"""
This setting allows for dynamic control of the DAG file parsing timeout based on the DAG file path.
Allow for dynamic control of the DAG file parsing timeout based on the DAG file path.
It is useful when there are a few DAG files requiring longer parsing times, while others do not.
You can control them separately instead of having one value for all DAG files.
Expand Down
4 changes: 2 additions & 2 deletions airflow/sentry.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@ def __init__(self):
sentry_sdk.init(integrations=integrations, **sentry_config_opts)

def add_tagging(self, task_instance):
"""Function to add tagging for a task_instance."""
"""Add tagging for a task_instance."""
dag_run = task_instance.dag_run
task = task_instance.task

Expand All @@ -141,7 +141,7 @@ def add_breadcrumbs(
task_instance: TaskInstance,
session: Session | None = None,
) -> None:
"""Function to add breadcrumbs inside of a task_instance."""
"""Add breadcrumbs inside of a task_instance."""
if session is None:
return
dr = task_instance.get_dagrun(session)
Expand Down
4 changes: 2 additions & 2 deletions airflow/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ def _get_rich_console(file):


def custom_show_warning(message, category, filename, lineno, file=None, line=None):
"""Custom function to print rich and visible warnings."""
"""Print rich and visible warnings."""
# Delay imports until we need it
from rich.markup import escape

Expand Down Expand Up @@ -432,7 +432,7 @@ def prepare_syspath():


def get_session_lifetime_config():
"""Gets session timeout configs and handles outdated configs gracefully."""
"""Get session timeout configs and handle outdated configs gracefully."""
session_lifetime_minutes = conf.get("webserver", "session_lifetime_minutes", fallback=None)
session_lifetime_days = conf.get("webserver", "session_lifetime_days", fallback=None)
uses_deprecated_lifetime_configs = session_lifetime_days or conf.get(
Expand Down
2 changes: 1 addition & 1 deletion helm_tests/other/test_keda.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ def test_keda_advanced(self, executor):

@staticmethod
def build_query(executor, concurrency=16, queue=None):
"""Builds the query used by KEDA autoscaler to determine how many workers there should be."""
"""Build the query used by KEDA autoscaler to determine how many workers there should be."""
query = (
f"SELECT ceil(COUNT(*)::decimal / {concurrency}) "
"FROM task_instance WHERE (state='running' OR state='queued')"
Expand Down
16 changes: 8 additions & 8 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -704,7 +704,7 @@ def get_all_db_dependencies() -> list[str]:

def is_package_excluded(package: str, exclusion_list: list[str]) -> bool:
"""
Checks if package should be excluded.
Check if package should be excluded.
:param package: package name (beginning of it)
:param exclusion_list: list of excluded packages
Expand All @@ -715,7 +715,7 @@ def is_package_excluded(package: str, exclusion_list: list[str]) -> bool:

def remove_provider_limits(package: str) -> str:
"""
Removes the limit for providers in devel_all to account for pre-release and development packages.
Remove the limit for providers in devel_all to account for pre-release and development packages.
:param package: package name (beginning of it)
:return: true if package should be excluded
Expand Down Expand Up @@ -749,7 +749,7 @@ def remove_provider_limits(package: str) -> str:

def sort_extras_dependencies() -> dict[str, list[str]]:
"""
The dictionary order remains when keys() are retrieved.
Sort dependencies; the dictionary order remains when keys() are retrieved.
Sort both: extras and list of dependencies to make it easier to analyse problems
external packages will be first, then if providers are added they are added at the end of the lists.
Expand Down Expand Up @@ -777,7 +777,7 @@ def sort_extras_dependencies() -> dict[str, list[str]]:

def get_provider_package_name_from_package_id(package_id: str) -> str:
"""
Builds the name of provider package out of the package id provided/.
Build the name of provider package out of the package id provided.
:param package_id: id of the package (like amazon or microsoft.azure)
:return: full name of package in PyPI
Expand All @@ -796,12 +796,12 @@ def get_provider_package_name_from_package_id(package_id: str) -> str:


def get_excluded_providers() -> list[str]:
"""Returns packages excluded for the current python version."""
"""Return packages excluded for the current python version."""
return []


def get_all_provider_packages() -> str:
"""Returns all provider packages configured in setup.py."""
"""Return all provider packages configured in setup.py."""
excluded_providers = get_excluded_providers()
return " ".join(
get_provider_package_name_from_package_id(package)
Expand Down Expand Up @@ -844,7 +844,7 @@ def parse_config_files(self, *args, **kwargs) -> None:

def replace_extra_dependencies_with_provider_packages(extra: str, providers: list[str]) -> None:
"""
Replaces extra dependencies with provider package.
Replace extra dependencies with provider package.
The intention here is that when the provider is added as dependency of extra, there is no
need to add the dependencies separately. This is not needed and even harmful, because in
Expand Down Expand Up @@ -897,7 +897,7 @@ def replace_extra_dependencies_with_provider_packages(extra: str, providers: lis

def add_provider_packages_to_extra_dependencies(extra: str, providers: list[str]) -> None:
"""
Adds provider packages as dependencies to extra.
Add provider packages as dependencies to extra.
This is used to add provider packages as dependencies to the "bulk" kind of extras.
Those bulk extras do not have the detailed 'extra' dependencies as initial values,
Expand Down

0 comments on commit 14c91db

Please sign in to comment.