Skip to content

Commit

Permalink
Refactor use of app instance (#4478)
Browse files Browse the repository at this point in the history
  • Loading branch information
ssbarnea authored Jan 20, 2025
1 parent ca3a76f commit 4eaedaa
Show file tree
Hide file tree
Showing 9 changed files with 86 additions and 79 deletions.
1 change: 1 addition & 0 deletions .vault_pass
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
secret123
2 changes: 2 additions & 0 deletions ansible.cfg
Original file line number Diff line number Diff line change
@@ -1,2 +1,4 @@
[defaults]
collections_path = collections:examples/playbooks/collections
# to avoid accidental use of vault from user environment:
vault_password_file = .vault_pass
5 changes: 4 additions & 1 deletion src/ansiblelint/rules/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -396,7 +396,10 @@ def __init__( # pylint: disable=too-many-arguments
else:
self.options = options
self.profile = []
self.app = app or get_app(cached=True)
# app should be defined on normal run logic, but for testing we might
# not pass it, and in this case we assume offline mode for performance
# reasons.
self.app = app or get_app(offline=True)

if profile_name:
self.profile = PROFILES[profile_name]
Expand Down
4 changes: 2 additions & 2 deletions src/ansiblelint/runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@

import ansiblelint.skip_utils
import ansiblelint.utils
from ansiblelint.app import App, get_app
from ansiblelint.constants import States
from ansiblelint.errors import LintWarning, MatchError, WarnSource
from ansiblelint.file_utils import (
Expand All @@ -50,6 +49,7 @@
from collections.abc import Callable, Generator

from ansiblelint._internal.rules import BaseRule
from ansiblelint.app import App
from ansiblelint.config import Options
from ansiblelint.constants import FileType
from ansiblelint.rules import RulesCollection
Expand Down Expand Up @@ -111,7 +111,7 @@ def __init__(
checked_files = set()
self.checked_files = checked_files

self.app = get_app(cached=True)
self.app = self.rules.app

def _update_exclude_paths(self, exclude_paths: list[str]) -> None:
if exclude_paths:
Expand Down
136 changes: 67 additions & 69 deletions src/ansiblelint/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -439,7 +439,7 @@ def roles_children(
if "role" in role or "name" in role:
if "tags" not in role or "skip_ansible_lint" not in role["tags"]:
results.extend(
_look_for_role_files(
self._look_for_role_files(
basedir,
role.get("role", role.get("name")),
),
Expand All @@ -448,7 +448,7 @@ def roles_children(
msg = f'role dict {role} does not contain a "role" or "name" key'
raise SystemExit(msg)
else:
results.extend(_look_for_role_files(basedir, role))
results.extend(self._look_for_role_files(basedir, role))
return results

def import_playbook_children(
Expand Down Expand Up @@ -483,7 +483,7 @@ def append_playbook_path(loc: str, playbook_path: list[str]) -> None:
possible_paths = []
namespace_name, collection_name, *playbook_path = parse_fqcn(v)
if namespace_name and collection_name:
for loc in get_app(cached=True).runtime.config.collections_paths:
for loc in self.app.runtime.config.collections_paths:
append_playbook_path(
loc,
playbook_path[:-1] + [f"{playbook_path[-1]}.yml"],
Expand Down Expand Up @@ -512,6 +512,70 @@ def append_playbook_path(loc: str, playbook_path: list[str]) -> None:
_logger.error(msg)
return []

def _look_for_role_files(self, basedir: str, role: str) -> list[Lintable]:
role_path = self._rolepath(basedir, role)
if not role_path: # pragma: no branch
return []

results = []

for kind in ["tasks", "meta", "handlers", "vars", "defaults"]:
current_path = os.path.join(role_path, kind)
for folder, _, files in os.walk(current_path):
for file in files:
file_ignorecase = file.lower()
if file_ignorecase.endswith((".yml", ".yaml")):
results.append(Lintable(os.path.join(folder, file)))

return results

def _rolepath(self, basedir: str, role: str) -> str | None:
role_path = None
namespace_name, collection_name, role_name = parse_fqcn(role)

possible_paths = [
# if included from a playbook
path_dwim(basedir, os.path.join("roles", role_name)),
path_dwim(basedir, role_name),
# if included from roles/[role]/meta/main.yml
path_dwim(basedir, os.path.join("..", "..", "..", "roles", role_name)),
path_dwim(basedir, os.path.join("..", "..", role_name)),
# if checking a role in the current directory
path_dwim(basedir, os.path.join("..", role_name)),
]

for loc in self.app.runtime.config.default_roles_path:
loc = os.path.expanduser(loc)
possible_paths.append(path_dwim(loc, role_name))

if namespace_name and collection_name:
for loc in get_app(cached=True).runtime.config.collections_paths:
loc = os.path.expanduser(loc)
possible_paths.append(
path_dwim(
loc,
os.path.join(
"ansible_collections",
namespace_name,
collection_name,
"roles",
role_name,
),
),
)

possible_paths.append(path_dwim(basedir, ""))

for path_option in possible_paths: # pragma: no branch
if os.path.isdir(path_option):
role_path = path_option
break

if role_path: # pragma: no branch
add_all_plugin_dirs(role_path)

return role_path


def _get_task_handler_children_for_tasks_or_playbooks(
task_handler: dict[str, Any],
Expand Down Expand Up @@ -558,72 +622,6 @@ def _get_task_handler_children_for_tasks_or_playbooks(
raise LookupError(msg)


def _rolepath(basedir: str, role: str) -> str | None:
role_path = None
namespace_name, collection_name, role_name = parse_fqcn(role)

possible_paths = [
# if included from a playbook
path_dwim(basedir, os.path.join("roles", role_name)),
path_dwim(basedir, role_name),
# if included from roles/[role]/meta/main.yml
path_dwim(basedir, os.path.join("..", "..", "..", "roles", role_name)),
path_dwim(basedir, os.path.join("..", "..", role_name)),
# if checking a role in the current directory
path_dwim(basedir, os.path.join("..", role_name)),
]

for loc in get_app(cached=True).runtime.config.default_roles_path:
loc = os.path.expanduser(loc)
possible_paths.append(path_dwim(loc, role_name))

if namespace_name and collection_name:
for loc in get_app(cached=True).runtime.config.collections_paths:
loc = os.path.expanduser(loc)
possible_paths.append(
path_dwim(
loc,
os.path.join(
"ansible_collections",
namespace_name,
collection_name,
"roles",
role_name,
),
),
)

possible_paths.append(path_dwim(basedir, ""))

for path_option in possible_paths: # pragma: no branch
if os.path.isdir(path_option):
role_path = path_option
break

if role_path: # pragma: no branch
add_all_plugin_dirs(role_path)

return role_path


def _look_for_role_files(basedir: str, role: str) -> list[Lintable]:
role_path = _rolepath(basedir, role)
if not role_path: # pragma: no branch
return []

results = []

for kind in ["tasks", "meta", "handlers", "vars", "defaults"]:
current_path = os.path.join(role_path, kind)
for folder, _, files in os.walk(current_path):
for file in files:
file_ignorecase = file.lower()
if file_ignorecase.endswith((".yml", ".yaml")):
results.append(Lintable(os.path.join(folder, file)))

return results


def _sanitize_task(task: dict[str, Any]) -> dict[str, Any]:
"""Return a stripped-off task structure compatible with new Ansible.
Expand Down
3 changes: 2 additions & 1 deletion test/test_formatter.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,12 +21,13 @@
# THE SOFTWARE.
import pathlib

from ansiblelint.app import get_app
from ansiblelint.errors import MatchError
from ansiblelint.file_utils import Lintable
from ansiblelint.formatters import Formatter
from ansiblelint.rules import AnsibleLintRule, RulesCollection

collection = RulesCollection()
collection = RulesCollection(app=get_app(offline=True))
rule = AnsibleLintRule()
rule.id = "TCF0001"
collection.register(rule)
Expand Down
3 changes: 2 additions & 1 deletion test/test_formatter_json.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@

import pytest

from ansiblelint.app import get_app
from ansiblelint.errors import MatchError
from ansiblelint.file_utils import Lintable
from ansiblelint.formatters import CodeclimateJSONFormatter
Expand All @@ -21,7 +22,7 @@ class TestCodeclimateJSONFormatter:
rule = AnsibleLintRule()
matches: list[MatchError] = []
formatter: CodeclimateJSONFormatter | None = None
collection = RulesCollection()
collection = RulesCollection(app=get_app(offline=True))

def setup_class(self) -> None:
"""Set up few MatchError objects."""
Expand Down
3 changes: 2 additions & 1 deletion test/test_formatter_sarif.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@

import pytest

from ansiblelint.app import get_app
from ansiblelint.errors import MatchError
from ansiblelint.file_utils import Lintable
from ansiblelint.formatters import SarifFormatter
Expand All @@ -24,7 +25,7 @@ class TestSarifFormatter:
rule2 = AnsibleLintRule()
matches: list[MatchError] = []
formatter: SarifFormatter | None = None
collection = RulesCollection()
collection = RulesCollection(app=get_app(offline=True))
collection.register(rule1)
collection.register(rule2)

Expand Down
8 changes: 4 additions & 4 deletions test/test_schemas.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,17 +96,17 @@ def test_spdx() -> None:
schema = json.load(f)
spx_enum = schema["$defs"]["SPDXLicenseEnum"]["enum"]
if set(spx_enum) != license_ids:
# In absence of a
if os.environ.get("PIP_CONSTRAINT", "/dev/null") == "/dev/null":
constraints = os.environ.get("PIP_CONSTRAINT", "/dev/null")
if constraints.endswith(".config/constraints.txt"):
with galaxy_json.open("w", encoding="utf-8") as f:
schema["$defs"]["SPDXLicenseEnum"]["enum"] = sorted(license_ids)
json.dump(schema, f, indent=2)
pytest.fail(
"SPDX license list inside galaxy.json JSON Schema file was updated.",
f"SPDX license list inside galaxy.json JSON Schema file was updated. {constraints}",
)
else:
warnings.warn(
"test_spdx failure was ignored because constraints were not pinned (PIP_CONSTRAINTS). This is expected for py310 and py-devel jobs.",
f"test_spdx failure was ignored because constraints were not pinned (PIP_CONSTRAINT={constraints}). This is expected for py310 and py-devel, lower jobs.",
category=pytest.PytestWarning,
stacklevel=1,
)

0 comments on commit 4eaedaa

Please sign in to comment.