Skip to content

Commit

Permalink
Support Python checks defined by a pyproject.toml file (#11233)
Browse files Browse the repository at this point in the history
* Support Python checks defined by a `pyproject.toml` file

* Apply suggestions from code review

Co-authored-by: Sarah Witt <sarah.witt@datadoghq.com>

* address

Co-authored-by: Sarah Witt <sarah.witt@datadoghq.com>
  • Loading branch information
ofek and sarah-witt authored Feb 3, 2022
1 parent 2b88d38 commit 0263d56
Show file tree
Hide file tree
Showing 7 changed files with 279 additions and 74 deletions.
51 changes: 34 additions & 17 deletions datadog_checks_dev/datadog_checks/dev/tooling/commands/dep.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,15 @@
from packaging.markers import InvalidMarker, Marker
from packaging.specifiers import SpecifierSet

from ...fs import read_file_lines, write_file_lines
from ...fs import basepath, read_file_lines, write_file_lines
from ..constants import get_agent_requirements
from ..dependencies import read_agent_dependencies, read_check_dependencies
from ..utils import get_check_req_file, get_valid_checks
from ..dependencies import (
read_agent_dependencies,
read_check_dependencies,
update_check_dependencies,
update_check_dependencies_at,
)
from ..utils import get_check_req_file, get_valid_checks, has_project_file
from .console import CONTEXT_SETTINGS, abort, echo_failure, echo_info
from .validate.licenses import extract_classifier_value

Expand Down Expand Up @@ -78,21 +83,27 @@ def pin(package, version, marker):
files_to_update[dependency_definition.file_path].append(dependency_definition)

for file_path, dependency_definitions in sorted(files_to_update.items()):
old_lines = read_file_lines(file_path)

new_lines = old_lines.copy()

for dependency_definition in dependency_definitions:
requirement = dependency_definition.requirement
if marker != requirement.marker:
continue

requirement.specifier = SpecifierSet(f'=={version}')
new_lines[dependency_definition.line_number] = f'{requirement}\n'

if new_lines != old_lines:
files_updated += 1
write_file_lines(file_path, new_lines)
if basepath(file_path) == 'pyproject.toml':
if update_check_dependencies_at(file_path, dependency_definitions):
files_updated += 1
else:
old_lines = read_file_lines(file_path)

new_lines = old_lines.copy()

for dependency_definition in dependency_definitions:
new_lines[dependency_definition.line_number] = f'{dependency_definition.requirement}\n'

if new_lines != old_lines:
files_updated += 1
write_file_lines(file_path, new_lines)

if not files_updated:
abort('No dependency definitions to update')
Expand Down Expand Up @@ -167,15 +178,21 @@ def sync():

if deps_to_update:
files_updated += 1
check_req_file = get_check_req_file(check_name)
old_lines = read_file_lines(check_req_file)
new_lines = old_lines.copy()

for dependency_definition, new_version in deps_to_update.items():
dependency_definition.requirement.specifier = new_version
new_lines[dependency_definition.line_number] = f'{dependency_definition.requirement}\n'

write_file_lines(check_req_file, new_lines)
if has_project_file(check_name):
update_check_dependencies(check_name, list(deps_to_update))
else:
check_req_file = get_check_req_file(check_name)
old_lines = read_file_lines(check_req_file)
new_lines = old_lines.copy()

for dependency_definition, new_version in deps_to_update.items():
dependency_definition.requirement.specifier = new_version
new_lines[dependency_definition.line_number] = f'{dependency_definition.requirement}\n'

write_file_lines(check_req_file, new_lines)

if not files_updated:
echo_info('All dependencies synced.')
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
from ...constants import get_agent_requirements, get_root
from ...dependencies import read_agent_dependencies, read_check_base_dependencies, read_check_dependencies
from ...testing import process_checks_option
from ...utils import complete_valid_checks
from ...utils import complete_valid_checks, get_project_file, has_project_file
from ..console import CONTEXT_SETTINGS, abort, annotate_error, annotate_errors, echo_failure


Expand Down Expand Up @@ -46,7 +46,13 @@ def verify_base_dependency(source, name, base_versions, force_pinned=True, min_b
failed = False
for specifier_set, dependency_definitions in base_versions.items():
checks = sorted(dep.check_name for dep in dependency_definitions)
file = os.path.join(get_root(), format_check_usage(checks, source), 'setup.py')
files = []
for check_name in checks:
if has_project_file(check_name):
files.append(get_project_file(check_name))
else:
files.append(os.path.join(get_root(), check_name, 'setup.py'))
file = ','.join(files)
if not specifier_set and force_pinned:
message = f'Unspecified version found for dependency `{name}`: {format_check_usage(checks, source)}'
echo_failure(message)
Expand Down Expand Up @@ -170,31 +176,36 @@ def dep(check, require_base_check_version, min_base_check_version):
abort()

for check_name in checks:
req_file = os.path.join(root, check_name, 'requirements.in')
if has_project_file(check_name):
req_source = get_project_file(check_name)
base_req_source = req_source
else:
req_source = os.path.join(root, check_name, 'requirements.in')
base_req_source = os.path.join(root, check_name, 'setup.py')

check_dependencies, check_errors = read_check_dependencies(check_name)
annotate_errors(req_file, check_errors)
annotate_errors(req_source, check_errors)
if check_errors:
for check_error in check_errors:
echo_failure(check_error)
abort()

check_base_dependencies, check_base_errors = read_check_base_dependencies(check_name)
base_req_file = os.path.join(root, check_name, 'setup.py')
annotate_errors(base_req_file, check_base_errors)
annotate_errors(base_req_source, check_base_errors)
if check_base_errors:
for check_error in check_base_errors:
echo_failure(check_error)
abort()

for name, versions in sorted(check_dependencies.items()):
if not verify_dependency('Checks', name, versions, req_file):
if not verify_dependency('Checks', name, versions, req_source):
failed = True

if name not in agent_dependencies:
failed = True
message = f'Dependency needs to be synced: {name}'
echo_failure(message)
annotate_error(req_file, message)
annotate_error(req_source, message)

check_base_dependencies, check_base_errors = read_check_base_dependencies(checks)
check_dependencies, check_errors = read_check_dependencies(checks)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,58 +5,91 @@

import click

from ....fs import basepath
from ...testing import process_checks_option
from ...utils import complete_valid_checks, get_setup_file, normalize_package_name, read_setup_file
from ...utils import (
complete_valid_checks,
get_package_name,
get_project_file,
get_setup_file,
has_project_file,
load_project_file_cached,
normalize_package_name,
normalize_project_name,
read_setup_file,
)
from ..console import CONTEXT_SETTINGS, abort, annotate_display_queue, echo_failure, echo_info, echo_success

# Some integrations aren't installable via the integration install command, so exclude them from the name requirements
EXCLUDE_CHECKS = ["datadog_checks_downloader", "datadog_checks_dev", "datadog_checks_base"]


@click.command('package', context_settings=CONTEXT_SETTINGS, short_help='Validate `setup.py` files')
def read_project_name(check_name):
if has_project_file(check_name):
return get_project_file(check_name), load_project_file_cached(check_name)['project']['name']

lines = read_setup_file(check_name)
for _, line in lines:
match = re.search("name=['\"](.*)['\"]", line)
if match:
return get_setup_file(check_name), match.group(1)


@click.command('package', context_settings=CONTEXT_SETTINGS, short_help='Validate Python package metadata')
@click.argument('check', autocompletion=complete_valid_checks, required=False)
def package(check):
"""Validate all `setup.py` files.
"""Validate all files for Python package metadata.
If `check` is specified, only the check will be validated, if check value is 'changed' will only apply to changed
checks, an 'all' or empty `check` value will validate all README files.
checks, an 'all' or empty `check` value will validate all files.
"""

checks = process_checks_option(check, source='valid_checks', validate=True)
echo_info(f"Validating setup.py files for {len(checks)} checks ...")
echo_info(f'Validating files for {len(checks)} checks ...')

failed_checks = 0
ok_checks = 0

for check in checks:
display_queue = []
file_failed = False
setup_file_path = get_setup_file(check)
if check in EXCLUDE_CHECKS:
continue

lines = read_setup_file(check)
for _, line in lines:
# The name field must match the pattern: `datadog-<folder_name>`
match = re.search("name=['\"](.*)['\"]", line)
if match:
group = match.group(1)
# Following PEP 503, lets normalize the groups and validate those
# https://www.python.org/dev/peps/pep-0503/#normalized-names
group = normalize_package_name(group)
normalized_package_name = normalize_package_name(f"datadog-{check}")
if group != normalized_package_name:
file_failed = True
display_queue.append(
(echo_failure, f" The name in setup.py: {group} must be: `{normalized_package_name}`")
source, project_name = read_project_name(check)
normalization_function = normalize_project_name if has_project_file(check) else normalize_package_name
project_name = normalization_function(project_name)
normalized_project_name = normalization_function(f'datadog-{check}')
# The name field must match the pattern: `datadog-<folder_name>`
if project_name != normalized_project_name:
file_failed = True
display_queue.append(
(
echo_failure,
f' The name in {basepath(source)}: {project_name} must be: `{normalized_project_name}`',
)
)

if has_project_file(check):
project_data = load_project_file_cached(check)
version_file = project_data.get('tool', {}).get('hatch', {}).get('version', {}).get('path', '')
expected_version_file = f'datadog_checks/{get_package_name(check)}/__about__.py'
if version_file != expected_version_file:
file_failed = True
display_queue.append(
(
echo_failure,
f' The field `tool.hatch.version.path` in {check}/pyproject.toml '
f'must be set to: {expected_version_file}',
)
)

if file_failed:
failed_checks += 1
# Display detailed info if file is invalid
echo_info(f'{check}... ', nl=False)
echo_failure(' FAILED')
annotate_display_queue(setup_file_path, display_queue)
annotate_display_queue(source, display_queue)
for display_func, message in display_queue:
display_func(message)
else:
Expand Down
Loading

0 comments on commit 0263d56

Please sign in to comment.