From 8ec93326732eb3b216bfe84b781ba48897f61c3c Mon Sep 17 00:00:00 2001 From: Andre Brisco Date: Sun, 14 Jan 2024 09:49:34 -0800 Subject: [PATCH] Added `py.typed` file and enabled `implicit_optional` --- .github/workflows/build.yml | 41 ++++++++++-- pylintrc | 11 ---- pyproject.toml | 19 +++++- req_compile/cmdline.py | 18 +++--- req_compile/compile.py | 17 +++-- req_compile/containers.py | 44 +++++++------ req_compile/dists.py | 54 +++++++--------- req_compile/errors.py | 2 +- req_compile/metadata/dist_info.py | 8 +-- req_compile/metadata/extractor.py | 6 +- req_compile/metadata/metadata.py | 2 +- req_compile/metadata/pyproject.py | 11 ++-- req_compile/metadata/source.py | 24 ++++--- req_compile/py.typed | 0 req_compile/repos/findlinks.py | 2 +- req_compile/repos/multi.py | 2 +- req_compile/repos/repository.py | 102 +++++++++++++----------------- req_compile/repos/solution.py | 18 ++++-- req_compile/repos/source.py | 32 ++++++---- req_compile/utils.py | 9 +-- setup.py | 3 +- 21 files changed, 232 insertions(+), 193 deletions(-) delete mode 100644 pylintrc create mode 100644 req_compile/py.typed diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 5bc5b9f..373616d 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -11,27 +11,54 @@ on: jobs: build: + strategy: + fail-fast: false + matrix: + platform: ["ubuntu-latest"] + python_version: ["3.7"] - runs-on: ubuntu-latest + runs-on: ${{ matrix.platform }} + + name: test ${{ matrix.platform }} (py${{ matrix.python_version }}) steps: - uses: actions/checkout@v2 - - name: Set up Python 3.7 + - name: Set up Python ${{ matrix.python_version }} uses: actions/setup-python@v2 with: - python-version: 3.7 + python-version: "${{ matrix.python_version }}" - name: Install dependencies run: | python -m pip install --upgrade pip pip install -r requirements.txt --user - name: Run mypy run: | - export MYPYPATH=req-compile/stubs - mypy req_compile + python -m mypy req_compile + env: + MYPYPATH: req-compile/stubs - name: Run pylint run: | - pylint req_compile + python -m pylint req_compile - name: Test with pytest run: | - pytest + python -m pytest -v + + formatting: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Set up Python 3.11 + uses: actions/setup-python@v2 + with: + python-version: "3.11" + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -r requirements.txt --user + - name: Run black + run: | + black --check --diff req_compile + - name: Run isort + run: | + isort --check-only req_compile diff --git a/pylintrc b/pylintrc deleted file mode 100644 index b4548d5..0000000 --- a/pylintrc +++ /dev/null @@ -1,11 +0,0 @@ -[FORMAT] -max-line-length=120 - -[MESSAGES CONTROL] -disable= - R, - missing-docstring, - too-few-public-methods, - too-many-arguments, - raise-missing-from, - consider-using-f-string, diff --git a/pyproject.toml b/pyproject.toml index 7d2c471..34b44cd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,7 +1,22 @@ [build-system] -requires = [ "setuptools", "wheel" ] +requires = ["setuptools", "wheel"] build-backend = "setuptools.build_meta" [tool.mypy] -implicit_optional = true namespace_packages = false + +[tool.pylint.main] +max-line-length = 120 + +[tool.pylint."messages control"] +disable = [ + "R", + "missing-docstring", + "too-few-public-methods", + "too-many-arguments", + "raise-missing-from", + "consider-using-f-string", +] + +[tool.isort] +profile = "black" diff --git a/req_compile/cmdline.py b/req_compile/cmdline.py index 2400777..df9c3f6 100644 --- a/req_compile/cmdline.py +++ b/req_compile/cmdline.py @@ -13,7 +13,7 @@ from collections import OrderedDict from io import StringIO from itertools import repeat -from typing import IO, Any, Iterable, List, Mapping, Sequence, Set, Union +from typing import IO, Any, Iterable, List, Mapping, Optional, Sequence, Set, Union import pkg_resources @@ -53,7 +53,7 @@ from req_compile.versions import is_possible # Blacklist of requirements that will be filtered out of the output -BLACKLIST = [] # type: Iterable[str] +BLACKLIST: Iterable[str] = [] def _cantusereason_to_text( @@ -79,7 +79,7 @@ def _cantusereason_to_text( def _find_paths_to_root( - failing_node: DependencyNode, visited: Set[DependencyNode] = None + failing_node: DependencyNode, visited: Optional[Set[DependencyNode]] = None ) -> Sequence[Sequence[DependencyNode]]: if visited is None: visited = set() @@ -105,7 +105,7 @@ def _generate_no_candidate_display( repo: Repository, dists: DistributionCollection, failure: Exception, - only_binary: Set[NormName] = None, + only_binary: Optional[Set[NormName]] = None, ) -> None: """Print a human friendly display to stderr when compilation fails""" failing_node = dists[req.name] @@ -211,7 +211,7 @@ def _print_paths_to_root( def _dump_repo_candidates( req: pkg_resources.Requirement, repos: Iterable[Repository], - only_binary: Set[NormName] = None, + only_binary: Optional[Set[NormName]] = None, ) -> None: """ Args: @@ -368,7 +368,7 @@ def write_requirements_file( repo: Repository, annotate_source: bool = False, urls: bool = False, - input_reqs: Iterable[RequirementContainer] = None, + input_reqs: Optional[Iterable[RequirementContainer]] = None, remove_non_source: bool = False, remove_source: bool = False, no_pins: bool = False, @@ -572,7 +572,7 @@ def build_repo( find_links: Iterable[str], index_urls: Iterable[str], wheeldir: str, - extra_index_urls: Iterable[str] = None, + extra_index_urls: Optional[Iterable[str]] = None, no_index: bool = False, allow_prerelease: bool = False, ) -> Repository: @@ -659,7 +659,7 @@ def __call__( parser: argparse.ArgumentParser, namespace: argparse.Namespace, values: Union[str, Sequence[Any], None], - option_string: str = None, + option_string: Optional[str] = None, ) -> None: """Parse the string into a set, checking for special cases.""" # Set the AllOnlyBinarySet to ensure all projects match the set. @@ -674,7 +674,7 @@ def __call__( ) -def compile_main(raw_args: Sequence[str] = None) -> None: +def compile_main(raw_args: Optional[Sequence[str]] = None) -> None: parser = argparse.ArgumentParser( description="Req-Compile: Python requirements compiler" ) diff --git a/req_compile/compile.py b/req_compile/compile.py index f39074c..1bd8099 100644 --- a/req_compile/compile.py +++ b/req_compile/compile.py @@ -65,7 +65,7 @@ def compile_roots( options: CompileOptions, depth: int = 1, max_downgrade: int = MAX_DOWNGRADE, - _path: Set[DependencyNode] = None, + _path: Optional[Set[DependencyNode]] = None, ) -> None: # pylint: disable=too-many-statements,too-many-locals,too-many-branches """ Args: @@ -190,7 +190,7 @@ def compile_roots( nodes = sorted(node.reverse_deps) - violate_score = defaultdict(int) # type: Dict[DependencyNode, int] + violate_score: Dict[DependencyNode, int] = defaultdict(int) for idx, revnode in enumerate(nodes): for next_node in nodes[idx + 1 :]: if not is_possible( @@ -264,10 +264,10 @@ def compile_roots( def perform_compile( input_reqs: Iterable[RequirementContainer], repo: Repository, - constraint_reqs: Iterable[RequirementContainer] = None, - extras: Iterable[str] = None, + constraint_reqs: Optional[Iterable[RequirementContainer]] = None, + extras: Optional[Iterable[str]] = None, allow_circular_dependencies: bool = True, - only_binary: Set[NormName] = None, + only_binary: Optional[Set[NormName]] = None, ) -> Tuple[DistributionCollection, Set[DependencyNode]]: """Perform a compilation using the given inputs and constraints. @@ -335,8 +335,11 @@ def perform_compile( return results, roots -def _add_constraints(all_pinned, constraint_reqs, results): - # type: (bool, Optional[Iterable[RequirementContainer]], DistributionCollection) -> None +def _add_constraints( + all_pinned: bool, + constraint_reqs: Optional[Iterable[RequirementContainer]], + results: DistributionCollection, +) -> None: if all_pinned and constraint_reqs is not None: for constraint_source in constraint_reqs: results.add_dist(constraint_source, None, None) diff --git a/req_compile/containers.py b/req_compile/containers.py index 1bffee1..74eafa9 100644 --- a/req_compile/containers.py +++ b/req_compile/containers.py @@ -48,8 +48,9 @@ def __init__( def __iter__(self) -> Iterator[pkg_resources.Requirement]: return iter(self.reqs) - def requires(self, extra=None): - # type: (str) -> Iterable[pkg_resources.Requirement] + def requires( + self, extra: Optional[str] = None + ) -> Iterable[pkg_resources.Requirement]: return reduce_requirements( req for req in self.reqs if req_uses_extra(req, extra) ) @@ -61,7 +62,7 @@ def to_definition( def reqs_from_files( - requirements_files: Iterable[str], parameters: List[str] = None + requirements_files: Iterable[str], parameters: Optional[List[str]] = None ) -> Iterable[pkg_resources.Requirement]: """Produce a list of requirements from multiple requirements files. @@ -90,19 +91,17 @@ def __init__( self, filename: str, reqs: Iterable[pkg_resources.Requirement], - parameters: List[str] = None, + parameters: Optional[List[str]] = None, **_kwargs: Any ) -> None: super(RequirementsFile, self).__init__(filename, reqs, meta=True) self.parameters = parameters - def __repr__(self): - # type: () -> str + def __repr__(self) -> str: return "RequirementsFile({})".format(self.name) @classmethod - def from_file(cls, full_path, **kwargs): - # type: (str, **Any) -> RequirementsFile + def from_file(cls, full_path: str, **kwargs: Any) -> "RequirementsFile": """Load requirements from a file and build a RequirementsFile Args: @@ -118,16 +117,22 @@ def from_file(cls, full_path, **kwargs): def __str__(self) -> str: return self.name - def to_definition(self, extras): - # type: (Optional[Iterable[str]]) -> Tuple[str, Optional[packaging.version.Version]] + def to_definition( + self, extras: Optional[Iterable[str]] + ) -> Tuple[str, Optional[packaging.version.Version]]: return self.name, None class DistInfo(RequirementContainer): """Metadata describing a distribution of a project""" - def __init__(self, name, version, reqs, meta=False): - # type: (str, Optional[packaging.version.Version], Iterable[pkg_resources.Requirement], bool) -> None + def __init__( + self, + name: str, + version: Optional[packaging.version.Version], + reqs: Iterable[pkg_resources.Requirement], + meta: bool = False, + ) -> None: """ Args: name: The project name @@ -139,19 +144,18 @@ def __init__(self, name, version, reqs, meta=False): self.version = version self.source = None - def __str__(self): - # type: () -> str + def __str__(self) -> str: return "{}=={}".format(*self.to_definition(None)) - def to_definition(self, extras): - # type: (Optional[Iterable[str]]) -> Tuple[str, Optional[packaging.version.Version]] + def to_definition( + self, extras: Optional[Iterable[str]] + ) -> Tuple[str, Optional[packaging.version.Version]]: req_expr = "{}{}".format( self.name, ("[" + ",".join(sorted(extras)) + "]") if extras else "" ) return req_expr, self.version - def __repr__(self): - # type: () -> str + def __repr__(self) -> str: return ( self.name + " " @@ -174,7 +178,9 @@ def __init__(self, dist: pkg_resources.Distribution) -> None: def __str__(self) -> str: return "{}=={}".format(*self.to_definition(None)) - def requires(self, extra: str = None) -> Iterable[pkg_resources.Requirement]: + def requires( + self, extra: Optional[str] = None + ) -> Iterable[pkg_resources.Requirement]: return self.dist.requires(extras=(extra,) if extra else ()) def to_definition( diff --git a/req_compile/dists.py b/req_compile/dists.py index 60d1b43..af84b54 100644 --- a/req_compile/dists.py +++ b/req_compile/dists.py @@ -1,4 +1,4 @@ -from __future__ import print_function +from __future__ import annotations import collections.abc import itertools @@ -41,38 +41,33 @@ class DependencyNode: def __init__(self, key: NormName, metadata: Optional[RequirementContainer]) -> None: self.key = key self.metadata = metadata - self.dependencies = ( - {} - ) # type: Dict[DependencyNode, Optional[pkg_resources.Requirement]] - self.reverse_deps = set() # type: Set[DependencyNode] - self.repo = None # type: Optional[Repository] + self.dependencies: Dict[ + DependencyNode, Optional[pkg_resources.Requirement] + ] = {} + self.reverse_deps: Set[DependencyNode] = set() + self.repo: Optional[Repository] = None self.complete = ( False # Whether this node and all of its dependency are completely solved ) - def __repr__(self): - # type: () -> str + def __repr__(self) -> str: return self.key - def __hash__(self): - # type: () -> int + def __hash__(self) -> int: return hash(self.key) - def __str__(self): - # type: () -> str + def __str__(self) -> str: if self.metadata is None: return self.key + " [UNSOLVED]" if self.metadata.meta: return self.metadata.name return "==".join(str(x) for x in self.metadata.to_definition(self.extras)) - def __lt__(self, other): - # type: (Any) -> bool + def __lt__(self, other: Any) -> bool: return self.key < other.key @property - def extras(self): - # type: () -> Set[str] + def extras(self) -> Set[str]: extras = set() for rdep in self.reverse_deps: assert ( @@ -83,12 +78,12 @@ def extras(self): extras |= set(reason.extras) return extras - def add_reason(self, node, reason): - # type: (DependencyNode, Optional[pkg_resources.Requirement]) -> None + def add_reason( + self, node: DependencyNode, reason: Optional[pkg_resources.Requirement] + ) -> None: self.dependencies[node] = reason - def build_constraints(self): - # type: () -> pkg_resources.Requirement + def build_constraints(self) -> pkg_resources.Requirement: result = None for rdep_node in self.reverse_deps: @@ -117,9 +112,8 @@ def build_constraints(self): return result -def build_constraints(root_node): - # type: (DependencyNode) -> Iterable[str] - constraints = [] # type: List[str] +def build_constraints(root_node: DependencyNode) -> Iterable[str]: + constraints: List[str] = [] for node in root_node.reverse_deps: assert ( node.metadata is not None @@ -133,8 +127,9 @@ def build_constraints(root_node): return constraints -def _process_constraint_req(req, node, constraints): - # type: (pkg_resources.Requirement, DependencyNode, List[str]) -> None +def _process_constraint_req( + req: pkg_resources.Requirement, node: DependencyNode, constraints: List[str] +) -> None: assert node.metadata is not None, "Node {} must be solved".format(node) extra = None if req.marker: @@ -156,8 +151,7 @@ class DistributionCollection: added to the collection and provide a concrete RequirementContainer (like a DistInfo from a wheel), the corresponding node in this collection will be marked solved.""" - def __init__(self): - # type: () -> None + def __init__(self) -> None: self.nodes: Dict[NormName, DependencyNode] = {} self.logger = logging.getLogger("req_compile.dists") @@ -184,7 +178,7 @@ def add_dist( if isinstance(name_or_metadata, str): req_name = name_or_metadata - metadata_to_apply = None # type: Optional[RequirementContainer] + metadata_to_apply: Optional[RequirementContainer] = None else: assert isinstance(name_or_metadata, RequirementContainer) metadata_to_apply = name_or_metadata @@ -293,7 +287,7 @@ def visit_nodes( roots: Iterable[DependencyNode], max_depth: int = sys.maxsize, reverse: bool = False, - _visited: Set[DependencyNode] = None, + _visited: Optional[Set[DependencyNode]] = None, _cur_depth: int = 0, ) -> Iterable[DependencyNode]: if _visited is None: @@ -329,7 +323,7 @@ def visit_nodes( def generate_lines( self, roots: Iterable[DependencyNode], - req_filter: Callable[[DependencyNode], bool] = None, + req_filter: Optional[Callable[[DependencyNode], bool]] = None, strip_extras: bool = False, ) -> Iterable[ Tuple[Tuple[str, Optional[packaging.version.Version], Optional[str]], str] diff --git a/req_compile/errors.py b/req_compile/errors.py index 08267fb..cf1eea0 100644 --- a/req_compile/errors.py +++ b/req_compile/errors.py @@ -8,7 +8,7 @@ class ExceptionWithDetails(Exception): def __init__(self) -> None: super(ExceptionWithDetails, self).__init__() - self.results = None # type: Optional[Any] + self.results: Optional[Any] = None class MetadataError(ExceptionWithDetails): diff --git a/req_compile/metadata/dist_info.py b/req_compile/metadata/dist_info.py index fb0c090..0fcd0d0 100644 --- a/req_compile/metadata/dist_info.py +++ b/req_compile/metadata/dist_info.py @@ -12,8 +12,9 @@ LOG = logging.getLogger("req_compile.metadata.dist_info") -def _find_dist_info_metadata(project_name, namelist): - # type: (str, Iterable[str]) -> Optional[str] +def _find_dist_info_metadata( + project_name: str, namelist: Iterable[str] +) -> Optional[str]: """ In a list of zip path entries, find the one that matches the dist-info for this project @@ -38,8 +39,7 @@ def _find_dist_info_metadata(project_name, namelist): return None -def _fetch_from_wheel(wheel): - # type: (str) -> Optional[DistInfo] +def _fetch_from_wheel(wheel: str) -> Optional[DistInfo]: """ Fetch metadata from a wheel file Args: diff --git a/req_compile/metadata/extractor.py b/req_compile/metadata/extractor.py index 57bee90..69f3f59 100644 --- a/req_compile/metadata/extractor.py +++ b/req_compile/metadata/extractor.py @@ -41,7 +41,7 @@ def add_rename(self, name: str, new_name: str) -> None: self.renames[self.to_relative(new_name)] = self.to_relative(name) def open( - self, file: str, mode: str = "r", encoding: str = None, **_kwargs: Any + self, file: str, mode: str = "r", encoding: Optional[str] = None, **_kwargs: Any ) -> IO[str]: """Open a real file or a file within the archive""" relative_filename = self.to_relative(file) @@ -240,10 +240,10 @@ def __init__(self, wrap: IO[bytes], encoding: str) -> None: def read(self, __n: int = 1024 * 1024) -> str: return self.reader.read(__n) - def readline(self, __limit: int = None) -> str: + def readline(self, __limit: Optional[int] = None) -> str: return self.reader.readline(__limit) - def readlines(self, __hint: int = None) -> List[str]: + def readlines(self, __hint: Optional[int] = None) -> List[str]: return self.reader.readlines(__hint) def write(self, data: Any) -> int: diff --git a/req_compile/metadata/metadata.py b/req_compile/metadata/metadata.py index b6e3ea0..a27ad23 100644 --- a/req_compile/metadata/metadata.py +++ b/req_compile/metadata/metadata.py @@ -20,7 +20,7 @@ def extract_metadata( - filename: str, allow_run_setup_py: bool = True, origin: Repository = None + filename: str, allow_run_setup_py: bool = True, origin: Optional[Repository] = None ) -> RequirementContainer: """Extract a DistInfo from a file or directory diff --git a/req_compile/metadata/pyproject.py b/req_compile/metadata/pyproject.py index 836cfb2..5202aa5 100644 --- a/req_compile/metadata/pyproject.py +++ b/req_compile/metadata/pyproject.py @@ -21,8 +21,7 @@ LOCK = threading.Lock() -def _create_build_backend(build_system): - # type: (Mapping) -> Any +def _create_build_backend(build_system: Mapping) -> Any: backend_name = build_system["build-backend"] module, _, obj = backend_name.partition(":") backend = importlib.import_module(module) @@ -35,8 +34,9 @@ def _fake_set_level(*_args: Any, **_kwargs: Any) -> None: """A setLevel method that does nothing.""" -def _parse_from_prepared_metadata(source_file, backend, pyproject): - # type: (str, Any, Mapping) -> Optional[DistInfo] +def _parse_from_prepared_metadata( + source_file: str, backend: Any, pyproject: Mapping +) -> Optional[DistInfo]: prepare = getattr(backend, "prepare_metadata_for_build_wheel", None) if prepare is None: return None @@ -82,8 +82,7 @@ def _parse_from_prepared_metadata(source_file, backend, pyproject): return None -def _parse_from_wheel(backend): - # type: (Mapping[str, Any]) -> Optional[DistInfo] +def _parse_from_wheel(backend: Mapping[str, Any]) -> Optional[DistInfo]: build_wheel = getattr(backend, "build_wheel", None) if build_wheel is None: return None diff --git a/req_compile/metadata/source.py b/req_compile/metadata/source.py index 67b0b39..d500e4d 100644 --- a/req_compile/metadata/source.py +++ b/req_compile/metadata/source.py @@ -61,8 +61,9 @@ THREADLOCAL = threading.local() -def find_in_archive(extractor, filename, max_depth=None): - # type: (Extractor, str, int) -> Optional[str] +def find_in_archive( + extractor: Extractor, filename: str, max_depth: Optional[int] = None +) -> Optional[str]: if extractor.exists(filename): return filename @@ -228,7 +229,9 @@ def _fake_abspath(path: str) -> str: return results -def _run_with_output(cmd: Sequence[str], cwd: str = None, timeout: float = 30.0) -> str: +def _run_with_output( + cmd: Sequence[str], cwd: Optional[str] = None, timeout: float = 30.0 +) -> str: """Run a subprocess with a timeout and return the output. Similar check_output with a timeout Args: @@ -331,8 +334,9 @@ def _build_wheel(name: str, source_file: str) -> Optional[DistInfo]: ) -def _build_egg_info(name, extractor, setup_file): - # type: (str, Extractor, Optional[str]) -> Optional[RequirementContainer] +def _build_egg_info( + name: str, extractor: Extractor, setup_file: Optional[str] +) -> Optional[RequirementContainer]: if setup_file is None: return None @@ -586,7 +590,7 @@ def _parse_setup_py( # pylint: disable=unused-import,unused-variable import codecs - import distutils.core + import distutils.core # pylint: disable=deprecated-module import fileinput import multiprocessing @@ -659,15 +663,18 @@ def __init__(self, modname: str, path: str) -> None: self.path = path self.contents = extractor.contents(path) - # pylint: disable=unused-argument def fake_load_source( modname: str, filename: str, filehandle: Any = None ) -> ModuleType: + del filehandle return import_contents(modname, filename, extractor.contents(filename)) def fake_spec_from_file_location( - modname: str, path: str, submodule_search_locations: Iterable[str] = None + modname: str, + path: str, + submodule_search_locations: Optional[Iterable[str]] = None, ) -> ModuleSpec: + del submodule_search_locations return FakeSpec(modname, path) def fake_module_from_spec(spec: ModuleType) -> ModuleType: @@ -734,6 +741,7 @@ def load_module(self, fullname: str) -> types.ModuleType: fake_stdin = StringIO() + # pylint: disable-next=unused-argument def _fake_find_packages(*args: Any, **kwargs: Any) -> Iterable[Any]: return [] diff --git a/req_compile/py.typed b/req_compile/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/req_compile/repos/findlinks.py b/req_compile/repos/findlinks.py index 891d211..0d3c597 100644 --- a/req_compile/repos/findlinks.py +++ b/req_compile/repos/findlinks.py @@ -19,7 +19,7 @@ class FindLinksRepository(Repository): A directory on the filesystem as a source of distributions. """ - def __init__(self, path: str, allow_prerelease: bool = None) -> None: + def __init__(self, path: str, allow_prerelease: Optional[bool] = None) -> None: super(FindLinksRepository, self).__init__( "findlinks", allow_prerelease=allow_prerelease ) diff --git a/req_compile/repos/multi.py b/req_compile/repos/multi.py index 2bf5551..2b027a2 100644 --- a/req_compile/repos/multi.py +++ b/req_compile/repos/multi.py @@ -32,7 +32,7 @@ def get_dist( self, req: pkg_resources.Requirement, allow_source_dist: bool = True, - max_downgrade: int = None, + max_downgrade: Optional[int] = None, ) -> Tuple[RequirementContainer, bool]: last_ex = NoCandidateException(req) for repo in self.repositories: diff --git a/req_compile/repos/repository.py b/req_compile/repos/repository.py index bfcf79b..3e776c2 100644 --- a/req_compile/repos/repository.py +++ b/req_compile/repos/repository.py @@ -1,7 +1,7 @@ from __future__ import annotations import abc -import distutils.util # pylint: disable=import-error,no-name-in-module,no-member +import distutils.util # pylint: disable=import-error,no-name-in-module,no-member,deprecated-module import enum import logging import os @@ -122,8 +122,7 @@ def manylinux_tag_is_compatible_with_this_system(tag: str) -> bool: return True -def _get_abi_tag(): - # type: () -> str +def _get_abi_tag() -> str: """Build a best effort ABI tag""" py_version = (sys.version_info.major, sys.version_info.minor) tag = INTERPRETER_TAG + PY_VERSION_NUM @@ -163,8 +162,7 @@ def check_compatibility(self) -> bool: raise NotImplementedError -def _impl_major_minor(py_version): - # type: (str) -> Tuple[str, int, int] +def _impl_major_minor(py_version: str) -> Tuple[str, int, int]: """Split a python version tag into the implementation and a major and minor version. If the minor version is not reported, return zero. If any parts are invalid, choose results that should sort them last""" @@ -181,8 +179,7 @@ def _impl_major_minor(py_version): return impl, major, minor -def _is_py_version_compatible(py_version): - # type: (str) -> bool +def _is_py_version_compatible(py_version: str) -> bool: impl, major, minor = _impl_major_minor(py_version) if impl == "py" or impl == INTERPRETER_TAG: if major == sys.version_info.major and minor <= sys.version_info.minor: @@ -215,16 +212,13 @@ def _py_version_score(py_version: str) -> int: class WheelVersionTags(PythonVersionRequirement): - def __init__(self, py_versions): - # type: (Iterable[str]) -> None + def __init__(self, py_versions: Iterable[str]) -> None: assert not isinstance(py_versions, str) - if py_versions is None: - self.py_versions = None # type: Optional[Set[str]] - else: + self.py_versions: Optional[Set[str]] = None + if py_versions is not None: self.py_versions = set(py_versions) - def check_compatibility(self): - # type: () -> bool + def check_compatibility(self) -> bool: if not self.py_versions: return True @@ -232,8 +226,7 @@ def check_compatibility(self): _is_py_version_compatible(py_version) for py_version in self.py_versions ) - def __str__(self): - # type: () -> str + def __str__(self) -> str: if not self.py_versions: return "any" @@ -245,8 +238,7 @@ def __eq__(self, other: object) -> bool: return self.py_versions == other.py_versions @property - def tag_score(self): - # type: () -> int + def tag_score(self) -> int: """Calculate a score based on how specific the versions given are""" if not self.py_versions: return 0 @@ -258,17 +250,16 @@ class Candidate: # pylint: disable=too-many-instance-attributes def __init__( self, - name, # type: str - filename, # type: Optional[str] - version, # type: packaging.version.Version - py_version, # type: Optional[WheelVersionTags] - abi, # type: Optional[str] - plats, # type: Union[str, Iterable[str]] - link, # type: Any - candidate_type=DistributionType.SDIST, # type: DistributionType - extra_sort_info="", # type: Any - ): - # type: (...) -> None + name: str, + filename: Optional[str], + version: packaging.version.Version, + py_version: Optional[WheelVersionTags], + abi: Optional[str], + plats: Union[str, Iterable[str]], + link: Any, + candidate_type: DistributionType = DistributionType.SDIST, + extra_sort_info: Any = "", + ) -> None: """ Args: name: Name of the candidate @@ -282,9 +273,7 @@ def __init__( """ self.name = name self.filename = filename - self.version = version or parse_version( - "0.0.0" - ) # type: packaging.version.Version + self.version: packaging.version.Version = version or parse_version("0.0.0") self.py_version = py_version self.abi = abi if isinstance(plats, str): @@ -296,12 +285,12 @@ def __init__( # Sort based on tags to make sure the most specific distributions # are matched first - self._sortkey = ( - None - ) # type: Optional[Tuple[packaging.version.Version, str, int, Tuple[int, int, int, int]]] + self._sortkey: Optional[ + Tuple[packaging.version.Version, str, int, Tuple[int, int, int, int]] + ] = None self.extra_sort_info = extra_sort_info - self.preparsed = None # type: Optional[RequirementContainer] + self.preparsed: Optional[RequirementContainer] = None # Repository this candidate came from. self.source: Optional[Repository] = None @@ -320,8 +309,7 @@ def sortkey( return self._sortkey @property - def tag_score(self): - # type: () -> Tuple[int, int, int, int] + def tag_score(self) -> Tuple[int, int, int, int]: py_version_score = ( self.py_version.tag_score if self.py_version is not None else 0 ) @@ -358,10 +346,10 @@ def tag_score(self): ) return py_version_score, plat_score, abi_score, extra_score - def __eq__(self, other): - # type: (Any) -> bool + def __eq__(self, other: object) -> bool: return ( - self.name == other.name + isinstance(other, Candidate) + and self.name == other.name and self.filename == other.filename and self.version == other.version and self.py_version == other.py_version @@ -371,8 +359,7 @@ def __eq__(self, other): and self.type == other.type ) - def __repr__(self): - # type: () -> str + def __repr__(self) -> str: return ( "Candidate(name={}, filename={}, version={}, py_versions={}, " "abi={}, platform={}, link={})".format( @@ -386,8 +373,7 @@ def __repr__(self): ) ) - def __str__(self): - # type: () -> str + def __str__(self) -> str: return "{} {}-{}-{}-{}-{}".format( self.type.name, self.name, @@ -494,8 +480,7 @@ def _tar_gz_filename_to_candidate(source: Tuple[str, str], filename: str) -> Can ) -def _check_platform_compatibility(py_platforms): - # type: (Iterable[str]) -> bool +def _check_platform_compatibility(py_platforms: Iterable[str]) -> bool: return ( "any" in py_platforms or any(py_platform.lower() in PLATFORM_TAGS for py_platform in py_platforms) @@ -506,8 +491,7 @@ def _check_platform_compatibility(py_platforms): ) -def _check_abi_compatibility(abi): - # type: (str) -> bool +def _check_abi_compatibility(abi: str) -> bool: return abi in ABI_TAGS @@ -586,8 +570,7 @@ def filter_candidates( ] -def _is_all_prereleases(candidates): - # type: (Iterable[Candidate]) -> bool +def _is_all_prereleases(candidates: Iterable[Candidate]) -> bool: all_prereleases = True for candidate in candidates: all_prereleases = all_prereleases and candidate.version.is_prerelease @@ -595,7 +578,9 @@ def _is_all_prereleases(candidates): class Repository(metaclass=abc.ABCMeta): - def __init__(self, logger_name: str, allow_prerelease: bool = None) -> None: + def __init__( + self, logger_name: str, allow_prerelease: Optional[bool] = None + ) -> None: super(Repository, self).__init__() if allow_prerelease is None: allow_prerelease = False @@ -640,7 +625,7 @@ def get_dist( self, req: pkg_resources.Requirement, allow_source_dist: bool = True, - max_downgrade: int = None, + max_downgrade: Optional[int] = None, ) -> Tuple[RequirementContainer, bool]: """Fetch the best matching distribution for the given requirement. @@ -668,7 +653,7 @@ def do_get_candidate( candidates: Iterable[Candidate], allow_source_dist: bool = True, force_allow_prerelease: bool = False, - max_downgrade: int = None, + max_downgrade: Optional[int] = None, ) -> Tuple[RequirementContainer, bool]: """ Args: @@ -746,10 +731,13 @@ def do_get_candidate( raise NoCandidateException(req) + # pylint: disable-next=invalid-name def why_cant_I_use( - self, req, candidate, only_binary=None - ): # pylint: disable=invalid-name - # type: (pkg_resources.Requirement, Candidate, Set[NormName]) -> CantUseReason + self, + req: pkg_resources.Requirement, + candidate: Candidate, + only_binary: Optional[Set[NormName]] = None, + ) -> CantUseReason: reason = check_usability( req, candidate, diff --git a/req_compile/repos/solution.py b/req_compile/repos/solution.py index 8c9cfc4..df10b65 100644 --- a/req_compile/repos/solution.py +++ b/req_compile/repos/solution.py @@ -29,7 +29,9 @@ def _candidate_from_node(node: DependencyNode) -> Candidate: class SolutionRepository(Repository): """A repository that provides distributions from a previous solution.""" - def __init__(self, filename: str, excluded_packages: Iterable[str] = None) -> None: + def __init__( + self, filename: str, excluded_packages: Optional[Iterable[str]] = None + ) -> None: """Constructor.""" super(SolutionRepository, self).__init__("solution", allow_prerelease=True) self.filename = os.path.abspath(filename) if filename != "-" else "-" @@ -112,7 +114,9 @@ def load_from_file(self, filename: str) -> None: self._remove_nodes() - def _load_from_lines(self, lines: Iterable[str], meta_file: str = None) -> None: + def _load_from_lines( + self, lines: Iterable[str], meta_file: Optional[str] = None + ) -> None: for line in lines: # Skip directives we don't process in solutions (like --index-url) if line.strip().startswith("--") and not self._partial_line: @@ -133,7 +137,7 @@ def _remove_nodes(self) -> None: except KeyError: pass - def _parse_line(self, line: str, meta_file: str = None) -> None: + def _parse_line(self, line: str, meta_file: Optional[str] = None) -> None: if self._partial_line: self._parse_multi_line(line, meta_file) return @@ -151,7 +155,7 @@ def _parse_line(self, line: str, meta_file: str = None) -> None: self._parse_single_line(line) - def _parse_single_line(self, line: str, meta_file: str = None) -> None: + def _parse_single_line(self, line: str, meta_file: Optional[str] = None) -> None: req_hash_part, _, source_part = line.partition("#") req_hash_part = req_hash_part.strip() if not req_hash_part: @@ -220,7 +224,7 @@ def _parse_single_line(self, line: str, meta_file: str = None) -> None: except Exception: raise ValueError(f"Failed to parse line: {line}") - def _parse_multi_line(self, line: str, meta_file: str = None) -> None: + def _parse_multi_line(self, line: str, meta_file: Optional[str] = None) -> None: stripped_line = line.strip() stripped_line = stripped_line.rstrip("\\") @@ -237,8 +241,8 @@ def _add_sources( self, req: pkg_resources.Requirement, sources: Iterable[str], - url: str = None, - dist_hash: str = None, + url: Optional[str] = None, + dist_hash: Optional[str] = None, ) -> None: pkg_names = map(lambda x: x.split(" ")[0], sources) constraints = map( diff --git a/req_compile/repos/source.py b/req_compile/repos/source.py index 592a847..bef85ae 100644 --- a/req_compile/repos/source.py +++ b/req_compile/repos/source.py @@ -46,8 +46,13 @@ class SourceRepository(Repository): of potential distributions. """ - def __init__(self, path, excluded_paths=None, marker_files=None, parallelism=1): - # type: (str, Iterable[str], Iterable[str], int) -> None + def __init__( + self, + path: str, + excluded_paths: Optional[Iterable[str]] = None, + marker_files: Optional[Iterable[str]] = None, + parallelism: int = 1, + ) -> None: """Constructor. Args: @@ -66,22 +71,23 @@ def __init__(self, path, excluded_paths=None, marker_files=None, parallelism=1): ) self.path = os.path.abspath(path) - self.distributions = collections.defaultdict( - list - ) # type: Dict[str, List[req_compile.repos.repository.Candidate]] + self.distributions: Dict[ + str, List[req_compile.repos.repository.Candidate] + ] = collections.defaultdict(list) self.marker_files = set(MARKER_FILES) self.parallelism = parallelism if marker_files: self.marker_files |= set(marker_files) - self._find_later = collections.deque() # type: Deque[str] + self._find_later: Deque[str] = collections.deque() self._find_all_distributions( [os.path.abspath(path) for path in (excluded_paths or [])] ) - def _extract_metadata(self, allow_setup_py, source_dir): - # type: (bool, str) -> Tuple[str, Optional[RequirementContainer]] + def _extract_metadata( + self, allow_setup_py: bool, source_dir: str + ) -> Tuple[str, Optional[RequirementContainer]]: if not allow_setup_py: if os.path.exists(os.path.join(source_dir, "setup.py")): self._find_later.append(source_dir) @@ -99,16 +105,15 @@ def _extract_metadata(self, allow_setup_py, source_dir): ) return source_dir, None - def _find_all_distributions(self, excluded_paths): - # type: (Iterable[str]) -> None + def _find_all_distributions(self, excluded_paths: Iterable[str]) -> None: """Find all source distribution possible locations""" source_dirs = set(self._find_all_source_dirs(excluded_paths)) # Loading source distributions via threads can be significantly faster because # it is a lot of I/O if self.parallelism == 1: - pool = None # type: Optional[ThreadPool] - map_func = map # type: Callable + pool: Optional[ThreadPool] = None + map_func: Callable = map else: pool = ThreadPool(self.parallelism) map_func = pool.imap_unordered @@ -129,8 +134,7 @@ def _find_all_distributions(self, excluded_paths): if result is not None: self._add_distribution(source_dir, result) - def _add_distribution(self, source_dir, result): - # type: (str, RequirementContainer) -> None + def _add_distribution(self, source_dir: str, result: RequirementContainer) -> None: if result.version is None: self.logger.debug("Source dir %s did not provide a version") result.version = parse_version("0") diff --git a/req_compile/utils.py b/req_compile/utils.py index 54a1f8b..a0a2797 100644 --- a/req_compile/utils.py +++ b/req_compile/utils.py @@ -92,7 +92,9 @@ def req_iter_from_file( def req_iter_from_lines( - lines: Iterable[str], parameters: typing.List[str], relative_dir: str = None + lines: Iterable[str], + parameters: typing.List[str], + relative_dir: Optional[str] = None, ) -> Iterable[pkg_resources.Requirement]: full_line = "" continuation = False @@ -205,7 +207,7 @@ def merge_requirements( NormName = typing.NewType("NormName", str) -NAME_CACHE = {} # type: Dict[str, NormName] +NAME_CACHE: Dict[str, NormName] = {} def normalize_project_name(project_name: str) -> NormName: @@ -247,8 +249,7 @@ def has_prerelease(req: pkg_resources.Requirement) -> bool: @lru_cache(maxsize=None) -def get_glibc_version(): - # type: () -> Optional[Tuple[int, int]] +def get_glibc_version() -> Optional[Tuple[int, int]]: """Based on PEP 513/600.""" import ctypes # pylint: disable=bad-option-value,import-outside-toplevel diff --git a/setup.py b/setup.py index ebe06f7..c67fd12 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ setup( name="req-compile", - version="1.0.0pre8", + version="1.0.0rc9", author="Spencer Putt", author_email="sputt@alumni.iu.edu", description="Python requirements compiler", @@ -10,6 +10,7 @@ url="https://github.com/sputt/req-compile", install_requires=open("requirements.in").readlines(), packages=find_packages(include=["req_compile*"]), + package_data={"": ["py.typed"]}, license="MIT License", entry_points={ "console_scripts": [