Skip to content

Commit

Permalink
WIP: Pausing to move pantsbuild#13398 handling into the caller.
Browse files Browse the repository at this point in the history
# Rust tests and lints will be skipped. Delete if not intended.
[ci skip-rust]

# Building wheels and fs_util will be skipped. Delete if not intended.
[ci skip-build-wheels]
  • Loading branch information
stuhood committed Apr 6, 2022
1 parent 97f4aac commit 6e8589d
Show file tree
Hide file tree
Showing 3 changed files with 57 additions and 64 deletions.
2 changes: 1 addition & 1 deletion src/python/pants/backend/python/util_rules/pex.py
Original file line number Diff line number Diff line change
Expand Up @@ -498,7 +498,7 @@ def _build_pex_args_for_requirements(
else:
argv.extend(["--requirement", lockfile.lockfile_path, "--no-transitive"])
is_network_resolve = True
if lockfile.metadata:
if lockfile.metadata and requirements.complete_req_strings:
validate_metadata(
lockfile.metadata,
interpreter_constraints,
Expand Down
115 changes: 53 additions & 62 deletions src/python/pants/backend/python/util_rules/pex_from_targets.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
PexRequest,
)
from pants.backend.python.util_rules.pex import rules as pex_rules
from pants.backend.python.util_rules.pex_requirements import Lockfile, PexRequirements
from pants.backend.python.util_rules.pex_requirements import Lockfile, LockfileContent, PexRequirements, RequirementsFromLockfile, LoadedLockfile, LoadLockfileRequest
from pants.backend.python.util_rules.python_sources import (
PythonSourceFiles,
PythonSourceFilesRequest,
Expand All @@ -40,7 +40,7 @@
from pants.backend.python.util_rules.python_sources import rules as python_sources_rules
from pants.engine.addresses import Address, Addresses
from pants.engine.collection import DeduplicatedCollection
from pants.engine.fs import Digest, DigestContents, GlobMatchErrorBehavior, MergeDigests, PathGlobs
from pants.engine.fs import Digest, DigestContents, GlobMatchErrorBehavior, MergeDigests, PathGlobs, FileContent
from pants.engine.rules import Get, MultiGet, collect_rules, rule
from pants.engine.target import Target, TransitiveTargets, TransitiveTargetsRequest
from pants.util.docutil import doc_url
Expand Down Expand Up @@ -192,7 +192,7 @@ async def interpreter_constraints_for_targets(
@dataclass(frozen=True)
class ChosenPythonResolve:
name: str
lockfile_path: str
lockfile: LoadedLockfile


@dataclass(frozen=True)
Expand Down Expand Up @@ -242,42 +242,46 @@ async def choose_python_resolve(
for root in transitive_targets.roots
if root.has_field(PythonResolveField)
}
if not root_resolves:
# If there are no relevant targets, we fall back to the default resolve. This is relevant,
# for example, when running `./pants repl` with no specs or only on non-Python targets.
return ChosenPythonResolve(
name=python_setup.default_resolve,
lockfile_path=python_setup.resolves[python_setup.default_resolve],
)

if len(root_resolves) > 1:
raise NoCompatibleResolveException(
python_setup,
"The input targets did not have a resolve in common",
transitive_targets.roots,
)

chosen_resolve = next(iter(root_resolves))

# Then, validate that all transitive deps are compatible.
for tgt in transitive_targets.dependencies:
if (
tgt.has_field(PythonResolveField)
and tgt[PythonResolveField].normalized_value(python_setup) != chosen_resolve
):
plural = ("s", "their") if len(transitive_targets.roots) > 1 else ("", "its")
if root_resolves:
if len(root_resolves) > 1:
raise NoCompatibleResolveException(
python_setup,
(
f"The resolve chosen for the root target{plural[0]} was {chosen_resolve}, but "
f"some of {plural[1]} dependencies are not compatible with that resolve"
),
transitive_targets.closure,
"The input targets did not have a resolve in common",
transitive_targets.roots,
)

return ChosenPythonResolve(
name=chosen_resolve, lockfile_path=python_setup.resolves[chosen_resolve]
chosen_resolve = next(iter(root_resolves))

# Then, validate that all transitive deps are compatible.
for tgt in transitive_targets.dependencies:
if (
tgt.has_field(PythonResolveField)
and tgt[PythonResolveField].normalized_value(python_setup) != chosen_resolve
):
plural = ("s", "their") if len(transitive_targets.roots) > 1 else ("", "its")
raise NoCompatibleResolveException(
python_setup,
(
f"The resolve chosen for the root target{plural[0]} was {chosen_resolve}, but "
f"some of {plural[1]} dependencies are not compatible with that resolve"
),
transitive_targets.closure,
)

else:
# If there are no relevant targets, we fall back to the default resolve. This is relevant,
# for example, when running `./pants repl` with no specs or only on non-Python targets.
chosen_resolve = python_setup.default_resolve

lockfile = await Get(LoadedLockfile, LoadLockfileRequest(Lockfile(
file_path=python_setup.resolves[chosen_resolve],
file_path_description_of_origin=(
f"the resolve `{chosen_resolve}` (from `[python].resolves`)"
),
resolve_name=chosen_resolve,
))
)
return ChosenPythonResolve(name=chosen_resolve, lockfile=lockfile)


class GlobalRequirementConstraints(DeduplicatedCollection[PipRequirement]):
Expand Down Expand Up @@ -378,7 +382,7 @@ async def create_pex_from_targets(
request: PexFromTargetsRequest, python_setup: PythonSetup
) -> PexRequest:
logger.warning(f"creating pex from targets with:\n {request}")
requirements: PexRequirements | Lockfile = PexRequirements()
requirements: PexRequirements | RequirementsFromLockfile = PexRequirements()
if request.include_requirements:
requirements = await Get(PexRequirements, _PexRequirementsRequest(request.addresses))

Expand Down Expand Up @@ -430,16 +434,7 @@ async def create_pex_from_targets(
chosen_resolve = await Get(
ChosenPythonResolve, ChosenPythonResolveRequest(request.addresses)
)
lockfile = Lockfile(
file_path=chosen_resolve.lockfile_path,
file_path_description_of_origin=(
f"the resolve `{chosen_resolve.name}` (from `[python].resolves`)"
),
resolve_name=chosen_resolve.name,
)
requirements = dataclasses.replace(
requirements, from_superset=lockfile
)
requirements = dataclasses.replace(requirements, from_superset=chosen_resolve.lockfile)

interpreter_constraints = await Get(
InterpreterConstraints,
Expand Down Expand Up @@ -538,20 +533,12 @@ async def get_repository_pex(
)
repository_pex_request = PexRequest(
description=(
f"Installing {chosen_resolve.lockfile_path} for the resolve `{chosen_resolve.name}`"
f"Installing {chosen_resolve.lockfile.lockfile_path} "
f"for the resolve `{chosen_resolve.name}`"
),
output_filename=f"{path_safe(chosen_resolve.name)}_lockfile.pex",
internal_only=request.internal_only,
requirements=Lockfile(
file_path=chosen_resolve.lockfile_path,
file_path_description_of_origin=(
f"the resolve `{chosen_resolve.name}` (from `[python].resolves`)"
),
resolve_name=chosen_resolve.name,
# NB: PEX interprets `--lock` with no `req_strings` as "install the entire lockfile"
# And we don't use `req_strings` if the resolve isn't a PEX lockfile.
req_strings=FrozenOrderedSet(),
),
requirements=RequirementsFromLockfile(chosen_resolve.lockfile),
interpreter_constraints=interpreter_constraints,
platforms=request.platforms,
complete_platforms=request.complete_platforms,
Expand Down Expand Up @@ -632,16 +619,20 @@ async def _setup_constraints_repository_pex(
# all these repository pexes will have identical pinned versions of everything,
# this is not a correctness issue, only a performance one.
all_constraints = {str(req) for req in (constraints_file_reqs | url_reqs)}

# Because the modified constraints do not exist on disk, but we'd like to treat them as a
# lockfile, we declare a new file (with approximately the same name) containing the normalized
# requirements to act as the lockfile for this resolve.
constraints_lockfile = LockfileContent(
FileContent(f"{constraints_path}.normalized", "\n".join(all_constraints).encode()),
constraints_path,
)

repository_pex = PexRequest(
description=f"Resolving {constraints_path}",
output_filename="repository.pex",
internal_only=request.internal_only,
requirements=PexRequirements(
all_constraints,
constraints_strings=(str(constraint) for constraint in global_requirement_constraints),
# TODO: See PexRequirements docs.
is_all_constraints_resolve=True,
),
requirements=RequirementsFromLockfile(constraints_lockfile),
interpreter_constraints=interpreter_constraints,
platforms=request.platforms,
complete_platforms=request.complete_platforms,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -205,7 +205,9 @@ class RequirementsFromLockfile:
content anyway.
"""
lockfile: LoadedLockfile
complete_req_strings: tuple[str]
# If available, the current complete set of requirement strings that influence this lockfile.
# Used for metadata validation.
complete_req_strings: tuple[str] | None = None


@frozen_after_init
Expand Down

0 comments on commit 6e8589d

Please sign in to comment.