Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Initial workspace support #3705

Merged
merged 13 commits into from
May 28, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

5 changes: 3 additions & 2 deletions crates/pep508-rs/src/verbatim_url.rs
Original file line number Diff line number Diff line change
Expand Up @@ -70,15 +70,16 @@ impl VerbatimUrl {
#[cfg(feature = "non-pep508-extensions")] // PEP 508 arguably only allows absolute file URLs.
pub fn parse_path(
path: impl AsRef<Path>,
working_dir: impl AsRef<Path>,
base_dir: impl AsRef<Path>,
) -> Result<Self, VerbatimUrlError> {
debug_assert!(base_dir.as_ref().is_absolute(), "base dir must be absolute");
let path = path.as_ref();

// Convert the path to an absolute path, if necessary.
let path = if path.is_absolute() {
path.to_path_buf()
} else {
working_dir.as_ref().join(path)
base_dir.as_ref().join(path)
};

// Normalize the path.
Expand Down
3 changes: 2 additions & 1 deletion crates/uv-requirements/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -35,9 +35,9 @@ fs-err = { workspace = true, features = ["tokio"] }
futures = { workspace = true }
glob = { workspace = true }
indexmap = { workspace = true }
itertools = { workspace = true }
path-absolutize = { workspace = true }
rustc-hash = { workspace = true }
same-file = { workspace = true }
schemars = { workspace = true, optional = true }
serde = { workspace = true }
thiserror = { workspace = true }
Expand All @@ -52,6 +52,7 @@ schemars = ["dep:schemars"]
indoc = "2.0.5"
insta = { version = "1.38.0", features = ["filters", "redactions", "json"] }
regex = { workspace = true }
tokio = { workspace = true }

[lints]
workspace = true
2 changes: 2 additions & 0 deletions crates/uv-requirements/src/lookahead.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ use futures::stream::FuturesUnordered;
use futures::StreamExt;
use rustc_hash::FxHashSet;
use thiserror::Error;
use tracing::trace;

use distribution_types::{
BuiltDist, Dist, DistributionMetadata, GitSourceDist, Requirement, RequirementSource,
Expand Down Expand Up @@ -152,6 +153,7 @@ impl<'a, Context: BuildContext> LookaheadResolver<'a, Context> {
&self,
requirement: Requirement,
) -> Result<Option<RequestedRequirements>, LookaheadError> {
trace!("Performing lookahead for {requirement}");
// Determine whether the requirement represents a local distribution and convert to a
// buildable distribution.
let dist = match requirement.source {
Expand Down
122 changes: 72 additions & 50 deletions crates/uv-requirements/src/pyproject.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ use uv_git::GitReference;
use uv_normalize::{ExtraName, PackageName};
use uv_warnings::warn_user_once;

use crate::ExtrasSpecification;
use crate::{ExtrasSpecification, Workspace};

#[derive(Debug, Error)]
pub enum Pep621Error {
Expand All @@ -39,7 +39,7 @@ pub enum Pep621Error {
#[error("Must specify a `[project]` section alongside `[tool.uv.sources]`")]
MissingProjectSection,
#[error("pyproject.toml section is declared as dynamic, but must be static: `{0}`")]
CantBeDynamic(&'static str),
DynamicNotAllowed(&'static str),
#[error("Failed to parse entry for: `{0}`")]
LoweringError(PackageName, #[source] LoweringError),
}
Expand Down Expand Up @@ -68,14 +68,16 @@ pub enum LoweringError {
InvalidVerbatimUrl(#[from] pep508_rs::VerbatimUrlError),
#[error("Can't combine URLs from both `project.dependencies` and `tool.uv.sources`")]
ConflictingUrls,
#[error("Could not normalize path: `{0}`")]
AbsolutizeError(String, #[source] io::Error),
#[error("Could not normalize path: `{}`", _0.user_display())]
AbsolutizeError(PathBuf, #[source] io::Error),
#[error("Fragments are not allowed in URLs: `{0}`")]
ForbiddenFragment(Url),
#[error("`workspace = false` is not yet supported")]
WorkspaceFalse,
#[error("`tool.uv.sources` is a preview feature; use `--preview` or set `UV_PREVIEW=1` to enable it")]
MissingPreview,
#[error("`editable = false` is not yet supported")]
NonEditableWorkspaceDependency,
}

/// A `pyproject.toml` as specified in PEP 517.
Expand Down Expand Up @@ -241,12 +243,11 @@ impl Pep621Metadata {
///
/// Returns an error if the requirements are not valid PEP 508 requirements.
pub(crate) fn try_from(
pyproject: PyProjectToml,
pyproject: &PyProjectToml,
extras: &ExtrasSpecification,
pyproject_path: &Path,
project_dir: &Path,
workspace_sources: &BTreeMap<PackageName, Source>,
workspace_packages: &BTreeMap<PackageName, String>,
workspace: &Workspace,
preview: PreviewMode,
) -> Result<Option<Self>, Pep621Error> {
let project_sources = pyproject
Expand All @@ -255,9 +256,9 @@ impl Pep621Metadata {
.and_then(|tool| tool.uv.as_ref())
.and_then(|uv| uv.sources.clone());

let has_sources = project_sources.is_some() || !workspace_sources.is_empty();
let has_sources = project_sources.is_some() || !workspace.sources().is_empty();

let Some(project) = pyproject.project else {
let Some(project) = &pyproject.project else {
return if has_sources {
Err(Pep621Error::MissingProjectSection)
} else {
Expand All @@ -268,7 +269,7 @@ impl Pep621Metadata {
// If the project specifies dynamic dependencies, we can't extract the requirements.
if dynamic.iter().any(|field| field == "dependencies") {
return if has_sources {
Err(Pep621Error::CantBeDynamic("project.dependencies"))
Err(Pep621Error::DynamicNotAllowed("project.dependencies"))
} else {
Ok(None)
};
Expand All @@ -277,22 +278,23 @@ impl Pep621Metadata {
// extract the requirements.
if !extras.is_empty() && dynamic.iter().any(|field| field == "optional-dependencies") {
return if has_sources {
Err(Pep621Error::CantBeDynamic("project.optional-dependencies"))
Err(Pep621Error::DynamicNotAllowed(
"project.optional-dependencies",
))
} else {
Ok(None)
};
}
}

let requirements = lower_requirements(
&project.dependencies.unwrap_or_default(),
&project.optional_dependencies.unwrap_or_default(),
project.dependencies.as_deref(),
project.optional_dependencies.as_ref(),
pyproject_path,
&project.name,
project_dir,
&project_sources.unwrap_or_default(),
workspace_sources,
workspace_packages,
workspace,
preview,
)?;

Expand All @@ -316,7 +318,7 @@ impl Pep621Metadata {
}

Ok(Some(Self {
name: project.name,
name: project.name.clone(),
requirements: requirements_with_extras,
used_extras,
}))
Expand All @@ -325,18 +327,18 @@ impl Pep621Metadata {

#[allow(clippy::too_many_arguments)]
pub(crate) fn lower_requirements(
dependencies: &[String],
optional_dependencies: &IndexMap<ExtraName, Vec<String>>,
dependencies: Option<&[String]>,
optional_dependencies: Option<&IndexMap<ExtraName, Vec<String>>>,
pyproject_path: &Path,
project_name: &PackageName,
project_dir: &Path,
project_sources: &BTreeMap<PackageName, Source>,
workspace_sources: &BTreeMap<PackageName, Source>,
workspace_packages: &BTreeMap<PackageName, String>,
workspace: &Workspace,
preview: PreviewMode,
) -> Result<Requirements, Pep621Error> {
let dependencies = dependencies
.iter()
.into_iter()
.flatten()
.map(|dependency| {
let requirement = pep508_rs::Requirement::from_str(dependency)?.with_origin(
RequirementOrigin::Project(pyproject_path.to_path_buf(), project_name.clone()),
Expand All @@ -347,15 +349,15 @@ pub(crate) fn lower_requirements(
project_name,
project_dir,
project_sources,
workspace_sources,
workspace_packages,
workspace,
preview,
)
.map_err(|err| Pep621Error::LoweringError(name, err))
})
.collect::<Result<_, Pep621Error>>()?;
let optional_dependencies = optional_dependencies
.iter()
.into_iter()
.flatten()
.map(|(extra_name, dependencies)| {
let dependencies: Vec<_> = dependencies
.iter()
Expand All @@ -372,8 +374,7 @@ pub(crate) fn lower_requirements(
project_name,
project_dir,
project_sources,
workspace_sources,
workspace_packages,
workspace,
preview,
)
.map_err(|err| Pep621Error::LoweringError(name, err))
Expand All @@ -394,29 +395,35 @@ pub(crate) fn lower_requirement(
project_name: &PackageName,
project_dir: &Path,
project_sources: &BTreeMap<PackageName, Source>,
workspace_sources: &BTreeMap<PackageName, Source>,
workspace_packages: &BTreeMap<PackageName, String>,
workspace: &Workspace,
preview: PreviewMode,
) -> Result<Requirement, LoweringError> {
let source = project_sources
.get(&requirement.name)
.or(workspace_sources.get(&requirement.name))
.or(workspace.sources().get(&requirement.name))
.cloned();

if !matches!(
source,
Some(Source::Workspace {
// By using toml, we technically support `workspace = false`.
workspace: true,
..
})
) && workspace_packages.contains_key(&requirement.name)
{
let workspace_package_declared =
// We require that when you use a package that's part of the workspace, ...
!workspace.packages().contains_key(&requirement.name)
// ... it must be declared as a workspace dependency (`workspace = true`), ...
|| matches!(
source,
Some(Source::Workspace {
// By using toml, we technically support `workspace = false`.
workspace: true,
..
})
)
// ... except for recursive self-inclusion (extras that activate other extras), e.g.
// `framework[machine_learning]` depends on `framework[cuda]`.
|| &requirement.name == project_name;
if !workspace_package_declared {
return Err(LoweringError::UndeclaredWorkspacePackage);
}

let Some(source) = source else {
let has_sources = !project_sources.is_empty() || !workspace_sources.is_empty();
let has_sources = !project_sources.is_empty() || !workspace.sources().is_empty();
// Support recursive editable inclusions.
if has_sources && requirement.version_or_url.is_none() && &requirement.name != project_name
{
Expand Down Expand Up @@ -523,20 +530,21 @@ pub(crate) fn lower_requirement(
Some(VersionOrUrl::Url(_)) => return Err(LoweringError::ConflictingUrls),
},
Source::Workspace {
workspace,
workspace: is_workspace,
editable,
} => {
if !workspace {
if !is_workspace {
return Err(LoweringError::WorkspaceFalse);
}
if matches!(requirement.version_or_url, Some(VersionOrUrl::Url(_))) {
return Err(LoweringError::ConflictingUrls);
}
let path = workspace_packages
let path = workspace
.packages()
.get(&requirement.name)
.ok_or(LoweringError::UndeclaredWorkspacePackage)?
.clone();
path_source(path, project_dir, editable.unwrap_or(true))?
path_source(path.root(), workspace.root(), editable.unwrap_or(true))?
}
Source::CatchAll { .. } => {
// Emit a dedicated error message, which is an improvement over Serde's default error.
Expand All @@ -554,16 +562,22 @@ pub(crate) fn lower_requirement(

/// Convert a path string to a path section.
fn path_source(
path: String,
path: impl AsRef<Path>,
project_dir: &Path,
editable: bool,
) -> Result<RequirementSource, LoweringError> {
let url = VerbatimUrl::parse_path(&path, project_dir)?.with_given(path.clone());
let path_buf = PathBuf::from(&path);
let url = VerbatimUrl::parse_path(path.as_ref(), project_dir)?
.with_given(path.as_ref().to_string_lossy().to_string());
let path_buf = path.as_ref().to_path_buf();
let path_buf = path_buf
.absolutize_from(project_dir)
.map_err(|err| LoweringError::AbsolutizeError(path, err))?
.map_err(|err| LoweringError::AbsolutizeError(path.as_ref().to_path_buf(), err))?
.to_path_buf();
if !editable {
// TODO(konsti): Support this. Currently we support `{ workspace = true }`, but we don't
// support `{ workspace = true, editable = false }` since we only collect editables.
return Err(LoweringError::NonEditableWorkspaceDependency);
}
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

When does this occur? I'm looking at this from the perspective of: what regressions could we ship that would accidentally impact users?

Copy link
Member Author

@konstin konstin May 27, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This branch is only reachable when using uv.tool.sources (otherwise source is None), so it's preview gated

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can you please expand the TODO?

Ok(RequirementSource::Path {
path: path_buf,
url,
Expand Down Expand Up @@ -663,14 +677,17 @@ mod serde_from_and_to_string {
#[cfg(test)]
mod test {
use std::path::Path;
use std::str::FromStr;

use anyhow::Context;
use indoc::indoc;
use insta::assert_snapshot;

use uv_configuration::PreviewMode;
use uv_fs::Simplified;
use uv_normalize::PackageName;

use crate::ProjectWorkspace;
use crate::{ExtrasSpecification, RequirementsSpecification};

fn from_source(
Expand All @@ -679,13 +696,19 @@ mod test {
extras: &ExtrasSpecification,
) -> anyhow::Result<RequirementsSpecification> {
let path = uv_fs::absolutize_path(path.as_ref())?;
let project_workspace =
ProjectWorkspace::dummy(path.as_ref(), &PackageName::from_str("foo").unwrap());
let pyproject_toml =
toml::from_str(contents).context("Failed to parse: `pyproject.toml`")?;
RequirementsSpecification::parse_direct_pyproject_toml(
contents,
&pyproject_toml,
project_workspace.workspace(),
extras,
path.as_ref(),
PreviewMode::Enabled,
)
.with_context(|| format!("Failed to parse: `{}`", path.user_display()))
.with_context(|| format!("Failed to parse: `{}`", path.user_display()))?
.context("Missing workspace")
}

fn format_err(input: &str) -> String {
Expand Down Expand Up @@ -803,7 +826,6 @@ mod test {
"tqdm",
]
"#};

assert!(from_source(input, "pyproject.toml", &ExtrasSpecification::None).is_ok());
}

Expand Down
Loading
Loading