Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 6 additions & 4 deletions crates/uv-resolver/src/lock/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1234,6 +1234,7 @@ impl Lock {
dependency_groups: &BTreeMap<GroupName, Vec<Requirement>>,
dependency_metadata: &DependencyMetadata,
indexes: Option<&IndexLocations>,
path_dependency_indexes: &BTreeSet<UrlString>,
tags: &Tags,
hasher: &HashStrategy,
index: &InMemoryIndex,
Expand Down Expand Up @@ -1445,15 +1446,16 @@ impl Lock {
queue.push_back(root);
}

// Unlike path dependencies, Git dependencies are immutable. Their sources cannot change
// without the hashes changing, so we know their indexes are still present.
while let Some(package) = queue.pop_front() {
// If the lockfile references an index that was not provided, we can't validate it.
if let Source::Registry(index) = &package.id.source {
match index {
RegistrySource::Url(url) => {
if remotes
.as_ref()
.is_some_and(|remotes| !remotes.contains(url))
{
if remotes.as_ref().is_some_and(|remotes| {
!remotes.contains(url) && !path_dependency_indexes.contains(url)
}) {
let name = &package.id.name;
let version = &package
.id
Expand Down
99 changes: 94 additions & 5 deletions crates/uv-workspace/src/workspace.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
//! Resolve the current [`ProjectWorkspace`] or [`Workspace`].

use std::collections::{BTreeMap, BTreeSet};
use std::borrow::Cow;
use std::collections::{BTreeMap, BTreeSet, VecDeque};
use std::path::{Path, PathBuf};
use std::sync::{Arc, Mutex};

Expand All @@ -19,7 +20,7 @@ use uv_warnings::warn_user_once;

use crate::dependency_groups::{DependencyGroupError, FlatDependencyGroups};
use crate::pyproject::{
Project, PyProjectToml, PyprojectTomlError, Sources, ToolUvSources, ToolUvWorkspace,
Project, PyProjectToml, PyprojectTomlError, Source, Sources, ToolUvSources, ToolUvWorkspace,
};

type WorkspaceMembers = Arc<BTreeMap<PackageName, WorkspaceMember>>;
Expand Down Expand Up @@ -765,9 +766,7 @@ impl Workspace {
// project. If it is the current project, it is added as such in the next step.
if let Some(project) = &workspace_pyproject_toml.project {
let pyproject_path = workspace_root.join("pyproject.toml");
let contents = fs_err::read_to_string(&pyproject_path)?;
let pyproject_toml = PyProjectToml::from_string(contents)
.map_err(|err| WorkspaceError::Toml(pyproject_path.clone(), Box::new(err)))?;
let pyproject_toml = pyproject_toml_from_path(pyproject_path.clone())?;

debug!(
"Adding root workspace member: `{}`",
Expand Down Expand Up @@ -931,6 +930,89 @@ impl Workspace {
}
Ok(workspace_members)
}

/// Collects indexes provided as sources in (transitive) path dependencies that
/// have not already been defined in the workspace.
pub fn collect_path_dependency_source_indexes(&self) -> Vec<Index> {
let mut dependency_indexes = FxHashSet::default();
let mut seen = FxHashSet::default();

// We will only add indexes if we have not already seen the URLs.
let known_urls: FxHashSet<_> = self.indexes.iter().map(Index::url).collect();

let mut pyproject_queue = VecDeque::new();
for package in self.packages.values() {
pyproject_queue
.push_back((package.root.clone(), Cow::Borrowed(&package.pyproject_toml)));
}

while let Some((base_path, pyproject)) = pyproject_queue.pop_front() {
if let Some(tool_uv_sources) = pyproject
.tool
.as_ref()
.and_then(|tool| tool.uv.as_ref())
.and_then(|uv| uv.sources.as_ref())
{
for sources in tool_uv_sources.inner().values() {
for source in sources.iter() {
if let Source::Path { path, .. } = source {
let dep_path = if path.as_ref().is_absolute() {
path.as_ref().to_path_buf()
} else {
base_path.join(path)
};

// Canonicalize path to compare symlinks and relative paths correctly
let Ok(canonical_path) = dep_path.canonicalize() else {
debug!(
"Failed to canonicalize path dependency path: {}",
dep_path.display()
);
continue;
};

// Prevent infinite loops from circular dependencies
if !seen.insert(canonical_path.clone()) {
continue;
}

let dep_pyproject_path = canonical_path.join("pyproject.toml");

match pyproject_toml_from_path(dep_pyproject_path.clone()) {
Ok(pyproject_toml) => {
if let Some(dep_indexes) = pyproject_toml
.tool
.as_ref()
.and_then(|tool| tool.uv.as_ref())
.and_then(|uv| uv.index.as_ref())
{
dependency_indexes.extend(
dep_indexes
.iter()
.filter(|idx| !known_urls.contains(idx.url()))
.cloned(),
);
}

pyproject_queue
.push_back((canonical_path, Cow::Owned(pyproject_toml)));
}
Err(e) => {
debug!(
"Failed to read `pyproject.toml` in path dependency `{}`: {}",
dep_pyproject_path.display(),
e
);
}
}
}
}
}
}
}

dependency_indexes.into_iter().collect::<Vec<_>>()
}
}

/// A project in a workspace.
Expand Down Expand Up @@ -1557,6 +1639,13 @@ impl VirtualProject {
}
}

/// Parses a `pyproject.toml` file from a path.
fn pyproject_toml_from_path(pyproject_path: PathBuf) -> Result<PyProjectToml, WorkspaceError> {
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We should avoid reparsing pyproject.toml, this is an expensive operation when it runs as part of a noop uv run. See #12096 for context and benchmarking instructions.

Can we reuse the parsed file through using the workspace cache around collect_members_only, or alternatively collect this information after workspace discovery so we can read from a warm cache of workspace members?

let contents = fs_err::read_to_string(&pyproject_path)?;
PyProjectToml::from_string(contents)
.map_err(|err| WorkspaceError::Toml(pyproject_path, Box::new(err)))
}

#[cfg(test)]
#[cfg(unix)] // Avoid path escaping for the unit tests
mod tests {
Expand Down
27 changes: 22 additions & 5 deletions crates/uv/src/commands/project/lock.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,8 @@ use uv_configuration::{
use uv_dispatch::BuildDispatch;
use uv_distribution::DistributionDatabase;
use uv_distribution_types::{
DependencyMetadata, HashGeneration, Index, IndexLocations, NameRequirementSpecification,
Requirement, UnresolvedRequirementSpecification,
DependencyMetadata, HashGeneration, Index, IndexLocations, IndexUrl,
NameRequirementSpecification, Requirement, UnresolvedRequirementSpecification, UrlString,
};
use uv_git::ResolvedRepositoryReference;
use uv_normalize::{GroupName, PackageName};
Expand Down Expand Up @@ -679,7 +679,7 @@ async fn do_lock(
let existing_lock = if let Some(existing_lock) = existing_lock {
match ValidatedLock::validate(
existing_lock,
target.install_path(),
target,
packages,
&members,
&requirements,
Expand Down Expand Up @@ -892,7 +892,7 @@ impl ValidatedLock {
/// Validate a [`Lock`] against the workspace requirements.
async fn validate<Context: BuildContext>(
lock: Lock,
install_path: &Path,
target: LockTarget<'_>,
packages: &BTreeMap<PackageName, WorkspaceMember>,
members: &[PackageName],
requirements: &[Requirement],
Expand Down Expand Up @@ -1072,10 +1072,26 @@ impl ValidatedLock {
Some(index_locations)
};

// Collect indexes specified in path dependencies
let path_dependency_indexes = if let LockTarget::Workspace(workspace) = target {
workspace
.collect_path_dependency_source_indexes()
.into_iter()
.filter_map(|index| match index.url() {
IndexUrl::Pypi(_) | IndexUrl::Url(_) => {
Some(UrlString::from(index.url().without_credentials().as_ref()))
}
IndexUrl::Path(_) => None,
})
.collect::<BTreeSet<_>>()
} else {
BTreeSet::default()
};

// Determine whether the lockfile satisfies the workspace requirements.
match lock
.satisfies(
install_path,
target.install_path(),
packages,
members,
requirements,
Expand All @@ -1085,6 +1101,7 @@ impl ValidatedLock {
dependency_groups,
dependency_metadata,
indexes,
&path_dependency_indexes,
interpreter.tags()?,
hasher,
index,
Expand Down
Loading