Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Lock all packages in workspace #4016

Merged
merged 9 commits into from
Jun 6, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

7 changes: 1 addition & 6 deletions crates/uv-distribution/src/metadata/lowering.rs
Original file line number Diff line number Diff line change
Expand Up @@ -222,17 +222,12 @@ fn path_source(
editable: bool,
) -> Result<RequirementSource, LoweringError> {
let url = VerbatimUrl::parse_path(path.as_ref(), project_dir)?
.with_given(path.as_ref().to_string_lossy().to_string());
.with_given(path.as_ref().to_string_lossy());
let path_buf = path.as_ref().to_path_buf();
let path_buf = path_buf
.absolutize_from(project_dir)
.map_err(|err| LoweringError::Absolutize(path.as_ref().to_path_buf(), err))?
.to_path_buf();
//if !editable {
// // TODO(konsti): Support this. Currently we support `{ workspace = true }`, but we don't
// // support `{ workspace = true, editable = false }` since we only collect editables.
// return Err(LoweringError::NonEditableWorkspaceDependency);
//}
Ok(RequirementSource::Path {
path: path_buf,
url,
Expand Down
2 changes: 1 addition & 1 deletion crates/uv-distribution/src/pyproject.rs
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ pub struct ToolUv {
pub dev_dependencies: Option<Vec<pep508_rs::Requirement<VerbatimParsedUrl>>>,
}

#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
#[derive(Serialize, Deserialize, Default, Debug, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct ToolUvWorkspace {
pub members: Option<Vec<SerdePattern>>,
Expand Down
75 changes: 50 additions & 25 deletions crates/uv-distribution/src/workspace.rs
Original file line number Diff line number Diff line change
Expand Up @@ -98,14 +98,22 @@ impl Workspace {

let (workspace_root, workspace_definition, workspace_pyproject_toml) =
if let Some(workspace) = explicit_root {
// We have found the explicit root immediately.
workspace
} else if pyproject_toml.project.is_none() {
// Without a project, it can't be an implicit root
return Err(WorkspaceError::MissingProject(project_path));
} else if let Some(workspace) = find_workspace(&project_path, stop_discovery_at).await?
{
// We have found an explicit root above.
workspace
} else {
return Err(WorkspaceError::MissingWorkspace(project_path));
// Support implicit single project workspaces.
(
project_path.clone(),
ToolUvWorkspace::default(),
pyproject_toml.clone(),
)
};

debug!(
Expand Down Expand Up @@ -145,6 +153,47 @@ impl Workspace {
})
}

/// Returns the set of requirements that include all packages in the workspace.
pub fn members_as_requirements(&self) -> Vec<Requirement> {
self.packages
.values()
.filter_map(|member| {
let project = member.pyproject_toml.project.as_ref()?;
// Extract the extras available in the project.
let extras = project
.optional_dependencies
.as_ref()
.map(|optional_dependencies| {
// It's a `BTreeMap` so the keys are sorted.
optional_dependencies.keys().cloned().collect::<Vec<_>>()
})
.unwrap_or_default();

let url = VerbatimUrl::from_path(&member.root)
.expect("path is valid URL")
.with_given(member.root.to_string_lossy());
Some(Requirement {
name: project.name.clone(),
extras,
marker: None,
source: RequirementSource::Path {
path: member.root.clone(),
editable: true,
url,
},
origin: None,
})
})
.collect()
}

/// If there is a package at the workspace root, return it.
pub fn root_member(&self) -> Option<&WorkspaceMember> {
self.packages
.values()
.find(|package| package.root == self.root)
}

/// The path to the workspace root, the directory containing the top level `pyproject.toml` with
/// the `uv.tool.workspace`, or the `pyproject.toml` in an implicit single workspace project.
pub fn root(&self) -> &PathBuf {
Expand Down Expand Up @@ -490,30 +539,6 @@ impl ProjectWorkspace {
&self.workspace().packages[&self.project_name]
}

/// Return the [`Requirement`] entries for the project, which is the current project as
/// editable.
pub fn requirements(&self) -> Vec<Requirement> {
vec![Requirement {
name: self.project_name.clone(),
extras: self.workspace().packages[&self.project_name]
.pyproject_toml
.project
.as_ref()
.and_then(|project| project.optional_dependencies.as_ref())
.map(|optional_dependencies| {
optional_dependencies.keys().cloned().collect::<Vec<_>>()
})
.unwrap_or_default(),
marker: None,
source: RequirementSource::Path {
path: self.project_root.clone(),
editable: true,
url: VerbatimUrl::from_path(&self.project_root).expect("path is valid URL"),
},
origin: None,
}]
}

/// Find the workspace for a project.
pub async fn from_project(
project_path: &Path,
Expand Down
9 changes: 3 additions & 6 deletions crates/uv-requirements/src/upgrade.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ use anyhow::Result;
use requirements_txt::RequirementsTxt;
use uv_client::{BaseClientBuilder, Connectivity};
use uv_configuration::Upgrade;
use uv_distribution::ProjectWorkspace;
use uv_distribution::Workspace;
use uv_git::ResolvedRepositoryReference;
use uv_resolver::{Lock, Preference, PreferenceError};

Expand Down Expand Up @@ -64,17 +64,14 @@ pub async fn read_requirements_txt(
}

/// Load the preferred requirements from an existing lockfile, applying the upgrade strategy.
pub async fn read_lockfile(
project: &ProjectWorkspace,
upgrade: &Upgrade,
) -> Result<LockedRequirements> {
pub async fn read_lockfile(workspace: &Workspace, upgrade: &Upgrade) -> Result<LockedRequirements> {
// As an optimization, skip reading the lockfile is we're upgrading all packages anyway.
if upgrade.is_all() {
return Ok(LockedRequirements::default());
}

// If an existing lockfile exists, build up a set of preferences.
let lockfile = project.workspace().root().join("uv.lock");
let lockfile = workspace.root().join("uv.lock");
let lock = match fs_err::tokio::read_to_string(&lockfile).await {
Ok(encoded) => match toml::from_str::<Lock>(&encoded) {
Ok(lock) => lock,
Expand Down
1 change: 1 addition & 0 deletions crates/uv-resolver/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@ petgraph = { workspace = true }
pubgrub = { workspace = true }
rkyv = { workspace = true }
rustc-hash = { workspace = true }
same-file = { workspace = true }
schemars = { workspace = true, optional = true }
serde = { workspace = true }
textwrap = { workspace = true }
Expand Down
22 changes: 19 additions & 3 deletions crates/uv-resolver/src/pubgrub/dependencies.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,13 @@ use either::Either;
use itertools::Itertools;
use pubgrub::range::Range;
use rustc_hash::FxHashSet;
use same_file::is_same_file;
use tracing::warn;

use distribution_types::Verbatim;
use pep440_rs::Version;
use pep508_rs::MarkerEnvironment;
use pypi_types::{Requirement, RequirementSource};
use pypi_types::{ParsedUrl, Requirement, RequirementSource, VerbatimParsedUrl};
use uv_configuration::{Constraints, Overrides};
use uv_git::GitResolver;
use uv_normalize::{ExtraName, GroupName, PackageName};
Expand Down Expand Up @@ -308,15 +309,30 @@ impl PubGrubRequirement {
version: Range::full(),
})
}
RequirementSource::Path { url, .. } => {
RequirementSource::Path { url, path, .. } => {
let Some(expected) = urls.get(&requirement.name) else {
return Err(ResolveError::DisallowedUrl(
requirement.name.clone(),
url.to_string(),
));
};

if !Urls::is_allowed(&expected.verbatim, url, git) {
let mut is_allowed = Urls::is_allowed(&expected.verbatim, url, git);
if !is_allowed {
if let VerbatimParsedUrl {
parsed_url: ParsedUrl::Path(previous_path),
..
} = &expected
{
// On Windows, we can have two versions of the same path, e.g.
// `C:\Users\KONSTA~1` and `C:\Users\Konstantin`.
if is_same_file(path, &previous_path.path).unwrap_or(false) {
is_allowed = true;
}
}
}

if !is_allowed {
return Err(ResolveError::ConflictingUrlsTransitive(
requirement.name.clone(),
expected.verbatim.verbatim().to_string(),
Expand Down
13 changes: 13 additions & 0 deletions crates/uv-resolver/src/resolver/urls.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
use rustc_hash::FxHashMap;
use same_file::is_same_file;
use tracing::debug;
use url::Url;

Expand Down Expand Up @@ -66,6 +67,18 @@ impl Urls {
verbatim: url.clone(),
};
if let Some(previous) = urls.insert(requirement.name.clone(), url.clone()) {
if let VerbatimParsedUrl {
parsed_url: ParsedUrl::Path(previous_path),
..
} = &previous
{
// On Windows, we can have two versions of the same path, e.g.
// `C:\Users\KONSTA~1` and `C:\Users\Konstantin`.
if is_same_file(path, &previous_path.path).unwrap_or(false) {
continue;
}
}

if !is_equal(&previous.verbatim, &url.verbatim) {
return Err(ResolveError::ConflictingUrlsDirect(
requirement.name.clone(),
Expand Down
55 changes: 34 additions & 21 deletions crates/uv/src/commands/project/lock.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,10 @@ use uv_configuration::{
SetupPyStrategy, Upgrade,
};
use uv_dispatch::BuildDispatch;
use uv_distribution::{ProjectWorkspace, DEV_DEPENDENCIES};
use uv_distribution::{Workspace, DEV_DEPENDENCIES};
use uv_git::GitResolver;
use uv_interpreter::PythonEnvironment;
use uv_normalize::PackageName;
use uv_requirements::upgrade::{read_lockfile, LockedRequirements};
use uv_resolver::{ExcludeNewer, FlatIndex, InMemoryIndex, Lock, OptionsBuilder};
use uv_types::{BuildIsolation, EmptyInstalledPackages, HashStrategy, InFlight};
Expand All @@ -39,14 +40,22 @@ pub(crate) async fn lock(
}

// Find the project requirements.
let project = ProjectWorkspace::discover(&std::env::current_dir()?, None).await?;
let workspace = Workspace::discover(&std::env::current_dir()?, None).await?;

// Discover or create the virtual environment.
let venv = project::init_environment(&project, preview, cache, printer)?;
let venv = project::init_environment(&workspace, preview, cache, printer)?;

// Perform the lock operation.
let root_project_name = workspace.root_member().and_then(|member| {
member
.pyproject_toml()
.project
.as_ref()
.map(|project| project.name.clone())
});
match do_lock(
&project,
root_project_name,
&workspace,
&venv,
&index_locations,
upgrade,
Expand All @@ -73,7 +82,8 @@ pub(crate) async fn lock(
/// Lock the project requirements into a lockfile.
#[allow(clippy::too_many_arguments)]
pub(super) async fn do_lock(
project: &ProjectWorkspace,
root_project_name: Option<PackageName>,
workspace: &Workspace,
venv: &PythonEnvironment,
index_locations: &IndexLocations,
upgrade: Upgrade,
Expand All @@ -83,32 +93,39 @@ pub(super) async fn do_lock(
printer: Printer,
) -> Result<Lock, ProjectError> {
// When locking, include the project itself (as editable).
let requirements = project
.requirements()
let requirements = workspace
.members_as_requirements()
.into_iter()
.map(UnresolvedRequirementSpecification::from)
.collect::<Vec<_>>();
.collect();
let constraints = vec![];
let overrides = vec![];
let dev = vec![DEV_DEPENDENCIES.clone()];

let source_trees = vec![];
let project_name = project.project_name().clone();

// Determine the supported Python range. If no range is defined, and warn and default to the
// current minor version.
let project = root_project_name
.as_ref()
.and_then(|name| workspace.packages().get(name));
let requires_python = if let Some(requires_python) =
project.current_project().project().requires_python.as_ref()
project.and_then(|root_project| root_project.project().requires_python.as_ref())
{
Cow::Borrowed(requires_python)
} else {
let requires_python = VersionSpecifiers::from(
VersionSpecifier::greater_than_equal_version(venv.interpreter().python_minor_version()),
);
warn_user!(
"No `requires-python` field found in `{}`. Defaulting to `{requires_python}`.",
project.current_project().project().name,
);
if let Some(root_project_name) = root_project_name.as_ref() {
warn_user!(
"No `requires-python` field found in `{root_project_name}`. Defaulting to `{requires_python}`.",
);
} else {
warn_user!(
"No `requires-python` field found in workspace. Defaulting to `{requires_python}`.",
);
}
Cow::Owned(requires_python)
};

Expand Down Expand Up @@ -143,7 +160,7 @@ pub(super) async fn do_lock(
let options = OptionsBuilder::new().exclude_newer(exclude_newer).build();

// If an existing lockfile exists, build up a set of preferences.
let LockedRequirements { preferences, git } = read_lockfile(project, &upgrade).await?;
let LockedRequirements { preferences, git } = read_lockfile(workspace, &upgrade).await?;

// Create the Git resolver.
let git = GitResolver::from_refs(git);
Expand Down Expand Up @@ -175,7 +192,7 @@ pub(super) async fn do_lock(
overrides,
dev,
source_trees,
Some(project_name),
root_project_name,
&extras,
preferences,
EmptyInstalledPackages,
Expand Down Expand Up @@ -203,11 +220,7 @@ pub(super) async fn do_lock(
// Write the lockfile to disk.
let lock = Lock::from_resolution_graph(&resolution)?;
let encoded = lock.to_toml()?;
fs_err::tokio::write(
project.workspace().root().join("uv.lock"),
encoded.as_bytes(),
)
.await?;
fs_err::tokio::write(workspace.root().join("uv.lock"), encoded.as_bytes()).await?;

Ok(lock)
}
Loading
Loading