From 203ee153ba4ec2c913825cb3c2894656999e2ad7 Mon Sep 17 00:00:00 2001 From: remimimimimi Date: Mon, 8 Sep 2025 17:37:40 +0300 Subject: [PATCH] fix(upgrade): fails for platform specific targets --- .../tests/integration_rust/upgrade_tests.rs | 114 ++++++- crates/pixi_cli/src/upgrade.rs | 284 ++++++++++++++---- 2 files changed, 333 insertions(+), 65 deletions(-) diff --git a/crates/pixi/tests/integration_rust/upgrade_tests.rs b/crates/pixi/tests/integration_rust/upgrade_tests.rs index 69147d1947..a8d45bd5a0 100644 --- a/crates/pixi/tests/integration_rust/upgrade_tests.rs +++ b/crates/pixi/tests/integration_rust/upgrade_tests.rs @@ -1,14 +1,15 @@ use indexmap::IndexMap; use insta::assert_snapshot; -use pixi_cli::upgrade::{Args, parse_specs}; +use pixi_cli::upgrade::{Args, parse_specs_for_platform}; use pixi_core::Workspace; use rattler_conda_types::Platform; +use tempfile::TempDir; +use url::Url; use crate::common::PixiControl; +use crate::common::package_database::{Package, PackageDatabase}; use crate::setup_tracing; -// This test requires network connection and takes around 40s to -// complete on my machine. #[cfg_attr(not(feature = "slow_integration_tests"), ignore)] #[tokio::test] async fn pypi_dependency_index_preserved_on_upgrade() { @@ -32,6 +33,7 @@ async fn pypi_dependency_index_preserved_on_upgrade() { let mut args = Args::default(); args.workspace_config.manifest_path = Some(pixi.manifest_path()); + args.no_install_config.no_install = true; let workspace = Workspace::from_path(&pixi.manifest_path()).unwrap(); @@ -40,7 +42,8 @@ async fn pypi_dependency_index_preserved_on_upgrade() { let mut workspace = workspace.modify().unwrap(); - let (match_specs, pypi_deps) = parse_specs(feature, &args, &workspace).unwrap(); + let (match_specs, pypi_deps) = + parse_specs_for_platform(feature, &args, &workspace, None).unwrap(); let _ = workspace .update_dependencies( @@ -64,3 +67,106 @@ async fn pypi_dependency_index_preserved_on_upgrade() { let redacted_content = content.replace(&Platform::current().to_string(), "[PLATFORM]"); assert_snapshot!(redacted_content); } + +#[cfg_attr(not(feature = "slow_integration_tests"), ignore)] +#[tokio::test] +async fn upgrade_command_updates_platform_specific_version() { + setup_tracing(); + + let platform = Platform::current(); + let mut package_database = PackageDatabase::default(); + package_database.add_package(Package::build("python", "3.12.0").finish()); + let channel_dir = TempDir::new().unwrap(); + package_database + .write_repodata(channel_dir.path()) + .await + .unwrap(); + let channel = Url::from_file_path(channel_dir.path()).unwrap(); + + let pixi = PixiControl::from_manifest(&format!( + r#" + [workspace] + channels = ["{channel}"] + platforms = ["{platform}"] + exclude-newer = "2025-05-19" + + [dependencies] + + [target.{platform}.dependencies] + python = "==3.12" + "#, + platform = platform, + channel = channel, + )) + .unwrap(); + + let mut args = Args::default(); + args.workspace_config.manifest_path = Some(pixi.manifest_path()); + args.no_install_config.no_install = true; + + pixi_cli::upgrade::execute(args).await.unwrap(); + + let content = pixi.manifest_contents().unwrap_or_default(); + + assert!( + !content.contains("python = \"==3.12\""), + "python pin should be removed from manifest" + ); + assert!( + content.contains("python = \">=3."), + "python version should be upgraded to a >=3.x,<3.(x+1) range" + ); +} + +#[cfg_attr(not(feature = "slow_integration_tests"), ignore)] +#[tokio::test] +async fn upgrade_command_updates_all_platform_specific_targets() { + setup_tracing(); + + let mut package_database = PackageDatabase::default(); + package_database.add_package(Package::build("python", "3.12.0").finish()); + let channel_dir = TempDir::new().unwrap(); + package_database + .write_repodata(channel_dir.path()) + .await + .unwrap(); + let channel = Url::from_file_path(channel_dir.path()).unwrap(); + + let pixi = PixiControl::from_manifest(&format!( + r#" + [workspace] + channels = ["{channel}"] + platforms = ["linux-64", "win-64"] + exclude-newer = "2025-05-19" + + [target.linux-64.dependencies] + python = "==3.12" + + [target.win-64.dependencies] + python = "==3.12" + "#, + channel = channel, + )) + .unwrap(); + + let mut args = Args::default(); + args.workspace_config.manifest_path = Some(pixi.manifest_path()); + args.no_install_config.no_install = true; + + pixi_cli::upgrade::execute(args).await.unwrap(); + + let content = pixi.manifest_contents().unwrap_or_default(); + + assert!( + !content.contains("==3.12"), + "python pins should be removed from all platform-specific targets" + ); + + let upgraded_occurrences = content.matches("python = \">=3.").count(); + assert!( + upgraded_occurrences == 2, + "expected at least two upgraded python entries, found {upgraded_occurrences}:\n{content}" + ); + assert!(content.contains("[target.linux-64.dependencies]")); + assert!(content.contains("[target.win-64.dependencies]")); +} diff --git a/crates/pixi_cli/src/upgrade.rs b/crates/pixi_cli/src/upgrade.rs index 3947d574e2..6d2fb9c36f 100644 --- a/crates/pixi_cli/src/upgrade.rs +++ b/crates/pixi_cli/src/upgrade.rs @@ -2,20 +2,21 @@ use std::cmp::Ordering; use clap::Parser; use fancy_display::FancyDisplay; -use indexmap::IndexMap; +use indexmap::{IndexMap, IndexSet}; use itertools::Itertools; use miette::{IntoDiagnostic, MietteDiagnostic, WrapErr}; use pep508_rs::{MarkerTree, Requirement}; use pixi_config::ConfigCli; use pixi_core::{ WorkspaceLocator, - diff::LockFileJsonDiff, + diff::{LockFileDiff, LockFileJsonDiff}, + lock_file::UpdateContext, workspace::{MatchSpecs, PypiDeps, WorkspaceMut}, }; use pixi_manifest::{FeatureName, SpecType}; use pixi_pypi_spec::PixiPypiSpec; use pixi_spec::PixiSpec; -use rattler_conda_types::{MatchSpec, StringMatcher}; +use rattler_conda_types::{MatchSpec, Platform, StringMatcher}; use crate::cli_config::{LockFileUpdateConfig, NoInstallConfig, WorkspaceConfig}; @@ -82,6 +83,7 @@ pub async fn execute(args: Args) -> miette::Result<()> { args.specs.feature.fancy_display() ) }; + let feature = feature.clone(); if !args.no_install_config.allow_installs() && (args.lock_file_update_config.lock_file_usage.frozen @@ -92,48 +94,132 @@ pub async fn execute(args: Args) -> miette::Result<()> { ) } - let (match_specs, pypi_deps) = parse_specs(feature, &args, &workspace)?; - let (update_deps, workspace) = match workspace - .update_dependencies( - match_specs, - pypi_deps, - IndexMap::default(), - args.no_install_config.no_install, - &args.lock_file_update_config.lock_file_usage()?, - &args.specs.feature, - &[], - false, - args.dry_run, - ) - .await - { - Ok(update_deps) => ( - update_deps, - if args.dry_run { - workspace.revert().await.into_diagnostic()? - } else { - workspace.save().await.into_diagnostic()? - }, - ), - Err(e) => { - return Err(e); + let all_platforms: Vec = workspace + .workspace() + .workspace + .value + .workspace + .platforms + .iter() + .copied() + .collect(); + + if let Some(package_names) = &args.specs.packages { + let available_set = collect_available_packages(&feature, &all_platforms); + let available_packages: Vec = available_set.into_iter().collect(); + for package in package_names { + ensure_package_exists(package, &available_packages)?; } - }; + } - // Is there something to report? - if let Some(update_deps) = update_deps { - let diff = update_deps.lock_file_diff; - // Format as json? - if args.json { - let json_diff = LockFileJsonDiff::new(Some(&workspace), diff); + let SpecsByTarget { + default_match_specs, + default_pypi_deps, + per_platform, + } = collect_specs_by_target(&feature, &args, &workspace, &all_platforms)?; + + let lock_file_usage = args.lock_file_update_config.lock_file_usage()?; + + // Capture original lock-file for combined JSON output (non-dry-run). + let original_lock_file = workspace.workspace().load_lock_file().await?; + + let mut printed_any = false; + if !default_match_specs.is_empty() || !default_pypi_deps.is_empty() { + if let Some(update) = workspace + .update_dependencies( + default_match_specs, + default_pypi_deps, + IndexMap::default(), + args.no_install_config.no_install, + &lock_file_usage, + &args.specs.feature, + &[], + false, + args.dry_run, + ) + .await? + { + let diff = update.lock_file_diff; + if !args.json { + diff.print() + .into_diagnostic() + .context("failed to print lock-file diff")?; + } + printed_any = true; + } + } + + for (platform, (platform_match_specs, platform_pypi_deps)) in per_platform { + if platform_match_specs.is_empty() && platform_pypi_deps.is_empty() { + continue; + } + + if let Some(update) = workspace + .update_dependencies( + platform_match_specs, + platform_pypi_deps, + IndexMap::default(), + args.no_install_config.no_install, + &lock_file_usage, + &args.specs.feature, + &[platform], + false, + args.dry_run, + ) + .await? + { + let diff = update.lock_file_diff; + if !args.json { + if printed_any { + println!(); + } + diff.print() + .into_diagnostic() + .context("failed to print lock-file diff")?; + } + printed_any = true; + } + } + + // If JSON is requested, emit a single combined diff once. + if args.json { + if args.dry_run { + // Compute a combined diff by solving once against the final in-memory manifest + // without writing to disk, then revert. Reuse the already-loaded original lockfile. + let derived = UpdateContext::builder(workspace.workspace()) + .with_lock_file(original_lock_file.clone()) + .with_no_install(args.no_install_config.no_install || args.dry_run) + .finish() + .await? + .update() + .await?; + let diff = LockFileDiff::from_lock_files(&original_lock_file, &derived.lock_file); + let json_diff = LockFileJsonDiff::new(Some(workspace.workspace()), diff); let json = serde_json::to_string_pretty(&json_diff).expect("failed to convert to json"); println!("{}", json); + // Revert changes after computing the diff in dry-run mode. + let _ = workspace.revert().await.into_diagnostic()?; } else { - diff.print() - .into_diagnostic() - .context("failed to print lock-file diff")?; + // Reload the resulting lock-file and compute a combined diff against the original. + let saved_workspace = workspace.save().await.into_diagnostic()?; + let updated_lock_file = saved_workspace.load_lock_file().await?; + let diff = LockFileDiff::from_lock_files(&original_lock_file, &updated_lock_file); + let json_diff = LockFileJsonDiff::new(Some(&saved_workspace), diff); + let json = serde_json::to_string_pretty(&json_diff).expect("failed to convert to json"); + println!("{}", json); } + return Ok(()); + } + + // Persist or revert changes at the end (non-JSON path) + let _workspace = if args.dry_run { + workspace.revert().await.into_diagnostic()? } else { + workspace.save().await.into_diagnostic()? + }; + + // Is there something to report? + if !printed_any { eprintln!( "{}All packages are already up-to-date", console::style(console::Emoji("✔ ", "")).green() @@ -143,41 +229,118 @@ pub async fn execute(args: Args) -> miette::Result<()> { Ok(()) } +/// A grouping of dependency specs by target table. +struct SpecsByTarget { + default_match_specs: MatchSpecs, + default_pypi_deps: PypiDeps, + per_platform: IndexMap, +} + +/// Collects specs for the default target and for each platform, partitioning +/// out default-owned names so platform targets only get platform-owned entries. +fn collect_specs_by_target( + feature: &pixi_manifest::Feature, + args: &Args, + workspace: &WorkspaceMut, + platforms: &[Platform], +) -> miette::Result { + // Determine default-owned names for partitioning + let default_deps_names: IndexSet<_> = feature + .dependencies(SpecType::Run, None) + .map(|deps| deps.keys().cloned().collect()) + .unwrap_or_default(); + let default_pypi_names: IndexSet<_> = feature + .pypi_dependencies(None) + .map(|deps| deps.keys().cloned().collect()) + .unwrap_or_default(); + + // Parse default-target specs (written to default location) + let (default_match_specs, default_pypi_deps) = + parse_specs_for_platform(feature, args, workspace, None)?; + + // Parse per-platform specs and filter out default-owned names + let mut per_platform: IndexMap = IndexMap::new(); + for &platform in platforms { + let (all_ms, all_py) = parse_specs_for_platform(feature, args, workspace, Some(platform))?; + + let platform_match_specs: MatchSpecs = all_ms + .into_iter() + .filter(|(name, _)| !default_deps_names.contains(name)) + .collect(); + let platform_pypi_deps: PypiDeps = all_py + .into_iter() + .filter(|(name, _)| !default_pypi_names.contains(name)) + .collect(); + + per_platform.insert(platform, (platform_match_specs, platform_pypi_deps)); + } + + Ok(SpecsByTarget { + default_match_specs, + default_pypi_deps, + per_platform, + }) +} + +/// Collects available package names (conda run + pypi) across the default +/// and all platform-specific targets, de-duplicated while preserving order. +fn collect_available_packages( + feature: &pixi_manifest::Feature, + platforms: &[Platform], +) -> IndexSet { + let mut available: IndexSet = IndexSet::new(); + + // Default target + if let Some(deps) = feature.dependencies(SpecType::Run, None) { + for (name, _) in deps.into_owned() { + available.insert(name.as_normalized().to_string()); + } + } + if let Some(deps) = feature.pypi_dependencies(None) { + for (name, _) in deps.into_owned() { + available.insert(name.as_normalized().to_string()); + } + } + + // Platform-specific targets + for &platform in platforms { + if let Some(deps) = feature.dependencies(SpecType::Run, Some(platform)) { + for (name, _) in deps.into_owned() { + available.insert(name.as_normalized().to_string()); + } + } + if let Some(deps) = feature.pypi_dependencies(Some(platform)) { + for (name, _) in deps.into_owned() { + available.insert(name.as_normalized().to_string()); + } + } + } + + available +} + /// Parses the specifications for dependencies from the given feature, /// arguments, and workspace. /// /// This function processes the dependencies and PyPi dependencies specified in /// the feature, filters them based on the provided arguments, and returns the /// resulting match specifications and PyPi dependencies. -pub fn parse_specs( +pub fn parse_specs_for_platform( feature: &pixi_manifest::Feature, args: &Args, workspace: &WorkspaceMut, + platform: Option, ) -> miette::Result<(MatchSpecs, PypiDeps)> { let spec_type = SpecType::Run; let match_spec_iter = feature - .dependencies(spec_type, None) + .dependencies(spec_type, platform) .into_iter() .flat_map(|deps| deps.into_owned()); let pypi_deps_iter = feature - .pypi_dependencies(None) + .pypi_dependencies(platform) .into_iter() .flat_map(|deps| deps.into_owned()); - if let Some(package_names) = &args.specs.packages { - let available_packages = match_spec_iter - .clone() - .map(|(name, _)| name.as_normalized().to_string()) - .chain( - pypi_deps_iter - .clone() - .map(|(name, _)| name.as_normalized().to_string()), - ) - .collect_vec(); - - for package in package_names { - ensure_package_exists(package, &available_packages)? - } - } + // Note: package existence is validated across all platforms in `execute`. let match_specs = match_spec_iter // Don't upgrade excluded packages .filter(|(name, _)| match &args.specs.exclude { @@ -295,11 +458,10 @@ pub fn parse_specs( _ => None, }) .map(|(name, req, pixi_req)| { - let location = workspace.document().pypi_dependency_location( - &name, - None, // TODO: add support for platforms - &args.specs.feature, - ); + let location = + workspace + .document() + .pypi_dependency_location(&name, platform, &args.specs.feature); (name, (req, Some(pixi_req), location)) }) .collect();