From 2b513de58ea673aa80c033466dad1b301263e8fb Mon Sep 17 00:00:00 2001 From: magentaqin Date: Thu, 5 Jun 2025 18:47:04 +0200 Subject: [PATCH 01/11] feata: init crate rattler_upload --- crates/rattler_upload/Cargo.toml | 52 ++ crates/rattler_upload/src/lib.rs | 66 +++ crates/rattler_upload/src/upload/anaconda.rs | 469 +++++++++++++++ .../rattler_upload/src/upload/conda_forge.rs | 170 ++++++ crates/rattler_upload/src/upload/mod.rs | 475 +++++++++++++++ crates/rattler_upload/src/upload/opt.rs | 560 ++++++++++++++++++ crates/rattler_upload/src/upload/package.rs | 90 +++ crates/rattler_upload/src/upload/prefix.rs | 237 ++++++++ .../src/upload/trusted_publishing.rs | 189 ++++++ 9 files changed, 2308 insertions(+) create mode 100644 crates/rattler_upload/Cargo.toml create mode 100644 crates/rattler_upload/src/lib.rs create mode 100644 crates/rattler_upload/src/upload/anaconda.rs create mode 100644 crates/rattler_upload/src/upload/conda_forge.rs create mode 100644 crates/rattler_upload/src/upload/mod.rs create mode 100644 crates/rattler_upload/src/upload/opt.rs create mode 100644 crates/rattler_upload/src/upload/package.rs create mode 100644 crates/rattler_upload/src/upload/prefix.rs create mode 100644 crates/rattler_upload/src/upload/trusted_publishing.rs diff --git a/crates/rattler_upload/Cargo.toml b/crates/rattler_upload/Cargo.toml new file mode 100644 index 0000000000..ee2f49a6df --- /dev/null +++ b/crates/rattler_upload/Cargo.toml @@ -0,0 +1,52 @@ +[package] +name = "rattler_upload" +version = "0.1.0" +categories.workspace = true +homepage.workspace = true +repository.workspace = true +license.workspace = true +edition.workspace = true +readme.workspace = true + +[dependencies] +rattler_conda_types = { workspace = true, default-features = false } +rattler_digest = { workspace = true, default-features = false } +rattler_networking = { workspace = true, default-features = false } +rattler_redaction = { workspace = true, default-features = false } +rattler_package_streaming = { workspace = true, default-features = false } +# rattler_solve = { workspace = true, default-features = false } +# pixi_config = { git = "https://github.com/prefix-dev/pixi", branch = "main" } +miette = { version = "7.6.0", features = ["fancy"] } +clap = { version = "4.5.37", features = ["derive", "env", "cargo"] } +fs-err = "3.1.0" +futures = "0.3.31" +indicatif = "0.17.11" +opendal = { version = "0.53.1", default-features = false, features = [ + "services-s3", +]} +reqwest-retry = "0.7.0" +tokio-util = { version = "0.7.15", features = ["codec", "compat"] } +reqwest = { version = "0.12.15", default-features = false, features = [ + "multipart", +]} +url = "2.5.4" +tracing = "0.1.41" +reqwest-middleware = { version = "0.4.2", features = ["json"] } +serde_yaml = "0.9.34" +serde = { version = "1.0.219", features = ["derive"] } +serde_json = "1.0.140" +base64 = "0.22.1" +tempfile = "3.19.1" +thiserror = "2.0.12" +tokio = { version = "1.44.2", features = [ + "rt", + "macros", + "rt-multi-thread", + "process", +] } + +[target.'cfg(not(target_os = "windows"))'.dependencies] +sha2 = { version = "0.10.8", features = ["asm"] } + +[target.'cfg(target_os = "windows")'.dependencies] +sha2 = { version = "0.10.8" } diff --git a/crates/rattler_upload/src/lib.rs b/crates/rattler_upload/src/lib.rs new file mode 100644 index 0000000000..125aa522a8 --- /dev/null +++ b/crates/rattler_upload/src/lib.rs @@ -0,0 +1,66 @@ +pub mod upload; + +use upload::opt::{UploadOpts, ServerType, QuetzData, ArtifactoryData, CondaForgeData, PrefixData, AnacondaData }; +use miette; +use rattler_conda_types::package::{ArchiveType}; + +/// Upload. +pub async fn upload_from_args(args: UploadOpts) -> miette::Result<()> { + if args.package_files.is_empty() { + return Err(miette::miette!("No package files were provided.")); + } + + for package_file in &args.package_files { + if ArchiveType::try_from(package_file).is_none() { + return Err(miette::miette!( + "The file {} does not appear to be a conda package.", + package_file.to_string_lossy() + )); + } + } + + let store = tool_configuration::get_auth_store(args.common.auth_file).into_diagnostic()?; + + match args.server_type { + ServerType::Quetz(quetz_opts) => { + let quetz_data = QuetzData::from(quetz_opts); + upload::upload_package_to_quetz(&store, &args.package_files, quetz_data).await + } + ServerType::Artifactory(artifactory_opts) => { + let artifactory_data = ArtifactoryData::try_from(artifactory_opts)?; + + upload::upload_package_to_artifactory(&store, &args.package_files, artifactory_data) + .await + } + ServerType::Prefix(prefix_opts) => { + let prefix_data = PrefixData::from(prefix_opts); + upload::upload_package_to_prefix(&store, &args.package_files, prefix_data).await + } + ServerType::Anaconda(anaconda_opts) => { + let anaconda_data = AnacondaData::from(anaconda_opts); + upload::upload_package_to_anaconda(&store, &args.package_files, anaconda_data).await + } + ServerType::S3(s3_opts) => { + upload::upload_package_to_s3( + &store, + s3_opts.channel, + s3_opts.endpoint_url, + s3_opts.region, + s3_opts.force_path_style, + s3_opts.access_key_id, + s3_opts.secret_access_key, + s3_opts.session_token, + &args.package_files, + ) + .await + } + ServerType::CondaForge(conda_forge_opts) => { + let conda_forge_data = CondaForgeData::from(conda_forge_opts); + upload::conda_forge::upload_packages_to_conda_forge( + &args.package_files, + conda_forge_data, + ) + .await + } + } +} \ No newline at end of file diff --git a/crates/rattler_upload/src/upload/anaconda.rs b/crates/rattler_upload/src/upload/anaconda.rs new file mode 100644 index 0000000000..21dddc1dde --- /dev/null +++ b/crates/rattler_upload/src/upload/anaconda.rs @@ -0,0 +1,469 @@ +use std::borrow::Cow; + +use fs_err::tokio as fs; +use miette::{IntoDiagnostic, miette}; +use rattler_conda_types::PackageName; +use rattler_conda_types::package::AboutJson; +use reqwest::Client; +use reqwest::multipart::Form; +use reqwest::multipart::Part; +use serde::{Deserialize, Serialize}; +use tracing::debug; +use tracing::info; +use url::Url; + +use crate::url_with_trailing_slash::UrlWithTrailingSlash; + +use super::VERSION; +use super::package::ExtractedPackage; + +pub struct Anaconda { + client: Client, + url: UrlWithTrailingSlash, +} + +#[derive(Serialize, Deserialize, Debug)] +struct PackageAttrs<'a> { + package_types: Vec, + name: Cow<'a, PackageName>, + #[serde(flatten)] + about: Cow<'a, AboutJson>, +} + +#[derive(Serialize, Deserialize, Debug)] +struct ReleaseCreationArgs<'a> { + requirements: Vec, + announce: bool, + description: Option, + #[serde(flatten)] + about: Cow<'a, AboutJson>, +} + +#[derive(Serialize, Deserialize, Debug)] +struct FileStageResponse { + post_url: Url, + form_data: serde_json::Map, + dist_id: String, +} + +impl Anaconda { + pub fn new(token: String, url: UrlWithTrailingSlash) -> Self { + let mut default_headers = reqwest::header::HeaderMap::new(); + + default_headers.append( + "Accept", + "application/json".parse().expect("failed to parse"), + ); + default_headers.append( + "Authorization", + format!("token {}", token).parse().expect("failed to parse"), + ); + + default_headers.append( + "x-binstar-api-version", + "1.12.2".parse().expect("failed to parse"), + ); + + let client = Client::builder() + .no_gzip() + .user_agent(format!("rattler-build/{}", VERSION)) + .default_headers(default_headers) + .build() + .expect("failed to create client"); + + Self { client, url } + } +} + +impl Anaconda { + pub async fn create_or_update_package( + &self, + owner: &str, + package: &ExtractedPackage<'_>, + ) -> miette::Result<()> { + let package_name = package.package_name(); + debug!("getting package {}/{}", owner, package_name.as_normalized(),); + + let url = self + .url + .join(&format!( + "package/{}/{}", + owner, + package_name.as_normalized(), + )) + .into_diagnostic()?; + + let response = self + .client + .get(url) + .send() + .await + .into_diagnostic() + .map_err(|e| miette!("failed to send request: {}", e))?; + + let exists = match response.status() { + reqwest::StatusCode::OK => true, + reqwest::StatusCode::NOT_FOUND => false, + _ => { + return Err(miette!( + "failed to get existing package: {}", + response.status() + )); + } + }; + + let url = self + .url + .join(&format!( + "package/{}/{}", + owner, + package_name.as_normalized(), + )) + .into_diagnostic()?; + + // See inspect_conda_info_dir in anaconda-client + // https://github.com/Anaconda-Platform/anaconda-client/blob/master/binstar_client/inspect_package/conda.py#L81-L150 + // dumping the entire about.json as public_attrs seems to work fine + let payload = serde_json::json!({ + "public": true, + "publish": false, + "public_attrs": PackageAttrs { + package_types: vec!["conda".to_string()], + name: Cow::Borrowed(package_name), + about: Cow::Borrowed(package.about_json()), + }, + }); + + let req = if exists { + debug!( + "updating package {}/{}", + owner, + package_name.as_normalized(), + ); + self.client.patch(url) + } else { + debug!( + "creating package {}/{}", + owner, + package_name.as_normalized(), + ); + self.client.post(url) + }; + + req.json(&payload) + .send() + .await + .into_diagnostic() + .map_err(|e| miette!("failed to send request: {}", e))? + .error_for_status() + .into_diagnostic() + .map_err(|e| miette!("failed to create package: {}", e))?; + + Ok(()) + } + + pub async fn create_or_update_release( + &self, + owner: &str, + package: &ExtractedPackage<'_>, + ) -> miette::Result<()> { + let package_name = package.package_name(); + let package_version = package.package_version(); + debug!( + "getting release {}/{}/{}", + owner, + package_name.as_normalized(), + package_version + ); + + let url = self + .url + .join(&format!( + "release/{}/{}/{}", + owner, + package_name.as_normalized(), + package_version, + )) + .into_diagnostic()?; + + let response = self + .client + .get(url) + .send() + .await + .into_diagnostic() + .map_err(|e| miette!("failed to send request: {}", e))?; + + let exists = match response.status() { + reqwest::StatusCode::OK => true, + reqwest::StatusCode::NOT_FOUND => false, + _ => { + return Err(miette!( + "failed to get existing release: {}", + response.status() + )); + } + }; + + let url = self + .url + .join(&format!( + "release/{}/{}/{}", + owner, + package_name.as_normalized(), + package_version, + )) + .into_diagnostic()?; + + let req = if exists { + debug!( + "updating release {}/{}/{}", + owner, + package_name.as_normalized(), + package_version + ); + self.client.patch(url).json(&serde_json::json!({ + "requirements": [], + "announce": false, + "description": null, + "public_attrs": Cow::Borrowed(package.about_json()) + })) + } else { + debug!( + "creating release {}/{}/{}", + owner, + package_name.as_normalized(), + package_version + ); + self.client.post(url).json(&ReleaseCreationArgs { + requirements: vec![], + announce: false, + description: None, + about: Cow::Borrowed(package.about_json()), + }) + }; + + req.send() + .await + .into_diagnostic() + .map_err(|e| miette!("failed to send request: {}", e))? + .error_for_status() + .into_diagnostic() + .map_err(|e| miette!("failed to create release: {}", e))?; + + Ok(()) + } + + pub async fn remove_file( + &self, + owner: &str, + package: &ExtractedPackage<'_>, + ) -> miette::Result<()> { + let package_name = package.package_name(); + let package_version = package.package_version(); + let subdir = package + .subdir() + .ok_or(miette!("missing subdir in index.json"))?; + let filename = package + .filename() + .ok_or(miette!("missing filename in index.json"))?; + + debug!( + "removing file {}/{}/{}/{}/{}", + owner, + package_name.as_normalized(), + package_version, + subdir, + filename, + ); + + let url = self + .url + .join(&format!( + "dist/{}/{}/{}/{}/{}", + owner, + package_name.as_normalized(), + package_version, + subdir, + filename, + )) + .into_diagnostic()?; + + self.client + .delete(url) + .send() + .await + .into_diagnostic() + .map_err(|e| miette!("failed to send request: {}", e))? + .error_for_status() + .into_diagnostic() + .map_err(|e| miette!("failed to remove file: {}", e))?; + + Ok(()) + } + + pub async fn upload_file( + &self, + owner: &str, + channels: &[String], + force: bool, + package: &ExtractedPackage<'_>, + ) -> miette::Result { + if channels.is_empty() { + return Err(miette!( + "No channel selected - please specify at least one channel for upload to Anaconda.org" + )); + } + + let sha256 = package.sha256().into_diagnostic()?; + + let package_name = package.package_name(); + let version = package.package_version(); + + let index_json = &package.index_json(); + + let subdir = index_json + .subdir + .as_deref() + .ok_or(miette!("missing subdir in index.json"))?; + + let filename = package.filename().ok_or(miette!("missing filename"))?; + + debug!( + "uploading file {}/{}/{}/{}/{}", + owner, + package_name.as_normalized(), + version, + subdir, + filename, + ); + + let url = self + .url + .join(&format!( + "stage/{}/{}/{}/{}/{}", + owner, + package_name.as_normalized(), + version, + subdir, + filename, + )) + .into_diagnostic()?; + + let payload = serde_json::json!({ + "distribution_type": "conda", + "description": null, + "attrs": index_json, + "channels": channels, + "sha256": sha256, + }); + + let resp = self + .client + .post(url) + .json(&payload) + .send() + .await + .into_diagnostic() + .map_err(|e| miette!("failed to send request: {}", e))?; + + match resp.status() { + reqwest::StatusCode::OK => (), + reqwest::StatusCode::CONFLICT => { + if force { + info!( + "file {} already exists, running with --force, removing file and retrying", + filename + ); + self.remove_file(owner, package).await?; + + // We cannot just retry the staging request here, because + // Anaconda might have garbage collected the release / + // package after the deletion of the file. + return Ok(false); + } else { + return Err(miette!( + "file {} already exists, use --force to overwrite", + filename + )); + } + } + _ => { + return Err(miette!( + "failed to stage file, server replied with: {}", + resp.status() + )); + } + } + + let parsed_response: FileStageResponse = resp + .json() + .await + .into_diagnostic() + .map_err(|e| miette!("failed to parse response: {}", e))?; + + debug!("Uploading file to S3 Bucket {}", parsed_response.post_url); + + let base64_md5 = package.base64_md5().into_diagnostic()?; + let file_size = package.file_size().into_diagnostic()?; + + let mut form_data = Form::new(); + + for (key, value) in parsed_response.form_data { + let serde_json::Value::String(value) = value else { + Err(miette!("invalid value in form data: {}", value))? + }; + + form_data = form_data.text(key, value); + } + + let content = fs::read(package.path()).await.into_diagnostic()?; + + form_data = form_data.text("Content-Length", file_size.to_string()); + form_data = form_data.text("Content-MD5", base64_md5.to_string()); + form_data = form_data.part("file", Part::bytes(content)); + + reqwest::Client::new() + .post(parsed_response.post_url) + .multipart(form_data) + .header("Accept", "application/json") + .send() + .await + .into_diagnostic() + .map_err(|e| miette!("failed to send request: {}", e))? + .error_for_status() + .into_diagnostic() + .map_err(|e| miette!("failed to upload file, server replied with: {}", e))?; + + debug!("Committing file {}", filename); + + let url = self + .url + .join(&format!( + "commit/{}/{}/{}/{}/{}", + owner, + package_name.as_normalized(), + version, + subdir, + filename, + )) + .into_diagnostic()?; + + self.client + .post(url) + .json(&serde_json::json!({ + "dist_id": parsed_response.dist_id, + })) + .send() + .await + .into_diagnostic() + .map_err(|e| miette!("failed to send commit: {}", e))? + .error_for_status() + .into_diagnostic() + .map_err(|e| miette!("failed to commit file, server replied with: {}", e))?; + + debug!("File {} uploaded successfully", filename); + + Ok(true) + } +} diff --git a/crates/rattler_upload/src/upload/conda_forge.rs b/crates/rattler_upload/src/upload/conda_forge.rs new file mode 100644 index 0000000000..d34d7b0482 --- /dev/null +++ b/crates/rattler_upload/src/upload/conda_forge.rs @@ -0,0 +1,170 @@ +//! Conda-forge package uploader. + +use std::{ + collections::HashMap, + path::{Path, PathBuf}, +}; + +use crate::{CondaForgeData, upload::get_default_client}; +use fs_err::tokio as fs; +use miette::{IntoDiagnostic, miette}; +use tracing::{debug, info}; + +use super::{ + anaconda, + package::{self}, +}; + +async fn get_channel_target_from_variant_config( + variant_config_path: &Path, +) -> miette::Result { + let variant_config = fs::read_to_string(variant_config_path) + .await + .into_diagnostic()?; + + let variant_config: serde_yaml::Value = + serde_yaml::from_str(&variant_config).into_diagnostic()?; + + let channel_target = variant_config + .get("channel_targets") + .and_then(|v| v.as_str()) + .ok_or_else(|| { + miette!("\"channel_targets\" not found or invalid format in variant_config") + })?; + + let (channel, label) = channel_target + .split_once(' ') + .ok_or_else(|| miette!("Invalid channel_target format"))?; + + if channel != "conda-forge" { + return Err(miette!("channel_target is not a conda-forge channel")); + } + + Ok(label.to_string()) +} + +/// Uploads the package conda forge. +pub async fn upload_packages_to_conda_forge( + package_files: &Vec, + conda_forge_data: CondaForgeData, +) -> miette::Result<()> { + let anaconda = anaconda::Anaconda::new( + conda_forge_data.staging_token, + conda_forge_data.anaconda_url, + ); + + let mut channels: HashMap> = HashMap::new(); + + for package_file in package_files { + let package = package::ExtractedPackage::from_package_file(package_file)?; + + let variant_config_path = package + .extraction_dir() + .join("info") + .join("recipe") + .join("variant_config.yaml"); + + let channel = get_channel_target_from_variant_config(&variant_config_path) + .await + .map_err(|e| { + miette!( + "Failed to get channel_targets from variant config for {}: {}", + package.path().display(), + e + ) + })?; + + if !conda_forge_data.dry_run { + anaconda + .create_or_update_package(&conda_forge_data.staging_channel, &package) + .await?; + + anaconda + .create_or_update_release(&conda_forge_data.staging_channel, &package) + .await?; + + anaconda + .upload_file( + &conda_forge_data.staging_channel, + &[channel.clone()], + false, + &package, + ) + .await?; + } else { + debug!( + "Would have uploaded {} to anaconda.org {}/{}", + package.path().display(), + conda_forge_data.staging_channel, + channel + ); + }; + + let dist_name = format!( + "{}/{}", + package.subdir().ok_or(miette::miette!("No subdir found"))?, + package + .filename() + .ok_or(miette::miette!("No filename found"))? + ); + + channels + .entry(channel) + .or_default() + .insert(dist_name, package.sha256().into_diagnostic()?); + } + + for (channel, checksums) in channels { + info!("Uploading packages for conda-forge channel {}", channel); + + let comment_on_error = std::env::var("POST_COMMENT_ON_ERROR").is_ok(); + + let payload = serde_json::json!({ + "feedstock": conda_forge_data.feedstock, + "outputs": checksums, + "channel": channel, + "comment_on_error": comment_on_error, + "hash_type": "sha256", + "provider": conda_forge_data.provider + }); + + let client = get_default_client().into_diagnostic()?; + + debug!( + "Sending payload to validation endpoint: {}", + serde_json::to_string_pretty(&payload).into_diagnostic()? + ); + + if conda_forge_data.dry_run { + debug!( + "Would have sent payload to validation endpoint {}", + conda_forge_data.validation_endpoint + ); + + continue; + } + + let resp = client + .post(conda_forge_data.validation_endpoint.clone()) + .json(&payload) + .header("FEEDSTOCK_TOKEN", conda_forge_data.feedstock_token.clone()) + .send() + .await + .into_diagnostic()?; + + let status = resp.status(); + + let body: serde_json::Value = resp.json().await.into_diagnostic()?; + + debug!( + "Copying to conda-forge/{} returned status code {} with body: {}", + channel, + status, + serde_json::to_string_pretty(&body).into_diagnostic()? + ); + } + + info!("Done uploading packages to conda-forge"); + + Ok(()) +} diff --git a/crates/rattler_upload/src/upload/mod.rs b/crates/rattler_upload/src/upload/mod.rs new file mode 100644 index 0000000000..ee87b32cec --- /dev/null +++ b/crates/rattler_upload/src/upload/mod.rs @@ -0,0 +1,475 @@ +//! The upload module provides the package upload functionality. + +use crate::{AnacondaData, ArtifactoryData, QuetzData, tool_configuration::APP_USER_AGENT}; +use fs_err::tokio as fs; +use futures::TryStreamExt; +use indicatif::{HumanBytes, ProgressState, style::TemplateError}; +use opendal::{Configurator, Operator, services::S3Config}; +use reqwest_retry::{RetryDecision, RetryPolicy, policies::ExponentialBackoff}; +use std::{ + fmt::Write, + net::Ipv4Addr, + path::{Path, PathBuf}, + time::{Duration, SystemTime}, +}; +use tokio_util::io::ReaderStream; + +use miette::{Context, IntoDiagnostic}; +use rattler_networking::{Authentication, AuthenticationStorage}; +use rattler_redaction::Redact; +use reqwest::{Method, StatusCode}; +use tracing::{info, warn}; +use url::Url; + +use crate::upload::package::{ExtractedPackage, sha256_sum}; + +mod anaconda; +pub mod conda_forge; +mod package; +mod prefix; +mod trusted_publishing; +pub mod opt; + +pub use prefix::upload_package_to_prefix; + +const VERSION: &str = env!("CARGO_PKG_VERSION"); + +/// Returns the style to use for a progressbar that is currently in progress. +fn default_bytes_style() -> Result { + Ok(indicatif::ProgressStyle::default_bar() + .template("{spinner:.green} {prefix:20!} [{elapsed_precise}] [{bar:40!.bright.yellow/dim.white}] {bytes:>8} @ {smoothed_bytes_per_sec:8}")? + .progress_chars("━━╾─") + .with_key( + "smoothed_bytes_per_sec", + |s: &ProgressState, w: &mut dyn Write| match (s.pos(), s.elapsed().as_millis()) { + (pos, elapsed_ms) if elapsed_ms > 0 => { + // TODO: log with tracing? + _ = write!(w, "{}/s", HumanBytes((pos as f64 * 1000_f64 / elapsed_ms as f64) as u64)); + } + _ => { + _ = write!(w, "-"); + }, + }, + )) +} + +fn get_default_client() -> Result { + reqwest::Client::builder() + .no_gzip() + .user_agent(APP_USER_AGENT) + .build() +} + +/// Returns a reqwest client with retry middleware. +fn get_client_with_retry() -> Result { + let client = reqwest::Client::builder() + .no_gzip() + .user_agent(APP_USER_AGENT) + .build()?; + + Ok(reqwest_middleware::ClientBuilder::new(client) + .with(reqwest_retry::RetryTransientMiddleware::new_with_policy( + reqwest_retry::policies::ExponentialBackoff::builder().build_with_max_retries(3), + )) + .build()) +} + +/// Uploads package files to a Quetz server. +pub async fn upload_package_to_quetz( + storage: &AuthenticationStorage, + package_files: &Vec, + quetz_data: QuetzData, +) -> miette::Result<()> { + let token = match quetz_data.api_key { + Some(api_key) => api_key, + None => match storage.get_by_url(Url::from(quetz_data.url.clone())) { + Ok((_, Some(Authentication::CondaToken(token)))) => token, + Ok((_, Some(_))) => { + return Err(miette::miette!("A Conda token is required for authentication with quetz. + Authentication information found in the keychain / auth file, but it was not a Conda token")); + } + Ok((_, None)) => { + return Err(miette::miette!( + "No quetz api key was given and none was found in the keychain / auth file" + )); + } + Err(e) => { + return Err(miette::miette!( + "Failed to get authentication information form keychain: {e}" + )); + } + }, + }; + + let client = get_default_client().into_diagnostic()?; + + for package_file in package_files { + let upload_url = quetz_data + .url + .join(&format!( + "api/channels/{}/upload/{}", + quetz_data.channels, + package_file.file_name().unwrap().to_string_lossy() + )) + .into_diagnostic()?; + + let hash = sha256_sum(package_file).into_diagnostic()?; + + let prepared_request = client + .request(Method::POST, upload_url) + .query(&[("force", "false"), ("sha256", &hash)]) + .header("X-API-Key", token.clone()); + + send_request_with_retry(prepared_request, package_file).await?; + } + + info!("Packages successfully uploaded to Quetz server"); + + Ok(()) +} + +/// Uploads package files to an Artifactory server. +pub async fn upload_package_to_artifactory( + storage: &AuthenticationStorage, + package_files: &Vec, + artifactory_data: ArtifactoryData, +) -> miette::Result<()> { + let token = match artifactory_data.token { + Some(t) => t, + _ => match storage.get_by_url(Url::from(artifactory_data.url.clone())) { + Ok((_, Some(Authentication::BearerToken(token)))) => token, + Ok(( + _, + Some(Authentication::BasicHTTP { + username: _, + password, + }), + )) => { + warn!( + "A bearer token is required for authentication with artifactory. Using the password from the keychain / auth file to authenticate. Consider switching to a bearer token instead for Artifactory." + ); + password + } + Ok((_, Some(_))) => { + return Err(miette::miette!("A bearer token is required for authentication with artifactory. + Authentication information found in the keychain / auth file, but it was not a bearer token")); + } + Ok((_, None)) => { + return Err(miette::miette!( + "No bearer token was given and none was found in the keychain / auth file" + )); + } + Err(e) => { + return Err(miette::miette!( + "Failed to get authentication information form keychain: {e}" + )); + } + }, + }; + + for package_file in package_files { + let package = ExtractedPackage::from_package_file(package_file)?; + + let subdir = package.subdir().ok_or_else(|| { + miette::miette!( + "index.json of package {} has no subdirectory. Cannot determine which directory to upload to", + package_file.display() + ) + })?; + + let package_name = package.filename().ok_or(miette::miette!( + "Package file {} has no filename", + package_file.display() + ))?; + + let client = get_default_client().into_diagnostic()?; + + let upload_url = artifactory_data + .url + .join(&format!( + "{}/{}/{}", + artifactory_data.channels, subdir, package_name + )) + .into_diagnostic()?; + + let prepared_request = client + .request(Method::PUT, upload_url) + .bearer_auth(token.clone()); + + send_request_with_retry(prepared_request, package_file).await?; + } + + info!("Packages successfully uploaded to Artifactory server"); + + Ok(()) +} + +/// Uploads package files to an Anaconda server. +pub async fn upload_package_to_anaconda( + storage: &AuthenticationStorage, + package_files: &Vec, + anaconda_data: AnacondaData, +) -> miette::Result<()> { + let token = match anaconda_data.api_key { + Some(token) => token, + None => match storage.get("anaconda.org") { + Ok(Some(Authentication::CondaToken(token))) => token, + Ok(Some(_)) => { + return Err(miette::miette!( + "A Conda token is required for authentication with anaconda.org. + Authentication information found in the keychain / auth file, but it was not a Conda token. + Please create a token on anaconda.org" + )); + } + Ok(None) => { + return Err(miette::miette!( + "No anaconda.org api key was given and no token were found in the keychain / auth file. Please create a token on anaconda.org" + )); + } + Err(e) => { + return Err(miette::miette!( + "Failed to get authentication information form keychain: {e}" + )); + } + }, + }; + + let anaconda = anaconda::Anaconda::new(token, anaconda_data.url); + + for package_file in package_files { + loop { + let package = package::ExtractedPackage::from_package_file(package_file)?; + + anaconda + .create_or_update_package(&anaconda_data.owner, &package) + .await?; + + anaconda + .create_or_update_release(&anaconda_data.owner, &package) + .await?; + + let successful = anaconda + .upload_file( + &anaconda_data.owner, + &anaconda_data.channels, + anaconda_data.force, + &package, + ) + .await?; + + // When running with --force and experiencing a conflict error, we delete the conflicting file. + // Anaconda automatically deletes releases / packages when the deletion of a file would leave them empty. + // Therefore, we need to ensure that the release / package still exists before trying to upload again. + if successful { + break; + } + } + } + Ok(()) +} + +/// Uploads a package to a channel in an S3 bucket. +#[allow(clippy::too_many_arguments)] +pub async fn upload_package_to_s3( + storage: &AuthenticationStorage, + channel: Url, + endpoint_url: Url, + region: String, + force_path_style: bool, + access_key_id: Option, + secret_access_key: Option, + session_token: Option, + package_files: &Vec, +) -> miette::Result<()> { + let bucket = channel + .host_str() + .ok_or_else(|| miette::miette!("Failed to get host from channel URL"))?; + + if let Some(host_endpoint) = endpoint_url.host_str() { + if host_endpoint.parse::().is_ok() && !force_path_style { + return Err(miette::miette!( + "Endpoint URL {} (IPv4 address) cannot be used without path style, please use --force-path-style", + endpoint_url + )); + } + } + + let mut s3_config = S3Config::default(); + s3_config.root = Some(channel.path().to_string()); + s3_config.bucket = bucket.to_string(); + s3_config.region = Some(region); + s3_config.endpoint = Some(endpoint_url.to_string()); + s3_config.enable_virtual_host_style = !force_path_style; + // Use credentials from the CLI if they are provided. + if let (Some(access_key_id), Some(secret_access_key)) = (access_key_id, secret_access_key) { + s3_config.secret_access_key = Some(secret_access_key); + s3_config.access_key_id = Some(access_key_id); + s3_config.session_token = session_token; + } else { + // If they're not provided, check rattler authentication storage for credentials. + let auth = storage.get_by_url(channel.clone()).into_diagnostic()?; + if let ( + _, + Some(Authentication::S3Credentials { + access_key_id, + secret_access_key, + session_token, + }), + ) = auth + { + s3_config.access_key_id = Some(access_key_id); + s3_config.secret_access_key = Some(secret_access_key); + s3_config.session_token = session_token; + } + } + + let builder = s3_config.into_builder(); + let op = Operator::new(builder).into_diagnostic()?.finish(); + + for package_file in package_files { + let package = ExtractedPackage::from_package_file(package_file)?; + let subdir = package + .subdir() + .ok_or_else(|| miette::miette!("Failed to get subdir"))?; + let filename = package + .filename() + .ok_or_else(|| miette::miette!("Failed to get filename"))?; + let key = format!("{}/{}", subdir, filename); + let body = fs::read(package_file).await.into_diagnostic()?; + op.write_with(&key, body) + .if_not_exists(true) + .await + .into_diagnostic()?; + + tracing::info!( + "Uploaded package to s3://{bucket}{}/{key}", + channel.path().to_string() + ); + } + + Ok(()) +} + +async fn send_request_with_retry( + prepared_request: reqwest::RequestBuilder, + package_file: &Path, +) -> miette::Result { + let retry_policy = ExponentialBackoff::builder().build_with_max_retries(3); + let mut current_try = 0; + + let request_start = SystemTime::now(); + + loop { + let request = prepared_request + .try_clone() + .expect("Could not clone request. Does it have a streaming body?"); + let response = send_request(request, package_file).await?; + + if response.status().is_success() { + return Ok(response); + } + + let status = response.status(); + let body = response.text().await.into_diagnostic()?; + let err = miette::miette!( + "Failed to upload package file: {}\nStatus: {}\nBody: {}", + package_file.display(), + status, + body + ); + + // Non-retry status codes + match status { + // Authentication/Authorization errors + StatusCode::UNAUTHORIZED | StatusCode::FORBIDDEN => { + return Err(miette::miette!("Authentication error: {}", err)); + } + // Resource conflicts + StatusCode::CONFLICT | StatusCode::UNPROCESSABLE_ENTITY => { + return Err(miette::miette!("Resource conflict: {}", err)); + } + // Client errors + StatusCode::BAD_REQUEST | StatusCode::NOT_FOUND | StatusCode::PAYLOAD_TOO_LARGE => { + return Err(miette::miette!("Client error: {}", err)); + } + _ => {} + } + + match retry_policy.should_retry(request_start, current_try) { + RetryDecision::DoNotRetry => { + return Err(err); + } + RetryDecision::Retry { execute_after } => { + let sleep_for = execute_after + .duration_since(SystemTime::now()) + .unwrap_or(Duration::ZERO); + warn!( + "Failed to upload package file: {}\nStatus: {}\nBody: {}\nRetrying in {} seconds", + package_file.display(), + status, + body, + sleep_for.as_secs() + ); + tokio::time::sleep(sleep_for).await; + } + } + + current_try += 1; + } +} + +/// Note that we need to use a regular request. reqwest_retry does not support streaming requests. +async fn send_request( + prepared_request: reqwest::RequestBuilder, + package_file: &Path, +) -> miette::Result { + let file = fs::File::open(package_file).await.into_diagnostic()?; + + let file_size = file.metadata().await.into_diagnostic()?.len(); + info!( + "Uploading package file: {} ({})\n", + package_file + .file_name() + .expect("no filename found") + .to_string_lossy(), + HumanBytes(file_size) + ); + let progress_bar = indicatif::ProgressBar::new(file_size) + .with_prefix("Uploading") + .with_style(default_bytes_style().into_diagnostic()?); + + let progress_bar_clone = progress_bar.clone(); + let reader_stream = ReaderStream::new(file) + .inspect_ok(move |bytes| { + progress_bar_clone.inc(bytes.len() as u64); + }) + .inspect_err(|e| { + println!("Error while uploading: {}", e); + }); + + let body = reqwest::Body::wrap_stream(reader_stream); + + let response = prepared_request + .body(body) + .send() + .await + .map_err(|e| e.redact()) + .into_diagnostic()?; + + response + .error_for_status_ref() + .map_err(|e| e.redact()) + .into_diagnostic() + .wrap_err("Server responded with error")?; + + progress_bar.finish(); + info!( + "\nUpload complete for package file: {}", + package_file + .file_name() + .expect("no filename found") + .to_string_lossy() + ); + + Ok(response) +} diff --git a/crates/rattler_upload/src/upload/opt.rs b/crates/rattler_upload/src/upload/opt.rs new file mode 100644 index 0000000000..2463f7adc8 --- /dev/null +++ b/crates/rattler_upload/src/upload/opt.rs @@ -0,0 +1,560 @@ +//! Command-line options. + +use std::{ path::PathBuf }; + +use clap::{Parser, arg }; +use rattler_solve::ChannelPriority; +use url::Url; +use rattler_conda_types::{NamedChannelOrUrl, Platform }; + +/// Container for rattler_solver::ChannelPriority so that it can be parsed +#[derive(Clone, PartialEq, Eq, Debug)] +pub struct ChannelPriorityWrapper { + /// The ChannelPriority value to be used when building the Configuration + pub value: ChannelPriority, +} + + + +/// Common opts that are shared between [`Rebuild`] and [`Build`]` subcommands +#[derive(Parser, Clone, Debug)] +pub struct CommonOpts { + /// Output directory for build artifacts. + #[clap( + long, + env = "CONDA_BLD_PATH", + verbatim_doc_comment, + help_heading = "Modifying result" + )] + pub output_dir: Option, + + /// Enable support for repodata.json.zst + #[clap(long, env = "RATTLER_ZSTD", default_value = "true", hide = true)] + pub use_zstd: bool, + + /// Enable support for repodata.json.bz2 + #[clap(long, env = "RATTLER_BZ2", default_value = "true", hide = true)] + pub use_bz2: bool, + + /// Enable experimental features + #[arg(long, env = "RATTLER_BUILD_EXPERIMENTAL")] + pub experimental: bool, + + /// List of hosts for which SSL certificate verification should be skipped + #[arg(long, value_delimiter = ',')] + pub allow_insecure_host: Option>, + + /// Path to an auth-file to read authentication information from + #[clap(long, env = "RATTLER_AUTH_FILE", hide = true)] + pub auth_file: Option, + + /// Channel priority to use when solving + #[arg(long)] + pub channel_priority: Option, +} + + + +/// Upload options. +#[derive(Parser, Debug)] +pub struct UploadOpts { + /// The package file to upload + #[arg(global = true, required = false)] + pub package_files: Vec, + + /// The server type + #[clap(subcommand)] + pub server_type: ServerType, + + /// Common options. + #[clap(flatten)] + pub common: CommonOpts, +} + +/// Server type. +#[derive(Clone, Debug, PartialEq, Parser)] +#[allow(missing_docs)] +pub enum ServerType { + Quetz(QuetzOpts), + Artifactory(ArtifactoryOpts), + Prefix(PrefixOpts), + Anaconda(AnacondaOpts), + S3(S3Opts), + #[clap(hide = true)] + CondaForge(CondaForgeOpts), +} + +/// Upload to a Quetz server. +/// Authentication is used from the keychain / auth-file. +#[derive(Clone, Debug, PartialEq, Parser)] +pub struct QuetzOpts { + /// The URL to your Quetz server + #[arg(short, long, env = "QUETZ_SERVER_URL")] + pub url: Url, + + /// The URL to your channel + #[arg(short, long = "channel", env = "QUETZ_CHANNEL")] + pub channels: String, + + /// The Quetz API key, if none is provided, the token is read from the + /// keychain / auth-file + #[arg(short, long, env = "QUETZ_API_KEY")] + pub api_key: Option, +} + +#[derive(Debug)] +#[allow(missing_docs)] +pub struct QuetzData { + pub url: UrlWithTrailingSlash, + pub channels: String, + pub api_key: Option, +} + +impl From for QuetzData { + fn from(value: QuetzOpts) -> Self { + Self::new(value.url, value.channels, value.api_key) + } +} + +impl QuetzData { + /// Create a new instance of `QuetzData` + pub fn new(url: Url, channels: String, api_key: Option) -> Self { + Self { + url: url.into(), + channels, + api_key, + } + } +} + +#[derive(Clone, Debug, PartialEq, Parser)] +/// Options for uploading to a Artifactory channel. +/// Authentication is used from the keychain / auth-file. +pub struct ArtifactoryOpts { + /// The URL to your Artifactory server + #[arg(short, long, env = "ARTIFACTORY_SERVER_URL")] + pub url: Url, + + /// The URL to your channel + #[arg(short, long = "channel", env = "ARTIFACTORY_CHANNEL")] + pub channels: String, + + /// Your Artifactory username + #[arg(long, env = "ARTIFACTORY_USERNAME", hide = true)] + pub username: Option, + + /// Your Artifactory password + #[arg(long, env = "ARTIFACTORY_PASSWORD", hide = true)] + pub password: Option, + + /// Your Artifactory token + #[arg(short, long, env = "ARTIFACTORY_TOKEN")] + pub token: Option, +} + +#[derive(Debug)] +#[allow(missing_docs)] +pub struct ArtifactoryData { + pub url: UrlWithTrailingSlash, + pub channels: String, + pub token: Option, +} + +impl TryFrom for ArtifactoryData { + type Error = miette::Error; + + fn try_from(value: ArtifactoryOpts) -> Result { + let token = match (value.username, value.password, value.token) { + (_, _, Some(token)) => Some(token), + (Some(_), Some(password), _) => { + warn!( + "Using username and password for Artifactory authentication is deprecated, using password as token. Please use an API token instead." + ); + Some(password) + } + (Some(_), None, _) => { + return Err(miette::miette!( + "Artifactory username provided without a password" + )); + } + (None, Some(_), _) => { + return Err(miette::miette!( + "Artifactory password provided without a username" + )); + } + _ => None, + }; + Ok(Self::new(value.url, value.channels, token)) + } +} + +impl ArtifactoryData { + /// Create a new instance of `ArtifactoryData` + pub fn new(url: Url, channels: String, token: Option) -> Self { + Self { + url: url.into(), + channels, + token, + } + } +} + +/// Options for uploading to a prefix.dev server. +/// Authentication is used from the keychain / auth-file +#[derive(Clone, Debug, PartialEq, Parser)] +pub struct PrefixOpts { + /// The URL to the prefix.dev server (only necessary for self-hosted + /// instances) + #[arg( + short, + long, + env = "PREFIX_SERVER_URL", + default_value = "https://prefix.dev" + )] + pub url: Url, + + /// The channel to upload the package to + #[arg(short, long, env = "PREFIX_CHANNEL")] + pub channel: String, + + /// The prefix.dev API key, if none is provided, the token is read from the + /// keychain / auth-file + #[arg(short, long, env = "PREFIX_API_KEY")] + pub api_key: Option, + + /// Upload one or more attestation files alongside the package + /// Note: if you add an attestation, you can _only_ upload a single package. + #[arg(long, required = false)] + pub attestation: Option, + + /// Skip upload if package is existed. + #[arg(short, long)] + pub skip_existing: bool, +} + +#[derive(Debug)] +#[allow(missing_docs)] +pub struct PrefixData { + pub url: UrlWithTrailingSlash, + pub channel: String, + pub api_key: Option, + pub attestation: Option, + pub skip_existing: bool, +} + +impl From for PrefixData { + fn from(value: PrefixOpts) -> Self { + Self::new( + value.url, + value.channel, + value.api_key, + value.attestation, + value.skip_existing, + ) + } +} + +impl PrefixData { + /// Create a new instance of `PrefixData` + pub fn new( + url: Url, + channel: String, + api_key: Option, + attestation: Option, + skip_existing: bool, + ) -> Self { + Self { + url: url.into(), + channel, + api_key, + attestation, + skip_existing, + } + } +} + +/// Options for uploading to a Anaconda.org server +#[derive(Clone, Debug, PartialEq, Parser)] +pub struct AnacondaOpts { + /// The owner of the distribution (e.g. conda-forge or your username) + #[arg(short, long, env = "ANACONDA_OWNER")] + pub owner: String, + + /// The channel / label to upload the package to (e.g. main / rc) + #[arg(short, long = "channel", env = "ANACONDA_CHANNEL")] + pub channels: Option>, + + /// The Anaconda API key, if none is provided, the token is read from the + /// keychain / auth-file + #[arg(short, long, env = "ANACONDA_API_KEY")] + pub api_key: Option, + + /// The URL to the Anaconda server + #[arg(short, long, env = "ANACONDA_SERVER_URL")] + pub url: Option, + + /// Replace files on conflict + #[arg(long, short, env = "ANACONDA_FORCE")] + pub force: bool, +} + +fn parse_s3_url(value: &str) -> Result { + let url: Url = Url::parse(value).map_err(|_| format!("`{}` isn't a valid URL", value))?; + if url.scheme() == "s3" && url.host_str().is_some() { + Ok(url) + } else { + Err(format!( + "Only S3 URLs of format s3://bucket/... can be used, not `{}`", + value + )) + } +} + +/// Options for uploading to S3 +#[derive(Clone, Debug, PartialEq, Parser)] +pub struct S3Opts { + /// The channel URL in the S3 bucket to upload the package to, e.g., s3://my-bucket/my-channel + #[arg(short, long, env = "S3_CHANNEL", value_parser = parse_s3_url)] + pub channel: Url, + + /// The endpoint URL of the S3 backend + #[arg( + long, + env = "S3_ENDPOINT_URL", + default_value = "https://s3.amazonaws.com" + )] + pub endpoint_url: Url, + + /// The region of the S3 backend + #[arg(long, env = "S3_REGION", default_value = "eu-central-1")] + pub region: String, + + /// Whether to use path-style S3 URLs + #[arg(long, env = "S3_FORCE_PATH_STYLE", default_value = "false")] + pub force_path_style: bool, + + /// The access key ID for the S3 bucket. + #[arg(long, env = "S3_ACCESS_KEY_ID", requires_all = ["secret_access_key"])] + pub access_key_id: Option, + + /// The secret access key for the S3 bucket. + #[arg(long, env = "S3_SECRET_ACCESS_KEY", requires_all = ["access_key_id"])] + pub secret_access_key: Option, + + /// The session token for the S3 bucket. + #[arg(long, env = "S3_SESSION_TOKEN", requires_all = ["access_key_id", "secret_access_key"])] + pub session_token: Option, +} + +#[derive(Debug)] +#[allow(missing_docs)] +pub struct AnacondaData { + pub owner: String, + pub channels: Vec, + pub api_key: Option, + pub url: UrlWithTrailingSlash, + pub force: bool, +} + +impl From for AnacondaData { + fn from(value: AnacondaOpts) -> Self { + Self::new( + value.owner, + value.channels, + value.api_key, + value.url, + value.force, + ) + } +} + +impl AnacondaData { + /// Create a new instance of `PrefixData` + pub fn new( + owner: String, + channel: Option>, + api_key: Option, + url: Option, + force: bool, + ) -> Self { + Self { + owner, + channels: channel.unwrap_or_else(|| vec!["main".to_string()]), + api_key, + url: url + .unwrap_or_else(|| Url::parse("https://api.anaconda.org").unwrap()) + .into(), + force, + } + } +} + +/// Options for uploading to conda-forge +#[derive(Clone, Debug, PartialEq, Parser)] +pub struct CondaForgeOpts { + /// The Anaconda API key + #[arg(long, env = "STAGING_BINSTAR_TOKEN")] + pub staging_token: String, + + /// The feedstock name + #[arg(long, env = "FEEDSTOCK_NAME")] + pub feedstock: String, + + /// The feedstock token + #[arg(long, env = "FEEDSTOCK_TOKEN")] + pub feedstock_token: String, + + /// The staging channel name + #[arg(long, env = "STAGING_CHANNEL")] + pub staging_channel: Option, + + /// The Anaconda Server URL + #[arg(long, env = "ANACONDA_SERVER_URL")] + pub anaconda_url: Option, + + /// The validation endpoint url + #[arg(long, env = "VALIDATION_ENDPOINT")] + pub validation_endpoint: Option, + + /// The CI provider + #[arg(long, env = "CI")] + pub provider: Option, + + /// Dry run, don't actually upload anything + #[arg(long, env = "DRY_RUN")] + pub dry_run: bool, +} + +#[derive(Debug)] +#[allow(missing_docs)] +pub struct CondaForgeData { + pub staging_token: String, + pub feedstock: String, + pub feedstock_token: String, + pub staging_channel: String, + pub anaconda_url: UrlWithTrailingSlash, + pub validation_endpoint: Url, + pub provider: Option, + pub dry_run: bool, +} + +impl From for CondaForgeData { + fn from(value: CondaForgeOpts) -> Self { + Self::new( + value.staging_token, + value.feedstock, + value.feedstock_token, + value.staging_channel, + value.anaconda_url, + value.validation_endpoint, + value.provider, + value.dry_run, + ) + } +} + +impl CondaForgeData { + /// Create a new instance of `PrefixData` + #[allow(clippy::too_many_arguments)] + pub fn new( + staging_token: String, + feedstock: String, + feedstock_token: String, + staging_channel: Option, + anaconda_url: Option, + validation_endpoint: Option, + provider: Option, + dry_run: bool, + ) -> Self { + Self { + staging_token, + feedstock, + feedstock_token, + staging_channel: staging_channel.unwrap_or_else(|| "cf-staging".to_string()), + anaconda_url: anaconda_url + .unwrap_or_else(|| Url::parse("https://api.anaconda.org").unwrap()) + .into(), + validation_endpoint: validation_endpoint.unwrap_or_else(|| { + Url::parse("https://conda-forge.herokuapp.com/feedstock-outputs/copy").unwrap() + }), + provider, + dry_run, + } + } +} + +/// Debug options +#[derive(Parser)] +pub struct DebugOpts { + /// Recipe file to debug + #[arg(short, long)] + pub recipe: PathBuf, + + /// Output directory for build artifacts + #[arg(short, long)] + pub output: Option, + + /// The target platform to build for + #[arg(long)] + pub target_platform: Option, + + /// The host platform to build for (defaults to target_platform) + #[arg(long)] + pub host_platform: Option, + + /// The build platform to build for (defaults to current platform) + #[arg(long)] + pub build_platform: Option, + + /// Channels to use when building + #[arg(short = 'c', long = "channel")] + pub channels: Option>, + + /// Common options + #[clap(flatten)] + pub common: CommonOpts, + + /// Name of the specific output to debug (only required when a recipe has multiple outputs) + #[arg(long, help = "Name of the specific output to debug")] + pub output_name: Option, +} + +#[derive(Debug, Clone)] +/// Data structure containing the configuration for debugging a recipe +pub struct DebugData { + /// Path to the recipe file to debug + pub recipe_path: PathBuf, + /// Directory where build artifacts will be stored + pub output_dir: PathBuf, + /// Platform where the build is being executed + pub build_platform: Platform, + /// Target platform for the build + pub target_platform: Platform, + /// Host platform for runtime dependencies + pub host_platform: Platform, + /// List of channels to search for dependencies + pub channels: Option>, + /// Common configuration options + pub common: CommonData, + /// Name of the specific output to debug (if recipe has multiple outputs) + pub output_name: Option, +} + +impl DebugData { + /// Generate a new TestData struct from TestOpts and an optional pixi config. + /// TestOpts have higher priority than the pixi config. + pub fn from_opts_and_config(opts: DebugOpts, config: Option) -> Self { + Self { + recipe_path: opts.recipe, + output_dir: opts.output.unwrap_or_else(|| PathBuf::from("./output")), + build_platform: opts.build_platform.unwrap_or(Platform::current()), + target_platform: opts.target_platform.unwrap_or(Platform::current()), + host_platform: opts + .host_platform + .unwrap_or_else(|| opts.target_platform.unwrap_or(Platform::current())), + channels: opts.channels, + common: CommonData::from_opts_and_config(opts.common, config.unwrap_or_default()), + output_name: opts.output_name, + } + } +} diff --git a/crates/rattler_upload/src/upload/package.rs b/crates/rattler_upload/src/upload/package.rs new file mode 100644 index 0000000000..2b2beb0a59 --- /dev/null +++ b/crates/rattler_upload/src/upload/package.rs @@ -0,0 +1,90 @@ +use std::path::Path; + +use base64::{Engine, engine::general_purpose}; +use miette::IntoDiagnostic; +use rattler_conda_types::{ + PackageName, VersionWithSource as PackageVersion, + package::{AboutJson, IndexJson, PackageFile}, +}; +use rattler_digest::{Md5, compute_file_digest}; +use sha2::Sha256; + +pub fn sha256_sum(package_file: &Path) -> Result { + Ok(format!( + "{:x}", + compute_file_digest::(&package_file)? + )) +} + +pub struct ExtractedPackage<'a> { + file: &'a Path, + about_json: AboutJson, + index_json: IndexJson, + extraction_dir: tempfile::TempDir, +} + +impl<'a> ExtractedPackage<'a> { + pub fn from_package_file(file: &'a Path) -> miette::Result { + let extraction_dir = tempfile::tempdir().into_diagnostic()?; + + rattler_package_streaming::fs::extract(file, extraction_dir.path()).into_diagnostic()?; + + let index_json = + IndexJson::from_package_directory(extraction_dir.path()).into_diagnostic()?; + + let about_json = + AboutJson::from_package_directory(extraction_dir.path()).into_diagnostic()?; + + Ok(Self { + file, + about_json, + index_json, + extraction_dir, + }) + } + + pub fn path(&self) -> &Path { + self.file + } + + pub fn package_name(&self) -> &PackageName { + &self.index_json.name + } + + pub fn package_version(&self) -> &PackageVersion { + &self.index_json.version + } + + pub fn subdir(&self) -> Option<&String> { + self.index_json.subdir.as_ref() + } + + pub fn sha256(&self) -> Result { + sha256_sum(self.file) + } + + pub fn base64_md5(&self) -> Result { + compute_file_digest::(&self.file) + .map(|digest| general_purpose::STANDARD.encode(digest)) + } + + pub fn filename(&self) -> Option<&str> { + self.file.file_name().and_then(|s| s.to_str()) + } + + pub fn file_size(&self) -> Result { + self.file.metadata().map(|metadata| metadata.len()) + } + + pub fn about_json(&self) -> &AboutJson { + &self.about_json + } + + pub fn index_json(&self) -> &IndexJson { + &self.index_json + } + + pub fn extraction_dir(&self) -> &Path { + self.extraction_dir.path() + } +} diff --git a/crates/rattler_upload/src/upload/prefix.rs b/crates/rattler_upload/src/upload/prefix.rs new file mode 100644 index 0000000000..bc7ba03e3b --- /dev/null +++ b/crates/rattler_upload/src/upload/prefix.rs @@ -0,0 +1,237 @@ +use fs_err::tokio as fs; +use futures::TryStreamExt as _; +use miette::IntoDiagnostic as _; +use rattler_networking::{Authentication, AuthenticationStorage}; +use reqwest::{ + StatusCode, + header::{self, HeaderMap, HeaderValue}, +}; +use reqwest_retry::{RetryDecision, RetryPolicy, policies::ExponentialBackoff}; +use std::{ + path::{Path, PathBuf}, + time::{Duration, SystemTime}, +}; +use tokio_util::io::ReaderStream; +use tracing::{info, warn}; +use url::Url; + +use super::opt::{ // ← Import from sibling module + PrefixData +}; + +use crate::{ + upload::{ + default_bytes_style, get_client_with_retry, get_default_client, + trusted_publishing::{TrustedPublishResult, check_trusted_publishing}, + }, +}; + +use super::package::sha256_sum; + +async fn create_upload_form( + package_file: &Path, + filename: &str, + file_size: u64, + progress_bar: indicatif::ProgressBar, + attestation: &Option, +) -> miette::Result { + let mut form = reqwest::multipart::Form::new(); + + let progress_bar_clone = progress_bar.clone(); + let reader_stream = ReaderStream::new(fs::File::open(package_file).await.into_diagnostic()?) + .inspect_ok(move |bytes| { + progress_bar_clone.inc(bytes.len() as u64); + }); + + let hash = sha256_sum(package_file).into_diagnostic()?; + + let mut file_headers = HeaderMap::new(); + file_headers.insert( + header::CONTENT_TYPE, + HeaderValue::from_static("application/octet-stream"), + ); + file_headers.insert( + header::CONTENT_LENGTH, + file_size.to_string().parse().into_diagnostic()?, + ); + file_headers.insert("X-File-Name", filename.parse().unwrap()); + file_headers.insert("X-File-SHA256", hash.parse().unwrap()); + + let file_part = reqwest::multipart::Part::stream_with_length( + reqwest::Body::wrap_stream(reader_stream), + file_size, + ) + .file_name(filename.to_owned()) + .headers(file_headers); + + form = form.part("file", file_part); + + if let Some(attestation) = attestation { + let text = fs::read_to_string(attestation).await.into_diagnostic()?; + form = form.part("attestation", reqwest::multipart::Part::text(text)); + } + + Ok(form) +} + +/// Uploads package files to a prefix.dev server. +pub async fn upload_package_to_prefix( + storage: &AuthenticationStorage, + package_files: &Vec, + prefix_data: PrefixData, +) -> miette::Result<()> { + let check_storage = || { + match storage.get_by_url(Url::from(prefix_data.url.clone())) { + Ok((_, Some(Authentication::BearerToken(token)))) => Ok(token), + Ok((_, Some(_))) => { + Err(miette::miette!("A Conda token is required for authentication with prefix.dev. + Authentication information found in the keychain / auth file, but it was not a Bearer token")) + } + Ok((_, None)) => { + Err(miette::miette!( + "No prefix.dev api key was given and none was found in the keychain / auth file" + )) + } + Err(e) => { + Err(miette::miette!( + "Failed to get authentication information from keychain: {e}" + )) + } + } + }; + + let token = match prefix_data.api_key { + Some(api_key) => api_key, + None => match check_trusted_publishing( + &get_client_with_retry().into_diagnostic()?, + &prefix_data.url, + ) + .await + { + TrustedPublishResult::Configured(token) => token.secret().to_string(), + TrustedPublishResult::Skipped => { + if prefix_data.attestation.is_some() { + return Err(miette::miette!( + "An attestation was provided, but trusted publishing is not configured" + )); + } + check_storage()? + } + TrustedPublishResult::Ignored(err) => { + tracing::warn!("Checked for trusted publishing but failed with {err}"); + if prefix_data.attestation.is_some() { + return Err(miette::miette!( + "An attestation was provided, but trusted publishing is not configured" + )); + } + check_storage()? + } + }, + }; + + for package_file in package_files { + let filename = package_file + .file_name() + .expect("no filename found") + .to_string_lossy() + .to_string(); + let file_size = package_file.metadata().into_diagnostic()?.len(); + let url = prefix_data + .url + .join(&format!("api/v1/upload/{}", prefix_data.channel)) + .into_diagnostic()?; + + let progress_bar = indicatif::ProgressBar::new(file_size) + .with_prefix("Uploading") + .with_style(default_bytes_style().into_diagnostic()?); + + let retry_policy = ExponentialBackoff::builder().build_with_max_retries(3); + let mut current_try = 0; + let request_start = SystemTime::now(); + + loop { + progress_bar.reset(); + + let form = create_upload_form( + package_file, + &filename, + file_size, + progress_bar.clone(), + &prefix_data.attestation, + ) + .await?; + + let response = get_default_client() + .into_diagnostic()? + .post(url.clone()) + .multipart(form) + .bearer_auth(&token) + .send() + .await + .into_diagnostic()?; + + if response.status().is_success() { + progress_bar.finish(); + info!("Upload complete for package file: {}", filename); + break; + } + + let status = response.status(); + let body = response.text().await.into_diagnostic()?; + let err = miette::miette!( + "Failed to upload package file: {}\nStatus: {}\nBody: {}", + package_file.display(), + status, + body + ); + + // Non-retry status codes (identical to send_request_with_retry) + match status { + StatusCode::UNAUTHORIZED | StatusCode::FORBIDDEN => { + return Err(miette::miette!("Authentication error: {}", err)); + } + StatusCode::CONFLICT => { + // skip if package is existed + if prefix_data.skip_existing { + progress_bar.finish(); + info!("Skip existing package: {}", filename); + return Ok(()); + } else { + return Err(miette::miette!("Resource conflict: {}", err)); + } + } + StatusCode::UNPROCESSABLE_ENTITY => { + return Err(miette::miette!("Resource conflict: {}", err)); + } + StatusCode::BAD_REQUEST | StatusCode::NOT_FOUND | StatusCode::PAYLOAD_TOO_LARGE => { + return Err(miette::miette!("Client error: {}", err)); + } + _ => {} + } + + match retry_policy.should_retry(request_start, current_try) { + RetryDecision::DoNotRetry => { + return Err(err); + } + RetryDecision::Retry { execute_after } => { + let sleep_for = execute_after + .duration_since(SystemTime::now()) + .unwrap_or(Duration::ZERO); + warn!( + "Failed to upload package file: {}\nStatus: {}\nBody: {}\nRetrying in {} seconds", + package_file.display(), + status, + body, + sleep_for.as_secs() + ); + tokio::time::sleep(sleep_for).await; + } + } + + current_try += 1; + } + } + + info!("Packages successfully uploaded to prefix.dev server"); + Ok(()) +} diff --git a/crates/rattler_upload/src/upload/trusted_publishing.rs b/crates/rattler_upload/src/upload/trusted_publishing.rs new file mode 100644 index 0000000000..14eb9ecac9 --- /dev/null +++ b/crates/rattler_upload/src/upload/trusted_publishing.rs @@ -0,0 +1,189 @@ +// This code has been adapted from uv under https://github.com/astral-sh/uv/blob/c5caf92edf539a9ebf24d375871178f8f8a0ab93/crates/uv-publish/src/trusted_publishing.rs +// The original code is dual-licensed under Apache-2.0 and MIT + +//! Trusted publishing (via OIDC) with GitHub actions. + +use reqwest::{StatusCode, header}; +use reqwest_middleware::ClientWithMiddleware; +use serde::{Deserialize, Serialize}; +use std::env; +use std::env::VarError; +use std::ffi::OsString; +use thiserror::Error; +use url::Url; + +use crate::{console_utils::github_action_runner, consts}; + +/// If applicable, attempt obtaining a token for trusted publishing. +pub async fn check_trusted_publishing( + client: &ClientWithMiddleware, + prefix_url: &Url, +) -> TrustedPublishResult { + // If we aren't in GitHub Actions, we can't use trusted publishing. + if !github_action_runner() { + return TrustedPublishResult::Skipped; + } + // We could check for credentials from the keyring or netrc the auth middleware first, but + // given that we are in GitHub Actions we check for trusted publishing first. + tracing::debug!( + "Running on GitHub Actions without explicit credentials, checking for trusted publishing" + ); + match get_token(client, prefix_url).await { + Ok(token) => TrustedPublishResult::Configured(token), + Err(err) => { + tracing::debug!("Could not obtain trusted publishing credentials, skipping: {err}"); + TrustedPublishResult::Ignored(err) + } + } +} + +pub enum TrustedPublishResult { + /// We didn't check for trusted publishing. + Skipped, + /// We checked for trusted publishing and found a token. + Configured(TrustedPublishingToken), + /// We checked for optional trusted publishing, but it didn't succeed. + Ignored(TrustedPublishingError), +} + +#[derive(Debug, Error)] +pub enum TrustedPublishingError { + #[error("Environment variable {0} not set, is the `id-token: write` permission missing?")] + MissingEnvVar(&'static str), + #[error("Environment variable {0} is not valid UTF-8: `{1:?}`")] + InvalidEnvVar(&'static str, OsString), + #[error(transparent)] + Url(#[from] url::ParseError), + #[error("Failed to fetch: `{0}`")] + Reqwest(Url, #[source] reqwest::Error), + #[error("Failed to fetch: `{0}`")] + ReqwestMiddleware(Url, #[source] reqwest_middleware::Error), + #[error( + "Prefix.dev returned error code {0}, is trusted publishing correctly configured?\nResponse: {1}" + )] + PrefixDev(StatusCode, String), +} + +impl TrustedPublishingError { + fn from_var_err(env_var: &'static str, err: VarError) -> Self { + match err { + VarError::NotPresent => Self::MissingEnvVar(env_var), + VarError::NotUnicode(os_string) => Self::InvalidEnvVar(env_var, os_string), + } + } +} + +#[derive(Deserialize)] +#[serde(transparent)] +pub struct TrustedPublishingToken(String); + +impl TrustedPublishingToken { + pub fn secret(&self) -> &str { + &self.0 + } +} + +/// The response from querying `$ACTIONS_ID_TOKEN_REQUEST_URL&audience=prefix.dev`. +#[derive(Deserialize)] +struct OidcToken { + value: String, +} + +/// The body for querying `$ACTIONS_ID_TOKEN_REQUEST_URL&audience=prefix.dev`. +#[derive(Serialize)] +struct MintTokenRequest { + token: String, +} + +/// Returns the short-lived token to use for uploading. +pub(crate) async fn get_token( + client: &ClientWithMiddleware, + prefix_url: &Url, +) -> Result { + // If this fails, we can skip the audience request. + let oidc_token_request_token = + env::var(consts::ACTIONS_ID_TOKEN_REQUEST_TOKEN).map_err(|err| { + TrustedPublishingError::from_var_err(consts::ACTIONS_ID_TOKEN_REQUEST_TOKEN, err) + })?; + + // Request 1: Get the OIDC token from GitHub. + let oidc_token = get_oidc_token(&oidc_token_request_token, client).await?; + + // Request 2: Get the publishing token from prefix.dev. + let publish_token = get_publish_token(&oidc_token, prefix_url, client).await?; + + tracing::info!("Received token, using trusted publishing"); + + // Tell GitHub Actions to mask the token in any console logs. + if github_action_runner() { + println!("::add-mask::{}", &publish_token.secret()); + } + + Ok(publish_token) +} + +async fn get_oidc_token( + oidc_token_request_token: &str, + client: &ClientWithMiddleware, +) -> Result { + let oidc_token_url = env::var(consts::ACTIONS_ID_TOKEN_REQUEST_URL).map_err(|err| { + TrustedPublishingError::from_var_err(consts::ACTIONS_ID_TOKEN_REQUEST_URL, err) + })?; + let mut oidc_token_url = Url::parse(&oidc_token_url)?; + oidc_token_url + .query_pairs_mut() + .append_pair("audience", "prefix.dev"); + tracing::info!("Querying the trusted publishing OIDC token from {oidc_token_url}"); + let authorization = format!("bearer {oidc_token_request_token}"); + let response = client + .get(oidc_token_url.clone()) + .header(header::AUTHORIZATION, authorization) + .send() + .await + .map_err(|err| TrustedPublishingError::ReqwestMiddleware(oidc_token_url.clone(), err))?; + let oidc_token: OidcToken = response + .error_for_status() + .map_err(|err| TrustedPublishingError::Reqwest(oidc_token_url.clone(), err))? + .json() + .await + .map_err(|err| TrustedPublishingError::Reqwest(oidc_token_url.clone(), err))?; + Ok(oidc_token.value) +} + +async fn get_publish_token( + oidc_token: &str, + prefix_url: &Url, + client: &ClientWithMiddleware, +) -> Result { + let mint_token_url = prefix_url.join("/api/oidc/mint_token")?; + tracing::info!("Querying the trusted publishing upload token from {mint_token_url}"); + let mint_token_payload = MintTokenRequest { + token: oidc_token.to_string(), + }; + + let response = client + .post(mint_token_url.clone()) + .json(&mint_token_payload) + .send() + .await + .map_err(|err| TrustedPublishingError::ReqwestMiddleware(mint_token_url.clone(), err))?; + + // reqwest's implementation of `.json()` also goes through `.bytes()` + let status = response.status(); + let body = response + .bytes() + .await + .map_err(|err| TrustedPublishingError::Reqwest(mint_token_url.clone(), err))?; + + if status.is_success() { + let token = TrustedPublishingToken(String::from_utf8_lossy(&body).to_string()); + Ok(token) + } else { + // An error here means that something is misconfigured, + // so we're showing the body for more context + Err(TrustedPublishingError::PrefixDev( + status, + String::from_utf8_lossy(&body).to_string(), + )) + } +} From 971707420c8892efbabf973c279b2cee8fc43b35 Mon Sep 17 00:00:00 2001 From: magentaqin Date: Fri, 6 Jun 2025 12:56:58 +0200 Subject: [PATCH 02/11] feat: make utils mod public in rattler_conda_type --- crates/rattler_conda_types/src/lib.rs | 2 +- crates/rattler_conda_types/src/utils/mod.rs | 4 +++- .../rattler_conda_types/src/utils/url_with_trailing_slash.rs | 4 ++++ 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/crates/rattler_conda_types/src/lib.rs b/crates/rattler_conda_types/src/lib.rs index 5f1d5446a7..2a3a406000 100644 --- a/crates/rattler_conda_types/src/lib.rs +++ b/crates/rattler_conda_types/src/lib.rs @@ -15,7 +15,7 @@ mod platform; mod repo_data; mod repo_data_record; mod run_export; -mod utils; +pub mod utils; mod version; pub mod version_spec; diff --git a/crates/rattler_conda_types/src/utils/mod.rs b/crates/rattler_conda_types/src/utils/mod.rs index c2cc669aa5..4ed8f049e0 100644 --- a/crates/rattler_conda_types/src/utils/mod.rs +++ b/crates/rattler_conda_types/src/utils/mod.rs @@ -1,6 +1,8 @@ +//! This module contains utility functions for url and serde + pub(crate) mod path; pub(crate) mod serde; pub(crate) mod url; -pub(crate) mod url_with_trailing_slash; +pub mod url_with_trailing_slash; pub(crate) use url_with_trailing_slash::UrlWithTrailingSlash; diff --git a/crates/rattler_conda_types/src/utils/url_with_trailing_slash.rs b/crates/rattler_conda_types/src/utils/url_with_trailing_slash.rs index f061c36e2c..dfaf5869d4 100644 --- a/crates/rattler_conda_types/src/utils/url_with_trailing_slash.rs +++ b/crates/rattler_conda_types/src/utils/url_with_trailing_slash.rs @@ -1,3 +1,7 @@ +//! This module provides utilities for handling and normalizing URLs +//! that are required to end with a trailing slash. It ensures consistency +//! in URL formatting for downstream processing or matching logic. + use std::{ fmt::{Display, Formatter}, ops::Deref, From 0ec0bda431a6044e42548fa3fdcfb0d37bb5aa11 Mon Sep 17 00:00:00 2001 From: magentaqin Date: Tue, 10 Jun 2025 12:34:26 +0200 Subject: [PATCH 03/11] feat: move tool_configuration, console_utils to utils directory --- crates/rattler_upload/Cargo.toml | 4 +- crates/rattler_upload/src/lib.rs | 4 +- crates/rattler_upload/src/upload/anaconda.rs | 3 +- crates/rattler_upload/src/upload/opt.rs | 97 ++++++++++++++++++- .../src/upload/trusted_publishing.rs | 3 +- .../rattler_upload/src/utils/console_utils.rs | 6 ++ crates/rattler_upload/src/utils/consts.rs | 18 ++++ crates/rattler_upload/src/utils/mod.rs | 3 + .../src/utils/tool_configuration.rs | 24 +++++ 9 files changed, 153 insertions(+), 9 deletions(-) create mode 100644 crates/rattler_upload/src/utils/console_utils.rs create mode 100644 crates/rattler_upload/src/utils/consts.rs create mode 100644 crates/rattler_upload/src/utils/mod.rs create mode 100644 crates/rattler_upload/src/utils/tool_configuration.rs diff --git a/crates/rattler_upload/Cargo.toml b/crates/rattler_upload/Cargo.toml index ee2f49a6df..ae2cadcf43 100644 --- a/crates/rattler_upload/Cargo.toml +++ b/crates/rattler_upload/Cargo.toml @@ -14,8 +14,8 @@ rattler_digest = { workspace = true, default-features = false } rattler_networking = { workspace = true, default-features = false } rattler_redaction = { workspace = true, default-features = false } rattler_package_streaming = { workspace = true, default-features = false } -# rattler_solve = { workspace = true, default-features = false } -# pixi_config = { git = "https://github.com/prefix-dev/pixi", branch = "main" } +rattler_solve = { git = "https://github.com/wolfv/rattler", branch = "pub-schema" } +#pixi_config = { git = "https://github.com/prefix-dev/pixi", branch = "main" } miette = { version = "7.6.0", features = ["fancy"] } clap = { version = "4.5.37", features = ["derive", "env", "cargo"] } fs-err = "3.1.0" diff --git a/crates/rattler_upload/src/lib.rs b/crates/rattler_upload/src/lib.rs index 125aa522a8..fbc4ec4205 100644 --- a/crates/rattler_upload/src/lib.rs +++ b/crates/rattler_upload/src/lib.rs @@ -1,8 +1,10 @@ pub mod upload; +pub(crate) mod utils; use upload::opt::{UploadOpts, ServerType, QuetzData, ArtifactoryData, CondaForgeData, PrefixData, AnacondaData }; -use miette; +use miette::{IntoDiagnostic}; use rattler_conda_types::package::{ArchiveType}; +use crate::utils::tool_configuration; /// Upload. pub async fn upload_from_args(args: UploadOpts) -> miette::Result<()> { diff --git a/crates/rattler_upload/src/upload/anaconda.rs b/crates/rattler_upload/src/upload/anaconda.rs index 21dddc1dde..96cf3aeb8e 100644 --- a/crates/rattler_upload/src/upload/anaconda.rs +++ b/crates/rattler_upload/src/upload/anaconda.rs @@ -11,8 +11,7 @@ use serde::{Deserialize, Serialize}; use tracing::debug; use tracing::info; use url::Url; - -use crate::url_with_trailing_slash::UrlWithTrailingSlash; +use rattler_conda_types::utils::url_with_trailing_slash::UrlWithTrailingSlash; use super::VERSION; use super::package::ExtractedPackage; diff --git a/crates/rattler_upload/src/upload/opt.rs b/crates/rattler_upload/src/upload/opt.rs index 2463f7adc8..585768f4b7 100644 --- a/crates/rattler_upload/src/upload/opt.rs +++ b/crates/rattler_upload/src/upload/opt.rs @@ -1,11 +1,12 @@ //! Command-line options. - -use std::{ path::PathBuf }; - +use std::{collections::HashMap, path::PathBuf, str::FromStr}; use clap::{Parser, arg }; use rattler_solve::ChannelPriority; use url::Url; use rattler_conda_types::{NamedChannelOrUrl, Platform }; +use rattler_conda_types::utils::url_with_trailing_slash::UrlWithTrailingSlash; +use rattler_networking::{mirror_middleware, s3_middleware}; +use tracing::warn; /// Container for rattler_solver::ChannelPriority so that it can be parsed #[derive(Clone, PartialEq, Eq, Debug)] @@ -13,7 +14,21 @@ pub struct ChannelPriorityWrapper { /// The ChannelPriority value to be used when building the Configuration pub value: ChannelPriority, } +impl FromStr for ChannelPriorityWrapper { + type Err = String; + fn from_str(s: &str) -> Result { + match s.to_lowercase().as_str() { + "strict" => Ok(ChannelPriorityWrapper { + value: ChannelPriority::Strict, + }), + "disabled" => Ok(ChannelPriorityWrapper { + value: ChannelPriority::Disabled, + }), + _ => Err("Channel priority must be either 'strict' or 'disabled'".to_string()), + } + } +} /// Common opts that are shared between [`Rebuild`] and [`Build`]` subcommands @@ -53,6 +68,82 @@ pub struct CommonOpts { pub channel_priority: Option, } +#[derive(Clone, Debug)] +#[allow(missing_docs)] +pub struct CommonData { + pub output_dir: PathBuf, + pub experimental: bool, + pub auth_file: Option, + pub channel_priority: ChannelPriority, + pub s3_config: HashMap, + pub mirror_config: HashMap>, + pub allow_insecure_host: Option>, +} + +impl CommonData { + /// Create a new instance of `CommonData` + pub fn new( + output_dir: Option, + experimental: bool, + auth_file: Option, + config: pixi_config::Config, + channel_priority: Option, + allow_insecure_host: Option>, + ) -> Self { + // mirror config + // todo: this is a duplicate in pixi and pixi-pack: do it like in `compute_s3_config` + let mut mirror_config = HashMap::new(); + tracing::debug!("Using mirrors: {:?}", config.mirror_map()); + + fn ensure_trailing_slash(url: &url::Url) -> url::Url { + if url.path().ends_with('/') { + url.clone() + } else { + // Do not use `join` because it removes the last element + format!("{}/", url) + .parse() + .expect("Failed to add trailing slash to URL") + } + } + + for (key, value) in config.mirror_map() { + let mut mirrors = Vec::new(); + for v in value { + mirrors.push(mirror_middleware::Mirror { + url: ensure_trailing_slash(v), + no_jlap: false, + no_bz2: false, + no_zstd: false, + max_failures: None, + }); + } + mirror_config.insert(ensure_trailing_slash(key), mirrors); + } + + let s3_config = config.compute_s3_config(); + Self { + output_dir: output_dir.unwrap_or_else(|| PathBuf::from("./output")), + experimental, + auth_file, + s3_config, + mirror_config, + channel_priority: channel_priority.unwrap_or(ChannelPriority::Strict), + allow_insecure_host, + } + } + + fn from_opts_and_config(value: CommonOpts, config: pixi_config::Config) -> Self { + Self::new( + value.output_dir, + value.experimental, + value.auth_file, + config, + value.channel_priority.map(|c| c.value), + value.allow_insecure_host, + ) + } +} + /// Upload options. diff --git a/crates/rattler_upload/src/upload/trusted_publishing.rs b/crates/rattler_upload/src/upload/trusted_publishing.rs index 14eb9ecac9..86c7b2b2c2 100644 --- a/crates/rattler_upload/src/upload/trusted_publishing.rs +++ b/crates/rattler_upload/src/upload/trusted_publishing.rs @@ -12,7 +12,8 @@ use std::ffi::OsString; use thiserror::Error; use url::Url; -use crate::{console_utils::github_action_runner, consts}; +use crate::utils::console_utils::github_action_runner; +use crate::utils::consts; /// If applicable, attempt obtaining a token for trusted publishing. pub async fn check_trusted_publishing( diff --git a/crates/rattler_upload/src/utils/console_utils.rs b/crates/rattler_upload/src/utils/console_utils.rs new file mode 100644 index 0000000000..49c352994e --- /dev/null +++ b/crates/rattler_upload/src/utils/console_utils.rs @@ -0,0 +1,6 @@ +use crate::utils::consts; + +/// Checks whether we are on GitHub Actions +pub fn github_action_runner() -> bool { + std::env::var(consts::GITHUB_ACTIONS) == Ok("true".to_string()) +} \ No newline at end of file diff --git a/crates/rattler_upload/src/utils/consts.rs b/crates/rattler_upload/src/utils/consts.rs new file mode 100644 index 0000000000..799a6ac24c --- /dev/null +++ b/crates/rattler_upload/src/utils/consts.rs @@ -0,0 +1,18 @@ +/// A `recipe.yaml` file might be accompanied by a `variants.yaml` file from +/// which we can read variant configuration for that specific recipe.. +pub const VARIANTS_CONFIG_FILE: &str = "variants.yaml"; + +/// The name of the old-style configuration file (`conda_build_config.yaml`). +pub const CONDA_BUILD_CONFIG_FILE: &str = "conda_build_config.yaml"; + +/// This env var is set to "true" when run inside a github actions runner +pub const GITHUB_ACTIONS: &str = "GITHUB_ACTIONS"; + +/// This env var contains the oidc token url +pub const ACTIONS_ID_TOKEN_REQUEST_URL: &str = "ACTIONS_ID_TOKEN_REQUEST_URL"; + +/// This env var contains the oidc request token +pub const ACTIONS_ID_TOKEN_REQUEST_TOKEN: &str = "ACTIONS_ID_TOKEN_REQUEST_TOKEN"; + +// This env var determines whether GitHub integration is enabled +pub const RATTLER_BUILD_ENABLE_GITHUB_INTEGRATION: &str = "RATTLER_BUILD_ENABLE_GITHUB_INTEGRATION"; diff --git a/crates/rattler_upload/src/utils/mod.rs b/crates/rattler_upload/src/utils/mod.rs new file mode 100644 index 0000000000..2ce35969ea --- /dev/null +++ b/crates/rattler_upload/src/utils/mod.rs @@ -0,0 +1,3 @@ +pub mod console_utils; +pub mod consts; +pub mod tool_configuration; \ No newline at end of file diff --git a/crates/rattler_upload/src/utils/tool_configuration.rs b/crates/rattler_upload/src/utils/tool_configuration.rs new file mode 100644 index 0000000000..1cc9056090 --- /dev/null +++ b/crates/rattler_upload/src/utils/tool_configuration.rs @@ -0,0 +1,24 @@ +use std::{path::PathBuf, sync::Arc}; +use rattler_networking::{ + AuthenticationStorage, + authentication_storage::{self, AuthenticationStorageError} +}; + +/// Get the authentication storage from the given file +pub fn get_auth_store( + auth_file: Option, +) -> Result { + match auth_file { + Some(auth_file) => { + let mut store = AuthenticationStorage::empty(); + store.add_backend(Arc::from( + authentication_storage::backends::file::FileStorage::from_path(auth_file)?, + )); + Ok(store) + } + None => rattler_networking::AuthenticationStorage::from_env_and_defaults(), + } +} + +/// The user agent to use for the reqwest client +pub const APP_USER_AGENT: &str = concat!(env!("CARGO_PKG_NAME"), "/", env!("CARGO_PKG_VERSION"),); \ No newline at end of file From 7ee5987134995cd93fda00069c5e575f0f54f38a Mon Sep 17 00:00:00 2001 From: qinmu Date: Wed, 2 Jul 2025 10:25:46 +0200 Subject: [PATCH 04/11] refactor: use rattler_config to replace pixi_config --- Cargo.lock | 408 +++++++++++++++++++--- crates/rattler_upload/Cargo.toml | 4 +- crates/rattler_upload/src/upload/opt.rs | 17 +- crates/rattler_upload/src/utils/consts.rs | 8 - 4 files changed, 371 insertions(+), 66 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 094152f328..bbf54b3884 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -841,6 +841,15 @@ dependencies = [ "windows-targets 0.52.6", ] +[[package]] +name = "backtrace-ext" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "537beee3be4a18fb023b570f80e3ae28003db9167a751266b259926e25539d50" +dependencies = [ + "backtrace", +] + [[package]] name = "base16ct" version = "0.1.1" @@ -1263,7 +1272,7 @@ dependencies = [ "encode_unicode", "libc", "once_cell", - "unicode-width", + "unicode-width 0.2.1", "windows-sys 0.59.0", ] @@ -1276,7 +1285,7 @@ dependencies = [ "encode_unicode", "libc", "once_cell", - "unicode-width", + "unicode-width 0.2.1", "windows-sys 0.60.2", ] @@ -1394,11 +1403,11 @@ dependencies = [ "clap", "itertools 0.14.0", "rattler_cache", - "rattler_conda_types", + "rattler_conda_types 0.35.4", "rattler_repodata_gateway", - "rattler_solve", + "rattler_solve 2.1.4", "reqwest", - "resolvo", + "resolvo 0.9.1", "serde_json", "tokio", ] @@ -1873,6 +1882,18 @@ dependencies = [ "subtle", ] +[[package]] +name = "file_url" +version = "0.2.3" +source = "git+https://github.com/wolfv/rattler?branch=pub-schema#61b2040c64a1e18ec475c447273dc9f368d9066c" +dependencies = [ + "itertools 0.14.0", + "percent-encoding", + "thiserror 2.0.12", + "typed-path 0.10.0", + "url", +] + [[package]] name = "file_url" version = "0.2.5" @@ -1881,7 +1902,7 @@ dependencies = [ "percent-encoding", "rstest", "thiserror 2.0.12", - "typed-path", + "typed-path 0.11.0", "url", ] @@ -2324,6 +2345,16 @@ dependencies = [ "crunchy", ] +[[package]] +name = "halfbrown" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8588661a8607108a5ca69cab034063441a0413a0b041c13618a7dd348021ef6f" +dependencies = [ + "hashbrown 0.14.5", + "serde", +] + [[package]] name = "halfbrown" version = "0.3.0" @@ -2345,6 +2376,10 @@ name = "hashbrown" version = "0.14.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" +dependencies = [ + "ahash", + "allocator-api2", +] [[package]] name = "hashbrown" @@ -2817,7 +2852,7 @@ checksum = "4adb2ee6ad319a912210a36e56e3623555817bcc877a7e6e8802d1d69c4d8056" dependencies = [ "console 0.16.0", "portable-atomic", - "unicode-width", + "unicode-width 0.2.1", "unit-prefix", "web-time", ] @@ -2878,6 +2913,12 @@ dependencies = [ "serde", ] +[[package]] +name = "is_ci" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7655c9839580ee829dfacba1d1278c2b7883e50a277ff7541299489d6bdfdc45" + [[package]] name = "is_terminal_polyfill" version = "1.70.1" @@ -3050,7 +3091,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "07033963ba89ebaf1584d767badaa2e8fcec21aedea6b8c0346d487d49c28667" dependencies = [ "cfg-if", - "windows-targets 0.48.5", + "windows-targets 0.53.2", ] [[package]] @@ -3200,6 +3241,36 @@ dependencies = [ "autocfg", ] +[[package]] +name = "miette" +version = "7.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f98efec8807c63c752b5bd61f862c165c115b0a35685bdcfd9238c7aeb592b7" +dependencies = [ + "backtrace", + "backtrace-ext", + "cfg-if", + "miette-derive", + "owo-colors", + "supports-color", + "supports-hyperlinks", + "supports-unicode", + "terminal_size", + "textwrap", + "unicode-width 0.1.14", +] + +[[package]] +name = "miette-derive" +version = "7.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db5b29714e950dbb20d5e6f74f9dcec4edbcc1067bb7f8ed198c097b8c1a818b" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "mime" version = "0.3.17" @@ -3592,6 +3663,12 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" +[[package]] +name = "owo-colors" +version = "4.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48dd4f4a2c8405440fd0462561f0e5806bd0f77e86f51c761481bdd4018b545e" + [[package]] name = "p256" version = "0.11.1" @@ -3677,7 +3754,7 @@ checksum = "31095ca1f396e3de32745f42b20deef7bc09077f918b085307e8eab6ddd8fb9c" dependencies = [ "once_cell", "serde", - "unicode-width", + "unicode-width 0.2.1", "unscanny", "version-ranges", ] @@ -3698,7 +3775,7 @@ dependencies = [ "serde", "smallvec", "thiserror 1.0.69", - "unicode-width", + "unicode-width 0.2.1", "url", "urlencoding", "version-ranges", @@ -4202,8 +4279,8 @@ dependencies = [ "parking_lot 0.12.4", "rand 0.9.1", "rattler_cache", - "rattler_conda_types", - "rattler_digest", + "rattler_conda_types 0.35.4", + "rattler_digest 1.1.4", "rattler_lock", "rattler_menuinst", "rattler_networking", @@ -4242,11 +4319,11 @@ dependencies = [ "once_cell", "rattler", "rattler_cache", - "rattler_conda_types", + "rattler_conda_types 0.35.4", "rattler_menuinst", "rattler_networking", "rattler_repodata_gateway", - "rattler_solve", + "rattler_solve 2.1.4", "rattler_virtual_packages", "reqwest", "reqwest-middleware", @@ -4271,8 +4348,8 @@ dependencies = [ "fxhash", "itertools 0.14.0", "parking_lot 0.12.4", - "rattler_conda_types", - "rattler_digest", + "rattler_conda_types 0.35.4", + "rattler_digest 1.1.4", "rattler_networking", "rattler_package_streaming", "rayon", @@ -4292,6 +4369,43 @@ dependencies = [ "url", ] +[[package]] +name = "rattler_conda_types" +version = "0.31.0" +source = "git+https://github.com/wolfv/rattler?branch=pub-schema#61b2040c64a1e18ec475c447273dc9f368d9066c" +dependencies = [ + "chrono", + "dirs", + "file_url 0.2.3", + "fs-err", + "fxhash", + "glob", + "hex", + "indexmap 2.10.0", + "itertools 0.14.0", + "lazy-regex", + "nom 7.1.3", + "purl", + "rattler_digest 1.0.6", + "rattler_macros 1.0.6", + "rattler_redaction 0.1.6", + "regex", + "serde", + "serde-untagged", + "serde_json", + "serde_repr", + "serde_with", + "serde_yaml", + "simd-json 0.14.3", + "smallvec", + "strum", + "tempfile", + "thiserror 2.0.12", + "tracing", + "typed-path 0.10.0", + "url", +] + [[package]] name = "rattler_conda_types" version = "0.35.4" @@ -4302,7 +4416,7 @@ dependencies = [ "criterion", "dirs", "dunce", - "file_url", + "file_url 0.2.5", "fs-err", "fxhash", "glob", @@ -4317,10 +4431,10 @@ dependencies = [ "pathdiff", "purl", "rand 0.9.1", - "rattler_digest", - "rattler_macros", + "rattler_digest 1.1.4", + "rattler_macros 1.0.11", "rattler_package_streaming", - "rattler_redaction", + "rattler_redaction 0.1.12", "rayon", "regex", "rstest", @@ -4330,14 +4444,14 @@ dependencies = [ "serde_repr", "serde_with", "serde_yaml", - "simd-json", + "simd-json 0.15.1", "smallvec", "strum", "tempfile", "thiserror 2.0.12", "tools", "tracing", - "typed-path", + "typed-path 0.11.0", "url", ] @@ -4349,7 +4463,7 @@ dependencies = [ "fs-err", "indexmap 2.10.0", "insta", - "rattler_conda_types", + "rattler_conda_types 0.35.4", "serde", "serde_json", "tempfile", @@ -4359,6 +4473,21 @@ dependencies = [ "url", ] +[[package]] +name = "rattler_digest" +version = "1.0.6" +source = "git+https://github.com/wolfv/rattler?branch=pub-schema#61b2040c64a1e18ec475c447273dc9f368d9066c" +dependencies = [ + "blake2", + "digest", + "generic-array", + "hex", + "md-5", + "serde", + "serde_with", + "sha2", +] + [[package]] name = "rattler_digest" version = "1.1.4" @@ -4393,9 +4522,9 @@ dependencies = [ "fxhash", "indicatif", "opendal", - "rattler_conda_types", + "rattler_conda_types 0.35.4", "rattler_config", - "rattler_digest", + "rattler_digest 1.1.4", "rattler_networking", "rattler_package_streaming", "reqwest", @@ -4430,16 +4559,16 @@ name = "rattler_lock" version = "0.23.9" dependencies = [ "chrono", - "file_url", + "file_url 0.2.5", "fxhash", "indexmap 2.10.0", "insta", "itertools 0.14.0", "pep440_rs", "pep508_rs", - "rattler_conda_types", - "rattler_digest", - "rattler_solve", + "rattler_conda_types 0.35.4", + "rattler_digest 1.1.4", + "rattler_solve 2.1.4", "rstest", "serde", "serde-value", @@ -4449,10 +4578,19 @@ dependencies = [ "serde_yaml", "similar-asserts", "thiserror 2.0.12", - "typed-path", + "typed-path 0.11.0", "url", ] +[[package]] +name = "rattler_macros" +version = "1.0.6" +source = "git+https://github.com/wolfv/rattler?branch=pub-schema#61b2040c64a1e18ec475c447273dc9f368d9066c" +dependencies = [ + "quote", + "syn", +] + [[package]] name = "rattler_macros" version = "1.0.11" @@ -4475,7 +4613,7 @@ dependencies = [ "once_cell", "plist", "quick-xml 0.37.5", - "rattler_conda_types", + "rattler_conda_types 0.35.4", "rattler_shell", "regex", "serde", @@ -4538,10 +4676,10 @@ dependencies = [ "futures-util", "insta", "num_cpus", - "rattler_conda_types", - "rattler_digest", + "rattler_conda_types 0.35.4", + "rattler_digest 1.1.4", "rattler_networking", - "rattler_redaction", + "rattler_redaction 0.1.12", "reqwest", "reqwest-middleware", "rstest", @@ -4570,6 +4708,14 @@ dependencies = [ "signal-hook", ] +[[package]] +name = "rattler_redaction" +version = "0.1.6" +source = "git+https://github.com/wolfv/rattler?branch=pub-schema#61b2040c64a1e18ec475c447273dc9f368d9066c" +dependencies = [ + "url", +] + [[package]] name = "rattler_redaction" version = "0.1.12" @@ -4596,7 +4742,7 @@ dependencies = [ "chrono", "dashmap", "dirs", - "file_url", + "file_url 0.2.5", "fs-err", "fslock", "futures", @@ -4614,11 +4760,11 @@ dependencies = [ "parking_lot 0.12.4", "pin-project-lite", "rattler_cache", - "rattler_conda_types", + "rattler_conda_types 0.35.4", "rattler_config", - "rattler_digest", + "rattler_digest 1.1.4", "rattler_networking", - "rattler_redaction", + "rattler_redaction 0.1.12", "reqwest", "reqwest-middleware", "retry-policies", @@ -4666,7 +4812,7 @@ dependencies = [ "indexmap 2.10.0", "insta", "itertools 0.14.0", - "rattler_conda_types", + "rattler_conda_types 0.35.4", "rattler_pty", "serde_json", "shlex", @@ -4678,6 +4824,24 @@ dependencies = [ "tracing", ] +[[package]] +name = "rattler_solve" +version = "1.3.9" +source = "git+https://github.com/wolfv/rattler?branch=pub-schema#61b2040c64a1e18ec475c447273dc9f368d9066c" +dependencies = [ + "chrono", + "futures", + "indexmap 2.10.0", + "itertools 0.14.0", + "rattler_conda_types 0.31.0", + "rattler_digest 1.0.6", + "resolvo 0.8.6", + "tempfile", + "thiserror 2.0.12", + "tracing", + "url", +] + [[package]] name = "rattler_solve" version = "2.1.4" @@ -4689,11 +4853,11 @@ dependencies = [ "itertools 0.14.0", "libc", "once_cell", - "rattler_conda_types", - "rattler_digest", + "rattler_conda_types 0.35.4", + "rattler_digest 1.1.4", "rattler_libsolv_c", "rattler_repodata_gateway", - "resolvo", + "resolvo 0.9.1", "rstest", "serde", "serde_json", @@ -4704,6 +4868,39 @@ dependencies = [ "url", ] +[[package]] +name = "rattler_upload" +version = "0.1.0" +dependencies = [ + "base64 0.22.1", + "clap", + "fs-err", + "futures", + "indicatif", + "miette", + "opendal", + "rattler_conda_types 0.35.4", + "rattler_config", + "rattler_digest 1.1.4", + "rattler_networking", + "rattler_package_streaming", + "rattler_redaction 0.1.12", + "rattler_solve 1.3.9", + "reqwest", + "reqwest-middleware", + "reqwest-retry", + "serde", + "serde_json", + "serde_yaml", + "sha2", + "tempfile", + "thiserror 2.0.12", + "tokio", + "tokio-util", + "tracing", + "url", +] + [[package]] name = "rattler_virtual_packages" version = "2.0.17" @@ -4713,7 +4910,7 @@ dependencies = [ "nom 8.0.0", "once_cell", "plist", - "rattler_conda_types", + "rattler_conda_types 0.35.4", "regex", "serde", "thiserror 2.0.12", @@ -4929,6 +5126,7 @@ dependencies = [ "js-sys", "log", "mime", + "mime_guess", "native-tls", "percent-encoding", "pin-project-lite", @@ -4992,6 +5190,23 @@ dependencies = [ "wasm-timer", ] +[[package]] +name = "resolvo" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5314eb4b865d39acd1b3cd05eb91b87031bb49fd1278a1bdf8d6680f1389ec29" +dependencies = [ + "ahash", + "bitvec", + "elsa", + "event-listener", + "futures", + "indexmap 2.10.0", + "itertools 0.14.0", + "petgraph", + "tracing", +] + [[package]] name = "resolvo" version = "0.9.1" @@ -5578,6 +5793,16 @@ dependencies = [ "cfg-if", "cpufeatures", "digest", + "sha2-asm", +] + +[[package]] +name = "sha2-asm" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b845214d6175804686b2bd482bcffe96651bb2d1200742b712003504a2dac1ab" +dependencies = [ + "cc", ] [[package]] @@ -5630,6 +5855,21 @@ version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d66dc143e6b11c1eddc06d5c423cfc97062865baf299914ab64caa38182078fe" +[[package]] +name = "simd-json" +version = "0.14.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa2bcf6c6e164e81bc7a5d49fc6988b3d515d9e8c07457d7b74ffb9324b9cd40" +dependencies = [ + "getrandom 0.2.16", + "halfbrown 0.2.5", + "ref-cast", + "serde", + "serde_json", + "simdutf8", + "value-trait 0.10.1", +] + [[package]] name = "simd-json" version = "0.15.1" @@ -5637,12 +5877,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c962f626b54771990066e5435ec8331d1462576cd2d1e62f24076ae014f92112" dependencies = [ "getrandom 0.3.3", - "halfbrown", + "halfbrown 0.3.0", "ref-cast", "serde", "serde_json", "simdutf8", - "value-trait", + "value-trait 0.11.0", ] [[package]] @@ -5782,6 +6022,27 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ab16ced94dbd8a46c82fd81e3ed9a8727dac2977ea869d217bcc4ea1f122e81f" +[[package]] +name = "supports-color" +version = "3.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c64fc7232dd8d2e4ac5ce4ef302b1d81e0b80d055b9d77c7c4f51f6aa4c867d6" +dependencies = [ + "is_ci", +] + +[[package]] +name = "supports-hyperlinks" +version = "3.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "804f44ed3c63152de6a9f90acbea1a110441de43006ea51bcce8f436196a288b" + +[[package]] +name = "supports-unicode" +version = "3.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7401a30af6cb5818bb64852270bb722533397edcfc7344954a38f420819ece2" + [[package]] name = "syn" version = "2.0.104" @@ -5868,7 +6129,7 @@ version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fce91f2f0ec87dff7e6bcbbeb267439aa1188703003c6055193c821487400432" dependencies = [ - "unicode-width", + "unicode-width 0.2.1", ] [[package]] @@ -5936,6 +6197,26 @@ dependencies = [ "winapi-util", ] +[[package]] +name = "terminal_size" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "45c6481c4829e4cc63825e62c49186a34538b7b2750b73b266581ffb612fb5ed" +dependencies = [ + "rustix 1.0.7", + "windows-sys 0.59.0", +] + +[[package]] +name = "textwrap" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c13547615a44dc9c452a8a534638acdf07120d4b6847c8178705da06306a3057" +dependencies = [ + "unicode-linebreak", + "unicode-width 0.2.1", +] + [[package]] name = "thiserror" version = "1.0.69" @@ -6138,6 +6419,7 @@ checksum = "66a539a9ad6d5d281510d5bd368c973d636c02dbf8a67300bfb6b950696ad7df" dependencies = [ "bytes", "futures-core", + "futures-io", "futures-sink", "pin-project-lite", "tokio", @@ -6203,7 +6485,7 @@ dependencies = [ "difference", "dirs", "fslock", - "rattler_digest", + "rattler_digest 1.1.4", "reqwest", "tempdir", "tempfile", @@ -6378,6 +6660,12 @@ dependencies = [ "toml 0.8.23", ] +[[package]] +name = "typed-path" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41713888c5ccfd99979fcd1afd47b71652e331b3d4a0e19d30769e80fec76cce" + [[package]] name = "typed-path" version = "0.11.0" @@ -6425,6 +6713,12 @@ version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" +[[package]] +name = "unicode-linebreak" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b09c83c3c29d37506a3e260c08c03743a6bb66a9cd432c6934ab501a190571f" + [[package]] name = "unicode-normalization" version = "0.1.24" @@ -6440,6 +6734,12 @@ version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" +[[package]] +name = "unicode-width" +version = "0.1.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af" + [[package]] name = "unicode-width" version = "0.2.1" @@ -6519,6 +6819,18 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" +[[package]] +name = "value-trait" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9170e001f458781e92711d2ad666110f153e4e50bfd5cbd02db6547625714187" +dependencies = [ + "float-cmp", + "halfbrown 0.2.5", + "itoa", + "ryu", +] + [[package]] name = "value-trait" version = "0.11.0" @@ -6526,7 +6838,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0508fce11ad19e0aab49ce20b6bec7f8f82902ded31df1c9fc61b90f0eb396b8" dependencies = [ "float-cmp", - "halfbrown", + "halfbrown 0.3.0", "itoa", "ryu", ] diff --git a/crates/rattler_upload/Cargo.toml b/crates/rattler_upload/Cargo.toml index ae2cadcf43..8324a66460 100644 --- a/crates/rattler_upload/Cargo.toml +++ b/crates/rattler_upload/Cargo.toml @@ -11,11 +11,11 @@ readme.workspace = true [dependencies] rattler_conda_types = { workspace = true, default-features = false } rattler_digest = { workspace = true, default-features = false } -rattler_networking = { workspace = true, default-features = false } +rattler_networking = { workspace = true, features = ["rattler_config"] } rattler_redaction = { workspace = true, default-features = false } rattler_package_streaming = { workspace = true, default-features = false } +rattler_config = { workspace = true, default-features = false } rattler_solve = { git = "https://github.com/wolfv/rattler", branch = "pub-schema" } -#pixi_config = { git = "https://github.com/prefix-dev/pixi", branch = "main" } miette = { version = "7.6.0", features = ["fancy"] } clap = { version = "4.5.37", features = ["derive", "env", "cargo"] } fs-err = "3.1.0" diff --git a/crates/rattler_upload/src/upload/opt.rs b/crates/rattler_upload/src/upload/opt.rs index 585768f4b7..d10aac5260 100644 --- a/crates/rattler_upload/src/upload/opt.rs +++ b/crates/rattler_upload/src/upload/opt.rs @@ -8,6 +8,9 @@ use rattler_conda_types::utils::url_with_trailing_slash::UrlWithTrailingSlash; use rattler_networking::{mirror_middleware, s3_middleware}; use tracing::warn; +/// The configuration type for rattler-build - just extends rattler / pixi config and can load the same TOML files. +pub type Config = rattler_config::config::ConfigBase<()>; + /// Container for rattler_solver::ChannelPriority so that it can be parsed #[derive(Clone, PartialEq, Eq, Debug)] pub struct ChannelPriorityWrapper { @@ -86,14 +89,14 @@ impl CommonData { output_dir: Option, experimental: bool, auth_file: Option, - config: pixi_config::Config, + config: Config, channel_priority: Option, allow_insecure_host: Option>, ) -> Self { // mirror config // todo: this is a duplicate in pixi and pixi-pack: do it like in `compute_s3_config` let mut mirror_config = HashMap::new(); - tracing::debug!("Using mirrors: {:?}", config.mirror_map()); + tracing::debug!("Using mirrors: {:?}", config.mirrors); fn ensure_trailing_slash(url: &url::Url) -> url::Url { if url.path().ends_with('/') { @@ -106,7 +109,7 @@ impl CommonData { } } - for (key, value) in config.mirror_map() { + for (key, value) in &config.mirrors { let mut mirrors = Vec::new(); for v in value { mirrors.push(mirror_middleware::Mirror { @@ -119,8 +122,7 @@ impl CommonData { } mirror_config.insert(ensure_trailing_slash(key), mirrors); } - - let s3_config = config.compute_s3_config(); + let s3_config = rattler_networking::s3_middleware::compute_s3_config(&config.s3_options.0); Self { output_dir: output_dir.unwrap_or_else(|| PathBuf::from("./output")), experimental, @@ -132,7 +134,7 @@ impl CommonData { } } - fn from_opts_and_config(value: CommonOpts, config: pixi_config::Config) -> Self { + fn from_opts_and_config(value: CommonOpts, config: Config) -> Self { Self::new( value.output_dir, value.experimental, @@ -145,7 +147,6 @@ impl CommonData { } - /// Upload options. #[derive(Parser, Debug)] pub struct UploadOpts { @@ -634,7 +635,7 @@ pub struct DebugData { impl DebugData { /// Generate a new TestData struct from TestOpts and an optional pixi config. /// TestOpts have higher priority than the pixi config. - pub fn from_opts_and_config(opts: DebugOpts, config: Option) -> Self { + pub fn from_opts_and_config(opts: DebugOpts, config: Option) -> Self { Self { recipe_path: opts.recipe, output_dir: opts.output.unwrap_or_else(|| PathBuf::from("./output")), diff --git a/crates/rattler_upload/src/utils/consts.rs b/crates/rattler_upload/src/utils/consts.rs index 799a6ac24c..1ac2dca18f 100644 --- a/crates/rattler_upload/src/utils/consts.rs +++ b/crates/rattler_upload/src/utils/consts.rs @@ -1,9 +1,4 @@ /// A `recipe.yaml` file might be accompanied by a `variants.yaml` file from -/// which we can read variant configuration for that specific recipe.. -pub const VARIANTS_CONFIG_FILE: &str = "variants.yaml"; - -/// The name of the old-style configuration file (`conda_build_config.yaml`). -pub const CONDA_BUILD_CONFIG_FILE: &str = "conda_build_config.yaml"; /// This env var is set to "true" when run inside a github actions runner pub const GITHUB_ACTIONS: &str = "GITHUB_ACTIONS"; @@ -13,6 +8,3 @@ pub const ACTIONS_ID_TOKEN_REQUEST_URL: &str = "ACTIONS_ID_TOKEN_REQUEST_URL"; /// This env var contains the oidc request token pub const ACTIONS_ID_TOKEN_REQUEST_TOKEN: &str = "ACTIONS_ID_TOKEN_REQUEST_TOKEN"; - -// This env var determines whether GitHub integration is enabled -pub const RATTLER_BUILD_ENABLE_GITHUB_INTEGRATION: &str = "RATTLER_BUILD_ENABLE_GITHUB_INTEGRATION"; From 741197344d617445c57749f86f0a7c6a627aadf4 Mon Sep 17 00:00:00 2001 From: Julian Hofer Date: Thu, 3 Jul 2025 13:57:26 +0200 Subject: [PATCH 05/11] Stop using upstream fork --- Cargo.lock | 253 ++++++------------------------- crates/rattler_upload/Cargo.toml | 2 +- 2 files changed, 46 insertions(+), 209 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index bbf54b3884..b691b5f387 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1403,11 +1403,11 @@ dependencies = [ "clap", "itertools 0.14.0", "rattler_cache", - "rattler_conda_types 0.35.4", + "rattler_conda_types", "rattler_repodata_gateway", - "rattler_solve 2.1.4", + "rattler_solve", "reqwest", - "resolvo 0.9.1", + "resolvo", "serde_json", "tokio", ] @@ -1882,18 +1882,6 @@ dependencies = [ "subtle", ] -[[package]] -name = "file_url" -version = "0.2.3" -source = "git+https://github.com/wolfv/rattler?branch=pub-schema#61b2040c64a1e18ec475c447273dc9f368d9066c" -dependencies = [ - "itertools 0.14.0", - "percent-encoding", - "thiserror 2.0.12", - "typed-path 0.10.0", - "url", -] - [[package]] name = "file_url" version = "0.2.5" @@ -1902,7 +1890,7 @@ dependencies = [ "percent-encoding", "rstest", "thiserror 2.0.12", - "typed-path 0.11.0", + "typed-path", "url", ] @@ -2345,16 +2333,6 @@ dependencies = [ "crunchy", ] -[[package]] -name = "halfbrown" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8588661a8607108a5ca69cab034063441a0413a0b041c13618a7dd348021ef6f" -dependencies = [ - "hashbrown 0.14.5", - "serde", -] - [[package]] name = "halfbrown" version = "0.3.0" @@ -2376,10 +2354,6 @@ name = "hashbrown" version = "0.14.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" -dependencies = [ - "ahash", - "allocator-api2", -] [[package]] name = "hashbrown" @@ -4279,8 +4253,8 @@ dependencies = [ "parking_lot 0.12.4", "rand 0.9.1", "rattler_cache", - "rattler_conda_types 0.35.4", - "rattler_digest 1.1.4", + "rattler_conda_types", + "rattler_digest", "rattler_lock", "rattler_menuinst", "rattler_networking", @@ -4319,11 +4293,11 @@ dependencies = [ "once_cell", "rattler", "rattler_cache", - "rattler_conda_types 0.35.4", + "rattler_conda_types", "rattler_menuinst", "rattler_networking", "rattler_repodata_gateway", - "rattler_solve 2.1.4", + "rattler_solve", "rattler_virtual_packages", "reqwest", "reqwest-middleware", @@ -4348,8 +4322,8 @@ dependencies = [ "fxhash", "itertools 0.14.0", "parking_lot 0.12.4", - "rattler_conda_types 0.35.4", - "rattler_digest 1.1.4", + "rattler_conda_types", + "rattler_digest", "rattler_networking", "rattler_package_streaming", "rayon", @@ -4369,43 +4343,6 @@ dependencies = [ "url", ] -[[package]] -name = "rattler_conda_types" -version = "0.31.0" -source = "git+https://github.com/wolfv/rattler?branch=pub-schema#61b2040c64a1e18ec475c447273dc9f368d9066c" -dependencies = [ - "chrono", - "dirs", - "file_url 0.2.3", - "fs-err", - "fxhash", - "glob", - "hex", - "indexmap 2.10.0", - "itertools 0.14.0", - "lazy-regex", - "nom 7.1.3", - "purl", - "rattler_digest 1.0.6", - "rattler_macros 1.0.6", - "rattler_redaction 0.1.6", - "regex", - "serde", - "serde-untagged", - "serde_json", - "serde_repr", - "serde_with", - "serde_yaml", - "simd-json 0.14.3", - "smallvec", - "strum", - "tempfile", - "thiserror 2.0.12", - "tracing", - "typed-path 0.10.0", - "url", -] - [[package]] name = "rattler_conda_types" version = "0.35.4" @@ -4416,7 +4353,7 @@ dependencies = [ "criterion", "dirs", "dunce", - "file_url 0.2.5", + "file_url", "fs-err", "fxhash", "glob", @@ -4431,10 +4368,10 @@ dependencies = [ "pathdiff", "purl", "rand 0.9.1", - "rattler_digest 1.1.4", - "rattler_macros 1.0.11", + "rattler_digest", + "rattler_macros", "rattler_package_streaming", - "rattler_redaction 0.1.12", + "rattler_redaction", "rayon", "regex", "rstest", @@ -4444,14 +4381,14 @@ dependencies = [ "serde_repr", "serde_with", "serde_yaml", - "simd-json 0.15.1", + "simd-json", "smallvec", "strum", "tempfile", "thiserror 2.0.12", "tools", "tracing", - "typed-path 0.11.0", + "typed-path", "url", ] @@ -4463,7 +4400,7 @@ dependencies = [ "fs-err", "indexmap 2.10.0", "insta", - "rattler_conda_types 0.35.4", + "rattler_conda_types", "serde", "serde_json", "tempfile", @@ -4473,21 +4410,6 @@ dependencies = [ "url", ] -[[package]] -name = "rattler_digest" -version = "1.0.6" -source = "git+https://github.com/wolfv/rattler?branch=pub-schema#61b2040c64a1e18ec475c447273dc9f368d9066c" -dependencies = [ - "blake2", - "digest", - "generic-array", - "hex", - "md-5", - "serde", - "serde_with", - "sha2", -] - [[package]] name = "rattler_digest" version = "1.1.4" @@ -4522,9 +4444,9 @@ dependencies = [ "fxhash", "indicatif", "opendal", - "rattler_conda_types 0.35.4", + "rattler_conda_types", "rattler_config", - "rattler_digest 1.1.4", + "rattler_digest", "rattler_networking", "rattler_package_streaming", "reqwest", @@ -4559,16 +4481,16 @@ name = "rattler_lock" version = "0.23.9" dependencies = [ "chrono", - "file_url 0.2.5", + "file_url", "fxhash", "indexmap 2.10.0", "insta", "itertools 0.14.0", "pep440_rs", "pep508_rs", - "rattler_conda_types 0.35.4", - "rattler_digest 1.1.4", - "rattler_solve 2.1.4", + "rattler_conda_types", + "rattler_digest", + "rattler_solve", "rstest", "serde", "serde-value", @@ -4578,19 +4500,10 @@ dependencies = [ "serde_yaml", "similar-asserts", "thiserror 2.0.12", - "typed-path 0.11.0", + "typed-path", "url", ] -[[package]] -name = "rattler_macros" -version = "1.0.6" -source = "git+https://github.com/wolfv/rattler?branch=pub-schema#61b2040c64a1e18ec475c447273dc9f368d9066c" -dependencies = [ - "quote", - "syn", -] - [[package]] name = "rattler_macros" version = "1.0.11" @@ -4613,7 +4526,7 @@ dependencies = [ "once_cell", "plist", "quick-xml 0.37.5", - "rattler_conda_types 0.35.4", + "rattler_conda_types", "rattler_shell", "regex", "serde", @@ -4676,10 +4589,10 @@ dependencies = [ "futures-util", "insta", "num_cpus", - "rattler_conda_types 0.35.4", - "rattler_digest 1.1.4", + "rattler_conda_types", + "rattler_digest", "rattler_networking", - "rattler_redaction 0.1.12", + "rattler_redaction", "reqwest", "reqwest-middleware", "rstest", @@ -4708,14 +4621,6 @@ dependencies = [ "signal-hook", ] -[[package]] -name = "rattler_redaction" -version = "0.1.6" -source = "git+https://github.com/wolfv/rattler?branch=pub-schema#61b2040c64a1e18ec475c447273dc9f368d9066c" -dependencies = [ - "url", -] - [[package]] name = "rattler_redaction" version = "0.1.12" @@ -4742,7 +4647,7 @@ dependencies = [ "chrono", "dashmap", "dirs", - "file_url 0.2.5", + "file_url", "fs-err", "fslock", "futures", @@ -4760,11 +4665,11 @@ dependencies = [ "parking_lot 0.12.4", "pin-project-lite", "rattler_cache", - "rattler_conda_types 0.35.4", + "rattler_conda_types", "rattler_config", - "rattler_digest 1.1.4", + "rattler_digest", "rattler_networking", - "rattler_redaction 0.1.12", + "rattler_redaction", "reqwest", "reqwest-middleware", "retry-policies", @@ -4812,7 +4717,7 @@ dependencies = [ "indexmap 2.10.0", "insta", "itertools 0.14.0", - "rattler_conda_types 0.35.4", + "rattler_conda_types", "rattler_pty", "serde_json", "shlex", @@ -4824,24 +4729,6 @@ dependencies = [ "tracing", ] -[[package]] -name = "rattler_solve" -version = "1.3.9" -source = "git+https://github.com/wolfv/rattler?branch=pub-schema#61b2040c64a1e18ec475c447273dc9f368d9066c" -dependencies = [ - "chrono", - "futures", - "indexmap 2.10.0", - "itertools 0.14.0", - "rattler_conda_types 0.31.0", - "rattler_digest 1.0.6", - "resolvo 0.8.6", - "tempfile", - "thiserror 2.0.12", - "tracing", - "url", -] - [[package]] name = "rattler_solve" version = "2.1.4" @@ -4853,11 +4740,11 @@ dependencies = [ "itertools 0.14.0", "libc", "once_cell", - "rattler_conda_types 0.35.4", - "rattler_digest 1.1.4", + "rattler_conda_types", + "rattler_digest", "rattler_libsolv_c", "rattler_repodata_gateway", - "resolvo 0.9.1", + "resolvo", "rstest", "serde", "serde_json", @@ -4879,13 +4766,13 @@ dependencies = [ "indicatif", "miette", "opendal", - "rattler_conda_types 0.35.4", + "rattler_conda_types", "rattler_config", - "rattler_digest 1.1.4", + "rattler_digest", "rattler_networking", "rattler_package_streaming", - "rattler_redaction 0.1.12", - "rattler_solve 1.3.9", + "rattler_redaction", + "rattler_solve", "reqwest", "reqwest-middleware", "reqwest-retry", @@ -4910,7 +4797,7 @@ dependencies = [ "nom 8.0.0", "once_cell", "plist", - "rattler_conda_types 0.35.4", + "rattler_conda_types", "regex", "serde", "thiserror 2.0.12", @@ -5190,23 +5077,6 @@ dependencies = [ "wasm-timer", ] -[[package]] -name = "resolvo" -version = "0.8.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5314eb4b865d39acd1b3cd05eb91b87031bb49fd1278a1bdf8d6680f1389ec29" -dependencies = [ - "ahash", - "bitvec", - "elsa", - "event-listener", - "futures", - "indexmap 2.10.0", - "itertools 0.14.0", - "petgraph", - "tracing", -] - [[package]] name = "resolvo" version = "0.9.1" @@ -5855,21 +5725,6 @@ version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d66dc143e6b11c1eddc06d5c423cfc97062865baf299914ab64caa38182078fe" -[[package]] -name = "simd-json" -version = "0.14.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa2bcf6c6e164e81bc7a5d49fc6988b3d515d9e8c07457d7b74ffb9324b9cd40" -dependencies = [ - "getrandom 0.2.16", - "halfbrown 0.2.5", - "ref-cast", - "serde", - "serde_json", - "simdutf8", - "value-trait 0.10.1", -] - [[package]] name = "simd-json" version = "0.15.1" @@ -5877,12 +5732,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c962f626b54771990066e5435ec8331d1462576cd2d1e62f24076ae014f92112" dependencies = [ "getrandom 0.3.3", - "halfbrown 0.3.0", + "halfbrown", "ref-cast", "serde", "serde_json", "simdutf8", - "value-trait 0.11.0", + "value-trait", ] [[package]] @@ -6485,7 +6340,7 @@ dependencies = [ "difference", "dirs", "fslock", - "rattler_digest 1.1.4", + "rattler_digest", "reqwest", "tempdir", "tempfile", @@ -6660,12 +6515,6 @@ dependencies = [ "toml 0.8.23", ] -[[package]] -name = "typed-path" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41713888c5ccfd99979fcd1afd47b71652e331b3d4a0e19d30769e80fec76cce" - [[package]] name = "typed-path" version = "0.11.0" @@ -6819,18 +6668,6 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" -[[package]] -name = "value-trait" -version = "0.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9170e001f458781e92711d2ad666110f153e4e50bfd5cbd02db6547625714187" -dependencies = [ - "float-cmp", - "halfbrown 0.2.5", - "itoa", - "ryu", -] - [[package]] name = "value-trait" version = "0.11.0" @@ -6838,7 +6675,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0508fce11ad19e0aab49ce20b6bec7f8f82902ded31df1c9fc61b90f0eb396b8" dependencies = [ "float-cmp", - "halfbrown 0.3.0", + "halfbrown", "itoa", "ryu", ] diff --git a/crates/rattler_upload/Cargo.toml b/crates/rattler_upload/Cargo.toml index 8324a66460..8f01603bf6 100644 --- a/crates/rattler_upload/Cargo.toml +++ b/crates/rattler_upload/Cargo.toml @@ -15,7 +15,7 @@ rattler_networking = { workspace = true, features = ["rattler_config"] } rattler_redaction = { workspace = true, default-features = false } rattler_package_streaming = { workspace = true, default-features = false } rattler_config = { workspace = true, default-features = false } -rattler_solve = { git = "https://github.com/wolfv/rattler", branch = "pub-schema" } +rattler_solve = { workspace = true } miette = { version = "7.6.0", features = ["fancy"] } clap = { version = "4.5.37", features = ["derive", "env", "cargo"] } fs-err = "3.1.0" From 9a5dadb19d614a7cb6a10f5d9b9f81bb93a8ce89 Mon Sep 17 00:00:00 2001 From: Magenta Date: Wed, 9 Jul 2025 09:39:56 +0200 Subject: [PATCH 06/11] feat: set platform to public --- crates/rattler_conda_types/src/lib.rs | 2 +- crates/rattler_conda_types/src/platform.rs | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/crates/rattler_conda_types/src/lib.rs b/crates/rattler_conda_types/src/lib.rs index 2a3a406000..e1587592f0 100644 --- a/crates/rattler_conda_types/src/lib.rs +++ b/crates/rattler_conda_types/src/lib.rs @@ -11,7 +11,7 @@ mod match_spec; pub mod menuinst; mod no_arch_type; mod parse_mode; -mod platform; +pub mod platform; mod repo_data; mod repo_data_record; mod run_export; diff --git a/crates/rattler_conda_types/src/platform.rs b/crates/rattler_conda_types/src/platform.rs index 8fb99a5a4a..e8b68bb2dd 100644 --- a/crates/rattler_conda_types/src/platform.rs +++ b/crates/rattler_conda_types/src/platform.rs @@ -1,3 +1,4 @@ +//! Platform-specific code. use itertools::Itertools; use serde::{Deserializer, Serializer}; use std::cmp::Ordering; From 659c0c06d068e054c1542b86c9e04970078db0e2 Mon Sep 17 00:00:00 2001 From: Magenta Date: Wed, 9 Jul 2025 10:19:35 +0200 Subject: [PATCH 07/11] lint: fix linting error --- crates/rattler_upload/src/upload/anaconda.rs | 6 ++--- .../rattler_upload/src/upload/conda_forge.rs | 20 +++++++------- crates/rattler_upload/src/upload/mod.rs | 10 +++---- crates/rattler_upload/src/upload/opt.rs | 26 +++++++++---------- crates/rattler_upload/src/utils/consts.rs | 1 - 5 files changed, 31 insertions(+), 32 deletions(-) diff --git a/crates/rattler_upload/src/upload/anaconda.rs b/crates/rattler_upload/src/upload/anaconda.rs index 96cf3aeb8e..0061fe9a27 100644 --- a/crates/rattler_upload/src/upload/anaconda.rs +++ b/crates/rattler_upload/src/upload/anaconda.rs @@ -55,7 +55,7 @@ impl Anaconda { ); default_headers.append( "Authorization", - format!("token {}", token).parse().expect("failed to parse"), + format!("token {token}").parse().expect("failed to parse"), ); default_headers.append( @@ -65,7 +65,7 @@ impl Anaconda { let client = Client::builder() .no_gzip() - .user_agent(format!("rattler-build/{}", VERSION)) + .user_agent(format!("rattler-build/{VERSION}")) .default_headers(default_headers) .build() .expect("failed to create client"); @@ -419,7 +419,7 @@ impl Anaconda { let content = fs::read(package.path()).await.into_diagnostic()?; form_data = form_data.text("Content-Length", file_size.to_string()); - form_data = form_data.text("Content-MD5", base64_md5.to_string()); + form_data = form_data.text("Content-MD5", base64_md5); form_data = form_data.part("file", Part::bytes(content)); reqwest::Client::new() diff --git a/crates/rattler_upload/src/upload/conda_forge.rs b/crates/rattler_upload/src/upload/conda_forge.rs index d34d7b0482..9d4b53d332 100644 --- a/crates/rattler_upload/src/upload/conda_forge.rs +++ b/crates/rattler_upload/src/upload/conda_forge.rs @@ -74,8 +74,15 @@ pub async fn upload_packages_to_conda_forge( ) })?; - if !conda_forge_data.dry_run { - anaconda + if conda_forge_data.dry_run { + debug!( + "Would have uploaded {} to anaconda.org {}/{}", + package.path().display(), + conda_forge_data.staging_channel, + channel + ); + } else { + anaconda .create_or_update_package(&conda_forge_data.staging_channel, &package) .await?; @@ -91,13 +98,6 @@ pub async fn upload_packages_to_conda_forge( &package, ) .await?; - } else { - debug!( - "Would have uploaded {} to anaconda.org {}/{}", - package.path().display(), - conda_forge_data.staging_channel, - channel - ); }; let dist_name = format!( @@ -134,7 +134,7 @@ pub async fn upload_packages_to_conda_forge( "Sending payload to validation endpoint: {}", serde_json::to_string_pretty(&payload).into_diagnostic()? ); - + #[allow(clippy::if_not_else)] if conda_forge_data.dry_run { debug!( "Would have sent payload to validation endpoint {}", diff --git a/crates/rattler_upload/src/upload/mod.rs b/crates/rattler_upload/src/upload/mod.rs index ee87b32cec..00b5b86c1a 100644 --- a/crates/rattler_upload/src/upload/mod.rs +++ b/crates/rattler_upload/src/upload/mod.rs @@ -334,7 +334,7 @@ pub async fn upload_package_to_s3( let filename = package .filename() .ok_or_else(|| miette::miette!("Failed to get filename"))?; - let key = format!("{}/{}", subdir, filename); + let key = format!("{subdir}/{filename}"); let body = fs::read(package_file).await.into_diagnostic()?; op.write_with(&key, body) .if_not_exists(true) @@ -418,7 +418,7 @@ async fn send_request_with_retry( } } -/// Note that we need to use a regular request. reqwest_retry does not support streaming requests. +/// Note that we need to use a regular request. `reqwest_retry` does not support streaming requests. async fn send_request( prepared_request: reqwest::RequestBuilder, package_file: &Path, @@ -444,7 +444,7 @@ async fn send_request( progress_bar_clone.inc(bytes.len() as u64); }) .inspect_err(|e| { - println!("Error while uploading: {}", e); + println!("Error while uploading: {e}"); }); let body = reqwest::Body::wrap_stream(reader_stream); @@ -453,12 +453,12 @@ async fn send_request( .body(body) .send() .await - .map_err(|e| e.redact()) + .map_err(Redact::redact) .into_diagnostic()?; response .error_for_status_ref() - .map_err(|e| e.redact()) + .map_err(Redact::redact) .into_diagnostic() .wrap_err("Server responded with error")?; diff --git a/crates/rattler_upload/src/upload/opt.rs b/crates/rattler_upload/src/upload/opt.rs index d10aac5260..ced27ba5af 100644 --- a/crates/rattler_upload/src/upload/opt.rs +++ b/crates/rattler_upload/src/upload/opt.rs @@ -11,10 +11,10 @@ use tracing::warn; /// The configuration type for rattler-build - just extends rattler / pixi config and can load the same TOML files. pub type Config = rattler_config::config::ConfigBase<()>; -/// Container for rattler_solver::ChannelPriority so that it can be parsed +/// Container for `rattler_solver::ChannelPriority` so that it can be parsed #[derive(Clone, PartialEq, Eq, Debug)] pub struct ChannelPriorityWrapper { - /// The ChannelPriority value to be used when building the Configuration + /// The `ChannelPriority` value to be used when building the Configuration pub value: ChannelPriority, } impl FromStr for ChannelPriorityWrapper { @@ -34,7 +34,7 @@ impl FromStr for ChannelPriorityWrapper { } -/// Common opts that are shared between [`Rebuild`] and [`Build`]` subcommands +/// Common opts that are shared between `Rebuild` and `Build` subcommands #[derive(Parser, Clone, Debug)] pub struct CommonOpts { /// Output directory for build artifacts. @@ -98,12 +98,13 @@ impl CommonData { let mut mirror_config = HashMap::new(); tracing::debug!("Using mirrors: {:?}", config.mirrors); + #[allow(clippy::items_after_statements)] fn ensure_trailing_slash(url: &url::Url) -> url::Url { if url.path().ends_with('/') { url.clone() } else { // Do not use `join` because it removes the last element - format!("{}/", url) + format!("{url}/") .parse() .expect("Failed to add trailing slash to URL") } @@ -391,21 +392,20 @@ pub struct AnacondaOpts { } fn parse_s3_url(value: &str) -> Result { - let url: Url = Url::parse(value).map_err(|_| format!("`{}` isn't a valid URL", value))?; + let url: Url = Url::parse(value).map_err(|err| format!("`{value}` isn't a valid URL: {err}"))?; if url.scheme() == "s3" && url.host_str().is_some() { Ok(url) } else { - Err(format!( - "Only S3 URLs of format s3://bucket/... can be used, not `{}`", - value - )) + Err(format!( + "Only S3 URLs of format s3://bucket/... can be used, not `{value}`" +)) } } /// Options for uploading to S3 #[derive(Clone, Debug, PartialEq, Parser)] pub struct S3Opts { - /// The channel URL in the S3 bucket to upload the package to, e.g., s3://my-bucket/my-channel + /// The channel URL in the S3 bucket to upload the package to, e.g., `s3://my-bucket/my-channel` #[arg(short, long, env = "S3_CHANNEL", value_parser = parse_s3_url)] pub channel: Url, @@ -590,7 +590,7 @@ pub struct DebugOpts { #[arg(long)] pub target_platform: Option, - /// The host platform to build for (defaults to target_platform) + /// The host platform to build for (defaults to `target_platform`) #[arg(long)] pub host_platform: Option, @@ -633,8 +633,8 @@ pub struct DebugData { } impl DebugData { - /// Generate a new TestData struct from TestOpts and an optional pixi config. - /// TestOpts have higher priority than the pixi config. + /// Generate a new `TestData` struct from `TestOpts` and an optional pixi config. + /// `TestOpts` have higher priority than the pixi config. pub fn from_opts_and_config(opts: DebugOpts, config: Option) -> Self { Self { recipe_path: opts.recipe, diff --git a/crates/rattler_upload/src/utils/consts.rs b/crates/rattler_upload/src/utils/consts.rs index 1ac2dca18f..d74dbed881 100644 --- a/crates/rattler_upload/src/utils/consts.rs +++ b/crates/rattler_upload/src/utils/consts.rs @@ -1,5 +1,4 @@ /// A `recipe.yaml` file might be accompanied by a `variants.yaml` file from - /// This env var is set to "true" when run inside a github actions runner pub const GITHUB_ACTIONS: &str = "GITHUB_ACTIONS"; From a2a01c34f746fa82becb952a5652454e25e73cc9 Mon Sep 17 00:00:00 2001 From: Magenta Date: Wed, 9 Jul 2025 10:24:18 +0200 Subject: [PATCH 08/11] fix: fix 'No newline at end of file' --- crates/rattler_upload/src/utils/tool_configuration.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/rattler_upload/src/utils/tool_configuration.rs b/crates/rattler_upload/src/utils/tool_configuration.rs index 1cc9056090..def8395af2 100644 --- a/crates/rattler_upload/src/utils/tool_configuration.rs +++ b/crates/rattler_upload/src/utils/tool_configuration.rs @@ -21,4 +21,4 @@ pub fn get_auth_store( } /// The user agent to use for the reqwest client -pub const APP_USER_AGENT: &str = concat!(env!("CARGO_PKG_NAME"), "/", env!("CARGO_PKG_VERSION"),); \ No newline at end of file +pub const APP_USER_AGENT: &str = concat!(env!("CARGO_PKG_NAME"), "/", env!("CARGO_PKG_VERSION"),); From fce21dca15bdb23a95b155e517263faeb7edead8 Mon Sep 17 00:00:00 2001 From: Magenta Date: Wed, 9 Jul 2025 10:49:29 +0200 Subject: [PATCH 09/11] fix: fix 'No newline at end of file' --- crates/rattler_upload/src/lib.rs | 2 +- crates/rattler_upload/src/utils/console_utils.rs | 2 +- crates/rattler_upload/src/utils/mod.rs | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/rattler_upload/src/lib.rs b/crates/rattler_upload/src/lib.rs index fbc4ec4205..baace9bd58 100644 --- a/crates/rattler_upload/src/lib.rs +++ b/crates/rattler_upload/src/lib.rs @@ -65,4 +65,4 @@ pub async fn upload_from_args(args: UploadOpts) -> miette::Result<()> { .await } } -} \ No newline at end of file +} diff --git a/crates/rattler_upload/src/utils/console_utils.rs b/crates/rattler_upload/src/utils/console_utils.rs index 49c352994e..a8aad3caf2 100644 --- a/crates/rattler_upload/src/utils/console_utils.rs +++ b/crates/rattler_upload/src/utils/console_utils.rs @@ -3,4 +3,4 @@ use crate::utils::consts; /// Checks whether we are on GitHub Actions pub fn github_action_runner() -> bool { std::env::var(consts::GITHUB_ACTIONS) == Ok("true".to_string()) -} \ No newline at end of file +} diff --git a/crates/rattler_upload/src/utils/mod.rs b/crates/rattler_upload/src/utils/mod.rs index 2ce35969ea..df2b22c56b 100644 --- a/crates/rattler_upload/src/utils/mod.rs +++ b/crates/rattler_upload/src/utils/mod.rs @@ -1,3 +1,3 @@ pub mod console_utils; pub mod consts; -pub mod tool_configuration; \ No newline at end of file +pub mod tool_configuration; From 7f2c8e54d54da219ed49a46be90f99b0c574b861 Mon Sep 17 00:00:00 2001 From: Magenta Date: Wed, 9 Jul 2025 11:16:02 +0200 Subject: [PATCH 10/11] fix: py-rattler diff with crates --- crates/rattler_upload/src/lib.rs | 114 +++++++++--------- crates/rattler_upload/src/upload/anaconda.rs | 10 +- .../rattler_upload/src/upload/conda_forge.rs | 6 +- crates/rattler_upload/src/upload/mod.rs | 12 +- crates/rattler_upload/src/upload/opt.rs | 21 ++-- crates/rattler_upload/src/upload/package.rs | 6 +- crates/rattler_upload/src/upload/prefix.rs | 17 ++- .../src/upload/trusted_publishing.rs | 2 +- .../src/utils/tool_configuration.rs | 4 +- 9 files changed, 96 insertions(+), 96 deletions(-) diff --git a/crates/rattler_upload/src/lib.rs b/crates/rattler_upload/src/lib.rs index baace9bd58..ee98ec4325 100644 --- a/crates/rattler_upload/src/lib.rs +++ b/crates/rattler_upload/src/lib.rs @@ -1,68 +1,70 @@ pub mod upload; pub(crate) mod utils; -use upload::opt::{UploadOpts, ServerType, QuetzData, ArtifactoryData, CondaForgeData, PrefixData, AnacondaData }; -use miette::{IntoDiagnostic}; -use rattler_conda_types::package::{ArchiveType}; use crate::utils::tool_configuration; +use miette::IntoDiagnostic; +use rattler_conda_types::package::ArchiveType; +use upload::opt::{ + AnacondaData, ArtifactoryData, CondaForgeData, PrefixData, QuetzData, ServerType, UploadOpts, +}; /// Upload. pub async fn upload_from_args(args: UploadOpts) -> miette::Result<()> { - if args.package_files.is_empty() { - return Err(miette::miette!("No package files were provided.")); - } + if args.package_files.is_empty() { + return Err(miette::miette!("No package files were provided.")); + } - for package_file in &args.package_files { - if ArchiveType::try_from(package_file).is_none() { - return Err(miette::miette!( - "The file {} does not appear to be a conda package.", - package_file.to_string_lossy() - )); - } - } + for package_file in &args.package_files { + if ArchiveType::try_from(package_file).is_none() { + return Err(miette::miette!( + "The file {} does not appear to be a conda package.", + package_file.to_string_lossy() + )); + } + } - let store = tool_configuration::get_auth_store(args.common.auth_file).into_diagnostic()?; + let store = tool_configuration::get_auth_store(args.common.auth_file).into_diagnostic()?; - match args.server_type { - ServerType::Quetz(quetz_opts) => { - let quetz_data = QuetzData::from(quetz_opts); - upload::upload_package_to_quetz(&store, &args.package_files, quetz_data).await - } - ServerType::Artifactory(artifactory_opts) => { - let artifactory_data = ArtifactoryData::try_from(artifactory_opts)?; + match args.server_type { + ServerType::Quetz(quetz_opts) => { + let quetz_data = QuetzData::from(quetz_opts); + upload::upload_package_to_quetz(&store, &args.package_files, quetz_data).await + } + ServerType::Artifactory(artifactory_opts) => { + let artifactory_data = ArtifactoryData::try_from(artifactory_opts)?; - upload::upload_package_to_artifactory(&store, &args.package_files, artifactory_data) - .await - } - ServerType::Prefix(prefix_opts) => { - let prefix_data = PrefixData::from(prefix_opts); - upload::upload_package_to_prefix(&store, &args.package_files, prefix_data).await - } - ServerType::Anaconda(anaconda_opts) => { - let anaconda_data = AnacondaData::from(anaconda_opts); - upload::upload_package_to_anaconda(&store, &args.package_files, anaconda_data).await - } - ServerType::S3(s3_opts) => { - upload::upload_package_to_s3( - &store, - s3_opts.channel, - s3_opts.endpoint_url, - s3_opts.region, - s3_opts.force_path_style, - s3_opts.access_key_id, - s3_opts.secret_access_key, - s3_opts.session_token, - &args.package_files, - ) - .await - } - ServerType::CondaForge(conda_forge_opts) => { - let conda_forge_data = CondaForgeData::from(conda_forge_opts); - upload::conda_forge::upload_packages_to_conda_forge( - &args.package_files, - conda_forge_data, - ) - .await - } - } + upload::upload_package_to_artifactory(&store, &args.package_files, artifactory_data) + .await + } + ServerType::Prefix(prefix_opts) => { + let prefix_data = PrefixData::from(prefix_opts); + upload::upload_package_to_prefix(&store, &args.package_files, prefix_data).await + } + ServerType::Anaconda(anaconda_opts) => { + let anaconda_data = AnacondaData::from(anaconda_opts); + upload::upload_package_to_anaconda(&store, &args.package_files, anaconda_data).await + } + ServerType::S3(s3_opts) => { + upload::upload_package_to_s3( + &store, + s3_opts.channel, + s3_opts.endpoint_url, + s3_opts.region, + s3_opts.force_path_style, + s3_opts.access_key_id, + s3_opts.secret_access_key, + s3_opts.session_token, + &args.package_files, + ) + .await + } + ServerType::CondaForge(conda_forge_opts) => { + let conda_forge_data = CondaForgeData::from(conda_forge_opts); + upload::conda_forge::upload_packages_to_conda_forge( + &args.package_files, + conda_forge_data, + ) + .await + } + } } diff --git a/crates/rattler_upload/src/upload/anaconda.rs b/crates/rattler_upload/src/upload/anaconda.rs index 0061fe9a27..9ac9cde701 100644 --- a/crates/rattler_upload/src/upload/anaconda.rs +++ b/crates/rattler_upload/src/upload/anaconda.rs @@ -1,20 +1,20 @@ use std::borrow::Cow; use fs_err::tokio as fs; -use miette::{IntoDiagnostic, miette}; -use rattler_conda_types::PackageName; +use miette::{miette, IntoDiagnostic}; use rattler_conda_types::package::AboutJson; -use reqwest::Client; +use rattler_conda_types::utils::url_with_trailing_slash::UrlWithTrailingSlash; +use rattler_conda_types::PackageName; use reqwest::multipart::Form; use reqwest::multipart::Part; +use reqwest::Client; use serde::{Deserialize, Serialize}; use tracing::debug; use tracing::info; use url::Url; -use rattler_conda_types::utils::url_with_trailing_slash::UrlWithTrailingSlash; -use super::VERSION; use super::package::ExtractedPackage; +use super::VERSION; pub struct Anaconda { client: Client, diff --git a/crates/rattler_upload/src/upload/conda_forge.rs b/crates/rattler_upload/src/upload/conda_forge.rs index 9d4b53d332..faa5f2067a 100644 --- a/crates/rattler_upload/src/upload/conda_forge.rs +++ b/crates/rattler_upload/src/upload/conda_forge.rs @@ -5,9 +5,9 @@ use std::{ path::{Path, PathBuf}, }; -use crate::{CondaForgeData, upload::get_default_client}; +use crate::{upload::get_default_client, CondaForgeData}; use fs_err::tokio as fs; -use miette::{IntoDiagnostic, miette}; +use miette::{miette, IntoDiagnostic}; use tracing::{debug, info}; use super::{ @@ -82,7 +82,7 @@ pub async fn upload_packages_to_conda_forge( channel ); } else { - anaconda + anaconda .create_or_update_package(&conda_forge_data.staging_channel, &package) .await?; diff --git a/crates/rattler_upload/src/upload/mod.rs b/crates/rattler_upload/src/upload/mod.rs index 00b5b86c1a..cc03af7903 100644 --- a/crates/rattler_upload/src/upload/mod.rs +++ b/crates/rattler_upload/src/upload/mod.rs @@ -1,11 +1,11 @@ //! The upload module provides the package upload functionality. -use crate::{AnacondaData, ArtifactoryData, QuetzData, tool_configuration::APP_USER_AGENT}; +use crate::{tool_configuration::APP_USER_AGENT, AnacondaData, ArtifactoryData, QuetzData}; use fs_err::tokio as fs; use futures::TryStreamExt; -use indicatif::{HumanBytes, ProgressState, style::TemplateError}; -use opendal::{Configurator, Operator, services::S3Config}; -use reqwest_retry::{RetryDecision, RetryPolicy, policies::ExponentialBackoff}; +use indicatif::{style::TemplateError, HumanBytes, ProgressState}; +use opendal::{services::S3Config, Configurator, Operator}; +use reqwest_retry::{policies::ExponentialBackoff, RetryDecision, RetryPolicy}; use std::{ fmt::Write, net::Ipv4Addr, @@ -21,14 +21,14 @@ use reqwest::{Method, StatusCode}; use tracing::{info, warn}; use url::Url; -use crate::upload::package::{ExtractedPackage, sha256_sum}; +use crate::upload::package::{sha256_sum, ExtractedPackage}; mod anaconda; pub mod conda_forge; +pub mod opt; mod package; mod prefix; mod trusted_publishing; -pub mod opt; pub use prefix::upload_package_to_prefix; diff --git a/crates/rattler_upload/src/upload/opt.rs b/crates/rattler_upload/src/upload/opt.rs index ced27ba5af..32c3fcaf1f 100644 --- a/crates/rattler_upload/src/upload/opt.rs +++ b/crates/rattler_upload/src/upload/opt.rs @@ -1,12 +1,12 @@ //! Command-line options. -use std::{collections::HashMap, path::PathBuf, str::FromStr}; -use clap::{Parser, arg }; -use rattler_solve::ChannelPriority; -use url::Url; -use rattler_conda_types::{NamedChannelOrUrl, Platform }; +use clap::{arg, Parser}; use rattler_conda_types::utils::url_with_trailing_slash::UrlWithTrailingSlash; +use rattler_conda_types::{NamedChannelOrUrl, Platform}; use rattler_networking::{mirror_middleware, s3_middleware}; +use rattler_solve::ChannelPriority; +use std::{collections::HashMap, path::PathBuf, str::FromStr}; use tracing::warn; +use url::Url; /// The configuration type for rattler-build - just extends rattler / pixi config and can load the same TOML files. pub type Config = rattler_config::config::ConfigBase<()>; @@ -33,7 +33,6 @@ impl FromStr for ChannelPriorityWrapper { } } - /// Common opts that are shared between `Rebuild` and `Build` subcommands #[derive(Parser, Clone, Debug)] pub struct CommonOpts { @@ -147,7 +146,6 @@ impl CommonData { } } - /// Upload options. #[derive(Parser, Debug)] pub struct UploadOpts { @@ -392,13 +390,14 @@ pub struct AnacondaOpts { } fn parse_s3_url(value: &str) -> Result { - let url: Url = Url::parse(value).map_err(|err| format!("`{value}` isn't a valid URL: {err}"))?; + let url: Url = + Url::parse(value).map_err(|err| format!("`{value}` isn't a valid URL: {err}"))?; if url.scheme() == "s3" && url.host_str().is_some() { Ok(url) } else { - Err(format!( - "Only S3 URLs of format s3://bucket/... can be used, not `{value}`" -)) + Err(format!( + "Only S3 URLs of format s3://bucket/... can be used, not `{value}`" + )) } } diff --git a/crates/rattler_upload/src/upload/package.rs b/crates/rattler_upload/src/upload/package.rs index 2b2beb0a59..2e7ba4c94a 100644 --- a/crates/rattler_upload/src/upload/package.rs +++ b/crates/rattler_upload/src/upload/package.rs @@ -1,12 +1,12 @@ use std::path::Path; -use base64::{Engine, engine::general_purpose}; +use base64::{engine::general_purpose, Engine}; use miette::IntoDiagnostic; use rattler_conda_types::{ - PackageName, VersionWithSource as PackageVersion, package::{AboutJson, IndexJson, PackageFile}, + PackageName, VersionWithSource as PackageVersion, }; -use rattler_digest::{Md5, compute_file_digest}; +use rattler_digest::{compute_file_digest, Md5}; use sha2::Sha256; pub fn sha256_sum(package_file: &Path) -> Result { diff --git a/crates/rattler_upload/src/upload/prefix.rs b/crates/rattler_upload/src/upload/prefix.rs index bc7ba03e3b..50f0bf2669 100644 --- a/crates/rattler_upload/src/upload/prefix.rs +++ b/crates/rattler_upload/src/upload/prefix.rs @@ -3,10 +3,10 @@ use futures::TryStreamExt as _; use miette::IntoDiagnostic as _; use rattler_networking::{Authentication, AuthenticationStorage}; use reqwest::{ - StatusCode, header::{self, HeaderMap, HeaderValue}, + StatusCode, }; -use reqwest_retry::{RetryDecision, RetryPolicy, policies::ExponentialBackoff}; +use reqwest_retry::{policies::ExponentialBackoff, RetryDecision, RetryPolicy}; use std::{ path::{Path, PathBuf}, time::{Duration, SystemTime}, @@ -15,15 +15,14 @@ use tokio_util::io::ReaderStream; use tracing::{info, warn}; use url::Url; -use super::opt::{ // ← Import from sibling module - PrefixData +use super::opt::{ + // ← Import from sibling module + PrefixData, }; -use crate::{ - upload::{ - default_bytes_style, get_client_with_retry, get_default_client, - trusted_publishing::{TrustedPublishResult, check_trusted_publishing}, - }, +use crate::upload::{ + default_bytes_style, get_client_with_retry, get_default_client, + trusted_publishing::{check_trusted_publishing, TrustedPublishResult}, }; use super::package::sha256_sum; diff --git a/crates/rattler_upload/src/upload/trusted_publishing.rs b/crates/rattler_upload/src/upload/trusted_publishing.rs index 86c7b2b2c2..469b4b912d 100644 --- a/crates/rattler_upload/src/upload/trusted_publishing.rs +++ b/crates/rattler_upload/src/upload/trusted_publishing.rs @@ -3,7 +3,7 @@ //! Trusted publishing (via OIDC) with GitHub actions. -use reqwest::{StatusCode, header}; +use reqwest::{header, StatusCode}; use reqwest_middleware::ClientWithMiddleware; use serde::{Deserialize, Serialize}; use std::env; diff --git a/crates/rattler_upload/src/utils/tool_configuration.rs b/crates/rattler_upload/src/utils/tool_configuration.rs index def8395af2..db1a8e5fb5 100644 --- a/crates/rattler_upload/src/utils/tool_configuration.rs +++ b/crates/rattler_upload/src/utils/tool_configuration.rs @@ -1,8 +1,8 @@ -use std::{path::PathBuf, sync::Arc}; use rattler_networking::{ + authentication_storage::{self, AuthenticationStorageError}, AuthenticationStorage, - authentication_storage::{self, AuthenticationStorageError} }; +use std::{path::PathBuf, sync::Arc}; /// Get the authentication storage from the given file pub fn get_auth_store( From 8f474b33665d7ad6d05e03ad5e2c53734b05664a Mon Sep 17 00:00:00 2001 From: magentaqin Date: Wed, 9 Jul 2025 15:50:51 +0200 Subject: [PATCH 11/11] doc: update comments for upload/lib.rs --- crates/rattler_upload/src/lib.rs | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/crates/rattler_upload/src/lib.rs b/crates/rattler_upload/src/lib.rs index ee98ec4325..807782c5bd 100644 --- a/crates/rattler_upload/src/lib.rs +++ b/crates/rattler_upload/src/lib.rs @@ -8,12 +8,14 @@ use upload::opt::{ AnacondaData, ArtifactoryData, CondaForgeData, PrefixData, QuetzData, ServerType, UploadOpts, }; -/// Upload. +/// Upload package to different channels pub async fn upload_from_args(args: UploadOpts) -> miette::Result<()> { + // Validate package files are provided if args.package_files.is_empty() { return Err(miette::miette!("No package files were provided.")); } + // Validate all files are conda packages for package_file in &args.package_files { if ArchiveType::try_from(package_file).is_none() { return Err(miette::miette!( @@ -23,8 +25,10 @@ pub async fn upload_from_args(args: UploadOpts) -> miette::Result<()> { } } + // Initialize authentication store let store = tool_configuration::get_auth_store(args.common.auth_file).into_diagnostic()?; + // Upload handler based on server type match args.server_type { ServerType::Quetz(quetz_opts) => { let quetz_data = QuetzData::from(quetz_opts);