diff --git a/src/cargo/core/compiler/build_config.rs b/src/cargo/core/compiler/build_config.rs index 787c032ee1e..e4b667552c3 100644 --- a/src/cargo/core/compiler/build_config.rs +++ b/src/cargo/core/compiler/build_config.rs @@ -204,6 +204,36 @@ impl ser::Serialize for CompileMode { } } +impl<'de> serde::Deserialize<'de> for CompileMode { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + let s = String::deserialize(deserializer)?; + match s.as_str() { + "test" => Ok(CompileMode::Test), + "build" => Ok(CompileMode::Build), + "check" => Ok(CompileMode::Check { test: false }), + "doc" => Ok(CompileMode::Doc), + "doctest" => Ok(CompileMode::Doctest), + "docscrape" => Ok(CompileMode::Docscrape), + "run-custom-build" => Ok(CompileMode::RunCustomBuild), + other => Err(serde::de::Error::unknown_variant( + other, + &[ + "test", + "build", + "check", + "doc", + "doctest", + "docscrape", + "run-custom-build", + ], + )), + } + } +} + impl CompileMode { /// Returns `true` if the unit is being checked. pub fn is_check(self) -> bool { diff --git a/src/cargo/core/compiler/mod.rs b/src/cargo/core/compiler/mod.rs index 4ad36afdbb8..6e228d9d776 100644 --- a/src/cargo/core/compiler/mod.rs +++ b/src/cargo/core/compiler/mod.rs @@ -46,7 +46,7 @@ mod output_depinfo; mod output_sbom; pub mod rustdoc; pub mod standard_lib; -mod timings; +pub mod timings; mod unit; pub mod unit_dependencies; pub mod unit_graph; diff --git a/src/cargo/core/compiler/timings/mod.rs b/src/cargo/core/compiler/timings/mod.rs index 0c0fce23edc..aaeb57c744d 100644 --- a/src/cargo/core/compiler/timings/mod.rs +++ b/src/cargo/core/compiler/timings/mod.rs @@ -3,7 +3,7 @@ //! This module implements some simple tracking information for timing of how //! long it takes for different units to compile. -mod report; +pub mod report; use super::{CompileMode, Unit}; use crate::core::PackageId; @@ -75,9 +75,9 @@ pub struct Timings<'gctx> { #[derive(Copy, Clone, serde::Serialize)] pub struct CompilationSection { /// Start of the section, as an offset in seconds from `UnitTime::start`. - start: f64, + pub start: f64, /// End of the section, as an offset in seconds from `UnitTime::start`. - end: Option, + pub end: Option, } /// Tracking information for an individual unit. @@ -107,18 +107,18 @@ struct UnitTime { /// /// This is used by the HTML report's JavaScript to render the pipeline graph. #[derive(serde::Serialize)] -struct UnitData { - i: u64, - name: String, - version: String, - mode: String, - target: String, - features: Vec, - start: f64, - duration: f64, - unblocked_units: Vec, - unblocked_rmeta_units: Vec, - sections: Option>, +pub struct UnitData { + pub i: u64, + pub name: String, + pub version: String, + pub mode: String, + pub target: String, + pub features: Vec, + pub start: f64, + pub duration: f64, + pub unblocked_units: Vec, + pub unblocked_rmeta_units: Vec, + pub sections: Option>, } impl<'gctx> Timings<'gctx> { @@ -445,16 +445,16 @@ impl<'gctx> Timings<'gctx> { let concurrency = report::compute_concurrency(&unit_data); let ctx = report::RenderContext { - start_str: &self.start_str, + start_str: self.start_str.clone(), root_units: &self.root_targets, - profile: &self.profile, + profile: self.profile.clone(), total_fresh: self.total_fresh, total_dirty: self.total_dirty, unit_data, concurrency, cpu_usage: &self.cpu_usage, - rustc_version, - host: &build_runner.bcx.rustc().host, + rustc_version: rustc_version.into(), + host: build_runner.bcx.rustc().host.to_string(), requested_targets, jobs: build_runner.bcx.jobs(), num_cpus, diff --git a/src/cargo/core/compiler/timings/report.rs b/src/cargo/core/compiler/timings/report.rs index 72e76c0fb5d..3ef5c102545 100644 --- a/src/cargo/core/compiler/timings/report.rs +++ b/src/cargo/core/compiler/timings/report.rs @@ -5,25 +5,19 @@ use std::collections::HashMap; use std::collections::HashSet; use std::io::Write; +use indexmap::IndexMap; use itertools::Itertools as _; use crate::CargoResult; use crate::core::compiler::Unit; +use super::CompilationSection; use super::UnitData; use super::UnitTime; -/// Contains post-processed data of individual compilation sections. -enum AggregatedSections { - /// We know the names and durations of individual compilation sections - Sections(Vec<(SectionName, SectionData)>), - /// We know only the total duration - OnlyTotalDuration, -} - /// Name of an individual compilation section. #[derive(Clone, Hash, Eq, PartialEq)] -pub(super) enum SectionName { +pub enum SectionName { Frontend, Codegen, Named(String), @@ -67,11 +61,11 @@ impl serde::ser::Serialize for SectionName { /// Postprocessed section data that has both start and an end. #[derive(Copy, Clone, serde::Serialize)] -pub(super) struct SectionData { +pub struct SectionData { /// Start (relative to the start of the unit) - start: f64, + pub start: f64, /// End (relative to the start of the unit) - end: f64, + pub end: f64, } impl SectionData { @@ -96,13 +90,13 @@ pub struct Concurrency { pub struct RenderContext<'a> { /// A rendered string of when compilation started. - pub start_str: &'a str, + pub start_str: String, /// A summary of the root units. /// /// Tuples of `(package_description, target_descriptions)`. pub root_units: &'a [(String, Vec)], /// The build profile. - pub profile: &'a str, + pub profile: String, /// Total number of fresh units. pub total_fresh: u32, /// Total number of dirty units. @@ -117,9 +111,9 @@ pub struct RenderContext<'a> { /// system. pub cpu_usage: &'a [(f64, f64)], /// Compiler version info, i.e., `rustc 1.92.0-beta.2 (0a411606e 2025-10-31)`. - pub rustc_version: &'a str, + pub rustc_version: String, /// The host triple (arch-platform-OS). - pub host: &'a str, + pub host: String, /// The requested target platforms of compilation for this build. pub requested_targets: &'a [&'a str], /// The number of jobs specified for this build. @@ -131,7 +125,7 @@ pub struct RenderContext<'a> { } /// Writes an HTML report. -pub(super) fn write_html(ctx: RenderContext<'_>, f: &mut impl Write) -> CargoResult<()> { +pub fn write_html(ctx: RenderContext<'_>, f: &mut impl Write) -> CargoResult<()> { // The last concurrency record should equal to the last unit finished time. let duration = ctx.concurrency.last().map(|c| c.t).unwrap_or(0.0); let roots: Vec<&str> = ctx @@ -384,10 +378,7 @@ pub(super) fn to_unit_data( .iter() .filter_map(|unit| unit_map.get(unit).copied()) .collect(); - let sections = match aggregate_sections(ut) { - AggregatedSections::Sections(sections) => Some(sections), - AggregatedSections::OnlyTotalDuration => None, - }; + let sections = aggregate_sections(ut.sections.clone(), ut.duration, ut.rmeta_time); UnitData { i, @@ -407,7 +398,7 @@ pub(super) fn to_unit_data( } /// Derives concurrency information from unit timing data. -pub(super) fn compute_concurrency(unit_data: &[UnitData]) -> Vec { +pub fn compute_concurrency(unit_data: &[UnitData]) -> Vec { if unit_data.is_empty() { return Vec::new(); } @@ -539,16 +530,17 @@ pub(super) fn compute_concurrency(unit_data: &[UnitData]) -> Vec { /// in which case we use them to determine the headers. /// - We have at least one rmeta time, so we hard-code Frontend and Codegen headers. /// - We only have total durations, so we don't add any additional headers. -fn aggregate_sections(unit_time: &UnitTime) -> AggregatedSections { - let end = unit_time.duration; - - if !unit_time.sections.is_empty() { +pub fn aggregate_sections( + sections: IndexMap, + end: f64, + rmeta_time: Option, +) -> Option> { + if !sections.is_empty() { // We have some detailed compilation section timings, so we postprocess them // Since it is possible that we do not have an end timestamp for a given compilation // section, we need to iterate them and if an end is missing, we assign the end of // the section to the start of the following section. - - let mut sections = unit_time.sections.clone().into_iter().fold( + let mut sections = sections.into_iter().fold( // The frontend section is currently implicit in rustc. // It is assumed to start at compilation start and end when codegen starts, // So we hard-code it here. @@ -593,11 +585,10 @@ fn aggregate_sections(unit_time: &UnitTime) -> AggregatedSections { }, )); } - - AggregatedSections::Sections(sections) - } else if let Some(rmeta) = unit_time.rmeta_time { + Some(sections) + } else if let Some(rmeta) = rmeta_time { // We only know when the rmeta time was generated - AggregatedSections::Sections(vec![ + Some(vec![ ( SectionName::Frontend, SectionData { @@ -614,13 +605,13 @@ fn aggregate_sections(unit_time: &UnitTime) -> AggregatedSections { ), ]) } else { - // We only know the total duration - AggregatedSections::OnlyTotalDuration + // No section data provided. We only know the total duration. + None } } /// Rounds seconds to 0.01s precision. -fn round_to_centisecond(x: f64) -> f64 { +pub fn round_to_centisecond(x: f64) -> f64 { (x * 100.0).round() / 100.0 } diff --git a/src/cargo/util/log_message.rs b/src/cargo/util/log_message.rs index 9ab10d6bdb0..e91833a5649 100644 --- a/src/cargo/util/log_message.rs +++ b/src/cargo/util/log_message.rs @@ -1,10 +1,12 @@ //! Messages for logging. +use std::borrow::Cow; use std::io::Write; use std::path::PathBuf; use cargo_util_schemas::core::PackageIdSpec; use jiff::Timestamp; +use serde::Deserialize; use serde::Serialize; use crate::core::compiler::CompileMode; @@ -13,7 +15,7 @@ use crate::core::compiler::fingerprint::DirtyReason; /// A log message. /// /// Each variant represents a different type of event. -#[derive(Serialize)] +#[derive(Serialize, Deserialize)] #[serde(tag = "reason", rename_all = "kebab-case")] pub enum LogMessage { /// Emitted when a build starts. @@ -57,7 +59,7 @@ pub enum LogMessage { /// Seconds elapsed from build start. elapsed: f64, /// Unit indices that were unblocked by this rmeta completion. - #[serde(skip_serializing_if = "Vec::is_empty")] + #[serde(default, skip_serializing_if = "Vec::is_empty")] unblocked: Vec, }, /// Emitted when a section (e.g., rmeta, link) of the compilation unit starts. @@ -89,7 +91,7 @@ pub enum LogMessage { /// Seconds elapsed from build start. elapsed: f64, /// Unit indices that were unblocked by this completion. - #[serde(skip_serializing_if = "Vec::is_empty")] + #[serde(default, skip_serializing_if = "Vec::is_empty")] unblocked: Vec, }, /// Emitted when a unit needs to be rebuilt. @@ -101,17 +103,18 @@ pub enum LogMessage { /// The compilation action this unit is for (check, build, test, etc.). mode: CompileMode, /// Reason why the unit is dirty and needs rebuilding. + #[serde(skip_deserializing, default = "default_reason")] cause: DirtyReason, }, } /// Cargo target information. -#[derive(Serialize)] +#[derive(Serialize, Deserialize)] pub struct Target { /// Target name. - name: String, + pub name: String, /// Target kind (lib, bin, test, bench, example, build-script). - kind: &'static str, + pub kind: Cow<'static, str>, } impl From<&crate::core::Target> for Target { @@ -126,7 +129,8 @@ impl From<&crate::core::Target> for Target { TargetKind::Bench => "bench", TargetKind::ExampleLib(..) | TargetKind::ExampleBin => "example", TargetKind::CustomBuild => "build-script", - }, + } + .into(), } } } @@ -153,3 +157,7 @@ impl LogMessage { Ok(()) } } + +fn default_reason() -> DirtyReason { + DirtyReason::NothingObvious +} diff --git a/src/cargo/util/logger.rs b/src/cargo/util/logger.rs index e213c750752..4a00dc977cc 100644 --- a/src/cargo/util/logger.rs +++ b/src/cargo/util/logger.rs @@ -1,5 +1,6 @@ //! Build analysis logging infrastructure. +use std::hash::Hash; use std::io::{BufWriter, Write}; use std::mem::ManuallyDrop; use std::path::Path; @@ -7,6 +8,7 @@ use std::sync::mpsc; use std::sync::mpsc::Sender; use std::thread::JoinHandle; +use anyhow::Context as _; use cargo_util::paths; use crate::CargoResult; @@ -17,7 +19,7 @@ use crate::util::short_hash; /// Logger for `-Zbuild-analysis`. pub struct BuildLogger { tx: ManuallyDrop>, - run_id: String, + run_id: RunId, handle: Option>, } @@ -38,7 +40,7 @@ impl BuildLogger { } fn new(ws: &Workspace<'_>) -> CargoResult { - let run_id = Self::generate_run_id(ws)?; + let run_id = Self::generate_run_id(ws); let log_dir = ws.gctx().home().join("log"); paths::create_dir_all(log_dir.as_path_unlocked())?; @@ -52,11 +54,11 @@ impl BuildLogger { let (tx, rx) = mpsc::channel::(); - let run_id_clone = run_id.clone(); + let run_id_str = run_id.to_string(); let handle = std::thread::spawn(move || { let mut writer = BufWriter::new(log_file); for msg in rx { - let _ = msg.write_json_log(&mut writer, &run_id_clone); + let _ = msg.write_json_log(&mut writer, &run_id_str); } let _ = writer.flush(); }); @@ -69,18 +71,12 @@ impl BuildLogger { } /// Generates a unique run ID. - /// - /// The format is `{timestamp}-{hash}`, with `:` and `.` in the timestamp - /// removed to make it safe for filenames. - /// For example, `20251024T194502773638Z-f891d525d52ecab3`. - pub fn generate_run_id(ws: &Workspace<'_>) -> CargoResult { - let hash = short_hash(&ws.root()); - let timestamp = jiff::Timestamp::now().to_string().replace([':', '.'], ""); - Ok(format!("{timestamp}-{hash}")) + pub fn generate_run_id(ws: &Workspace<'_>) -> RunId { + RunId::new(&ws.root()) } /// Returns the run ID for this build session. - pub fn run_id(&self) -> &str { + pub fn run_id(&self) -> &RunId { &self.run_id } @@ -103,3 +99,74 @@ impl Drop for BuildLogger { } } } + +/// A unique identifier for a Cargo invocation. +#[derive(Clone)] +pub struct RunId { + timestamp: jiff::Timestamp, + hash: String, +} + +impl RunId { + const FORMAT: &str = "%Y%m%dT%H%M%S%3fZ"; + + pub fn new(h: &H) -> RunId { + RunId { + timestamp: jiff::Timestamp::now(), + hash: short_hash(h), + } + } + + pub fn timestamp(&self) -> &jiff::Timestamp { + &self.timestamp + } + + /// Checks whether ID was generated from the same workspace. + pub fn same_workspace(&self, other: &RunId) -> bool { + self.hash == other.hash + } +} + +impl std::fmt::Display for RunId { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let hash = &self.hash; + let timestamp = self.timestamp.strftime(Self::FORMAT); + write!(f, "{timestamp}-{hash}") + } +} + +impl std::str::FromStr for RunId { + type Err = anyhow::Error; + + fn from_str(s: &str) -> Result { + let msg = + || format!("expect run ID in format `20060724T012128000Z-<16-char-hex>`, got `{s}`"); + let Some((timestamp, hash)) = s.rsplit_once('-') else { + anyhow::bail!(msg()); + }; + + if hash.len() != 16 || !hash.chars().all(|c| c.is_ascii_hexdigit()) { + anyhow::bail!(msg()); + } + let timestamp = jiff::civil::DateTime::strptime(Self::FORMAT, timestamp) + .and_then(|dt| dt.to_zoned(jiff::tz::TimeZone::UTC)) + .map(|zoned| zoned.timestamp()) + .with_context(msg)?; + + Ok(RunId { + timestamp, + hash: hash.into(), + }) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn run_id_round_trip() { + let id = "20060724T012128000Z-b0fd440798ab3cfb"; + assert_eq!(id, &id.parse::().unwrap().to_string()); + } +}