Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add fallible units for scrape-examples feature #10596

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions src/cargo/core/compiler/context/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,13 @@ pub struct Context<'a, 'cfg> {
/// Map of Doc/Docscrape units to metadata for their -Cmetadata flag.
/// See Context::find_metadata_units for more details.
pub metadata_for_doc_units: HashMap<Unit, Metadata>,

/// Map that tracks whether a unit completed successfully. Used in conjuction
/// with the `Unit::can_fail` flag, so jobs can dynamically track at runtime
/// whether their dependencies succeeded or failed. Currently used for
/// the Rustdoc scrape-examples feature to allow Rustdoc to proceed even if
/// examples fail to compile.
pub completed_units: Arc<Mutex<HashMap<Metadata, bool>>>,
}

impl<'a, 'cfg> Context<'a, 'cfg> {
Expand Down Expand Up @@ -115,6 +122,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
rustc_clients: HashMap::new(),
lto: HashMap::new(),
metadata_for_doc_units: HashMap::new(),
completed_units: Arc::new(Mutex::new(HashMap::new())),
})
}

Expand Down
46 changes: 45 additions & 1 deletion src/cargo/core/compiler/job_queue.rs
Original file line number Diff line number Diff line change
Expand Up @@ -126,6 +126,10 @@ struct DrainState<'cfg> {
total_units: usize,

queue: DependencyQueue<Unit, Artifact, Job>,
/// Dependency map that is like JobQueue::dep_map, except with Job information removed.
/// Used to determine if a unit's dependencies have failed, see
/// [`DrainState::spawn_work_if_possible`].
dep_map: HashMap<Unit, HashSet<(Unit, Artifact)>>,
messages: Arc<Queue<Message>>,
/// Diagnostic deduplication support.
diag_dedupe: DiagDedupe<'cfg>,
Expand Down Expand Up @@ -506,8 +510,15 @@ impl<'cfg> JobQueue<'cfg> {
self.queue.queue_finished();

let progress = Progress::with_style("Building", ProgressStyle::Ratio, cx.bcx.config);
let dep_map = self
.queue
.dep_map()
.iter()
.map(|(unit, (deps, _))| (unit.clone(), deps.clone()))
.collect();
let state = DrainState {
total_units: self.queue.len(),
dep_map,
queue: self.queue,
// 100 here is somewhat arbitrary. It is a few screenfulls of
// output, and hopefully at most a few megabytes of memory for
Expand Down Expand Up @@ -578,6 +589,32 @@ impl<'cfg> DrainState<'cfg> {
// start requesting job tokens. Each job after the first needs to
// request a token.
while let Some((unit, job)) = self.queue.dequeue() {
// First, we handle the special case of fallible units. If
// this unit is allowed to fail, and any one of its dependencies
// has failed, then we should immediately mark it as failed and
// skip executing it.
if unit.can_fail {
let mut completed_units = cx.completed_units.lock().unwrap();
let failed_deps = self.dep_map[&unit]
.iter()
.filter(|(dep_unit, _)| {
let dep_meta = cx.files().metadata(dep_unit);
!completed_units[&dep_meta]
})
.map(|(_, artifact)| artifact)
.collect::<HashSet<_>>();
if !failed_deps.is_empty() {
// TODO: should put a warning here saying which units were skipped
// due to failed dependencies.
for artifact in failed_deps {
self.queue.finish(&unit, artifact);
}
let unit_meta = cx.files().metadata(&unit);
completed_units.insert(unit_meta, false);
continue;
}
}

self.pending_queue.push((unit, job));
if self.active.len() + self.pending_queue.len() > 1 {
jobserver_helper.request_token();
Expand Down Expand Up @@ -713,7 +750,8 @@ impl<'cfg> DrainState<'cfg> {
};
debug!("end ({:?}): {:?}", unit, result);
match result {
Ok(()) => self.finish(id, &unit, artifact, cx)?,
Ok(()) => self.finish(id, &unit, artifact, cx, true)?,
Err(_) if unit.can_fail => self.finish(id, &unit, artifact, cx, false)?,
Err(error) => {
let msg = "The following warnings were emitted during compilation:";
self.emit_warnings(Some(msg), &unit, cx)?;
Expand Down Expand Up @@ -1161,6 +1199,7 @@ impl<'cfg> DrainState<'cfg> {
unit: &Unit,
artifact: Artifact,
cx: &mut Context<'_, '_>,
success: bool,
) -> CargoResult<()> {
if unit.mode.is_run_custom_build() && unit.show_warnings(cx.bcx.config) {
self.emit_warnings(None, unit, cx)?;
Expand All @@ -1170,6 +1209,11 @@ impl<'cfg> DrainState<'cfg> {
Artifact::All => self.timings.unit_finished(id, unlocked),
Artifact::Metadata => self.timings.unit_rmeta_finished(id, unlocked),
}
cx.completed_units
.lock()
.unwrap()
.insert(cx.files().metadata(unit), success);

Ok(())
}

Expand Down
44 changes: 30 additions & 14 deletions src/cargo/core/compiler/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ mod unit;
pub mod unit_dependencies;
pub mod unit_graph;

use std::collections::HashSet;
use std::collections::{HashMap, HashSet};
use std::env;
use std::ffi::{OsStr, OsString};
use std::fs::{self, File};
Expand Down Expand Up @@ -639,9 +639,9 @@ fn rustdoc(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult<Work> {
let metadata = cx.metadata_for_doc_units[unit];
rustdoc.arg("-C").arg(format!("metadata={}", metadata));

let scrape_output_path = |unit: &Unit| -> CargoResult<PathBuf> {
let scrape_output_path = |unit: &Unit| -> PathBuf {
let output_dir = cx.files().deps_dir(unit);
Ok(output_dir.join(format!("{}.examples", unit.buildkey())))
output_dir.join(format!("{}.examples", unit.buildkey()))
};

if unit.mode.is_doc_scrape() {
Expand All @@ -651,7 +651,7 @@ fn rustdoc(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult<Work> {

rustdoc
.arg("--scrape-examples-output-path")
.arg(scrape_output_path(unit)?);
.arg(scrape_output_path(unit));

// Only scrape example for items from crates in the workspace, to reduce generated file size
for pkg in cx.bcx.ws.members() {
Expand All @@ -664,18 +664,18 @@ fn rustdoc(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult<Work> {
rustdoc.arg("--scrape-examples-target-crate").arg(name);
}
}
} else if cx.bcx.scrape_units.len() > 0 && cx.bcx.ws.unit_needs_doc_scrape(unit) {
// We only pass scraped examples to packages in the workspace
// since examples are only coming from reverse-dependencies of workspace packages
}

let should_include_scrape_units =
cx.bcx.scrape_units.len() > 0 && cx.bcx.ws.unit_needs_doc_scrape(unit);
let scrape_outputs = should_include_scrape_units.then(|| {
rustdoc.arg("-Zunstable-options");

for scrape_unit in &cx.bcx.scrape_units {
rustdoc
.arg("--with-examples")
.arg(scrape_output_path(scrape_unit)?);
}
}
cx.bcx
.scrape_units
.iter()
.map(|unit| (cx.files().metadata(unit), scrape_output_path(unit)))
.collect::<HashMap<_, _>>()
});

build_deps_args(&mut rustdoc, cx, unit)?;
rustdoc::add_root_urls(cx, unit, &mut rustdoc)?;
Expand All @@ -693,19 +693,35 @@ fn rustdoc(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult<Work> {
let target = Target::clone(&unit.target);
let mut output_options = OutputOptions::new(cx, unit);
let script_metadata = cx.find_build_script_metadata(unit);
let completed_units = Arc::clone(&cx.completed_units);
Ok(Work::new(move |state| {
add_custom_flags(
&mut rustdoc,
&build_script_outputs.lock().unwrap(),
script_metadata,
)?;

// Add the output of scraped examples to the rustdoc command.
// This action must happen after the unit's dependencies have finished,
// because some of those deps may be Docscrape units which have failed.
// So we dynamically determine which `--with-examples` flags to pass here.
if let Some(scrape_outputs) = scrape_outputs {
let completed_units = completed_units.lock().unwrap();
for (metadata, output_path) in &scrape_outputs {
if completed_units[metadata] {
rustdoc.arg("--with-examples").arg(output_path);
}
}
}

let crate_dir = doc_dir.join(&crate_name);
if crate_dir.exists() {
// Remove output from a previous build. This ensures that stale
// files for removed items are removed.
debug!("removing pre-existing doc directory {:?}", crate_dir);
paths::remove_dir_all(crate_dir)?;
}

state.running(&rustdoc);

rustdoc
Expand Down
1 change: 1 addition & 0 deletions src/cargo/core/compiler/standard_lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -217,6 +217,7 @@ pub fn generate_std_roots(
/*is_std*/ true,
/*dep_hash*/ 0,
IsArtifact::No,
false,
));
}
}
Expand Down
4 changes: 4 additions & 0 deletions src/cargo/core/compiler/unit.rs
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,7 @@ pub struct UnitInner {
/// This value initially starts as 0, and then is filled in via a
/// second-pass after all the unit dependencies have been computed.
pub dep_hash: u64,
pub can_fail: bool,
}

impl UnitInner {
Expand Down Expand Up @@ -141,6 +142,7 @@ impl fmt::Debug for Unit {
.field("artifact", &self.artifact.is_true())
.field("is_std", &self.is_std)
.field("dep_hash", &self.dep_hash)
.field("can_fail", &self.can_fail)
.finish()
}
}
Expand Down Expand Up @@ -184,6 +186,7 @@ impl UnitInterner {
is_std: bool,
dep_hash: u64,
artifact: IsArtifact,
can_fail: bool,
) -> Unit {
let target = match (is_std, target.kind()) {
// This is a horrible hack to support build-std. `libstd` declares
Expand Down Expand Up @@ -216,6 +219,7 @@ impl UnitInterner {
is_std,
dep_hash,
artifact,
can_fail,
});
Unit { inner }
}
Expand Down
1 change: 1 addition & 0 deletions src/cargo/core/compiler/unit_dependencies.rs
Original file line number Diff line number Diff line change
Expand Up @@ -888,6 +888,7 @@ fn new_unit_dep_with_profile(
state.is_std,
/*dep_hash*/ 0,
artifact.map_or(IsArtifact::No, |_| IsArtifact::Yes),
parent.can_fail,
);
Ok(UnitDep {
unit,
Expand Down
2 changes: 2 additions & 0 deletions src/cargo/core/features.rs
Original file line number Diff line number Diff line change
Expand Up @@ -665,6 +665,7 @@ unstable_cli_options!(
// TODO(wcrichto): move scrape example configuration into Cargo.toml before stabilization
// See: https://github.com/rust-lang/cargo/pull/9525#discussion_r728470927
rustdoc_scrape_examples: Option<String> = ("Allow rustdoc to scrape examples from reverse-dependencies for documentation"),
ignore_scrape_failures: bool = ("When scraping examples for Rustdoc, don't stop compilation if an example fails"),
skip_rustdoc_fingerprint: bool = (HIDDEN),
);

Expand Down Expand Up @@ -938,6 +939,7 @@ impl CliUnstable {
)
}
}
"ignore-scrape-failures" => self.ignore_scrape_failures = parse_empty(k, v)?,
"skip-rustdoc-fingerprint" => self.skip_rustdoc_fingerprint = parse_empty(k, v)?,
"compile-progress" => stabilized_warn(k, "1.30", STABILIZED_COMPILE_PROGRESS),
"offline" => stabilized_err(k, "1.36", STABILIZED_OFFLINE)?,
Expand Down
2 changes: 1 addition & 1 deletion src/cargo/core/workspace.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1519,7 +1519,7 @@ impl<'cfg> Workspace<'cfg> {
// (not documented) or proc macros (have no scrape-able exports). Additionally,
// naively passing a proc macro's unit_for to new_unit_dep will currently cause
// Cargo to panic, see issue #10545.
self.is_member(&unit.pkg) && !unit.target.for_host()
self.is_member(&unit.pkg) && !unit.target.for_host() && unit.mode.is_doc()
}
}

Expand Down
3 changes: 3 additions & 0 deletions src/cargo/ops/cargo_compile.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1069,6 +1069,7 @@ fn generate_targets(
/*is_std*/ false,
/*dep_hash*/ 0,
IsArtifact::No,
mode.is_doc_scrape() && ws.config().cli_unstable().ignore_scrape_failures,
);
units.insert(unit);
}
Expand Down Expand Up @@ -1631,6 +1632,7 @@ fn traverse_and_share(
unit.is_std,
new_dep_hash,
unit.artifact,
unit.can_fail,
);
assert!(memo.insert(unit.clone(), new_unit.clone()).is_none());
new_graph.entry(new_unit.clone()).or_insert(new_deps);
Expand Down Expand Up @@ -1872,6 +1874,7 @@ fn override_rustc_crate_types(
unit.is_std,
unit.dep_hash,
unit.artifact,
unit.can_fail,
)
};
units[0] = match unit.target.kind() {
Expand Down
4 changes: 4 additions & 0 deletions src/cargo/util/dependency_queue.rs
Original file line number Diff line number Diff line change
Expand Up @@ -170,6 +170,10 @@ impl<N: Hash + Eq + Clone, E: Eq + Hash + Clone, V> DependencyQueue<N, E, V> {
self.dep_map.len()
}

pub fn dep_map(&self) -> &HashMap<N, (HashSet<(N, E)>, V)> {
&self.dep_map
}

/// Indicate that something has finished.
///
/// Calling this function indicates that the `node` has produced `edge`. All
Expand Down
Loading