Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions crates/cargo-util-schemas/src/core/source_kind.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,8 @@ pub enum SourceKind {
LocalRegistry,
/// A directory-based registry.
Directory,
/// Package sources distributed with the rust toolchain
Builtin,
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This will impact the unique identifier for the packages from this source in cargo's json output when compiling, cargo metadata, cargo <cmd> -p, etc

}

// The hash here is important for what folder packages get downloaded into.
Expand All @@ -40,6 +42,7 @@ impl SourceKind {
SourceKind::SparseRegistry => None,
SourceKind::LocalRegistry => Some("local-registry"),
SourceKind::Directory => Some("directory"),
SourceKind::Builtin => Some("builtin"),
}
}
}
Expand Down Expand Up @@ -71,6 +74,10 @@ impl Ord for SourceKind {
(_, SourceKind::Directory) => Ordering::Greater,

(SourceKind::Git(a), SourceKind::Git(b)) => a.cmp(b),
(SourceKind::Git(_), _) => Ordering::Less,
(_, SourceKind::Git(_)) => Ordering::Greater,

(SourceKind::Builtin, SourceKind::Builtin) => Ordering::Equal,
}
}
}
Expand Down
1 change: 1 addition & 0 deletions src/cargo/core/compiler/standard_lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,7 @@ pub fn resolve_std<'gctx>(
HasDevUnits::No,
crate::core::resolver::features::ForceAllTargets::No,
dry_run,
false,
)?;
debug_assert_eq!(resolve.specs_and_features.len(), 1);
Ok((
Expand Down
88 changes: 45 additions & 43 deletions src/cargo/core/compiler/unit_dependencies.rs
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,8 @@ struct State<'a, 'gctx> {
std_resolve: Option<&'a Resolve>,
/// Like `usr_features` but for building standard library (`-Zbuild-std`).
std_features: Option<&'a ResolvedFeatures>,
// The root units of any opaque dependencies present in the user resolve
opaque_roots: &'a HashMap<CompileKind, Vec<Unit>>,
/// `true` while generating the dependencies for the standard library.
is_std: bool,
/// The high-level operation requested by the user.
Expand Down Expand Up @@ -92,7 +94,7 @@ pub fn build_unit_dependencies<'a, 'gctx>(
resolve: &'a Resolve,
features: &'a ResolvedFeatures,
std_resolve: Option<&'a (Resolve, ResolvedFeatures)>,
roots: &[Unit],
roots: &[Unit], //TODO: builtins can be roots if requested on the command line
scrape_units: &[Unit],
std_roots: &HashMap<CompileKind, Vec<Unit>>,
intent: UserIntent,
Expand All @@ -119,6 +121,7 @@ pub fn build_unit_dependencies<'a, 'gctx>(
usr_features: features,
std_resolve,
std_features,
opaque_roots: std_roots,
is_std: false,
intent,
target_data,
Expand All @@ -129,15 +132,14 @@ pub fn build_unit_dependencies<'a, 'gctx>(
};

let std_unit_deps = calc_deps_of_std(&mut state, std_roots)?;
if let Some(std_unit_deps) = std_unit_deps {
attach_std_deps(&mut state, std_unit_deps);
}

deps_of_roots(roots, &mut state)?;
super::links::validate_links(state.resolve(), &state.unit_dependencies)?;
// Hopefully there aren't any links conflicts with the standard library?

if let Some(std_unit_deps) = std_unit_deps {
attach_std_deps(&mut state, std_roots, std_unit_deps);
}

connect_run_custom_build_deps(&mut state);

// Dependencies are used in tons of places throughout the backend, many of
Expand Down Expand Up @@ -188,35 +190,14 @@ fn calc_deps_of_std(
Ok(Some(std::mem::take(&mut state.unit_dependencies)))
}

/// Add the standard library units to the `unit_dependencies`.
fn attach_std_deps(
state: &mut State<'_, '_>,
std_roots: &HashMap<CompileKind, Vec<Unit>>,
std_unit_deps: UnitGraph,
) {
// Attach the standard library as a dependency of every target unit.
let mut found = false;
for (unit, deps) in state.unit_dependencies.iter_mut() {
if !unit.kind.is_host() && !unit.mode.is_run_custom_build() {
deps.extend(std_roots[&unit.kind].iter().map(|unit| UnitDep {
unit: unit.clone(),
unit_for: UnitFor::new_normal(unit.kind),
extern_crate_name: unit.pkg.name(),
dep_name: None,
// TODO: Does this `public` make sense?
public: true,
noprelude: true,
}));
found = true;
/// Add the dependencies of standard library units to the `unit_dependencies`.
fn attach_std_deps(state: &mut State<'_, '_>, std_unit_deps: UnitGraph) {
for (unit, deps) in std_unit_deps.into_iter() {
if unit.pkg.package_id().name() == "sysroot" {
continue;
}
}
// And also include the dependencies of the standard library itself. Don't
// include these if no units actually needed the standard library.
if found {
for (unit, deps) in std_unit_deps.into_iter() {
if let Some(other_unit) = state.unit_dependencies.insert(unit, deps) {
panic!("std unit collision with existing unit: {:?}", other_unit);
}
if let Some(other_unit) = state.unit_dependencies.insert(unit, deps) {
panic!("std unit collision with existing unit: {:?}", other_unit);
}
}
}
Expand Down Expand Up @@ -333,16 +314,37 @@ fn compute_deps(
)?;
ret.push(unit_dep);
} else {
let unit_dep = new_unit_dep(
state,
unit,
dep_pkg,
dep_lib,
dep_unit_for,
unit.kind.for_target(dep_lib),
mode,
IS_NO_ARTIFACT_DEP,
)?;
// if builtin, return from state.opaque_roots
let unit_dep = if dep_pkg_id.source_id().is_builtin() {
let unit: Vec<_> = state.opaque_roots[&unit.kind.for_target(dep_lib)]
.iter()
.filter(|&u| u.pkg.name() == dep_pkg_id.name())
.collect();
assert!(
unit.len() == 1,
"libstd was resolved with all possible builtin deps as roots"
);
let unit = unit[0];
UnitDep {
unit: unit.clone(),
unit_for: UnitFor::new_normal(unit.kind),
extern_crate_name: unit.pkg.name(),
dep_name: None,
public: true,
noprelude: true,
}
} else {
new_unit_dep(
state,
unit,
dep_pkg,
dep_lib,
dep_unit_for,
unit.kind.for_target(dep_lib),
mode,
IS_NO_ARTIFACT_DEP,
)?
};
ret.push(unit_dep);
}

Expand Down
32 changes: 32 additions & 0 deletions src/cargo/core/dependency.rs
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,10 @@ struct Inner {
// This dependency should be used only for this platform.
// `None` means *all platforms*.
platform: Option<Platform>,

// Opaque dependencies should be resolved with a separate resolver run, and handled
// by unit generation.
opaque: bool,
}

#[derive(Serialize)]
Expand Down Expand Up @@ -162,6 +166,30 @@ impl Dependency {
platform: None,
explicit_name_in_toml: None,
artifact: None,
opaque: false,
}),
}
}

pub fn new_injected_builtin(name: InternedString) -> Dependency {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

what do you see as the role of this compared to the other news?

assert!(!name.is_empty());
Dependency {
inner: Arc::new(Inner {
name,
source_id: SourceId::new_builtin(&name).expect("package name is valid url"),
registry_id: None,
req: OptVersionReq::Any,
kind: DepKind::Normal,
only_match_name: true,
optional: false,
public: false,
features: Vec::new(),
default_features: true,
specified_req: false,
platform: None,
explicit_name_in_toml: None,
artifact: None,
opaque: true,
}),
}
}
Expand Down Expand Up @@ -455,6 +483,10 @@ impl Dependency {
pub(crate) fn maybe_lib(&self) -> bool {
self.artifact().map(|a| a.is_lib).unwrap_or(true)
}

pub fn is_opaque(&self) -> bool {
self.inner.opaque
}
}

/// The presence of an artifact turns an ordinary dependency into an Artifact dependency.
Expand Down
33 changes: 32 additions & 1 deletion src/cargo/core/registry.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
//! The former is just one kind of source,
//! while the latter involves operations on the registry Web API.

use std::collections::{HashMap, HashSet};
use std::collections::{BTreeMap, HashMap, HashSet};
use std::task::{Poll, ready};

use crate::core::{Dependency, PackageId, PackageSet, Patch, SourceId, Summary};
Expand All @@ -24,6 +24,7 @@ use crate::util::{CanonicalUrl, GlobalContext};
use annotate_snippets::Level;
use anyhow::Context as _;
use itertools::Itertools;
use semver::Version;
use tracing::{debug, trace};
use url::Url;

Expand Down Expand Up @@ -724,6 +725,36 @@ impl<'gctx> Registry for PackageRegistry<'gctx> {
)
})?;

if dep.is_opaque() {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'd like to find a way to ask the source for the opaque variant of the summary. The tricky thing will be that we need to work with both variants.

// Currently, all opaque dependencies are builtins.
// Create a dummy Summary that can be replaced with a real package during
// unit generation
trace!(
"Injecting package to satisfy builtin dependency on {}",
dep.package_name()
);
let ver = if dep.package_name() == "compiler_builtins" {
//TODO: hack
Version::new(0, 1, 160)
} else {
Version::new(0, 0, 0)
};
let pkg_id = PackageId::new(
dep.package_name(),
ver,
SourceId::new_builtin(&dep.package_name()).expect("SourceId ok"),
);

let summary = Summary::new(
pkg_id,
vec![],
&BTreeMap::new(), // TODO: bodge
Option::<String>::None,
None,
)?;
f(IndexSummary::Candidate(summary));
return Poll::Ready(Ok(()));
}
let source = self.sources.get_mut(dep.source_id());
match (override_summary, source) {
(Some(_), None) => {
Expand Down
4 changes: 4 additions & 0 deletions src/cargo/core/resolver/context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,10 @@ pub struct ResolverContext {
/// a way to look up for a package in activations what packages required it
/// and all of the exact deps that it fulfilled.
pub parents: Graph<PackageId, im_rc::HashSet<Dependency, rustc_hash::FxBuildHasher>>,
// Opaque dependencies require a separate resolver run as they allow for multiple
// different semver-compatible versions of crates in the final resolve. This is the
// (unactivated) set of Summaries that need handling in a future invocation
//pub promises: HashSet<Dependency>,
}

/// When backtracking it can be useful to know how far back to go.
Expand Down
38 changes: 35 additions & 3 deletions src/cargo/core/resolver/dep_cache.rs
Original file line number Diff line number Diff line change
Expand Up @@ -48,21 +48,42 @@ pub struct RegistryQueryer<'a> {
>,
/// all the cases we ended up using a supplied replacement
used_replacements: HashMap<PackageId, Summary>,
/// Cached builtin dependencies that should be injected. Empty implies that builtins shouldn't
/// be injected
builtins: Vec<Dependency>,
}

impl<'a> RegistryQueryer<'a> {
pub fn new(
registry: &'a mut dyn Registry,
replacements: &'a [(PackageIdSpec, Dependency)],
version_prefs: &'a VersionPreferences,
inject_builtins: bool,
) -> Self {
let builtins = if inject_builtins {
[
"std",
"alloc",
"core",
"panic_unwind",
"proc_macro",
"compiler_builtins",
]
.iter()
.map(|&krate| Dependency::new_injected_builtin(krate.into()))
.collect()
} else {
vec![]
};

RegistryQueryer {
registry,
replacements,
version_prefs,
registry_cache: HashMap::new(),
summary_cache: HashMap::new(),
used_replacements: HashMap::new(),
builtins,
}
}

Expand Down Expand Up @@ -238,10 +259,11 @@ impl<'a> RegistryQueryer<'a> {
{
return Ok(out.0.clone());
}

// First, figure out our set of dependencies based on the requested set
// of features. This also calculates what features we're going to enable
// for our own dependencies.
let (used_features, deps) = resolve_features(parent, candidate, opts)?;
let (used_features, deps) = resolve_features(parent, candidate, opts, &self.builtins)?;

// Next, transform all dependencies into a list of possible candidates
// which can satisfy that dependency.
Expand Down Expand Up @@ -291,18 +313,28 @@ pub fn resolve_features<'b>(
parent: Option<PackageId>,
s: &'b Summary,
opts: &'b ResolveOpts,
builtins: &[Dependency],
) -> ActivateResult<(HashSet<InternedString>, Vec<(Dependency, FeaturesSet)>)> {
// First, filter by dev-dependencies.
let deps = s.dependencies();
let deps = deps.iter().filter(|d| d.is_transitive() || opts.dev_deps);

let deps = deps
.into_iter()
.filter(|d| d.is_transitive() || opts.dev_deps);
let builtin_deps = if s.source_id().is_builtin() {
// Don't add builtin deps to dummy builtin packages
None
} else {
Some(builtins.iter())
};
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I assume this is for implicit builtins. Is there a reason you chose to do this here?


let reqs = build_requirements(parent, s, opts)?;
let mut ret = Vec::new();
let default_dep = BTreeSet::new();
let mut valid_dep_names = HashSet::new();

// Next, collect all actually enabled dependencies and their features.
for dep in deps {
for dep in deps.chain(builtin_deps.into_iter().flatten()) {
// Skip optional dependencies, but not those enabled through a
// feature
if dep.is_optional() && !reqs.deps.contains_key(&dep.name_in_toml()) {
Expand Down
3 changes: 2 additions & 1 deletion src/cargo/core/resolver/encode.rs
Original file line number Diff line number Diff line change
Expand Up @@ -661,7 +661,8 @@ pub fn encodable_package_id(
}

fn encodable_source_id(id: SourceId, version: ResolveVersion) -> Option<TomlLockfileSourceId> {
if id.is_path() {
// TODO: Not enough to stop builtins from appearing in the lockfile
if id.is_path() || id.is_builtin() {
None
} else {
Some(
Expand Down
Loading
Loading