Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 9 additions & 1 deletion compiler/rustc_builtin_macros/src/source_util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -275,7 +275,15 @@ fn load_binary_file(
}
};
match cx.source_map().load_binary_file(&resolved_path) {
Ok(data) => Ok(data),
Ok(data) => {
cx.sess
.psess
.file_depinfo
.borrow_mut()
.insert(Symbol::intern(&resolved_path.to_string_lossy()));

Ok(data)
}
Err(io_err) => {
let mut err = cx.dcx().struct_span_err(
macro_span,
Expand Down
29 changes: 14 additions & 15 deletions compiler/rustc_interface/src/passes.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ use rustc_ast::{self as ast, CRATE_NODE_ID};
use rustc_attr_parsing::{AttributeParser, Early, ShouldEmit};
use rustc_codegen_ssa::traits::CodegenBackend;
use rustc_codegen_ssa::{CodegenResults, CrateInfo};
use rustc_data_structures::indexmap::IndexMap;
use rustc_data_structures::jobserver::Proxy;
use rustc_data_structures::steal::Steal;
use rustc_data_structures::sync::{AppendOnlyIndexVec, FreezeLock, WorkerLocal};
Expand Down Expand Up @@ -584,7 +585,7 @@ fn write_out_deps(tcx: TyCtxt<'_>, outputs: &OutputFilenames, out_filenames: &[P
let result: io::Result<()> = try {
// Build a list of files used to compile the output and
// write Makefile-compatible dependency rules
let mut files: Vec<(String, u64, Option<SourceFileHash>)> = sess
let mut files: IndexMap<String, (u64, Option<SourceFileHash>)> = sess
.source_map()
.files()
.iter()
Expand All @@ -593,10 +594,12 @@ fn write_out_deps(tcx: TyCtxt<'_>, outputs: &OutputFilenames, out_filenames: &[P
.map(|fmap| {
(
escape_dep_filename(&fmap.name.prefer_local_unconditionally().to_string()),
// This needs to be unnormalized,
// as external tools wouldn't know how rustc normalizes them
fmap.unnormalized_source_len as u64,
fmap.checksum_hash,
(
// This needs to be unnormalized,
// as external tools wouldn't know how rustc normalizes them
fmap.unnormalized_source_len as u64,
fmap.checksum_hash,
),
)
})
.collect();
Expand All @@ -614,7 +617,7 @@ fn write_out_deps(tcx: TyCtxt<'_>, outputs: &OutputFilenames, out_filenames: &[P
fn hash_iter_files<P: AsRef<Path>>(
it: impl Iterator<Item = P>,
checksum_hash_algo: Option<SourceFileHashAlgorithm>,
) -> impl Iterator<Item = (P, u64, Option<SourceFileHash>)> {
) -> impl Iterator<Item = (P, (u64, Option<SourceFileHash>))> {
it.map(move |path| {
match checksum_hash_algo.and_then(|algo| {
fs::File::open(path.as_ref())
Expand All @@ -630,8 +633,8 @@ fn write_out_deps(tcx: TyCtxt<'_>, outputs: &OutputFilenames, out_filenames: &[P
})
.ok()
}) {
Some((file_len, checksum)) => (path, file_len, Some(checksum)),
None => (path, 0, None),
Some((file_len, checksum)) => (path, (file_len, Some(checksum))),
None => (path, (0, None)),
}
})
}
Expand Down Expand Up @@ -705,18 +708,14 @@ fn write_out_deps(tcx: TyCtxt<'_>, outputs: &OutputFilenames, out_filenames: &[P
file,
"{}: {}\n",
path.display(),
files
.iter()
.map(|(path, _file_len, _checksum_hash_algo)| path.as_str())
.intersperse(" ")
.collect::<String>()
files.keys().map(String::as_str).intersperse(" ").collect::<String>()
)?;
}

// Emit a fake target for each input file to the compilation. This
// prevents `make` from spitting out an error if a file is later
// deleted. For more info see #28735
for (path, _file_len, _checksum_hash_algo) in &files {
for path in files.keys() {
writeln!(file, "{path}:")?;
}

Expand Down Expand Up @@ -745,7 +744,7 @@ fn write_out_deps(tcx: TyCtxt<'_>, outputs: &OutputFilenames, out_filenames: &[P
if sess.opts.unstable_opts.checksum_hash_algorithm().is_some() {
files
.iter()
.filter_map(|(path, file_len, hash_algo)| {
.filter_map(|(path, (file_len, hash_algo))| {
hash_algo.map(|hash_algo| (path, file_len, hash_algo))
})
.try_for_each(|(path, file_len, checksum_hash)| {
Expand Down
39 changes: 18 additions & 21 deletions compiler/rustc_query_impl/src/plumbing.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,8 @@ use rustc_span::def_id::LOCAL_CRATE;

use crate::QueryConfigRestored;

/// Implements [`QueryContext`] for use by [`rustc_query_system`], since that
/// crate does not have direct access to [`TyCtxt`].
#[derive(Copy, Clone)]
pub struct QueryCtxt<'tcx> {
pub tcx: TyCtxt<'tcx>,
Expand All @@ -47,15 +49,6 @@ impl<'tcx> QueryCtxt<'tcx> {
}
}

impl<'tcx> std::ops::Deref for QueryCtxt<'tcx> {
type Target = TyCtxt<'tcx>;

#[inline]
fn deref(&self) -> &Self::Target {
&self.tcx
}
}

impl<'tcx> HasDepContext for QueryCtxt<'tcx> {
type Deps = rustc_middle::dep_graph::DepsType;
type DepContext = TyCtxt<'tcx>;
Expand All @@ -69,14 +62,16 @@ impl<'tcx> HasDepContext for QueryCtxt<'tcx> {
impl QueryContext for QueryCtxt<'_> {
#[inline]
fn jobserver_proxy(&self) -> &Proxy {
&*self.jobserver_proxy
&self.tcx.jobserver_proxy
}

#[inline]
fn next_job_id(self) -> QueryJobId {
QueryJobId(
NonZero::new(self.query_system.jobs.fetch_add(1, std::sync::atomic::Ordering::Relaxed))
.unwrap(),
NonZero::new(
self.tcx.query_system.jobs.fetch_add(1, std::sync::atomic::Ordering::Relaxed),
)
.unwrap(),
)
}

Expand Down Expand Up @@ -113,7 +108,8 @@ impl QueryContext for QueryCtxt<'_> {
self,
prev_dep_node_index: SerializedDepNodeIndex,
) -> Option<QuerySideEffect> {
self.query_system
self.tcx
.query_system
.on_disk_cache
.as_ref()
.and_then(|c| c.load_side_effect(self.tcx, prev_dep_node_index))
Expand All @@ -122,7 +118,7 @@ impl QueryContext for QueryCtxt<'_> {
#[inline(never)]
#[cold]
fn store_side_effect(self, dep_node_index: DepNodeIndex, side_effect: QuerySideEffect) {
if let Some(c) = self.query_system.on_disk_cache.as_ref() {
if let Some(c) = self.tcx.query_system.on_disk_cache.as_ref() {
c.store_side_effect(dep_node_index, side_effect)
}
}
Expand All @@ -140,7 +136,9 @@ impl QueryContext for QueryCtxt<'_> {
// as `self`, so we use `with_related_context` to relate the 'tcx lifetimes
// when accessing the `ImplicitCtxt`.
tls::with_related_context(self.tcx, move |current_icx| {
if depth_limit && !self.recursion_limit().value_within_limit(current_icx.query_depth) {
if depth_limit
&& !self.tcx.recursion_limit().value_within_limit(current_icx.query_depth)
{
self.depth_limit_error(token);
}

Expand All @@ -161,16 +159,16 @@ impl QueryContext for QueryCtxt<'_> {
let query_map = self.collect_active_jobs(true).expect("failed to collect active queries");
let (info, depth) = job.find_dep_kind_root(query_map);

let suggested_limit = match self.recursion_limit() {
let suggested_limit = match self.tcx.recursion_limit() {
Limit(0) => Limit(2),
limit => limit * 2,
};

self.sess.dcx().emit_fatal(QueryOverflow {
self.tcx.sess.dcx().emit_fatal(QueryOverflow {
span: info.job.span,
note: QueryOverflowNote { desc: info.query.description, depth },
suggested_limit,
crate_name: self.crate_name(LOCAL_CRATE),
crate_name: self.tcx.crate_name(LOCAL_CRATE),
});
}
}
Expand Down Expand Up @@ -367,7 +365,7 @@ pub(crate) fn encode_query_results<'a, 'tcx, Q>(
Q: super::QueryConfigRestored<'tcx>,
Q::RestoredValue: Encodable<CacheEncoder<'a, 'tcx>>,
{
let _timer = qcx.profiler().generic_activity_with_arg("encode_query_results_for", query.name());
let _timer = qcx.tcx.prof.generic_activity_with_arg("encode_query_results_for", query.name());

assert!(query.query_state(qcx).all_inactive());
let cache = query.query_cache(qcx);
Expand All @@ -389,8 +387,7 @@ pub(crate) fn query_key_hash_verify<'tcx>(
query: impl QueryConfig<QueryCtxt<'tcx>>,
qcx: QueryCtxt<'tcx>,
) {
let _timer =
qcx.profiler().generic_activity_with_arg("query_key_hash_verify_for", query.name());
let _timer = qcx.tcx.prof.generic_activity_with_arg("query_key_hash_verify_for", query.name());

let mut map = UnordMap::default();

Expand Down
6 changes: 3 additions & 3 deletions compiler/rustc_resolve/src/ident.rs
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
orig_ctxt: Span,
derive_fallback_lint_id: Option<NodeId>,
mut visitor: impl FnMut(
&mut CmResolver<'r, 'ra, 'tcx>,
CmResolver<'_, 'ra, 'tcx>,
Scope<'ra>,
UsePrelude,
Span,
Expand Down Expand Up @@ -165,7 +165,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
if visit {
let use_prelude = if use_prelude { UsePrelude::Yes } else { UsePrelude::No };
if let ControlFlow::Break(break_result) =
visitor(&mut self, scope, use_prelude, ctxt)
visitor(self.reborrow(), scope, use_prelude, ctxt)
{
return Some(break_result);
}
Expand Down Expand Up @@ -438,7 +438,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
parent_scope,
orig_ident.span,
derive_fallback_lint_id,
|this, scope, use_prelude, ctxt| {
|mut this, scope, use_prelude, ctxt| {
let ident = Ident::new(orig_ident.name, ctxt);
// The passed `ctxt` is already normalized, so avoid expensive double normalization.
let ident = Macros20NormalizedIdent(ident);
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_resolve/src/imports.rs
Original file line number Diff line number Diff line change
Expand Up @@ -893,7 +893,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
};

let mut indeterminate_count = 0;
self.per_ns_cm(|this, ns| {
self.per_ns_cm(|mut this, ns| {
if !type_ns_only || ns == TypeNS {
if bindings[ns].get() != PendingDecl::Pending {
return;
Expand Down
10 changes: 5 additions & 5 deletions compiler/rustc_resolve/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1831,13 +1831,13 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
f(self, MacroNS);
}

fn per_ns_cm<'r, F: FnMut(&mut CmResolver<'r, 'ra, 'tcx>, Namespace)>(
fn per_ns_cm<'r, F: FnMut(CmResolver<'_, 'ra, 'tcx>, Namespace)>(
mut self: CmResolver<'r, 'ra, 'tcx>,
mut f: F,
) {
f(&mut self, TypeNS);
f(&mut self, ValueNS);
f(&mut self, MacroNS);
f(self.reborrow(), TypeNS);
f(self.reborrow(), ValueNS);
f(self, MacroNS);
}

fn is_builtin_macro(&self, res: Res) -> bool {
Expand Down Expand Up @@ -1902,7 +1902,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
}

let scope_set = ScopeSet::All(TypeNS);
self.cm().visit_scopes(scope_set, parent_scope, ctxt, None, |this, scope, _, _| {
self.cm().visit_scopes(scope_set, parent_scope, ctxt, None, |mut this, scope, _, _| {
match scope {
Scope::ModuleNonGlobs(module, _) => {
this.get_mut().traits_in_module(module, assoc_item, &mut found_traits);
Expand Down
1 change: 1 addition & 0 deletions compiler/rustc_span/src/symbol.rs
Original file line number Diff line number Diff line change
Expand Up @@ -270,6 +270,7 @@ symbols! {
Into,
IntoFuture,
IntoIterator,
IntoIteratorItem,
IoBufRead,
IoLines,
IoRead,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4390,6 +4390,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
param_env: ty::ParamEnv<'tcx>,
path_segment: &hir::PathSegment<'_>,
args: &[hir::Expr<'_>],
prev_ty: Ty<'_>,
err: &mut Diag<'_, G>,
) {
let tcx = self.tcx;
Expand All @@ -4403,6 +4404,47 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
let TypeError::Sorts(expected_found) = diff else {
continue;
};
if tcx.is_diagnostic_item(sym::IntoIteratorItem, *def_id)
&& path_segment.ident.name == sym::iter
&& self.can_eq(
param_env,
Ty::new_ref(
tcx,
tcx.lifetimes.re_erased,
expected_found.found,
ty::Mutability::Not,
),
*ty,
)
&& let [] = args
{
// Used `.iter()` when `.into_iter()` was likely meant.
err.span_suggestion_verbose(
path_segment.ident.span,
format!("consider consuming the `{prev_ty}` to construct the `Iterator`"),
"into_iter".to_string(),
Applicability::MachineApplicable,
);
}
if tcx.is_diagnostic_item(sym::IntoIteratorItem, *def_id)
&& path_segment.ident.name == sym::into_iter
&& self.can_eq(
param_env,
expected_found.found,
Ty::new_ref(tcx, tcx.lifetimes.re_erased, *ty, ty::Mutability::Not),
)
&& let [] = args
{
// Used `.into_iter()` when `.iter()` was likely meant.
err.span_suggestion_verbose(
path_segment.ident.span,
format!(
"consider not consuming the `{prev_ty}` to construct the `Iterator`"
),
"iter".to_string(),
Applicability::MachineApplicable,
);
}
if tcx.is_diagnostic_item(sym::IteratorItem, *def_id)
&& path_segment.ident.name == sym::map
&& self.can_eq(param_env, expected_found.found, *ty)
Expand Down Expand Up @@ -4515,19 +4557,20 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
expr = rcvr_expr;
let assocs_in_this_method =
self.probe_assoc_types_at_expr(&type_diffs, span, prev_ty, expr.hir_id, param_env);
prev_ty = self.resolve_vars_if_possible(
typeck_results.expr_ty_adjusted_opt(expr).unwrap_or(Ty::new_misc_error(tcx)),
);
self.look_for_iterator_item_mistakes(
&assocs_in_this_method,
typeck_results,
&type_diffs,
param_env,
path_segment,
args,
prev_ty,
err,
);
assocs.push(assocs_in_this_method);
prev_ty = self.resolve_vars_if_possible(
typeck_results.expr_ty_adjusted_opt(expr).unwrap_or(Ty::new_misc_error(tcx)),
);

if let hir::ExprKind::Path(hir::QPath::Resolved(None, path)) = expr.kind
&& let hir::Path { res: Res::Local(hir_id), .. } = path
Expand Down
1 change: 1 addition & 0 deletions library/core/src/iter/traits/collect.rs
Original file line number Diff line number Diff line change
Expand Up @@ -281,6 +281,7 @@ pub trait FromIterator<A>: Sized {
#[stable(feature = "rust1", since = "1.0.0")]
pub trait IntoIterator {
/// The type of the elements being iterated over.
#[rustc_diagnostic_item = "IntoIteratorItem"]
#[stable(feature = "rust1", since = "1.0.0")]
type Item;

Expand Down
1 change: 1 addition & 0 deletions tests/run-make/checksum-freshness/binary_file
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
binaryÿ
6 changes: 4 additions & 2 deletions tests/run-make/checksum-freshness/expected.d
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
lib.d: lib.rs foo.rs
lib.d: lib.rs foo.rs binary_file

lib.rs:
foo.rs:
# checksum:blake3=94af75ee4ed805434484c3de51c9025278e5c3ada2315e2592052e102168a503 file_len:120 lib.rs
binary_file:
# checksum:blake3=4ac56f3f877798fb762d714c7bcb72e70133f4cc585f80dbd99c07755ae2c7f6 file_len:222 lib.rs
# checksum:blake3=2720e17bfda4f3b2a5c96bb61b7e76ed8ebe3359b34128c0e5d8032c090a4f1a file_len:119 foo.rs
# checksum:blake3=119a5db8711914922c5b1c1908be4958175c5afa95c08888de594725329b5439 file_len:7 binary_file
3 changes: 2 additions & 1 deletion tests/run-make/checksum-freshness/lib.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
// A basic library to be used in tests with no real purpose.

mod foo;

// Binary file with invalid UTF-8 sequence.
static BINARY_FILE: &[u8] = include_bytes!("binary_file");
pub fn sum(a: i32, b: i32) -> i32 {
a + b
}
Loading
Loading