Skip to content

Commit

Permalink
Auto merge of #69118 - Dylan-DPC:rollup-7hpm1fj, r=Dylan-DPC
Browse files Browse the repository at this point in the history
Rollup of 9 pull requests

Successful merges:

 - #67642 (Relax bounds on HashMap/HashSet)
 - #68848 (Hasten macro parsing)
 - #69008 (Properly use parent generics for opaque types)
 - #69048 (Suggestion when encountering assoc types from hrtb)
 - #69049 (Optimize image sizes)
 - #69050 (Micro-optimize the heck out of LEB128 reading and writing.)
 - #69068 (Make the SGX arg cleanup implementation a NOP)
 - #69082 (When expecting `BoxFuture` and using `async {}`, suggest `Box::pin`)
 - #69104 (bootstrap: Configure cmake when building sanitizer runtimes)

Failed merges:

r? @ghost
  • Loading branch information
bors committed Feb 13, 2020
2 parents ba18875 + 1ddf250 commit 2e6eace
Show file tree
Hide file tree
Showing 36 changed files with 584 additions and 253 deletions.
28 changes: 19 additions & 9 deletions src/bootstrap/native.rs
Original file line number Diff line number Diff line change
Expand Up @@ -262,7 +262,7 @@ impl Step for Llvm {
cfg.define("PYTHON_EXECUTABLE", python);
}

configure_cmake(builder, target, &mut cfg);
configure_cmake(builder, target, &mut cfg, true);

// FIXME: we don't actually need to build all LLVM tools and all LLVM
// libraries here, e.g., we just want a few components and a few
Expand Down Expand Up @@ -301,7 +301,12 @@ fn check_llvm_version(builder: &Builder<'_>, llvm_config: &Path) {
panic!("\n\nbad LLVM version: {}, need >=7.0\n\n", version)
}

fn configure_cmake(builder: &Builder<'_>, target: Interned<String>, cfg: &mut cmake::Config) {
fn configure_cmake(
builder: &Builder<'_>,
target: Interned<String>,
cfg: &mut cmake::Config,
use_compiler_launcher: bool,
) {
// Do not print installation messages for up-to-date files.
// LLVM and LLD builds can produce a lot of those and hit CI limits on log size.
cfg.define("CMAKE_INSTALL_MESSAGE", "LAZY");
Expand Down Expand Up @@ -372,9 +377,11 @@ fn configure_cmake(builder: &Builder<'_>, target: Interned<String>, cfg: &mut cm
} else {
// If ccache is configured we inform the build a little differently how
// to invoke ccache while also invoking our compilers.
if let Some(ref ccache) = builder.config.ccache {
cfg.define("CMAKE_C_COMPILER_LAUNCHER", ccache)
.define("CMAKE_CXX_COMPILER_LAUNCHER", ccache);
if use_compiler_launcher {
if let Some(ref ccache) = builder.config.ccache {
cfg.define("CMAKE_C_COMPILER_LAUNCHER", ccache)
.define("CMAKE_CXX_COMPILER_LAUNCHER", ccache);
}
}
cfg.define("CMAKE_C_COMPILER", sanitize_cc(cc))
.define("CMAKE_CXX_COMPILER", sanitize_cc(cxx));
Expand Down Expand Up @@ -458,7 +465,7 @@ impl Step for Lld {
t!(fs::create_dir_all(&out_dir));

let mut cfg = cmake::Config::new(builder.src.join("src/llvm-project/lld"));
configure_cmake(builder, target, &mut cfg);
configure_cmake(builder, target, &mut cfg, true);

// This is an awful, awful hack. Discovered when we migrated to using
// clang-cl to compile LLVM/LLD it turns out that LLD, when built out of
Expand Down Expand Up @@ -595,10 +602,7 @@ impl Step for Sanitizers {
let _time = util::timeit(&builder);

let mut cfg = cmake::Config::new(&compiler_rt_dir);
cfg.target(&self.target);
cfg.host(&builder.config.build);
cfg.profile("Release");

cfg.define("CMAKE_C_COMPILER_TARGET", self.target);
cfg.define("COMPILER_RT_BUILD_BUILTINS", "OFF");
cfg.define("COMPILER_RT_BUILD_CRT", "OFF");
Expand All @@ -610,6 +614,12 @@ impl Step for Sanitizers {
cfg.define("COMPILER_RT_USE_LIBCXX", "OFF");
cfg.define("LLVM_CONFIG_PATH", &llvm_config);

// On Darwin targets the sanitizer runtimes are build as universal binaries.
// Unfortunately sccache currently lacks support to build them successfully.
// Disable compiler launcher on Darwin targets to avoid potential issues.
let use_compiler_launcher = !self.target.contains("apple-darwin");
configure_cmake(builder, self.target, &mut cfg, use_compiler_launcher);

t!(fs::create_dir_all(&out_dir));
cfg.out_dir(out_dir);

Expand Down
Binary file modified src/etc/installer/gfx/rust-logo.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
4 changes: 4 additions & 0 deletions src/libcore/marker.rs
Original file line number Diff line number Diff line change
Expand Up @@ -727,6 +727,10 @@ unsafe impl<T: ?Sized> Freeze for &mut T {}
/// [`Pin<P>`]: ../pin/struct.Pin.html
/// [`pin module`]: ../../std/pin/index.html
#[stable(feature = "pin", since = "1.33.0")]
#[rustc_on_unimplemented(
on(_Self = "std::future::Future", note = "consider using `Box::pin`",),
message = "`{Self}` cannot be unpinned"
)]
#[lang = "unpin"]
pub auto trait Unpin {}

Expand Down
10 changes: 10 additions & 0 deletions src/librustc/traits/error_reporting/on_unimplemented.rs
Original file line number Diff line number Diff line change
Expand Up @@ -201,6 +201,16 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
}
}
}
if let ty::Dynamic(traits, _) = self_ty.kind {
for t in *traits.skip_binder() {
match t {
ty::ExistentialPredicate::Trait(trait_ref) => {
flags.push((sym::_Self, Some(self.tcx.def_path_str(trait_ref.def_id))))
}
_ => {}
}
}
}

if let Ok(Some(command)) =
OnUnimplementedDirective::of_item(self.tcx, trait_ref.def_id, def_id)
Expand Down
5 changes: 1 addition & 4 deletions src/librustc/traits/error_reporting/suggestions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -701,10 +701,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
})
.collect::<Vec<_>>();
// Add the suggestion for the return type.
suggestions.push((
ret_ty.span,
format!("Box<{}{}>", if has_dyn { "" } else { "dyn " }, snippet),
));
suggestions.push((ret_ty.span, format!("Box<dyn {}>", trait_obj)));
err.multipart_suggestion(
"return a boxed trait object instead",
suggestions,
Expand Down
1 change: 1 addition & 0 deletions src/librustc_expand/lib.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
#![feature(cow_is_borrowed)]
#![feature(crate_visibility_modifier)]
#![feature(decl_macro)]
#![feature(proc_macro_diagnostic)]
Expand Down
48 changes: 14 additions & 34 deletions src/librustc_expand/mbe/macro_parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -78,20 +78,19 @@ use crate::mbe::{self, TokenTree};

use rustc_ast_pretty::pprust;
use rustc_parse::parser::{FollowedByType, Parser, PathStyle};
use rustc_parse::Directory;
use rustc_session::parse::ParseSess;
use rustc_span::symbol::{kw, sym, Symbol};
use syntax::ast::{Ident, Name};
use syntax::ptr::P;
use syntax::token::{self, DocComment, Nonterminal, Token};
use syntax::tokenstream::TokenStream;

use rustc_errors::{FatalError, PResult};
use rustc_span::Span;
use smallvec::{smallvec, SmallVec};

use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::sync::Lrc;
use std::borrow::Cow;
use std::collections::hash_map::Entry::{Occupied, Vacant};
use std::mem;
use std::ops::{Deref, DerefMut};
Expand Down Expand Up @@ -613,28 +612,9 @@ fn inner_parse_loop<'root, 'tt>(
Success(())
}

/// Use the given sequence of token trees (`ms`) as a matcher. Match the given token stream `tts`
/// against it and return the match.
///
/// # Parameters
///
/// - `sess`: The session into which errors are emitted
/// - `tts`: The tokenstream we are matching against the pattern `ms`
/// - `ms`: A sequence of token trees representing a pattern against which we are matching
/// - `directory`: Information about the file locations (needed for the black-box parser)
/// - `recurse_into_modules`: Whether or not to recurse into modules (needed for the black-box
/// parser)
pub(super) fn parse(
sess: &ParseSess,
tts: TokenStream,
ms: &[TokenTree],
directory: Option<Directory<'_>>,
recurse_into_modules: bool,
) -> NamedParseResult {
// Create a parser that can be used for the "black box" parts.
let mut parser =
Parser::new(sess, tts, directory, recurse_into_modules, true, rustc_parse::MACRO_ARGUMENTS);

/// Use the given sequence of token trees (`ms`) as a matcher. Match the token
/// stream from the given `parser` against it and return the match.
pub(super) fn parse_tt(parser: &mut Cow<'_, Parser<'_>>, ms: &[TokenTree]) -> NamedParseResult {
// A queue of possible matcher positions. We initialize it with the matcher position in which
// the "dot" is before the first token of the first token tree in `ms`. `inner_parse_loop` then
// processes all of these possible matcher positions and produces possible next positions into
Expand All @@ -659,7 +639,7 @@ pub(super) fn parse(
// parsing from the black-box parser done. The result is that `next_items` will contain a
// bunch of possible next matcher positions in `next_items`.
match inner_parse_loop(
sess,
parser.sess,
&mut cur_items,
&mut next_items,
&mut eof_items,
Expand All @@ -684,7 +664,7 @@ pub(super) fn parse(
if eof_items.len() == 1 {
let matches =
eof_items[0].matches.iter_mut().map(|dv| Lrc::make_mut(dv).pop().unwrap());
return nameize(sess, ms, matches);
return nameize(parser.sess, ms, matches);
} else if eof_items.len() > 1 {
return Error(
parser.token.span,
Expand All @@ -709,9 +689,14 @@ pub(super) fn parse(
// unnecessary implicit clone later in Rc::make_mut.
drop(eof_items);

// If there are no possible next positions AND we aren't waiting for the black-box parser,
// then there is a syntax error.
if bb_items.is_empty() && next_items.is_empty() {
return Failure(parser.token.clone(), "no rules expected this token in macro call");
}
// Another possibility is that we need to call out to parse some rust nonterminal
// (black-box) parser. However, if there is not EXACTLY ONE of these, something is wrong.
if (!bb_items.is_empty() && !next_items.is_empty()) || bb_items.len() > 1 {
else if (!bb_items.is_empty() && !next_items.is_empty()) || bb_items.len() > 1 {
let nts = bb_items
.iter()
.map(|item| match item.top_elts.get_tt(item.idx) {
Expand All @@ -733,16 +718,11 @@ pub(super) fn parse(
),
);
}
// If there are no possible next positions AND we aren't waiting for the black-box parser,
// then there is a syntax error.
else if bb_items.is_empty() && next_items.is_empty() {
return Failure(parser.token.take(), "no rules expected this token in macro call");
}
// Dump all possible `next_items` into `cur_items` for the next iteration.
else if !next_items.is_empty() {
// Now process the next token
cur_items.extend(next_items.drain(..));
parser.bump();
parser.to_mut().bump();
}
// Finally, we have the case where we need to call the black-box parser to get some
// nonterminal.
Expand All @@ -754,7 +734,7 @@ pub(super) fn parse(
let match_cur = item.match_cur;
item.push_match(
match_cur,
MatchedNonterminal(Lrc::new(parse_nt(&mut parser, span, ident.name))),
MatchedNonterminal(Lrc::new(parse_nt(parser.to_mut(), span, ident.name))),
);
item.idx += 1;
item.match_cur += 1;
Expand Down
64 changes: 47 additions & 17 deletions src/librustc_expand/mbe/macro_rules.rs
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
use crate::base::{DummyResult, ExtCtxt, MacResult, TTMacroExpander};
use crate::base::{DummyResult, ExpansionData, ExtCtxt, MacResult, TTMacroExpander};
use crate::base::{SyntaxExtension, SyntaxExtensionKind};
use crate::expand::{ensure_complete_parse, parse_ast_fragment, AstFragment, AstFragmentKind};
use crate::mbe;
use crate::mbe::macro_check;
use crate::mbe::macro_parser::parse;
use crate::mbe::macro_parser::parse_tt;
use crate::mbe::macro_parser::{Error, Failure, Success};
use crate::mbe::macro_parser::{MatchedNonterminal, MatchedSeq, NamedParseResult};
use crate::mbe::macro_parser::{MatchedNonterminal, MatchedSeq};
use crate::mbe::transcribe::transcribe;

use rustc_ast_pretty::pprust;
Expand Down Expand Up @@ -166,9 +166,9 @@ impl TTMacroExpander for MacroRulesMacroExpander {
}
}

fn trace_macros_note(cx: &mut ExtCtxt<'_>, sp: Span, message: String) {
fn trace_macros_note(cx_expansions: &mut FxHashMap<Span, Vec<String>>, sp: Span, message: String) {
let sp = sp.macro_backtrace().last().map(|trace| trace.call_site).unwrap_or(sp);
cx.expansions.entry(sp).or_default().push(message);
cx_expansions.entry(sp).or_default().push(message);
}

/// Given `lhses` and `rhses`, this is the new macro we create
Expand All @@ -184,12 +184,36 @@ fn generic_extension<'cx>(
) -> Box<dyn MacResult + 'cx> {
if cx.trace_macros() {
let msg = format!("expanding `{}! {{ {} }}`", name, pprust::tts_to_string(arg.clone()));
trace_macros_note(cx, sp, msg);
trace_macros_note(&mut cx.expansions, sp, msg);
}

// Which arm's failure should we report? (the one furthest along)
let mut best_failure: Option<(Token, &str)> = None;

// We create a base parser that can be used for the "black box" parts.
// Every iteration needs a fresh copy of that base parser. However, the
// parser is not mutated on many of the iterations, particularly when
// dealing with macros like this:
//
// macro_rules! foo {
// ("a") => (A);
// ("b") => (B);
// ("c") => (C);
// // ... etc. (maybe hundreds more)
// }
//
// as seen in the `html5ever` benchmark. We use a `Cow` so that the base
// parser is only cloned when necessary (upon mutation). Furthermore, we
// reinitialize the `Cow` with the base parser at the start of every
// iteration, so that any mutated parsers are not reused. This is all quite
// hacky, but speeds up the `html5ever` benchmark significantly. (Issue
// 68836 suggests a more comprehensive but more complex change to deal with
// this situation.)
let base_parser = base_parser_from_cx(&cx.current_expansion, &cx.parse_sess, arg.clone());

for (i, lhs) in lhses.iter().enumerate() {
let mut parser = Cow::Borrowed(&base_parser);

// try each arm's matchers
let lhs_tt = match *lhs {
mbe::TokenTree::Delimited(_, ref delim) => &delim.tts[..],
Expand All @@ -202,7 +226,7 @@ fn generic_extension<'cx>(
// are not recorded. On the first `Success(..)`ful matcher, the spans are merged.
let mut gated_spans_snaphot = mem::take(&mut *cx.parse_sess.gated_spans.spans.borrow_mut());

match parse_tt(cx, lhs_tt, arg.clone()) {
match parse_tt(&mut parser, lhs_tt) {
Success(named_matches) => {
// The matcher was `Success(..)`ful.
// Merge the gated spans from parsing the matcher with the pre-existing ones.
Expand Down Expand Up @@ -232,11 +256,11 @@ fn generic_extension<'cx>(

if cx.trace_macros() {
let msg = format!("to `{}`", pprust::tts_to_string(tts.clone()));
trace_macros_note(cx, sp, msg);
trace_macros_note(&mut cx.expansions, sp, msg);
}

let directory = Directory {
path: Cow::from(cx.current_expansion.module.directory.as_path()),
path: cx.current_expansion.module.directory.clone(),
ownership: cx.current_expansion.directory_ownership,
};
let mut p = Parser::new(cx.parse_sess(), tts, Some(directory), true, false, None);
Expand Down Expand Up @@ -269,6 +293,7 @@ fn generic_extension<'cx>(
// Restore to the state before snapshotting and maybe try again.
mem::swap(&mut gated_spans_snaphot, &mut cx.parse_sess.gated_spans.spans.borrow_mut());
}
drop(base_parser);

let (token, label) = best_failure.expect("ran no matchers");
let span = token.span.substitute_dummy(sp);
Expand All @@ -286,7 +311,9 @@ fn generic_extension<'cx>(
mbe::TokenTree::Delimited(_, ref delim) => &delim.tts[..],
_ => continue,
};
match parse_tt(cx, lhs_tt, arg.clone()) {
let base_parser =
base_parser_from_cx(&cx.current_expansion, &cx.parse_sess, arg.clone());
match parse_tt(&mut Cow::Borrowed(&base_parser), lhs_tt) {
Success(_) => {
if comma_span.is_dummy() {
err.note("you might be missing a comma");
Expand Down Expand Up @@ -368,7 +395,8 @@ pub fn compile_declarative_macro(
),
];

let argument_map = match parse(sess, body, &argument_gram, None, true) {
let base_parser = Parser::new(sess, body, None, true, true, rustc_parse::MACRO_ARGUMENTS);
let argument_map = match parse_tt(&mut Cow::Borrowed(&base_parser), &argument_gram) {
Success(m) => m,
Failure(token, msg) => {
let s = parse_failure_msg(&token);
Expand Down Expand Up @@ -1184,14 +1212,16 @@ fn quoted_tt_to_string(tt: &mbe::TokenTree) -> String {
}
}

/// Use this token tree as a matcher to parse given tts.
fn parse_tt(cx: &ExtCtxt<'_>, mtch: &[mbe::TokenTree], tts: TokenStream) -> NamedParseResult {
// `None` is because we're not interpolating
fn base_parser_from_cx<'cx>(
current_expansion: &'cx ExpansionData,
sess: &'cx ParseSess,
tts: TokenStream,
) -> Parser<'cx> {
let directory = Directory {
path: Cow::from(cx.current_expansion.module.directory.as_path()),
ownership: cx.current_expansion.directory_ownership,
path: current_expansion.module.directory.clone(),
ownership: current_expansion.directory_ownership,
};
parse(cx.parse_sess(), tts, mtch, Some(directory), true)
Parser::new(sess, tts, Some(directory), true, true, rustc_parse::MACRO_ARGUMENTS)
}

/// Generates an appropriate parsing failure message. For EOF, this is "unexpected end...". For
Expand Down
Loading

0 comments on commit 2e6eace

Please sign in to comment.