diff --git a/src/librustc_builtin_macros/format.rs b/src/librustc_builtin_macros/format.rs index a9298abe2d759..072c987a5230e 100644 --- a/src/librustc_builtin_macros/format.rs +++ b/src/librustc_builtin_macros/format.rs @@ -158,7 +158,7 @@ fn parse_args<'a>( } // accept trailing commas if p.token.is_ident() && p.look_ahead(1, |t| *t == token::Eq) { named = true; - let name = if let token::Ident(name, _) = p.token.kind { + let name = if let token::Ident(name, _) = p.normalized_token.kind { p.bump(); name } else { diff --git a/src/librustc_expand/mbe/macro_parser.rs b/src/librustc_expand/mbe/macro_parser.rs index 6599e92222c75..2a53d600c5bcf 100644 --- a/src/librustc_expand/mbe/macro_parser.rs +++ b/src/librustc_expand/mbe/macro_parser.rs @@ -753,6 +753,12 @@ pub(super) fn parse_tt(parser: &mut Cow<'_, Parser<'_>>, ms: &[TokenTree]) -> Na fn get_macro_name(token: &Token) -> Option<(Name, bool)> { match token.kind { token::Ident(name, is_raw) if name != kw::Underscore => Some((name, is_raw)), + token::Interpolated(ref nt) => match **nt { + token::NtIdent(ident, is_raw) if ident.name != kw::Underscore => { + Some((ident.name, is_raw)) + } + _ => None, + }, _ => None, } } @@ -883,9 +889,8 @@ fn parse_nt_inner<'a>(p: &mut Parser<'a>, sp: Span, name: Symbol) -> PResult<'a, // this could be handled like a token, since it is one sym::ident => { if let Some((name, is_raw)) = get_macro_name(&p.token) { - let span = p.token.span; p.bump(); - token::NtIdent(Ident::new(name, span), is_raw) + token::NtIdent(Ident::new(name, p.normalized_prev_token.span), is_raw) } else { let token_str = pprust::token_to_string(&p.token); let msg = &format!("expected ident, found {}", &token_str); diff --git a/src/librustc_parse/parser/diagnostics.rs b/src/librustc_parse/parser/diagnostics.rs index 018aef3c13cee..00f5fb9705286 100644 --- a/src/librustc_parse/parser/diagnostics.rs +++ b/src/librustc_parse/parser/diagnostics.rs @@ -13,7 +13,7 @@ use syntax::ast::{ }; use syntax::ast::{AttrVec, ItemKind, Mutability, Pat, PatKind, PathSegment, QSelf, Ty, TyKind}; use syntax::ptr::P; -use syntax::token::{self, token_can_begin_expr, TokenKind}; +use syntax::token::{self, TokenKind}; use syntax::util::parser::AssocOp; use log::{debug, trace}; @@ -192,12 +192,12 @@ impl<'a> Parser<'a> { TokenKind::CloseDelim(token::DelimToken::Brace), TokenKind::CloseDelim(token::DelimToken::Paren), ]; - if let token::Ident(name, false) = self.token.kind { - if Ident::new(name, self.token.span).is_raw_guess() + if let token::Ident(name, false) = self.normalized_token.kind { + if Ident::new(name, self.normalized_token.span).is_raw_guess() && self.look_ahead(1, |t| valid_follow.contains(&t.kind)) { err.span_suggestion( - self.token.span, + self.normalized_token.span, "you can escape reserved keywords to use them as identifiers", format!("r#{}", name), Applicability::MaybeIncorrect, @@ -900,8 +900,7 @@ impl<'a> Parser<'a> { } else if !sm.is_multiline(self.prev_span.until(self.token.span)) { // The current token is in the same line as the prior token, not recoverable. } else if self.look_ahead(1, |t| { - t == &token::CloseDelim(token::Brace) - || token_can_begin_expr(t) && t.kind != token::Colon + t == &token::CloseDelim(token::Brace) || t.can_begin_expr() && t.kind != token::Colon }) && [token::Comma, token::Colon].contains(&self.token.kind) { // Likely typo: `,` → `;` or `:` → `;`. This is triggered if the current token is @@ -919,7 +918,7 @@ impl<'a> Parser<'a> { } else if self.look_ahead(0, |t| { t == &token::CloseDelim(token::Brace) || ( - token_can_begin_expr(t) && t != &token::Semi && t != &token::Pound + t.can_begin_expr() && t != &token::Semi && t != &token::Pound // Avoid triggering with too many trailing `#` in raw string. ) }) { diff --git a/src/librustc_parse/parser/expr.rs b/src/librustc_parse/parser/expr.rs index b8f67e73bc3f7..2d5223f210246 100644 --- a/src/librustc_parse/parser/expr.rs +++ b/src/librustc_parse/parser/expr.rs @@ -97,15 +97,14 @@ impl<'a> Parser<'a> { fn parse_expr_catch_underscore(&mut self) -> PResult<'a, P> { match self.parse_expr() { Ok(expr) => Ok(expr), - Err(mut err) => match self.token.kind { + Err(mut err) => match self.normalized_token.kind { token::Ident(name, false) if name == kw::Underscore && self.look_ahead(1, |t| t == &token::Comma) => { // Special-case handling of `foo(_, _, _)` err.emit(); - let sp = self.token.span; self.bump(); - Ok(self.mk_expr(sp, ExprKind::Err, AttrVec::new())) + Ok(self.mk_expr(self.prev_token.span, ExprKind::Err, AttrVec::new())) } _ => Err(err), }, @@ -166,7 +165,7 @@ impl<'a> Parser<'a> { while let Some(op) = self.check_assoc_op() { // Adjust the span for interpolated LHS to point to the `$lhs` token // and not to what it refers to. - let lhs_span = match self.unnormalized_prev_token.kind { + let lhs_span = match self.prev_token.kind { TokenKind::Interpolated(..) => self.prev_span, _ => lhs.span, }; @@ -333,7 +332,7 @@ impl<'a> Parser<'a> { /// Also performs recovery for `and` / `or` which are mistaken for `&&` and `||` respectively. fn check_assoc_op(&self) -> Option> { Some(Spanned { - node: match (AssocOp::from_token(&self.token), &self.token.kind) { + node: match (AssocOp::from_token(&self.token), &self.normalized_token.kind) { (Some(op), _) => op, (None, token::Ident(sym::and, false)) => { self.error_bad_logical_op("and", "&&", "conjunction"); @@ -345,7 +344,7 @@ impl<'a> Parser<'a> { } _ => return None, }, - span: self.token.span, + span: self.normalized_token.span, }) } @@ -437,7 +436,7 @@ impl<'a> Parser<'a> { let attrs = self.parse_or_use_outer_attributes(attrs)?; let lo = self.token.span; // Note: when adding new unary operators, don't forget to adjust TokenKind::can_begin_expr() - let (hi, ex) = match self.token.kind { + let (hi, ex) = match self.normalized_token.kind { token::Not => self.parse_unary_expr(lo, UnOp::Not), // `!expr` token::Tilde => self.recover_tilde_expr(lo), // `~expr` token::BinOp(token::Minus) => self.parse_unary_expr(lo, UnOp::Neg), // `-expr` @@ -523,7 +522,7 @@ impl<'a> Parser<'a> { ) -> PResult<'a, (Span, P)> { expr.map(|e| { ( - match self.unnormalized_prev_token.kind { + match self.prev_token.kind { TokenKind::Interpolated(..) => self.prev_span, _ => e.span, }, @@ -704,7 +703,7 @@ impl<'a> Parser<'a> { } fn parse_dot_suffix_expr(&mut self, lo: Span, base: P) -> PResult<'a, P> { - match self.token.kind { + match self.normalized_token.kind { token::Ident(..) => self.parse_dot_suffix(base, lo), token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) => { Ok(self.parse_tuple_field_access_expr(lo, base, symbol, suffix)) @@ -773,8 +772,8 @@ impl<'a> Parser<'a> { field: Symbol, suffix: Option, ) -> P { - let span = self.token.span; self.bump(); + let span = self.prev_token.span; let field = ExprKind::Field(base, Ident::new(field, span)); self.expect_no_suffix(span, "a tuple index", suffix); self.mk_expr(lo.to(span), field, AttrVec::new()) @@ -798,7 +797,7 @@ impl<'a> Parser<'a> { /// Assuming we have just parsed `.`, continue parsing into an expression. fn parse_dot_suffix(&mut self, self_arg: P, lo: Span) -> PResult<'a, P> { - if self.token.span.rust_2018() && self.eat_keyword(kw::Await) { + if self.normalized_token.span.rust_2018() && self.eat_keyword(kw::Await) { return self.mk_await_expr(self_arg, lo); } @@ -912,7 +911,7 @@ impl<'a> Parser<'a> { // | ^ expected expression self.bump(); Ok(self.mk_expr_err(self.token.span)) - } else if self.token.span.rust_2018() { + } else if self.normalized_token.span.rust_2018() { // `Span::rust_2018()` is somewhat expensive; don't get it repeatedly. if self.check_keyword(kw::Async) { if self.is_async_block() { @@ -1342,7 +1341,7 @@ impl<'a> Parser<'a> { if self.eat_keyword(kw::Static) { Movability::Static } else { Movability::Movable }; let asyncness = - if self.token.span.rust_2018() { self.parse_asyncness() } else { Async::No }; + if self.normalized_token.span.rust_2018() { self.parse_asyncness() } else { Async::No }; if asyncness.is_async() { // Feature-gate `async ||` closures. self.sess.gated_spans.gate(sym::async_closure, self.prev_span); @@ -1556,9 +1555,8 @@ impl<'a> Parser<'a> { fn eat_label(&mut self) -> Option