Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Refactor macro by example #64689

Merged
merged 8 commits into from
Sep 25, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions src/librustc_resolve/macros.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ use syntax::ext::base::{self, InvocationRes, Indeterminate, SpecialDerives};
use syntax::ext::base::{MacroKind, SyntaxExtension};
use syntax::ext::expand::{AstFragment, AstFragmentKind, Invocation, InvocationKind};
use syntax::ext::hygiene::{self, ExpnId, ExpnData, ExpnKind};
use syntax::ext::tt::macro_rules;
use syntax::ext::compile_declarative_macro;
use syntax::feature_gate::{emit_feature_err, is_builtin_attr_name};
use syntax::feature_gate::GateIssue;
use syntax::symbol::{Symbol, kw, sym};
Expand Down Expand Up @@ -843,7 +843,7 @@ impl<'a> Resolver<'a> {
/// Compile the macro into a `SyntaxExtension` and possibly replace it with a pre-defined
/// extension partially or entirely for built-in macros and legacy plugin macros.
crate fn compile_macro(&mut self, item: &ast::Item, edition: Edition) -> SyntaxExtension {
let mut result = macro_rules::compile(
let mut result = compile_declarative_macro(
&self.session.parse_sess, self.session.features_untracked(), item, edition
);

Expand Down
6 changes: 3 additions & 3 deletions src/libsyntax/ext/expand.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ use crate::config::StripUnconfigured;
use crate::ext::base::*;
use crate::ext::proc_macro::{collect_derives, MarkAttrs};
use crate::ext::hygiene::{ExpnId, SyntaxContext, ExpnData, ExpnKind};
use crate::ext::tt::macro_rules::annotate_err_with_kind;
use crate::ext::mbe::macro_rules::annotate_err_with_kind;
use crate::ext::placeholders::{placeholder, PlaceholderExpander};
use crate::feature_gate::{self, Features, GateIssue, is_builtin_attr, emit_feature_err};
use crate::mut_visit::*;
Expand Down Expand Up @@ -115,8 +115,8 @@ macro_rules! ast_fragments {
}
}

impl<'a> MacResult for crate::ext::tt::macro_rules::ParserAnyMacro<'a> {
$(fn $make_ast(self: Box<crate::ext::tt::macro_rules::ParserAnyMacro<'a>>)
impl<'a> MacResult for crate::ext::mbe::macro_rules::ParserAnyMacro<'a> {
$(fn $make_ast(self: Box<crate::ext::mbe::macro_rules::ParserAnyMacro<'a>>)
-> Option<$AstTy> {
Some(self.make(AstFragmentKind::$Kind).$make_ast())
})*
Expand Down
166 changes: 166 additions & 0 deletions src/libsyntax/ext/mbe.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,166 @@
//! This module implements declarative macros: old `macro_rules` and the newer
//! `macro`. Declarative macros are also known as "macro by example", and that's
//! why we call this module `mbe`. For external documentation, prefer the
//! official terminology: "declarative macros".

crate mod transcribe;
crate mod macro_check;
crate mod macro_parser;
crate mod macro_rules;
crate mod quoted;

use crate::ast;
use crate::parse::token::{self, Token, TokenKind};
use crate::tokenstream::{DelimSpan};

use syntax_pos::{BytePos, Span};

use rustc_data_structures::sync::Lrc;

/// Contains the sub-token-trees of a "delimited" token tree, such as the contents of `(`. Note
/// that the delimiter itself might be `NoDelim`.
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)]
struct Delimited {
delim: token::DelimToken,
tts: Vec<TokenTree>,
}

impl Delimited {
/// Returns a `self::TokenTree` with a `Span` corresponding to the opening delimiter.
fn open_tt(&self, span: Span) -> TokenTree {
let open_span = if span.is_dummy() {
span
} else {
span.with_hi(span.lo() + BytePos(self.delim.len() as u32))
};
TokenTree::token(token::OpenDelim(self.delim), open_span)
}

/// Returns a `self::TokenTree` with a `Span` corresponding to the closing delimiter.
fn close_tt(&self, span: Span) -> TokenTree {
let close_span = if span.is_dummy() {
span
} else {
span.with_lo(span.hi() - BytePos(self.delim.len() as u32))
};
TokenTree::token(token::CloseDelim(self.delim), close_span)
}
}

#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)]
struct SequenceRepetition {
/// The sequence of token trees
tts: Vec<TokenTree>,
/// The optional separator
separator: Option<Token>,
/// Whether the sequence can be repeated zero (*), or one or more times (+)
kleene: KleeneToken,
/// The number of `Match`s that appear in the sequence (and subsequences)
num_captures: usize,
}

#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, Copy)]
struct KleeneToken {
span: Span,
op: KleeneOp,
}

impl KleeneToken {
fn new(op: KleeneOp, span: Span) -> KleeneToken {
KleeneToken { span, op }
}
}

/// A Kleene-style [repetition operator](http://en.wikipedia.org/wiki/Kleene_star)
/// for token sequences.
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
enum KleeneOp {
/// Kleene star (`*`) for zero or more repetitions
ZeroOrMore,
/// Kleene plus (`+`) for one or more repetitions
OneOrMore,
/// Kleene optional (`?`) for zero or one reptitions
ZeroOrOne,
}

/// Similar to `tokenstream::TokenTree`, except that `$i`, `$i:ident`, and `$(...)`
/// are "first-class" token trees. Useful for parsing macros.
#[derive(Debug, Clone, PartialEq, RustcEncodable, RustcDecodable)]
enum TokenTree {
Token(Token),
Delimited(DelimSpan, Lrc<Delimited>),
/// A kleene-style repetition sequence
Sequence(DelimSpan, Lrc<SequenceRepetition>),
/// e.g., `$var`
MetaVar(Span, ast::Ident),
/// e.g., `$var:expr`. This is only used in the left hand side of MBE macros.
MetaVarDecl(
Span,
ast::Ident, /* name to bind */
ast::Ident, /* kind of nonterminal */
),
}

impl TokenTree {
/// Return the number of tokens in the tree.
fn len(&self) -> usize {
match *self {
TokenTree::Delimited(_, ref delimed) => match delimed.delim {
token::NoDelim => delimed.tts.len(),
_ => delimed.tts.len() + 2,
},
TokenTree::Sequence(_, ref seq) => seq.tts.len(),
_ => 0,
}
}

/// Returns `true` if the given token tree is delimited.
fn is_delimited(&self) -> bool {
match *self {
TokenTree::Delimited(..) => true,
_ => false,
}
}

/// Returns `true` if the given token tree is a token of the given kind.
fn is_token(&self, expected_kind: &TokenKind) -> bool {
match self {
TokenTree::Token(Token { kind: actual_kind, .. }) => actual_kind == expected_kind,
_ => false,
}
}

/// Gets the `index`-th sub-token-tree. This only makes sense for delimited trees and sequences.
fn get_tt(&self, index: usize) -> TokenTree {
match (self, index) {
(&TokenTree::Delimited(_, ref delimed), _) if delimed.delim == token::NoDelim => {
delimed.tts[index].clone()
}
(&TokenTree::Delimited(span, ref delimed), _) => {
if index == 0 {
return delimed.open_tt(span.open);
}
if index == delimed.tts.len() + 1 {
return delimed.close_tt(span.close);
}
delimed.tts[index - 1].clone()
}
(&TokenTree::Sequence(_, ref seq), _) => seq.tts[index].clone(),
_ => panic!("Cannot expand a token tree"),
}
}

/// Retrieves the `TokenTree`'s span.
fn span(&self) -> Span {
match *self {
TokenTree::Token(Token { span, .. })
| TokenTree::MetaVar(span, _)
| TokenTree::MetaVarDecl(span, _, _) => span,
TokenTree::Delimited(span, _) | TokenTree::Sequence(span, _) => span.entire(),
}
}

fn token(kind: TokenKind, span: Span) -> TokenTree {
TokenTree::Token(Token::new(kind, span))
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@
//! bound.
use crate::ast::NodeId;
use crate::early_buffered_lints::BufferedEarlyLintId;
use crate::ext::tt::quoted::{KleeneToken, TokenTree};
use crate::ext::mbe::{KleeneToken, TokenTree};
use crate::parse::token::TokenKind;
use crate::parse::token::{DelimToken, Token};
use crate::parse::ParseSess;
Expand Down Expand Up @@ -196,7 +196,7 @@ struct MacroState<'a> {
/// - `node_id` is used to emit lints
/// - `span` is used when no spans are available
/// - `lhses` and `rhses` should have the same length and represent the macro definition
pub fn check_meta_variables(
pub(super) fn check_meta_variables(
sess: &ParseSess,
node_id: NodeId,
span: Span,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -70,12 +70,12 @@
//! eof: [a $( a )* a b ·]
//! ```

pub use NamedMatch::*;
pub use ParseResult::*;
crate use NamedMatch::*;
crate use ParseResult::*;
use TokenTreeOrTokenTreeSlice::*;

use crate::ast::{Ident, Name};
use crate::ext::tt::quoted::{self, TokenTree};
use crate::ext::mbe::{self, TokenTree};
use crate::parse::{Directory, ParseSess};
use crate::parse::parser::{Parser, PathStyle};
use crate::parse::token::{self, DocComment, Nonterminal, Token};
Expand Down Expand Up @@ -195,7 +195,7 @@ struct MatcherPos<'root, 'tt> {
// `None`.

/// The KleeneOp of this sequence if we are in a repetition.
seq_op: Option<quoted::KleeneOp>,
seq_op: Option<mbe::KleeneOp>,

/// The separator if we are in a repetition.
sep: Option<Token>,
Expand Down Expand Up @@ -267,7 +267,7 @@ impl<'root, 'tt> DerefMut for MatcherPosHandle<'root, 'tt> {
}

/// Represents the possible results of an attempted parse.
pub enum ParseResult<T> {
crate enum ParseResult<T> {
/// Parsed successfully.
Success(T),
/// Arm failed to match. If the second parameter is `token::Eof`, it indicates an unexpected
Expand All @@ -279,10 +279,10 @@ pub enum ParseResult<T> {

/// A `ParseResult` where the `Success` variant contains a mapping of `Ident`s to `NamedMatch`es.
/// This represents the mapping of metavars to the token trees they bind to.
pub type NamedParseResult = ParseResult<FxHashMap<Ident, NamedMatch>>;
crate type NamedParseResult = ParseResult<FxHashMap<Ident, NamedMatch>>;

/// Count how many metavars are named in the given matcher `ms`.
pub fn count_names(ms: &[TokenTree]) -> usize {
pub(super) fn count_names(ms: &[TokenTree]) -> usize {
ms.iter().fold(0, |count, elt| {
count + match *elt {
TokenTree::Sequence(_, ref seq) => seq.num_captures,
Expand Down Expand Up @@ -352,7 +352,7 @@ fn initial_matcher_pos<'root, 'tt>(ms: &'tt [TokenTree], open: Span) -> MatcherP
/// only on the nesting depth of `ast::TTSeq`s in the originating
/// token tree it was derived from.
#[derive(Debug, Clone)]
pub enum NamedMatch {
crate enum NamedMatch {
MatchedSeq(Lrc<NamedMatchVec>, DelimSpan),
MatchedNonterminal(Lrc<Nonterminal>),
}
Expand Down Expand Up @@ -415,7 +415,7 @@ fn nameize<I: Iterator<Item = NamedMatch>>(

/// Generates an appropriate parsing failure message. For EOF, this is "unexpected end...". For
/// other tokens, this is "unexpected token...".
pub fn parse_failure_msg(tok: &Token) -> String {
crate fn parse_failure_msg(tok: &Token) -> String {
match tok.kind {
token::Eof => "unexpected end of macro invocation".to_string(),
_ => format!(
Expand Down Expand Up @@ -532,7 +532,7 @@ fn inner_parse_loop<'root, 'tt>(
}
// We don't need a separator. Move the "dot" back to the beginning of the matcher
// and try to match again UNLESS we are only allowed to have _one_ repetition.
else if item.seq_op != Some(quoted::KleeneOp::ZeroOrOne) {
else if item.seq_op != Some(mbe::KleeneOp::ZeroOrOne) {
item.match_cur = item.match_lo;
item.idx = 0;
cur_items.push(item);
Expand All @@ -555,8 +555,8 @@ fn inner_parse_loop<'root, 'tt>(
// implicitly disallowing OneOrMore from having 0 matches here. Thus, that will
// result in a "no rules expected token" error by virtue of this matcher not
// working.
if seq.kleene.op == quoted::KleeneOp::ZeroOrMore
|| seq.kleene.op == quoted::KleeneOp::ZeroOrOne
if seq.kleene.op == mbe::KleeneOp::ZeroOrMore
|| seq.kleene.op == mbe::KleeneOp::ZeroOrOne
{
let mut new_item = item.clone();
new_item.match_cur += seq.num_captures;
Expand Down Expand Up @@ -648,7 +648,7 @@ fn inner_parse_loop<'root, 'tt>(
/// - `directory`: Information about the file locations (needed for the black-box parser)
/// - `recurse_into_modules`: Whether or not to recurse into modules (needed for the black-box
/// parser)
pub fn parse(
pub(super) fn parse(
sess: &ParseSess,
tts: TokenStream,
ms: &[TokenTree],
Expand Down
Loading