diff --git a/src/libproc_macro/lib.rs b/src/libproc_macro/lib.rs index 0516e111be3b3..8d7fe655c23b2 100644 --- a/src/libproc_macro/lib.rs +++ b/src/libproc_macro/lib.rs @@ -101,7 +101,7 @@ pub mod __internal { pub fn token_stream_parse_items(stream: TokenStream) -> Result>, LexError> { with_parse_sess(move |sess| { - let mut parser = parse::new_parser_from_ts(sess, stream.inner); + let mut parser = parse::stream_to_parser(sess, stream.inner); let mut items = Vec::new(); while let Some(item) = try!(parser.parse_item().map_err(super::parse_to_lex_err)) { @@ -177,9 +177,8 @@ impl FromStr for TokenStream { __internal::with_parse_sess(|sess| { let src = src.to_string(); let name = "".to_string(); - let tts = parse::parse_tts_from_source_str(name, src, sess); - - Ok(__internal::token_stream_wrap(tts.into_iter().collect())) + let stream = parse::parse_stream_from_source_str(name, src, sess); + Ok(__internal::token_stream_wrap(stream)) }) } } diff --git a/src/libproc_macro_plugin/qquote.rs b/src/libproc_macro_plugin/qquote.rs index dc7c96a4e2767..0276587ed52b1 100644 --- a/src/libproc_macro_plugin/qquote.rs +++ b/src/libproc_macro_plugin/qquote.rs @@ -17,7 +17,7 @@ use syntax::symbol::Symbol; use syntax::tokenstream::{self, Delimited, TokenTree, TokenStream}; use syntax_pos::DUMMY_SP; -use std::rc::Rc; +use std::iter; pub fn qquote<'cx>(stream: TokenStream) -> TokenStream { stream.quote() @@ -49,10 +49,7 @@ macro_rules! quote_tree { } fn delimit(delim: token::DelimToken, stream: TokenStream) -> TokenStream { - TokenTree::Delimited(DUMMY_SP, Rc::new(Delimited { - delim: delim, - tts: stream.trees().cloned().collect(), - })).into() + TokenTree::Delimited(DUMMY_SP, Delimited { delim: delim, tts: stream.into() }).into() } macro_rules! quote { @@ -75,9 +72,9 @@ impl Quote for TokenStream { return quote!(::syntax::tokenstream::TokenStream::empty()); } - struct Quote<'a>(tokenstream::Cursor<'a>); + struct Quote(iter::Peekable); - impl<'a> Iterator for Quote<'a> { + impl Iterator for Quote { type Item = TokenStream; fn next(&mut self) -> Option { @@ -89,25 +86,18 @@ impl Quote for TokenStream { _ => false, }; - self.0.next().cloned().map(|tree| { + self.0.next().map(|tree| { let quoted_tree = if is_unquote { tree.into() } else { tree.quote() }; quote!(::syntax::tokenstream::TokenStream::from((unquote quoted_tree)),) }) } } - let quoted = Quote(self.trees()).collect::(); + let quoted = Quote(self.trees().peekable()).collect::(); quote!([(unquote quoted)].iter().cloned().collect::<::syntax::tokenstream::TokenStream>()) } } -impl Quote for Vec { - fn quote(&self) -> TokenStream { - let stream = self.iter().cloned().collect::(); - quote!((quote stream).trees().cloned().collect::<::std::vec::Vec<_> >()) - } -} - impl Quote for TokenTree { fn quote(&self) -> TokenStream { match *self { @@ -123,12 +113,12 @@ impl Quote for TokenTree { } } -impl Quote for Rc { +impl Quote for Delimited { fn quote(&self) -> TokenStream { - quote!(::std::rc::Rc::new(::syntax::tokenstream::Delimited { + quote!(::syntax::tokenstream::Delimited { delim: (quote self.delim), - tts: (quote self.tts), - })) + tts: (quote self.stream()).into(), + }) } } diff --git a/src/librustc/hir/mod.rs b/src/librustc/hir/mod.rs index c1ba688974be8..22bc28eb3fec0 100644 --- a/src/librustc/hir/mod.rs +++ b/src/librustc/hir/mod.rs @@ -40,7 +40,7 @@ use syntax::ast::{Ident, Name, NodeId, DUMMY_NODE_ID, AsmDialect}; use syntax::ast::{Attribute, Lit, StrStyle, FloatTy, IntTy, UintTy, MetaItem}; use syntax::ptr::P; use syntax::symbol::{Symbol, keywords}; -use syntax::tokenstream::TokenTree; +use syntax::tokenstream::TokenStream; use syntax::util::ThinVec; use std::collections::BTreeMap; @@ -471,7 +471,7 @@ pub struct MacroDef { pub attrs: HirVec, pub id: NodeId, pub span: Span, - pub body: HirVec, + pub body: TokenStream, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] diff --git a/src/librustc_incremental/calculate_svh/svh_visitor.rs b/src/librustc_incremental/calculate_svh/svh_visitor.rs index 8a8d2bd512b25..d0eedcac0c06a 100644 --- a/src/librustc_incremental/calculate_svh/svh_visitor.rs +++ b/src/librustc_incremental/calculate_svh/svh_visitor.rs @@ -866,8 +866,8 @@ impl<'a, 'hash, 'tcx> visit::Visitor<'tcx> for StrictVersionHashVisitor<'a, 'has debug!("visit_macro_def: st={:?}", self.st); SawMacroDef.hash(self.st); hash_attrs!(self, ¯o_def.attrs); - for tt in ¯o_def.body { - self.hash_token_tree(tt); + for tt in macro_def.body.trees() { + self.hash_token_tree(&tt); } visit::walk_macro_def(self, macro_def) } @@ -1033,15 +1033,9 @@ impl<'a, 'hash, 'tcx> StrictVersionHashVisitor<'a, 'hash, 'tcx> { } tokenstream::TokenTree::Delimited(span, ref delimited) => { hash_span!(self, span); - let tokenstream::Delimited { - ref delim, - ref tts, - } = **delimited; - - delim.hash(self.st); - tts.len().hash(self.st); - for sub_tt in tts { - self.hash_token_tree(sub_tt); + delimited.delim.hash(self.st); + for sub_tt in delimited.stream().trees() { + self.hash_token_tree(&sub_tt); } } } diff --git a/src/librustc_metadata/cstore_impl.rs b/src/librustc_metadata/cstore_impl.rs index de53c91ba2d8f..cf2219e0e3df5 100644 --- a/src/librustc_metadata/cstore_impl.rs +++ b/src/librustc_metadata/cstore_impl.rs @@ -34,7 +34,7 @@ use std::rc::Rc; use syntax::ast; use syntax::attr; -use syntax::parse::filemap_to_tts; +use syntax::parse::filemap_to_stream; use syntax::symbol::Symbol; use syntax_pos::{mk_sp, Span}; use rustc::hir::svh::Svh; @@ -401,7 +401,7 @@ impl CrateStore for cstore::CStore { let filemap = sess.parse_sess.codemap().new_filemap(source_name, None, def.body); let local_span = mk_sp(filemap.start_pos, filemap.end_pos); - let body = filemap_to_tts(&sess.parse_sess, filemap); + let body = filemap_to_stream(&sess.parse_sess, filemap); // Mark the attrs as used let attrs = data.get_item_attrs(id.index); @@ -419,7 +419,7 @@ impl CrateStore for cstore::CStore { id: ast::DUMMY_NODE_ID, span: local_span, attrs: attrs, - body: body, + body: body.into(), }) } diff --git a/src/librustc_metadata/encoder.rs b/src/librustc_metadata/encoder.rs index af0edab7a83bd..8ddc1642d9e1c 100644 --- a/src/librustc_metadata/encoder.rs +++ b/src/librustc_metadata/encoder.rs @@ -853,9 +853,10 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { /// Serialize the text of exported macros fn encode_info_for_macro_def(&mut self, macro_def: &hir::MacroDef) -> Entry<'tcx> { + use syntax::print::pprust; Entry { kind: EntryKind::MacroDef(self.lazy(&MacroDef { - body: ::syntax::print::pprust::tts_to_string(¯o_def.body) + body: pprust::tts_to_string(¯o_def.body.trees().collect::>()), })), visibility: self.lazy(&ty::Visibility::Public), span: self.lazy(¯o_def.span), diff --git a/src/librustc_resolve/build_reduced_graph.rs b/src/librustc_resolve/build_reduced_graph.rs index 89cff39c59e31..751f59d0290ac 100644 --- a/src/librustc_resolve/build_reduced_graph.rs +++ b/src/librustc_resolve/build_reduced_graph.rs @@ -516,7 +516,7 @@ impl<'a> Resolver<'a> { expansion: Cell::new(LegacyScope::Empty), }); self.invocations.insert(mark, invocation); - macro_rules.body = mark_tts(¯o_rules.body, mark); + macro_rules.body = mark_tts(macro_rules.stream(), mark).into(); let ext = Rc::new(macro_rules::compile(&self.session.parse_sess, ¯o_rules)); self.macro_map.insert(def_id, ext.clone()); ext diff --git a/src/librustc_resolve/macros.rs b/src/librustc_resolve/macros.rs index b7068f4b09f5f..36645418d4f78 100644 --- a/src/librustc_resolve/macros.rs +++ b/src/librustc_resolve/macros.rs @@ -545,7 +545,7 @@ impl<'a> Resolver<'a> { pub fn define_macro(&mut self, item: &ast::Item, legacy_scope: &mut LegacyScope<'a>) { let tts = match item.node { - ast::ItemKind::Mac(ref mac) => &mac.node.tts, + ast::ItemKind::Mac(ref mac) => mac.node.stream(), _ => unreachable!(), }; @@ -562,7 +562,7 @@ impl<'a> Resolver<'a> { attrs: item.attrs.clone(), id: ast::DUMMY_NODE_ID, span: item.span, - body: mark_tts(tts, mark), + body: mark_tts(tts, mark).into(), }; *legacy_scope = LegacyScope::Binding(self.arenas.alloc_legacy_binding(LegacyBinding { diff --git a/src/librustc_save_analysis/span_utils.rs b/src/librustc_save_analysis/span_utils.rs index 6c93744f014a3..34402742e6c33 100644 --- a/src/librustc_save_analysis/span_utils.rs +++ b/src/librustc_save_analysis/span_utils.rs @@ -284,7 +284,7 @@ impl<'a> SpanUtils<'a> { pub fn signature_string_for_span(&self, span: Span) -> String { let mut toks = self.retokenise_span(span); toks.real_token(); - let mut toks = toks.parse_all_token_trees().unwrap().into_iter(); + let mut toks = toks.parse_all_token_trees().unwrap().trees(); let mut prev = toks.next().unwrap(); let first_span = prev.span(); diff --git a/src/librustdoc/visit_ast.rs b/src/librustdoc/visit_ast.rs index 236d9f230b5d4..42928427233d7 100644 --- a/src/librustdoc/visit_ast.rs +++ b/src/librustdoc/visit_ast.rs @@ -211,7 +211,8 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> { }; // FIXME(jseyfried) merge with `self.visit_macro()` - let matchers = def.body.chunks(4).map(|arm| arm[0].span()).collect(); + let tts = def.stream().trees().collect::>(); + let matchers = tts.chunks(4).map(|arm| arm[0].span()).collect(); om.macros.push(Macro { def_id: def_id, attrs: def.attrs.clone().into(), @@ -520,8 +521,9 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> { // convert each exported_macro into a doc item fn visit_local_macro(&self, def: &hir::MacroDef) -> Macro { + let tts = def.body.trees().collect::>(); // Extract the spans of all matchers. They represent the "interface" of the macro. - let matchers = def.body.chunks(4).map(|arm| arm[0].span()).collect(); + let matchers = tts.chunks(4).map(|arm| arm[0].span()).collect(); Macro { def_id: self.cx.tcx.hir.local_def_id(def.id), diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index 09fb369cd3568..9cc754cbf4d19 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -24,7 +24,7 @@ use ext::hygiene::SyntaxContext; use print::pprust; use ptr::P; use symbol::{Symbol, keywords}; -use tokenstream::{TokenTree}; +use tokenstream::{ThinTokenStream, TokenStream}; use std::collections::HashSet; use std::fmt; @@ -1033,7 +1033,13 @@ pub type Mac = Spanned; #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct Mac_ { pub path: Path, - pub tts: Vec, + pub tts: ThinTokenStream, +} + +impl Mac_ { + pub fn stream(&self) -> TokenStream { + self.tts.clone().into() + } } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] @@ -1915,7 +1921,13 @@ pub struct MacroDef { pub attrs: Vec, pub id: NodeId, pub span: Span, - pub body: Vec, + pub body: ThinTokenStream, +} + +impl MacroDef { + pub fn stream(&self) -> TokenStream { + self.body.clone().into() + } } #[cfg(test)] diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index c7d2f0cd31dc6..e242cf2777fe5 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -188,10 +188,7 @@ impl AttrProcMacro for F /// Represents a thing that maps token trees to Macro Results pub trait TTMacroExpander { - fn expand<'cx>(&self, - ecx: &'cx mut ExtCtxt, - span: Span, - token_tree: &[tokenstream::TokenTree]) + fn expand<'cx>(&self, ecx: &'cx mut ExtCtxt, span: Span, input: TokenStream) -> Box; } @@ -200,15 +197,11 @@ pub type MacroExpanderFn = -> Box; impl TTMacroExpander for F - where F : for<'cx> Fn(&'cx mut ExtCtxt, Span, &[tokenstream::TokenTree]) - -> Box + where F: for<'cx> Fn(&'cx mut ExtCtxt, Span, &[tokenstream::TokenTree]) -> Box { - fn expand<'cx>(&self, - ecx: &'cx mut ExtCtxt, - span: Span, - token_tree: &[tokenstream::TokenTree]) + fn expand<'cx>(&self, ecx: &'cx mut ExtCtxt, span: Span, input: TokenStream) -> Box { - (*self)(ecx, span, token_tree) + (*self)(ecx, span, &input.trees().collect::>()) } } @@ -654,9 +647,8 @@ impl<'a> ExtCtxt<'a> { expand::MacroExpander::new(self, true) } - pub fn new_parser_from_tts(&self, tts: &[tokenstream::TokenTree]) - -> parser::Parser<'a> { - parse::tts_to_parser(self.parse_sess, tts.to_vec()) + pub fn new_parser_from_tts(&self, tts: &[tokenstream::TokenTree]) -> parser::Parser<'a> { + parse::stream_to_parser(self.parse_sess, tts.iter().cloned().collect()) } pub fn codemap(&self) -> &'a CodeMap { self.parse_sess.codemap() } pub fn parse_sess(&self) -> &'a parse::ParseSess { self.parse_sess } diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 38494378f72ad..f1662284a8820 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use ast::{self, Block, Ident, Mac_, PatKind}; +use ast::{self, Block, Ident, PatKind}; use ast::{Name, MacStmtStyle, StmtKind, ItemKind}; use attr::{self, HasAttrs}; use codemap::{ExpnInfo, NameAndSpan, MacroBang, MacroAttribute}; @@ -20,16 +20,15 @@ use ext::placeholders::{placeholder, PlaceholderExpander}; use feature_gate::{self, Features, is_builtin_attr}; use fold; use fold::*; +use parse::{filemap_to_stream, ParseSess, DirectoryOwnership, PResult, token}; use parse::parser::Parser; -use parse::token; -use parse::{ParseSess, DirectoryOwnership, PResult, filemap_to_tts}; use print::pprust; use ptr::P; use std_inject; use symbol::Symbol; use symbol::keywords; use syntax_pos::{self, Span, ExpnId}; -use tokenstream::{TokenTree, TokenStream}; +use tokenstream::TokenStream; use util::small_vector::SmallVector; use visit::Visitor; @@ -462,8 +461,8 @@ impl<'a, 'b> MacroExpander<'a, 'b> { kind.expect_from_annotatables(items) } SyntaxExtension::AttrProcMacro(ref mac) => { - let attr_toks = tts_for_attr_args(&attr, &self.cx.parse_sess).into_iter().collect(); - let item_toks = tts_for_item(&item, &self.cx.parse_sess).into_iter().collect(); + let attr_toks = stream_for_attr_args(&attr, &self.cx.parse_sess); + let item_toks = stream_for_item(&item, &self.cx.parse_sess); let tok_result = mac.expand(self.cx, attr.span, attr_toks, item_toks); self.parse_expansion(tok_result, kind, name, attr.span) @@ -487,11 +486,11 @@ impl<'a, 'b> MacroExpander<'a, 'b> { InvocationKind::Bang { mac, ident, span } => (mac, ident, span), _ => unreachable!(), }; - let Mac_ { path, tts, .. } = mac.node; + let path = &mac.node.path; let extname = path.segments.last().unwrap().identifier.name; let ident = ident.unwrap_or(keywords::Invalid.ident()); - let marked_tts = mark_tts(&tts, mark); + let marked_tts = mark_tts(mac.node.stream(), mark); let opt_expanded = match *ext { NormalTT(ref expandfun, exp_span, allow_internal_unstable) => { if ident.name != keywords::Invalid.name() { @@ -510,7 +509,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { }, }); - kind.make_from(expandfun.expand(self.cx, span, &marked_tts)) + kind.make_from(expandfun.expand(self.cx, span, marked_tts)) } IdentTT(ref expander, tt_span, allow_internal_unstable) => { @@ -529,7 +528,8 @@ impl<'a, 'b> MacroExpander<'a, 'b> { } }); - kind.make_from(expander.expand(self.cx, span, ident, marked_tts)) + let input: Vec<_> = marked_tts.into_trees().collect(); + kind.make_from(expander.expand(self.cx, span, ident, input)) } MultiDecorator(..) | MultiModifier(..) | SyntaxExtension::AttrProcMacro(..) => { @@ -563,8 +563,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { }, }); - let toks = marked_tts.into_iter().collect(); - let tok_result = expandfun.expand(self.cx, span, toks); + let tok_result = expandfun.expand(self.cx, span, marked_tts); Some(self.parse_expansion(tok_result, kind, extname, span)) } }; @@ -647,7 +646,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { fn parse_expansion(&mut self, toks: TokenStream, kind: ExpansionKind, name: Name, span: Span) -> Expansion { - let mut parser = self.cx.new_parser_from_tts(&toks.trees().cloned().collect::>()); + let mut parser = self.cx.new_parser_from_tts(&toks.into_trees().collect::>()); let expansion = match parser.parse_expansion(kind, false) { Ok(expansion) => expansion, Err(mut err) => { @@ -821,23 +820,23 @@ fn find_attr_invoc(attrs: &mut Vec) -> Option { // Therefore, we must use the pretty printer (yuck) to turn the AST node into a // string, which we then re-tokenise (double yuck), but first we have to patch // the pretty-printed string on to the end of the existing codemap (infinity-yuck). -fn tts_for_item(item: &Annotatable, parse_sess: &ParseSess) -> Vec { +fn stream_for_item(item: &Annotatable, parse_sess: &ParseSess) -> TokenStream { let text = match *item { Annotatable::Item(ref i) => pprust::item_to_string(i), Annotatable::TraitItem(ref ti) => pprust::trait_item_to_string(ti), Annotatable::ImplItem(ref ii) => pprust::impl_item_to_string(ii), }; - string_to_tts(text, parse_sess) + string_to_stream(text, parse_sess) } -fn tts_for_attr_args(attr: &ast::Attribute, parse_sess: &ParseSess) -> Vec { +fn stream_for_attr_args(attr: &ast::Attribute, parse_sess: &ParseSess) -> TokenStream { use ast::MetaItemKind::*; use print::pp::Breaks; use print::pprust::PrintState; let token_string = match attr.value.node { // For `#[foo]`, an empty token - Word => return vec![], + Word => return TokenStream::empty(), // For `#[foo(bar, baz)]`, returns `(bar, baz)` List(ref items) => pprust::to_string(|s| { s.popen()?; @@ -853,12 +852,12 @@ fn tts_for_attr_args(attr: &ast::Attribute, parse_sess: &ParseSess) -> Vec Vec { +fn string_to_stream(text: String, parse_sess: &ParseSess) -> TokenStream { let filename = String::from(""); - filemap_to_tts(parse_sess, parse_sess.codemap().new_filemap(filename, None, text)) + filemap_to_stream(parse_sess, parse_sess.codemap().new_filemap(filename, None, text)) } impl<'a, 'b> Folder for InvocationCollector<'a, 'b> { @@ -1162,6 +1161,6 @@ impl Folder for Marker { } // apply a given mark to the given token trees. Used prior to expansion of a macro. -pub fn mark_tts(tts: &[TokenTree], m: Mark) -> Vec { +pub fn mark_tts(tts: TokenStream, m: Mark) -> TokenStream { noop_fold_tts(tts, &mut Marker{mark:m, expn_id: None}) } diff --git a/src/libsyntax/ext/placeholders.rs b/src/libsyntax/ext/placeholders.rs index 0636a78b2152f..e2fb1946e90db 100644 --- a/src/libsyntax/ext/placeholders.rs +++ b/src/libsyntax/ext/placeholders.rs @@ -13,6 +13,7 @@ use codemap::{DUMMY_SP, dummy_spanned}; use ext::base::ExtCtxt; use ext::expand::{Expansion, ExpansionKind}; use ext::hygiene::Mark; +use tokenstream::TokenStream; use fold::*; use ptr::P; use symbol::keywords; @@ -26,7 +27,7 @@ pub fn placeholder(kind: ExpansionKind, id: ast::NodeId) -> Expansion { fn mac_placeholder() -> ast::Mac { dummy_spanned(ast::Mac_ { path: ast::Path { span: DUMMY_SP, segments: Vec::new() }, - tts: Vec::new(), + tts: TokenStream::empty().into(), }) } diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index b1b69c80f4d00..69ff726e719a9 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -16,7 +16,7 @@ use ext::build::AstBuilder; use parse::parser::{Parser, PathStyle}; use parse::token; use ptr::P; -use tokenstream::TokenTree; +use tokenstream::{TokenStream, TokenTree}; /// Quasiquoting works via token trees. @@ -35,7 +35,7 @@ pub mod rt { use std::rc::Rc; use symbol::Symbol; - use tokenstream::{self, TokenTree}; + use tokenstream::{self, TokenTree, TokenStream}; pub use parse::new_parser_from_tts; pub use syntax_pos::{BytePos, Span, DUMMY_SP}; @@ -227,10 +227,10 @@ pub mod rt { if self.style == ast::AttrStyle::Inner { r.push(TokenTree::Token(self.span, token::Not)); } - r.push(TokenTree::Delimited(self.span, Rc::new(tokenstream::Delimited { + r.push(TokenTree::Delimited(self.span, tokenstream::Delimited { delim: token::Bracket, - tts: self.value.to_tokens(cx), - }))); + tts: self.value.to_tokens(cx).into_iter().collect::().into(), + })); r } } @@ -244,10 +244,10 @@ pub mod rt { impl ToTokens for () { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { - vec![TokenTree::Delimited(DUMMY_SP, Rc::new(tokenstream::Delimited { + vec![TokenTree::Delimited(DUMMY_SP, tokenstream::Delimited { delim: token::Paren, - tts: vec![], - }))] + tts: TokenStream::empty().into(), + })] } } @@ -355,14 +355,15 @@ pub mod rt { } fn parse_tts(&self, s: String) -> Vec { - parse::parse_tts_from_source_str("".to_string(), s, self.parse_sess()) + let source_name = "".to_owned(); + parse::parse_stream_from_source_str(source_name, s, self.parse_sess()) + .into_trees().collect() } } } // Replaces `Token::OpenDelim .. Token::CloseDelim` with `TokenTree::Delimited(..)`. pub fn unflatten(tts: Vec) -> Vec { - use std::rc::Rc; use tokenstream::Delimited; let mut results = Vec::new(); @@ -373,8 +374,10 @@ pub fn unflatten(tts: Vec) -> Vec { results.push(::std::mem::replace(&mut result, Vec::new())); } TokenTree::Token(span, token::CloseDelim(delim)) => { - let tree = - TokenTree::Delimited(span, Rc::new(Delimited { delim: delim, tts: result })); + let tree = TokenTree::Delimited(span, Delimited { + delim: delim, + tts: result.into_iter().map(TokenStream::from).collect::().into(), + }); result = results.pop().unwrap(); result.push(tree); } @@ -747,7 +750,7 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, quoted: bool) -> Vec { let mut stmts = statements_mk_tt(cx, &delimed.open_tt(span), false); - stmts.extend(statements_mk_tts(cx, &delimed.tts)); + stmts.extend(statements_mk_tts(cx, delimed.stream())); stmts.extend(statements_mk_tt(cx, &delimed.close_tt(span), false)); stmts } @@ -810,14 +813,14 @@ fn mk_stmts_let(cx: &ExtCtxt, sp: Span) -> Vec { vec![stmt_let_sp, stmt_let_tt] } -fn statements_mk_tts(cx: &ExtCtxt, tts: &[TokenTree]) -> Vec { +fn statements_mk_tts(cx: &ExtCtxt, tts: TokenStream) -> Vec { let mut ss = Vec::new(); let mut quoted = false; - for tt in tts { - quoted = match *tt { + for tt in tts.into_trees() { + quoted = match tt { TokenTree::Token(_, token::Dollar) if !quoted => true, _ => { - ss.extend(statements_mk_tt(cx, tt, quoted)); + ss.extend(statements_mk_tt(cx, &tt, quoted)); false } } @@ -829,7 +832,7 @@ fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[TokenTree]) -> (P, P), + Tt(TokenTree), + TtSeq(Vec), } impl TokenTreeOrTokenTreeVec { @@ -113,7 +113,7 @@ impl TokenTreeOrTokenTreeVec { } } - fn get_tt(&self, index: usize) -> quoted::TokenTree { + fn get_tt(&self, index: usize) -> TokenTree { match *self { TtSeq(ref v) => v[index].clone(), Tt(ref tt) => tt.get_tt(index), @@ -144,9 +144,7 @@ struct MatcherPos { pub type NamedParseResult = ParseResult>>; -pub fn count_names(ms: &[quoted::TokenTree]) -> usize { - use self::quoted::TokenTree; - +pub fn count_names(ms: &[TokenTree]) -> usize { ms.iter().fold(0, |count, elt| { count + match *elt { TokenTree::Sequence(_, ref seq) => { @@ -163,7 +161,7 @@ pub fn count_names(ms: &[quoted::TokenTree]) -> usize { }) } -fn initial_matcher_pos(ms: Vec, lo: BytePos) -> Box { +fn initial_matcher_pos(ms: Vec, lo: BytePos) -> Box { let match_idx_hi = count_names(&ms[..]); let matches = create_matches(match_idx_hi); Box::new(MatcherPos { @@ -202,10 +200,8 @@ pub enum NamedMatch { MatchedNonterminal(Rc) } -fn nameize>>(sess: &ParseSess, ms: &[quoted::TokenTree], mut res: I) +fn nameize>>(sess: &ParseSess, ms: &[TokenTree], mut res: I) -> NamedParseResult { - use self::quoted::TokenTree; - fn n_rec>>(sess: &ParseSess, m: &TokenTree, mut res: &mut I, ret_val: &mut HashMap>) -> Result<(), (syntax_pos::Span, String)> { @@ -289,9 +285,8 @@ fn inner_parse_loop(sess: &ParseSess, eof_eis: &mut SmallVector>, bb_eis: &mut SmallVector>, token: &Token, - span: &syntax_pos::Span) -> ParseResult<()> { - use self::quoted::TokenTree; - + span: &syntax_pos::Span) + -> ParseResult<()> { while let Some(mut ei) = cur_eis.pop() { // When unzipped trees end, remove them while ei.idx >= ei.top_elts.len() { @@ -419,13 +414,8 @@ fn inner_parse_loop(sess: &ParseSess, Success(()) } -pub fn parse(sess: &ParseSess, - tts: Vec, - ms: &[quoted::TokenTree], - directory: Option) +pub fn parse(sess: &ParseSess, tts: TokenStream, ms: &[TokenTree], directory: Option) -> NamedParseResult { - use self::quoted::TokenTree; - let mut parser = Parser::new(sess, tts, directory, true); let mut cur_eis = SmallVector::one(initial_matcher_pos(ms.to_owned(), parser.span.lo)); let mut next_eis = Vec::new(); // or proceed normally diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 193c06707c7a6..1d386c1a3ac93 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -22,9 +22,8 @@ use parse::{Directory, ParseSess}; use parse::parser::Parser; use parse::token::{self, NtTT}; use parse::token::Token::*; -use print; use symbol::Symbol; -use tokenstream::TokenTree; +use tokenstream::{TokenStream, TokenTree}; use std::collections::{HashMap}; use std::collections::hash_map::{Entry}; @@ -68,7 +67,7 @@ impl TTMacroExpander for MacroRulesMacroExpander { fn expand<'cx>(&self, cx: &'cx mut ExtCtxt, sp: Span, - arg: &[TokenTree]) + input: TokenStream) -> Box { if !self.valid { return DummyResult::any(sp); @@ -76,7 +75,7 @@ impl TTMacroExpander for MacroRulesMacroExpander { generic_extension(cx, sp, self.name, - arg, + input, &self.lhses, &self.rhses) } @@ -86,14 +85,12 @@ impl TTMacroExpander for MacroRulesMacroExpander { fn generic_extension<'cx>(cx: &'cx ExtCtxt, sp: Span, name: ast::Ident, - arg: &[TokenTree], + arg: TokenStream, lhses: &[quoted::TokenTree], rhses: &[quoted::TokenTree]) -> Box { if cx.trace_macros() { - println!("{}! {{ {} }}", - name, - print::pprust::tts_to_string(arg)); + println!("{}! {{ {} }}", name, arg); } // Which arm's failure should we report? (the one furthest along) @@ -106,7 +103,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, _ => cx.span_bug(sp, "malformed macro lhs") }; - match TokenTree::parse(cx, lhs_tt, arg) { + match TokenTree::parse(cx, lhs_tt, arg.clone()) { Success(named_matches) => { let rhs = match rhses[i] { // ignore delimiters @@ -186,7 +183,7 @@ pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension { ]; // Parse the macro_rules! invocation - let argument_map = match parse(sess, def.body.clone(), &argument_gram, None) { + let argument_map = match parse(sess, def.body.clone().into(), &argument_gram, None) { Success(m) => m, Failure(sp, tok) => { let s = parse_failure_msg(tok); @@ -205,7 +202,7 @@ pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension { s.iter().map(|m| { if let MatchedNonterminal(ref nt) = **m { if let NtTT(ref tt) = **nt { - let tt = quoted::parse(&[tt.clone()], true, sess).pop().unwrap(); + let tt = quoted::parse(tt.clone().into(), true, sess).pop().unwrap(); valid &= check_lhs_nt_follows(sess, &tt); return tt; } @@ -221,7 +218,7 @@ pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension { s.iter().map(|m| { if let MatchedNonterminal(ref nt) = **m { if let NtTT(ref tt) = **nt { - return quoted::parse(&[tt.clone()], false, sess).pop().unwrap(); + return quoted::parse(tt.clone().into(), false, sess).pop().unwrap(); } } sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs") diff --git a/src/libsyntax/ext/tt/quoted.rs b/src/libsyntax/ext/tt/quoted.rs index 530824b28348a..d56859d805c87 100644 --- a/src/libsyntax/ext/tt/quoted.rs +++ b/src/libsyntax/ext/tt/quoted.rs @@ -124,10 +124,10 @@ impl TokenTree { } } -pub fn parse(input: &[tokenstream::TokenTree], expect_matchers: bool, sess: &ParseSess) +pub fn parse(input: tokenstream::TokenStream, expect_matchers: bool, sess: &ParseSess) -> Vec { let mut result = Vec::new(); - let mut trees = input.iter().cloned(); + let mut trees = input.trees(); while let Some(tree) = trees.next() { let tree = parse_tree(tree, &mut trees, expect_matchers, sess); match tree { @@ -161,13 +161,13 @@ fn parse_tree(tree: tokenstream::TokenTree, { match tree { tokenstream::TokenTree::Token(span, token::Dollar) => match trees.next() { - Some(tokenstream::TokenTree::Delimited(span, ref delimited)) => { + Some(tokenstream::TokenTree::Delimited(span, delimited)) => { if delimited.delim != token::Paren { let tok = pprust::token_to_string(&token::OpenDelim(delimited.delim)); let msg = format!("expected `(`, found `{}`", tok); sess.span_diagnostic.span_err(span, &msg); } - let sequence = parse(&delimited.tts, expect_matchers, sess); + let sequence = parse(delimited.tts.into(), expect_matchers, sess); let (separator, op) = parse_sep_and_kleene_op(trees, span, sess); let name_captures = macro_parser::count_names(&sequence); TokenTree::Sequence(span, Rc::new(SequenceRepetition { @@ -197,7 +197,7 @@ fn parse_tree(tree: tokenstream::TokenTree, tokenstream::TokenTree::Delimited(span, delimited) => { TokenTree::Delimited(span, Rc::new(Delimited { delim: delimited.delim, - tts: parse(&delimited.tts, expect_matchers, sess), + tts: parse(delimited.tts.into(), expect_matchers, sess), })) } } diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index 90f64a5208f75..24004492be2a0 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -14,7 +14,7 @@ use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal}; use ext::tt::quoted; use parse::token::{self, SubstNt, Token, NtIdent, NtTT}; use syntax_pos::{Span, DUMMY_SP}; -use tokenstream::{TokenTree, Delimited}; +use tokenstream::{TokenStream, TokenTree, Delimited}; use util::small_vector::SmallVector; use std::rc::Rc; @@ -66,11 +66,11 @@ impl Iterator for Frame { pub fn transcribe(sp_diag: &Handler, interp: Option>>, src: Vec) - -> Vec { + -> TokenStream { let mut stack = SmallVector::one(Frame::new(src)); let interpolations = interp.unwrap_or_else(HashMap::new); /* just a convenience */ let mut repeats = Vec::new(); - let mut result = Vec::new(); + let mut result: Vec = Vec::new(); let mut result_stack = Vec::new(); loop { @@ -84,8 +84,11 @@ pub fn transcribe(sp_diag: &Handler, *idx = 0; if let Some(sep) = sep.clone() { // repeat same span, I guess - let prev_span = result.last().map(TokenTree::span).unwrap_or(DUMMY_SP); - result.push(TokenTree::Token(prev_span, sep)); + let prev_span = match result.last() { + Some(stream) => stream.trees().next().unwrap().span(), + None => DUMMY_SP, + }; + result.push(TokenTree::Token(prev_span, sep).into()); } continue } @@ -97,14 +100,14 @@ pub fn transcribe(sp_diag: &Handler, } Frame::Delimited { forest, span, .. } => { if result_stack.is_empty() { - return result; + return TokenStream::concat(result); } - let tree = TokenTree::Delimited(span, Rc::new(Delimited { + let tree = TokenTree::Delimited(span, Delimited { delim: forest.delim, - tts: result, - })); + tts: TokenStream::concat(result).into(), + }); result = result_stack.pop().unwrap(); - result.push(tree); + result.push(tree.into()); } } continue @@ -148,19 +151,20 @@ pub fn transcribe(sp_diag: &Handler, // FIXME #2887: think about span stuff here quoted::TokenTree::Token(sp, SubstNt(ident)) => { match lookup_cur_matched(ident, &interpolations, &repeats) { - None => result.push(TokenTree::Token(sp, SubstNt(ident))), + None => result.push(TokenTree::Token(sp, SubstNt(ident)).into()), Some(cur_matched) => if let MatchedNonterminal(ref nt) = *cur_matched { match **nt { // sidestep the interpolation tricks for ident because // (a) idents can be in lots of places, so it'd be a pain // (b) we actually can, since it's a token. NtIdent(ref sn) => { - result.push(TokenTree::Token(sn.span, token::Ident(sn.node))); + let token = TokenTree::Token(sn.span, token::Ident(sn.node)); + result.push(token.into()); } - NtTT(ref tt) => result.push(tt.clone()), + NtTT(ref tt) => result.push(tt.clone().into()), _ => { - // FIXME(pcwalton): Bad copy - result.push(TokenTree::Token(sp, token::Interpolated(nt.clone()))); + let token = TokenTree::Token(sp, token::Interpolated(nt.clone())); + result.push(token.into()); } } } else { @@ -174,7 +178,7 @@ pub fn transcribe(sp_diag: &Handler, stack.push(Frame::Delimited { forest: delimited, idx: 0, span: span }); result_stack.push(mem::replace(&mut result, Vec::new())); } - quoted::TokenTree::Token(span, tok) => result.push(TokenTree::Token(span, tok)), + quoted::TokenTree::Token(span, tok) => result.push(TokenTree::Token(span, tok).into()), quoted::TokenTree::MetaVarDecl(..) => panic!("unexpected `TokenTree::MetaVarDecl"), } } diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index 257b7efba5c8e..4242b0f8b9803 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -233,11 +233,11 @@ pub trait Folder : Sized { noop_fold_ty_params(tps, self) } - fn fold_tt(&mut self, tt: &TokenTree) -> TokenTree { + fn fold_tt(&mut self, tt: TokenTree) -> TokenTree { noop_fold_tt(tt, self) } - fn fold_tts(&mut self, tts: &[TokenTree]) -> Vec { + fn fold_tts(&mut self, tts: TokenStream) -> TokenStream { noop_fold_tts(tts, self) } @@ -497,8 +497,8 @@ pub fn noop_fold_attribute(attr: Attribute, fld: &mut T) -> Option(Spanned {node, span}: Mac, fld: &mut T) -> Mac { Spanned { node: Mac_ { + tts: fld.fold_tts(node.stream()).into(), path: fld.fold_path(node.path), - tts: fld.fold_tts(&node.tts), }, span: fld.new_span(span) } @@ -539,23 +539,19 @@ pub fn noop_fold_arg(Arg {id, pat, ty}: Arg, fld: &mut T) -> Arg { } } -pub fn noop_fold_tt(tt: &TokenTree, fld: &mut T) -> TokenTree { - match *tt { - TokenTree::Token(span, ref tok) => - TokenTree::Token(fld.new_span(span), fld.fold_token(tok.clone())), - TokenTree::Delimited(span, ref delimed) => { - TokenTree::Delimited(fld.new_span(span), Rc::new( - Delimited { - delim: delimed.delim, - tts: fld.fold_tts(&delimed.tts), - } - )) - }, +pub fn noop_fold_tt(tt: TokenTree, fld: &mut T) -> TokenTree { + match tt { + TokenTree::Token(span, tok) => + TokenTree::Token(fld.new_span(span), fld.fold_token(tok)), + TokenTree::Delimited(span, delimed) => TokenTree::Delimited(fld.new_span(span), Delimited { + tts: fld.fold_tts(delimed.stream()).into(), + delim: delimed.delim, + }), } } -pub fn noop_fold_tts(tts: &[TokenTree], fld: &mut T) -> Vec { - tts.iter().map(|tt| fld.fold_tt(tt)).collect() +pub fn noop_fold_tts(tts: TokenStream, fld: &mut T) -> TokenStream { + tts.trees().map(|tt| fld.fold_tt(tt)).collect() } // apply ident folder if it's an ident, apply other folds to interpolated nodes @@ -617,7 +613,7 @@ pub fn noop_fold_interpolated(nt: token::Nonterminal, fld: &mut T) token::NtIdent(id) => token::NtIdent(Spanned::{node: fld.fold_ident(id.node), ..id}), token::NtMeta(meta_item) => token::NtMeta(fld.fold_meta_item(meta_item)), token::NtPath(path) => token::NtPath(fld.fold_path(path)), - token::NtTT(tt) => token::NtTT(fld.fold_tt(&tt)), + token::NtTT(tt) => token::NtTT(fld.fold_tt(tt)), token::NtArm(arm) => token::NtArm(fld.fold_arm(arm)), token::NtImplItem(item) => token::NtImplItem(fld.fold_impl_item(item) diff --git a/src/libsyntax/parse/lexer/tokentrees.rs b/src/libsyntax/parse/lexer/tokentrees.rs index eafc3f77ab052..554a1fcfc71a6 100644 --- a/src/libsyntax/parse/lexer/tokentrees.rs +++ b/src/libsyntax/parse/lexer/tokentrees.rs @@ -12,32 +12,30 @@ use print::pprust::token_to_string; use parse::lexer::StringReader; use parse::{token, PResult}; use syntax_pos::Span; -use tokenstream::{Delimited, TokenTree}; - -use std::rc::Rc; +use tokenstream::{Delimited, TokenStream, TokenTree}; impl<'a> StringReader<'a> { // Parse a stream of tokens into a list of `TokenTree`s, up to an `Eof`. - pub fn parse_all_token_trees(&mut self) -> PResult<'a, Vec> { + pub fn parse_all_token_trees(&mut self) -> PResult<'a, TokenStream> { let mut tts = Vec::new(); while self.token != token::Eof { - tts.push(self.parse_token_tree()?); + tts.push(self.parse_token_tree()?.into()); } - Ok(tts) + Ok(TokenStream::concat(tts)) } // Parse a stream of tokens into a list of `TokenTree`s, up to a `CloseDelim`. - fn parse_token_trees_until_close_delim(&mut self) -> Vec { + fn parse_token_trees_until_close_delim(&mut self) -> TokenStream { let mut tts = vec![]; loop { if let token::CloseDelim(..) = self.token { - return tts; + return TokenStream::concat(tts); } match self.parse_token_tree() { - Ok(tt) => tts.push(tt), + Ok(tt) => tts.push(tt.into()), Err(mut e) => { e.emit(); - return tts; + return TokenStream::concat(tts); } } } @@ -111,10 +109,10 @@ impl<'a> StringReader<'a> { _ => {} } - Ok(TokenTree::Delimited(span, Rc::new(Delimited { + Ok(TokenTree::Delimited(span, Delimited { delim: delim, - tts: tts, - }))) + tts: tts.into(), + })) }, token::CloseDelim(_) => { // An unexpected closing delimiter (i.e., there is no diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 6fec49b229abe..c00d2952b3b42 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -19,7 +19,7 @@ use parse::parser::Parser; use ptr::P; use str::char_at; use symbol::Symbol; -use tokenstream; +use tokenstream::{TokenStream, TokenTree}; use std::cell::RefCell; use std::collections::HashSet; @@ -141,9 +141,9 @@ pub fn parse_stmt_from_source_str<'a>(name: String, source: String, sess: &'a Pa new_parser_from_source_str(sess, name, source).parse_stmt() } -pub fn parse_tts_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess) - -> Vec { - filemap_to_tts(sess, sess.codemap().new_filemap(name, None, source)) +pub fn parse_stream_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess) + -> TokenStream { + filemap_to_stream(sess, sess.codemap().new_filemap(name, None, source)) } // Create a new parser from a source string @@ -175,7 +175,7 @@ pub fn new_sub_parser_from_file<'a>(sess: &'a ParseSess, /// Given a filemap and config, return a parser pub fn filemap_to_parser<'a>(sess: &'a ParseSess, filemap: Rc, ) -> Parser<'a> { let end_pos = filemap.end_pos; - let mut parser = tts_to_parser(sess, filemap_to_tts(sess, filemap)); + let mut parser = stream_to_parser(sess, filemap_to_stream(sess, filemap)); if parser.token == token::Eof && parser.span == syntax_pos::DUMMY_SP { parser.span = syntax_pos::mk_sp(end_pos, end_pos); @@ -186,13 +186,8 @@ pub fn filemap_to_parser<'a>(sess: &'a ParseSess, filemap: Rc, ) -> Par // must preserve old name for now, because quote! from the *existing* // compiler expands into it -pub fn new_parser_from_tts<'a>(sess: &'a ParseSess, tts: Vec) - -> Parser<'a> { - tts_to_parser(sess, tts) -} - -pub fn new_parser_from_ts<'a>(sess: &'a ParseSess, ts: tokenstream::TokenStream) -> Parser<'a> { - tts_to_parser(sess, ts.trees().cloned().collect()) +pub fn new_parser_from_tts<'a>(sess: &'a ParseSess, tts: Vec) -> Parser<'a> { + stream_to_parser(sess, tts.into_iter().collect()) } @@ -215,15 +210,15 @@ fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option) } /// Given a filemap, produce a sequence of token-trees -pub fn filemap_to_tts(sess: &ParseSess, filemap: Rc) -> Vec { +pub fn filemap_to_stream(sess: &ParseSess, filemap: Rc) -> TokenStream { let mut srdr = lexer::StringReader::new(sess, filemap); srdr.real_token(); panictry!(srdr.parse_all_token_trees()) } -/// Given tts and the ParseSess, produce a parser -pub fn tts_to_parser<'a>(sess: &'a ParseSess, tts: Vec) -> Parser<'a> { - let mut p = Parser::new(sess, tts, None, false); +/// Given stream and the ParseSess, produce a parser +pub fn stream_to_parser<'a>(sess: &'a ParseSess, stream: TokenStream) -> Parser<'a> { + let mut p = Parser::new(sess, stream, None, false); p.check_unknown_macro_variable(); p } @@ -603,7 +598,6 @@ pub fn integer_lit(s: &str, suffix: Option, sd: &Handler, sp: Span) -> a #[cfg(test)] mod tests { use super::*; - use std::rc::Rc; use syntax_pos::{self, Span, BytePos, Pos, NO_EXPANSION}; use codemap::Spanned; use ast::{self, Ident, PatKind}; @@ -614,7 +608,7 @@ mod tests { use print::pprust::item_to_string; use ptr::P; use tokenstream::{self, TokenTree}; - use util::parser_testing::{string_to_tts, string_to_parser}; + use util::parser_testing::{string_to_stream, string_to_parser}; use util::parser_testing::{string_to_expr, string_to_item, string_to_stmt}; use util::ThinVec; @@ -659,8 +653,9 @@ mod tests { // check the token-tree-ization of macros #[test] fn string_to_tts_macro () { - let tts = string_to_tts("macro_rules! zip (($a)=>($a))".to_string()); - let tts: &[tokenstream::TokenTree] = &tts[..]; + let tts: Vec<_> = + string_to_stream("macro_rules! zip (($a)=>($a))".to_string()).trees().collect(); + let tts: &[TokenTree] = &tts[..]; match (tts.len(), tts.get(0), tts.get(1), tts.get(2), tts.get(3)) { ( @@ -672,7 +667,7 @@ mod tests { ) if name_macro_rules.name == "macro_rules" && name_zip.name == "zip" => { - let tts = ¯o_delimed.tts[..]; + let tts = ¯o_delimed.stream().trees().collect::>(); match (tts.len(), tts.get(0), tts.get(1), tts.get(2)) { ( 3, @@ -681,7 +676,7 @@ mod tests { Some(&TokenTree::Delimited(_, ref second_delimed)), ) if macro_delimed.delim == token::Paren => { - let tts = &first_delimed.tts[..]; + let tts = &first_delimed.stream().trees().collect::>(); match (tts.len(), tts.get(0), tts.get(1)) { ( 2, @@ -689,9 +684,9 @@ mod tests { Some(&TokenTree::Token(_, token::Ident(ident))), ) if first_delimed.delim == token::Paren && ident.name == "a" => {}, - _ => panic!("value 3: {:?}", **first_delimed), + _ => panic!("value 3: {:?}", *first_delimed), } - let tts = &second_delimed.tts[..]; + let tts = &second_delimed.stream().trees().collect::>(); match (tts.len(), tts.get(0), tts.get(1)) { ( 2, @@ -700,10 +695,10 @@ mod tests { ) if second_delimed.delim == token::Paren && ident.name == "a" => {}, - _ => panic!("value 4: {:?}", **second_delimed), + _ => panic!("value 4: {:?}", *second_delimed), } }, - _ => panic!("value 2: {:?}", **macro_delimed), + _ => panic!("value 2: {:?}", *macro_delimed), } }, _ => panic!("value: {:?}",tts), @@ -712,31 +707,31 @@ mod tests { #[test] fn string_to_tts_1() { - let tts = string_to_tts("fn a (b : i32) { b; }".to_string()); + let tts = string_to_stream("fn a (b : i32) { b; }".to_string()); - let expected = vec![ - TokenTree::Token(sp(0, 2), token::Ident(Ident::from_str("fn"))), - TokenTree::Token(sp(3, 4), token::Ident(Ident::from_str("a"))), + let expected = TokenStream::concat(vec![ + TokenTree::Token(sp(0, 2), token::Ident(Ident::from_str("fn"))).into(), + TokenTree::Token(sp(3, 4), token::Ident(Ident::from_str("a"))).into(), TokenTree::Delimited( sp(5, 14), - Rc::new(tokenstream::Delimited { + tokenstream::Delimited { delim: token::DelimToken::Paren, - tts: vec![ - TokenTree::Token(sp(6, 7), token::Ident(Ident::from_str("b"))), - TokenTree::Token(sp(8, 9), token::Colon), - TokenTree::Token(sp(10, 13), token::Ident(Ident::from_str("i32"))), - ], - })), + tts: TokenStream::concat(vec![ + TokenTree::Token(sp(6, 7), token::Ident(Ident::from_str("b"))).into(), + TokenTree::Token(sp(8, 9), token::Colon).into(), + TokenTree::Token(sp(10, 13), token::Ident(Ident::from_str("i32"))).into(), + ]).into(), + }).into(), TokenTree::Delimited( sp(15, 21), - Rc::new(tokenstream::Delimited { + tokenstream::Delimited { delim: token::DelimToken::Brace, - tts: vec![ - TokenTree::Token(sp(17, 18), token::Ident(Ident::from_str("b"))), - TokenTree::Token(sp(18, 19), token::Semi), - ], - })) - ]; + tts: TokenStream::concat(vec![ + TokenTree::Token(sp(17, 18), token::Ident(Ident::from_str("b"))).into(), + TokenTree::Token(sp(18, 19), token::Semi).into(), + ]).into(), + }).into() + ]); assert_eq!(tts, expected); } @@ -979,8 +974,8 @@ mod tests { let expr = parse::parse_expr_from_source_str("foo".to_string(), "foo!( fn main() { body } )".to_string(), &sess).unwrap(); - let tts = match expr.node { - ast::ExprKind::Mac(ref mac) => mac.node.tts.clone(), + let tts: Vec<_> = match expr.node { + ast::ExprKind::Mac(ref mac) => mac.node.stream().trees().collect(), _ => panic!("not a macro"), }; diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 71274c4fdaa4e..6e3724b5fd87b 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -9,7 +9,7 @@ // except according to those terms. use abi::{self, Abi}; -use ast::BareFnTy; +use ast::{AttrStyle, BareFnTy}; use ast::{RegionTyParamBound, TraitTyParamBound, TraitBoundModifier}; use ast::Unsafety; use ast::{Mod, Arg, Arm, Attribute, BindingMode, TraitItemKind}; @@ -46,21 +46,21 @@ use errors::{self, DiagnosticBuilder}; use parse::{self, classify, token}; use parse::common::SeqSep; use parse::lexer::TokenAndSpan; +use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration}; use parse::obsolete::ObsoleteSyntax; use parse::{new_sub_parser_from_file, ParseSess, Directory, DirectoryOwnership}; use util::parser::{AssocOp, Fixity}; use print::pprust; use ptr::P; use parse::PResult; -use tokenstream::{Delimited, TokenTree}; +use tokenstream::{self, Delimited, ThinTokenStream, TokenTree, TokenStream}; use symbol::{Symbol, keywords}; use util::ThinVec; use std::collections::HashSet; -use std::mem; +use std::{cmp, mem, slice}; use std::path::{Path, PathBuf}; use std::rc::Rc; -use std::slice; bitflags! { flags Restrictions: u8 { @@ -175,12 +175,112 @@ pub struct Parser<'a> { /// into modules, and sub-parsers have new values for this name. pub root_module_name: Option, pub expected_tokens: Vec, - pub tts: Vec<(TokenTree, usize)>, + token_cursor: TokenCursor, pub desugar_doc_comments: bool, /// Whether we should configure out of line modules as we parse. pub cfg_mods: bool, } +struct TokenCursor { + frame: TokenCursorFrame, + stack: Vec, +} + +struct TokenCursorFrame { + delim: token::DelimToken, + span: Span, + open_delim: bool, + tree_cursor: tokenstream::Cursor, + close_delim: bool, +} + +impl TokenCursorFrame { + fn new(sp: Span, delimited: &Delimited) -> Self { + TokenCursorFrame { + delim: delimited.delim, + span: sp, + open_delim: delimited.delim == token::NoDelim, + tree_cursor: delimited.stream().into_trees(), + close_delim: delimited.delim == token::NoDelim, + } + } +} + +impl TokenCursor { + fn next(&mut self) -> TokenAndSpan { + loop { + let tree = if !self.frame.open_delim { + self.frame.open_delim = true; + Delimited { delim: self.frame.delim, tts: TokenStream::empty().into() } + .open_tt(self.frame.span) + } else if let Some(tree) = self.frame.tree_cursor.next() { + tree + } else if !self.frame.close_delim { + self.frame.close_delim = true; + Delimited { delim: self.frame.delim, tts: TokenStream::empty().into() } + .close_tt(self.frame.span) + } else if let Some(frame) = self.stack.pop() { + self.frame = frame; + continue + } else { + return TokenAndSpan { tok: token::Eof, sp: syntax_pos::DUMMY_SP } + }; + + match tree { + TokenTree::Token(sp, tok) => return TokenAndSpan { tok: tok, sp: sp }, + TokenTree::Delimited(sp, ref delimited) => { + let frame = TokenCursorFrame::new(sp, delimited); + self.stack.push(mem::replace(&mut self.frame, frame)); + } + } + } + } + + fn next_desugared(&mut self) -> TokenAndSpan { + let (sp, name) = match self.next() { + TokenAndSpan { sp, tok: token::DocComment(name) } => (sp, name), + tok @ _ => return tok, + }; + + let stripped = strip_doc_comment_decoration(&name.as_str()); + + // Searches for the occurrences of `"#*` and returns the minimum number of `#`s + // required to wrap the text. + let mut num_of_hashes = 0; + let mut count = 0; + for ch in stripped.chars() { + count = match ch { + '"' => 1, + '#' if count > 0 => count + 1, + _ => 0, + }; + num_of_hashes = cmp::max(num_of_hashes, count); + } + + let body = TokenTree::Delimited(sp, Delimited { + delim: token::Bracket, + tts: [TokenTree::Token(sp, token::Ident(ast::Ident::from_str("doc"))), + TokenTree::Token(sp, token::Eq), + TokenTree::Token(sp, token::Literal( + token::StrRaw(Symbol::intern(&stripped), num_of_hashes), None))] + .iter().cloned().collect::().into(), + }); + + self.stack.push(mem::replace(&mut self.frame, TokenCursorFrame::new(sp, &Delimited { + delim: token::NoDelim, + tts: if doc_comment_style(&name.as_str()) == AttrStyle::Inner { + [TokenTree::Token(sp, token::Pound), TokenTree::Token(sp, token::Not), body] + .iter().cloned().collect::().into() + } else { + [TokenTree::Token(sp, token::Pound), body] + .iter().cloned().collect::().into() + }, + }))); + + self.next() + } +} + #[derive(PartialEq, Eq, Clone)] pub enum TokenType { Token(token::Token), @@ -309,14 +409,10 @@ impl From> for LhsExpr { impl<'a> Parser<'a> { pub fn new(sess: &'a ParseSess, - tokens: Vec, + tokens: TokenStream, directory: Option, desugar_doc_comments: bool) -> Self { - let tt = TokenTree::Delimited(syntax_pos::DUMMY_SP, Rc::new(Delimited { - delim: token::NoDelim, - tts: tokens, - })); let mut parser = Parser { sess: sess, token: token::Underscore, @@ -328,7 +424,13 @@ impl<'a> Parser<'a> { directory: Directory { path: PathBuf::new(), ownership: DirectoryOwnership::Owned }, root_module_name: None, expected_tokens: Vec::new(), - tts: if tt.len() > 0 { vec![(tt, 0)] } else { Vec::new() }, + token_cursor: TokenCursor { + frame: TokenCursorFrame::new(syntax_pos::DUMMY_SP, &Delimited { + delim: token::NoDelim, + tts: tokens.into(), + }), + stack: Vec::new(), + }, desugar_doc_comments: desugar_doc_comments, cfg_mods: true, }; @@ -346,29 +448,14 @@ impl<'a> Parser<'a> { } fn next_tok(&mut self) -> TokenAndSpan { - loop { - let tok = if let Some((tts, i)) = self.tts.pop() { - let tt = tts.get_tt(i); - if i + 1 < tts.len() { - self.tts.push((tts, i + 1)); - } - if let TokenTree::Token(sp, tok) = tt { - TokenAndSpan { tok: tok, sp: sp } - } else { - self.tts.push((tt, 0)); - continue - } - } else { - TokenAndSpan { tok: token::Eof, sp: self.span } - }; - - match tok.tok { - token::DocComment(name) if self.desugar_doc_comments => { - self.tts.push((TokenTree::Token(tok.sp, token::DocComment(name)), 0)); - } - _ => return tok, - } + let mut next = match self.desugar_doc_comments { + true => self.token_cursor.next_desugared(), + false => self.token_cursor.next(), + }; + if next.sp == syntax_pos::DUMMY_SP { + next.sp = self.prev_span; } + next } /// Convert a token to a string using self's reader @@ -972,19 +1059,16 @@ impl<'a> Parser<'a> { F: FnOnce(&token::Token) -> R, { if dist == 0 { - return f(&self.token); - } - let mut tok = token::Eof; - if let Some(&(ref tts, mut i)) = self.tts.last() { - i += dist - 1; - if i < tts.len() { - tok = match tts.get_tt(i) { - TokenTree::Token(_, tok) => tok, - TokenTree::Delimited(_, delimited) => token::OpenDelim(delimited.delim), - }; - } + return f(&self.token) } - f(&tok) + + f(&match self.token_cursor.frame.tree_cursor.look_ahead(dist - 1) { + Some(tree) => match tree { + TokenTree::Token(_, tok) => tok, + TokenTree::Delimited(_, delimited) => token::OpenDelim(delimited.delim), + }, + None => token::CloseDelim(self.token_cursor.frame.delim), + }) } pub fn fatal(&self, m: &str) -> DiagnosticBuilder<'a> { self.sess.span_diagnostic.struct_span_fatal(self.span, m) @@ -2022,10 +2106,10 @@ impl<'a> Parser<'a> { }) } - fn expect_delimited_token_tree(&mut self) -> PResult<'a, (token::DelimToken, Vec)> { + fn expect_delimited_token_tree(&mut self) -> PResult<'a, (token::DelimToken, ThinTokenStream)> { match self.token { token::OpenDelim(delim) => self.parse_token_tree().map(|tree| match tree { - TokenTree::Delimited(_, delimited) => (delim, delimited.tts.clone()), + TokenTree::Delimited(_, delimited) => (delim, delimited.stream().into()), _ => unreachable!(), }), _ => Err(self.fatal("expected open delimiter")), @@ -2569,10 +2653,14 @@ impl<'a> Parser<'a> { pub fn parse_token_tree(&mut self) -> PResult<'a, TokenTree> { match self.token { token::OpenDelim(..) => { - let tt = self.tts.pop().unwrap().0; - self.span = tt.span(); + let frame = mem::replace(&mut self.token_cursor.frame, + self.token_cursor.stack.pop().unwrap()); + self.span = frame.span; self.bump(); - return Ok(tt); + return Ok(TokenTree::Delimited(frame.span, Delimited { + delim: frame.delim, + tts: frame.tree_cursor.original_stream().into(), + })); }, token::CloseDelim(_) | token::Eof => unreachable!(), _ => Ok(TokenTree::Token(self.span, self.bump_and_get())), diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index ec962d03458d1..53ef8e8dfa49c 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -286,7 +286,7 @@ pub fn token_to_string(tok: &Token) -> String { token::NtStmt(ref e) => stmt_to_string(&e), token::NtPat(ref e) => pat_to_string(&e), token::NtIdent(ref e) => ident_to_string(e.node), - token::NtTT(ref e) => tt_to_string(&e), + token::NtTT(ref tree) => tt_to_string(tree.clone()), token::NtArm(ref e) => arm_to_string(&e), token::NtImplItem(ref e) => impl_item_to_string(&e), token::NtTraitItem(ref e) => trait_item_to_string(&e), @@ -321,12 +321,12 @@ pub fn lifetime_to_string(e: &ast::Lifetime) -> String { to_string(|s| s.print_lifetime(e)) } -pub fn tt_to_string(tt: &tokenstream::TokenTree) -> String { +pub fn tt_to_string(tt: tokenstream::TokenTree) -> String { to_string(|s| s.print_tt(tt)) } pub fn tts_to_string(tts: &[tokenstream::TokenTree]) -> String { - to_string(|s| s.print_tts(tts)) + to_string(|s| s.print_tts(tts.iter().cloned().collect())) } pub fn stmt_to_string(stmt: &ast::Stmt) -> String { @@ -1324,7 +1324,7 @@ impl<'a> State<'a> { self.print_ident(item.ident)?; self.cbox(INDENT_UNIT)?; self.popen()?; - self.print_tts(&node.tts[..])?; + self.print_tts(node.stream())?; self.pclose()?; word(&mut self.s, ";")?; self.end()?; @@ -1456,8 +1456,8 @@ impl<'a> State<'a> { /// appropriate macro, transcribe back into the grammar we just parsed from, /// and then pretty-print the resulting AST nodes (so, e.g., we print /// expression arguments as expressions). It can be done! I think. - pub fn print_tt(&mut self, tt: &tokenstream::TokenTree) -> io::Result<()> { - match *tt { + pub fn print_tt(&mut self, tt: tokenstream::TokenTree) -> io::Result<()> { + match tt { TokenTree::Token(_, ref tk) => { word(&mut self.s, &token_to_string(tk))?; match *tk { @@ -1470,16 +1470,16 @@ impl<'a> State<'a> { TokenTree::Delimited(_, ref delimed) => { word(&mut self.s, &token_to_string(&delimed.open_token()))?; space(&mut self.s)?; - self.print_tts(&delimed.tts)?; + self.print_tts(delimed.stream())?; space(&mut self.s)?; word(&mut self.s, &token_to_string(&delimed.close_token())) }, } } - pub fn print_tts(&mut self, tts: &[tokenstream::TokenTree]) -> io::Result<()> { + pub fn print_tts(&mut self, tts: tokenstream::TokenStream) -> io::Result<()> { self.ibox(0)?; - for (i, tt) in tts.iter().enumerate() { + for (i, tt) in tts.into_trees().enumerate() { if i != 0 { space(&mut self.s)?; } @@ -1550,7 +1550,7 @@ impl<'a> State<'a> { word(&mut self.s, "! ")?; self.cbox(INDENT_UNIT)?; self.popen()?; - self.print_tts(&node.tts[..])?; + self.print_tts(node.stream())?; self.pclose()?; word(&mut self.s, ";")?; self.end()? @@ -1586,7 +1586,7 @@ impl<'a> State<'a> { word(&mut self.s, "! ")?; self.cbox(INDENT_UNIT)?; self.popen()?; - self.print_tts(&node.tts[..])?; + self.print_tts(node.stream())?; self.pclose()?; word(&mut self.s, ";")?; self.end()? @@ -1779,7 +1779,7 @@ impl<'a> State<'a> { } token::NoDelim => {} } - self.print_tts(&m.node.tts)?; + self.print_tts(m.node.stream())?; match delim { token::Paren => self.pclose(), token::Bracket => word(&mut self.s, "]"), diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs index 6665404672133..2da442a1a53da 100644 --- a/src/libsyntax/tokenstream.rs +++ b/src/libsyntax/tokenstream.rs @@ -22,21 +22,17 @@ //! and a borrowed TokenStream is sufficient to build an owned TokenStream without taking //! ownership of the original. -use ast::{self, AttrStyle, LitKind}; use syntax_pos::{BytePos, Span, DUMMY_SP}; -use codemap::Spanned; use ext::base; use ext::tt::{macro_parser, quoted}; -use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration}; -use parse::{self, Directory}; -use parse::token::{self, Token, Lit}; +use parse::Directory; +use parse::token::{self, Token}; use print::pprust; use serialize::{Decoder, Decodable, Encoder, Encodable}; -use symbol::Symbol; use util::RcSlice; -use std::{fmt, iter}; -use std::rc::Rc; +use std::{fmt, iter, mem}; +use std::hash::{self, Hash}; /// A delimited sequence of token trees #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] @@ -44,7 +40,7 @@ pub struct Delimited { /// The type of delimiter pub delim: token::DelimToken, /// The delimited sequence of token trees - pub tts: Vec, + pub tts: ThinTokenStream, } impl Delimited { @@ -77,8 +73,8 @@ impl Delimited { } /// Returns the token trees inside the delimiters. - pub fn subtrees(&self) -> &[TokenTree] { - &self.tts + pub fn stream(&self) -> TokenStream { + self.tts.clone().into() } } @@ -99,85 +95,19 @@ pub enum TokenTree { /// A single token Token(Span, token::Token), /// A delimited sequence of token trees - Delimited(Span, Rc), + Delimited(Span, Delimited), } impl TokenTree { - pub fn len(&self) -> usize { - match *self { - TokenTree::Token(_, token::DocComment(name)) => { - match doc_comment_style(&name.as_str()) { - AttrStyle::Outer => 2, - AttrStyle::Inner => 3, - } - } - TokenTree::Delimited(_, ref delimed) => match delimed.delim { - token::NoDelim => delimed.tts.len(), - _ => delimed.tts.len() + 2, - }, - TokenTree::Token(..) => 0, - } - } - - pub fn get_tt(&self, index: usize) -> TokenTree { - match (self, index) { - (&TokenTree::Token(sp, token::DocComment(_)), 0) => TokenTree::Token(sp, token::Pound), - (&TokenTree::Token(sp, token::DocComment(name)), 1) - if doc_comment_style(&name.as_str()) == AttrStyle::Inner => { - TokenTree::Token(sp, token::Not) - } - (&TokenTree::Token(sp, token::DocComment(name)), _) => { - let stripped = strip_doc_comment_decoration(&name.as_str()); - - // Searches for the occurrences of `"#*` and returns the minimum number of `#`s - // required to wrap the text. - let num_of_hashes = stripped.chars() - .scan(0, |cnt, x| { - *cnt = if x == '"' { - 1 - } else if *cnt != 0 && x == '#' { - *cnt + 1 - } else { - 0 - }; - Some(*cnt) - }) - .max() - .unwrap_or(0); - - TokenTree::Delimited(sp, Rc::new(Delimited { - delim: token::Bracket, - tts: vec![TokenTree::Token(sp, token::Ident(ast::Ident::from_str("doc"))), - TokenTree::Token(sp, token::Eq), - TokenTree::Token(sp, token::Literal( - token::StrRaw(Symbol::intern(&stripped), num_of_hashes), None))], - })) - } - (&TokenTree::Delimited(_, ref delimed), _) if delimed.delim == token::NoDelim => { - delimed.tts[index].clone() - } - (&TokenTree::Delimited(span, ref delimed), _) => { - if index == 0 { - return delimed.open_tt(span); - } - if index == delimed.tts.len() + 1 { - return delimed.close_tt(span); - } - delimed.tts[index - 1].clone() - } - _ => panic!("Cannot expand a token tree"), - } - } - /// Use this token tree as a matcher to parse given tts. - pub fn parse(cx: &base::ExtCtxt, mtch: &[quoted::TokenTree], tts: &[TokenTree]) + pub fn parse(cx: &base::ExtCtxt, mtch: &[quoted::TokenTree], tts: TokenStream) -> macro_parser::NamedParseResult { // `None` is because we're not interpolating let directory = Directory { path: cx.current_expansion.module.directory.clone(), ownership: cx.current_expansion.directory_ownership, }; - macro_parser::parse(cx.parse_sess(), tts.iter().cloned().collect(), mtch, Some(directory)) + macro_parser::parse(cx.parse_sess(), tts, mtch, Some(directory)) } /// Check if this TokenTree is equal to the other, regardless of span information. @@ -185,15 +115,8 @@ impl TokenTree { match (self, other) { (&TokenTree::Token(_, ref tk), &TokenTree::Token(_, ref tk2)) => tk == tk2, (&TokenTree::Delimited(_, ref dl), &TokenTree::Delimited(_, ref dl2)) => { - (*dl).delim == (*dl2).delim && dl.tts.len() == dl2.tts.len() && - { - for (tt1, tt2) in dl.tts.iter().zip(dl2.tts.iter()) { - if !tt1.eq_unspanned(tt2) { - return false; - } - } - true - } + dl.delim == dl2.delim && + dl.stream().trees().zip(dl2.stream().trees()).all(|(tt, tt2)| tt.eq_unspanned(&tt2)) } (_, _) => false, } @@ -213,64 +136,6 @@ impl TokenTree { _ => false, } } - - /// Indicates if the token is an identifier. - pub fn is_ident(&self) -> bool { - self.maybe_ident().is_some() - } - - /// Returns an identifier. - pub fn maybe_ident(&self) -> Option { - match *self { - TokenTree::Token(_, Token::Ident(t)) => Some(t.clone()), - TokenTree::Delimited(_, ref dl) => { - let tts = dl.subtrees(); - if tts.len() != 1 { - return None; - } - tts[0].maybe_ident() - } - _ => None, - } - } - - /// Returns a Token literal. - pub fn maybe_lit(&self) -> Option { - match *self { - TokenTree::Token(_, Token::Literal(l, _)) => Some(l.clone()), - TokenTree::Delimited(_, ref dl) => { - let tts = dl.subtrees(); - if tts.len() != 1 { - return None; - } - tts[0].maybe_lit() - } - _ => None, - } - } - - /// Returns an AST string literal. - pub fn maybe_str(&self) -> Option { - match *self { - TokenTree::Token(sp, Token::Literal(Lit::Str_(s), _)) => { - let l = LitKind::Str(Symbol::intern(&parse::str_lit(&s.as_str())), - ast::StrStyle::Cooked); - Some(Spanned { - node: l, - span: sp, - }) - } - TokenTree::Token(sp, Token::Literal(Lit::StrRaw(s, n), _)) => { - let l = LitKind::Str(Symbol::intern(&parse::raw_str_lit(&s.as_str())), - ast::StrStyle::Raw(n)); - Some(Spanned { - node: l, - span: sp, - }) - } - _ => None, - } - } } /// # Token Streams @@ -299,7 +164,7 @@ impl From for TokenStream { impl> iter::FromIterator for TokenStream { fn from_iter>(iter: I) -> Self { - TokenStream::concat(iter.into_iter().map(Into::into)) + TokenStream::concat(iter.into_iter().map(Into::into).collect::>()) } } @@ -323,29 +188,30 @@ impl TokenStream { } } - pub fn concat>(streams: I) -> TokenStream { - let mut streams = streams.into_iter().filter(|stream| !stream.is_empty()); - let first_stream = match streams.next() { - Some(stream) => stream, - None => return TokenStream::empty(), - }; - let second_stream = match streams.next() { - Some(stream) => stream, - None => return first_stream, - }; - let mut vec = vec![first_stream, second_stream]; - vec.extend(streams); - TokenStream { kind: TokenStreamKind::Stream(RcSlice::new(vec)) } + pub fn concat(mut streams: Vec) -> TokenStream { + match streams.len() { + 0 => TokenStream::empty(), + 1 => TokenStream::from(streams.pop().unwrap()), + _ => TokenStream::concat_rc_slice(RcSlice::new(streams)), + } } - pub fn trees<'a>(&'a self) -> Cursor { + fn concat_rc_slice(streams: RcSlice) -> TokenStream { + TokenStream { kind: TokenStreamKind::Stream(streams) } + } + + pub fn trees(&self) -> Cursor { + self.clone().into_trees() + } + + pub fn into_trees(self) -> Cursor { Cursor::new(self) } /// Compares two TokenStreams, checking equality without regarding span information. pub fn eq_unspanned(&self, other: &TokenStream) -> bool { for (t1, t2) in self.trees().zip(other.trees()) { - if !t1.eq_unspanned(t2) { + if !t1.eq_unspanned(&t2) { return false; } } @@ -353,72 +219,154 @@ impl TokenStream { } } -pub struct Cursor<'a> { - current_frame: CursorFrame<'a>, - stack: Vec>, -} - -impl<'a> Iterator for Cursor<'a> { - type Item = &'a TokenTree; +pub struct Cursor(CursorKind); - fn next(&mut self) -> Option<&'a TokenTree> { - let tree = self.peek(); - self.current_frame = self.stack.pop().unwrap_or(CursorFrame::Empty); - tree - } +enum CursorKind { + Empty, + Tree(TokenTree, bool /* consumed? */), + Stream(StreamCursor), } -enum CursorFrame<'a> { - Empty, - Tree(&'a TokenTree), - Stream(&'a RcSlice, usize), +struct StreamCursor { + stream: RcSlice, + index: usize, + stack: Vec<(RcSlice, usize)>, } -impl<'a> CursorFrame<'a> { - fn new(stream: &'a TokenStream) -> Self { - match stream.kind { - TokenStreamKind::Empty => CursorFrame::Empty, - TokenStreamKind::Tree(ref tree) => CursorFrame::Tree(tree), - TokenStreamKind::Stream(ref stream) => CursorFrame::Stream(stream, 0), +impl Iterator for Cursor { + type Item = TokenTree; + + fn next(&mut self) -> Option { + let cursor = match self.0 { + CursorKind::Stream(ref mut cursor) => cursor, + CursorKind::Tree(ref tree, ref mut consumed @ false) => { + *consumed = true; + return Some(tree.clone()); + } + _ => return None, + }; + + loop { + if cursor.index < cursor.stream.len() { + match cursor.stream[cursor.index].kind.clone() { + TokenStreamKind::Tree(tree) => { + cursor.index += 1; + return Some(tree); + } + TokenStreamKind::Stream(stream) => { + cursor.stack.push((mem::replace(&mut cursor.stream, stream), + mem::replace(&mut cursor.index, 0) + 1)); + } + TokenStreamKind::Empty => { + cursor.index += 1; + } + } + } else if let Some((stream, index)) = cursor.stack.pop() { + cursor.stream = stream; + cursor.index = index; + } else { + return None; + } } } } -impl<'a> Cursor<'a> { - fn new(stream: &'a TokenStream) -> Self { - Cursor { - current_frame: CursorFrame::new(stream), - stack: Vec::new(), - } +impl Cursor { + fn new(stream: TokenStream) -> Self { + Cursor(match stream.kind { + TokenStreamKind::Empty => CursorKind::Empty, + TokenStreamKind::Tree(tree) => CursorKind::Tree(tree, false), + TokenStreamKind::Stream(stream) => { + CursorKind::Stream(StreamCursor { stream: stream, index: 0, stack: Vec::new() }) + } + }) } - pub fn peek(&mut self) -> Option<&'a TokenTree> { - while let CursorFrame::Stream(stream, index) = self.current_frame { - self.current_frame = if index == stream.len() { - self.stack.pop().unwrap_or(CursorFrame::Empty) - } else { - self.stack.push(CursorFrame::Stream(stream, index + 1)); - CursorFrame::new(&stream[index]) - }; + pub fn original_stream(self) -> TokenStream { + match self.0 { + CursorKind::Empty => TokenStream::empty(), + CursorKind::Tree(tree, _) => tree.into(), + CursorKind::Stream(cursor) => TokenStream::concat_rc_slice({ + cursor.stack.get(0).cloned().map(|(stream, _)| stream).unwrap_or(cursor.stream) + }), } + } + + pub fn look_ahead(&self, n: usize) -> Option { + fn look_ahead(streams: &[TokenStream], mut n: usize) -> Result { + for stream in streams { + n = match stream.kind { + TokenStreamKind::Tree(ref tree) if n == 0 => return Ok(tree.clone()), + TokenStreamKind::Tree(..) => n - 1, + TokenStreamKind::Stream(ref stream) => match look_ahead(stream, n) { + Ok(tree) => return Ok(tree), + Err(n) => n, + }, + _ => n, + }; + } - match self.current_frame { - CursorFrame::Empty => None, - CursorFrame::Tree(tree) => Some(tree), - CursorFrame::Stream(..) => unreachable!(), + Err(n) } + + match self.0 { + CursorKind::Empty | CursorKind::Tree(_, true) => Err(n), + CursorKind::Tree(ref tree, false) => look_ahead(&[tree.clone().into()], n), + CursorKind::Stream(ref cursor) => { + look_ahead(&cursor.stream[cursor.index ..], n).or_else(|mut n| { + for &(ref stream, index) in cursor.stack.iter().rev() { + n = match look_ahead(&stream[index..], n) { + Ok(tree) => return Ok(tree), + Err(n) => n, + } + } + + Err(n) + }) + } + }.ok() + } +} + +/// The `TokenStream` type is large enough to represent a single `TokenTree` without allocation. +/// `ThinTokenStream` is smaller, but needs to allocate to represent a single `TokenTree`. +/// We must use `ThinTokenStream` in `TokenTree::Delimited` to avoid infinite size due to recursion. +#[derive(Debug, Clone)] +pub struct ThinTokenStream(Option>); + +impl From for ThinTokenStream { + fn from(stream: TokenStream) -> ThinTokenStream { + ThinTokenStream(match stream.kind { + TokenStreamKind::Empty => None, + TokenStreamKind::Tree(tree) => Some(RcSlice::new(vec![tree.into()])), + TokenStreamKind::Stream(stream) => Some(stream), + }) + } +} + +impl From for TokenStream { + fn from(stream: ThinTokenStream) -> TokenStream { + stream.0.map(TokenStream::concat_rc_slice).unwrap_or_else(TokenStream::empty) + } +} + +impl Eq for ThinTokenStream {} + +impl PartialEq for ThinTokenStream { + fn eq(&self, other: &ThinTokenStream) -> bool { + TokenStream::from(self.clone()) == TokenStream::from(other.clone()) } } impl fmt::Display for TokenStream { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.write_str(&pprust::tts_to_string(&self.trees().cloned().collect::>())) + f.write_str(&pprust::tts_to_string(&self.trees().collect::>())) } } impl Encodable for TokenStream { fn encode(&self, encoder: &mut E) -> Result<(), E::Error> { - self.trees().cloned().collect::>().encode(encoder) + self.trees().collect::>().encode(encoder) } } @@ -428,6 +376,32 @@ impl Decodable for TokenStream { } } +impl Hash for TokenStream { + fn hash(&self, state: &mut H) { + for tree in self.trees() { + tree.hash(state); + } + } +} + +impl Encodable for ThinTokenStream { + fn encode(&self, encoder: &mut E) -> Result<(), E::Error> { + TokenStream::from(self.clone()).encode(encoder) + } +} + +impl Decodable for ThinTokenStream { + fn decode(decoder: &mut D) -> Result { + TokenStream::decode(decoder).map(Into::into) + } +} + +impl Hash for ThinTokenStream { + fn hash(&self, state: &mut H) { + TokenStream::from(self.clone()).hash(state); + } +} + #[cfg(test)] mod tests { @@ -435,10 +409,10 @@ mod tests { use syntax::ast::Ident; use syntax_pos::{Span, BytePos, NO_EXPANSION}; use parse::token::Token; - use util::parser_testing::string_to_tts; + use util::parser_testing::string_to_stream; fn string_to_ts(string: &str) -> TokenStream { - string_to_tts(string.to_owned()).into_iter().collect() + string_to_stream(string.to_owned()) } fn sp(a: u32, b: u32) -> Span { @@ -454,24 +428,16 @@ mod tests { let test_res = string_to_ts("foo::bar::baz"); let test_fst = string_to_ts("foo::bar"); let test_snd = string_to_ts("::baz"); - let eq_res = TokenStream::concat([test_fst, test_snd].iter().cloned()); + let eq_res = TokenStream::concat(vec![test_fst, test_snd]); assert_eq!(test_res.trees().count(), 5); assert_eq!(eq_res.trees().count(), 5); assert_eq!(test_res.eq_unspanned(&eq_res), true); } - #[test] - fn test_from_to_bijection() { - let test_start = string_to_tts("foo::bar(baz)".to_string()); - let ts = test_start.iter().cloned().collect::(); - let test_end: Vec = ts.trees().cloned().collect(); - assert_eq!(test_start, test_end) - } - #[test] fn test_to_from_bijection() { let test_start = string_to_ts("foo::bar(baz)"); - let test_end = test_start.trees().cloned().collect(); + let test_end = test_start.trees().collect(); assert_eq!(test_start, test_end) } diff --git a/src/libsyntax/util/parser_testing.rs b/src/libsyntax/util/parser_testing.rs index e703dc6b4191c..51eb295b502a7 100644 --- a/src/libsyntax/util/parser_testing.rs +++ b/src/libsyntax/util/parser_testing.rs @@ -9,17 +9,17 @@ // except according to those terms. use ast::{self, Ident}; -use parse::{ParseSess,PResult,filemap_to_tts}; +use parse::{ParseSess, PResult, filemap_to_stream}; use parse::{lexer, new_parser_from_source_str}; use parse::parser::Parser; use ptr::P; -use tokenstream; +use tokenstream::TokenStream; use std::iter::Peekable; /// Map a string to tts, using a made-up filename: -pub fn string_to_tts(source_str: String) -> Vec { +pub fn string_to_stream(source_str: String) -> TokenStream { let ps = ParseSess::new(); - filemap_to_tts(&ps, ps.codemap().new_filemap("bogofile".to_string(), None, source_str)) + filemap_to_stream(&ps, ps.codemap().new_filemap("bogofile".to_string(), None, source_str)) } /// Map string to parser (via tts) diff --git a/src/libsyntax/util/rc_slice.rs b/src/libsyntax/util/rc_slice.rs index cb3becf83f682..195fb23f9d8c7 100644 --- a/src/libsyntax/util/rc_slice.rs +++ b/src/libsyntax/util/rc_slice.rs @@ -8,7 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use std::hash::{self, Hash}; use std::fmt; use std::ops::Deref; use std::rc::Rc; @@ -37,12 +36,6 @@ impl Deref for RcSlice { } } -impl Hash for RcSlice { - fn hash(&self, state: &mut H) { - self.deref().hash(state); - } -} - impl fmt::Debug for RcSlice { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Debug::fmt(self.deref(), f) diff --git a/src/libsyntax_ext/asm.rs b/src/libsyntax_ext/asm.rs index a5e083f926a07..767ec94a0ce61 100644 --- a/src/libsyntax_ext/asm.rs +++ b/src/libsyntax_ext/asm.rs @@ -107,7 +107,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, if p2.token != token::Eof { let mut extra_tts = panictry!(p2.parse_all_token_trees()); extra_tts.extend(tts[first_colon..].iter().cloned()); - p = parse::tts_to_parser(cx.parse_sess, extra_tts); + p = parse::stream_to_parser(cx.parse_sess, extra_tts.into_iter().collect()); } asm = s; diff --git a/src/test/run-pass-fulldeps/ast_stmt_expr_attr.rs b/src/test/run-pass-fulldeps/ast_stmt_expr_attr.rs index a41b34f6a53d0..5139b68bce7fd 100644 --- a/src/test/run-pass-fulldeps/ast_stmt_expr_attr.rs +++ b/src/test/run-pass-fulldeps/ast_stmt_expr_attr.rs @@ -18,7 +18,7 @@ use syntax::ast::*; use syntax::attr::*; use syntax::ast; use syntax::parse; -use syntax::parse::{ParseSess,filemap_to_tts, PResult}; +use syntax::parse::{ParseSess, PResult}; use syntax::parse::new_parser_from_source_str; use syntax::parse::parser::Parser; use syntax::parse::token; diff --git a/src/test/run-pass-fulldeps/auxiliary/cond_plugin.rs b/src/test/run-pass-fulldeps/auxiliary/cond_plugin.rs index 730e112c70016..2f94a440e72da 100644 --- a/src/test/run-pass-fulldeps/auxiliary/cond_plugin.rs +++ b/src/test/run-pass-fulldeps/auxiliary/cond_plugin.rs @@ -32,13 +32,13 @@ pub fn plugin_registrar(reg: &mut Registry) { fn cond(input: TokenStream) -> TokenStream { let mut conds = Vec::new(); - let mut input = input.trees(); + let mut input = input.trees().peekable(); while let Some(tree) = input.next() { - let cond: TokenStream = match *tree { - TokenTree::Delimited(_, ref delimited) => delimited.tts.iter().cloned().collect(), + let mut cond = match tree { + TokenTree::Delimited(_, ref delimited) => delimited.stream(), _ => panic!("Invalid input"), }; - let mut trees = cond.trees().cloned(); + let mut trees = cond.trees(); let test = trees.next(); let rhs = trees.collect::(); if rhs.is_empty() { diff --git a/src/test/run-pass-fulldeps/auxiliary/plugin_args.rs b/src/test/run-pass-fulldeps/auxiliary/plugin_args.rs index 3c8868f1664e8..134e36c587bed 100644 --- a/src/test/run-pass-fulldeps/auxiliary/plugin_args.rs +++ b/src/test/run-pass-fulldeps/auxiliary/plugin_args.rs @@ -26,7 +26,7 @@ use syntax::print::pprust; use syntax::ptr::P; use syntax::symbol::Symbol; use syntax_pos::Span; -use syntax::tokenstream; +use syntax::tokenstream::TokenStream; use rustc_plugin::Registry; struct Expander { @@ -37,7 +37,7 @@ impl TTMacroExpander for Expander { fn expand<'cx>(&self, ecx: &'cx mut ExtCtxt, sp: Span, - _: &[tokenstream::TokenTree]) -> Box { + _: TokenStream) -> Box { let args = self.args.iter().map(|i| pprust::meta_list_item_to_string(i)) .collect::>().join(", "); MacEager::expr(ecx.expr_str(sp, Symbol::intern(&args))) diff --git a/src/test/run-pass-fulldeps/auxiliary/procedural_mbe_matching.rs b/src/test/run-pass-fulldeps/auxiliary/procedural_mbe_matching.rs index 3db69f2167cc6..c9fa96b83c280 100644 --- a/src/test/run-pass-fulldeps/auxiliary/procedural_mbe_matching.rs +++ b/src/test/run-pass-fulldeps/auxiliary/procedural_mbe_matching.rs @@ -35,8 +35,8 @@ fn expand_mbe_matches(cx: &mut ExtCtxt, _: Span, args: &[TokenTree]) -> Box { let mbe_matcher = quote_tokens!(cx, $$matched:expr, $$($$pat:pat)|+); - let mbe_matcher = quoted::parse(&mbe_matcher, true, cx.parse_sess); - let map = match TokenTree::parse(cx, &mbe_matcher, args) { + let mbe_matcher = quoted::parse(mbe_matcher.into_iter().collect(), true, cx.parse_sess); + let map = match TokenTree::parse(cx, &mbe_matcher, args.iter().cloned().collect()) { Success(map) => map, Failure(_, tok) => { panic!("expected Success, but got Failure: {}", parse_failure_msg(tok));