From 0bcb05cf50624fc54162823f998df40658e158b1 Mon Sep 17 00:00:00 2001 From: Stefan Schindler Date: Sun, 27 Nov 2016 15:34:32 +0100 Subject: [PATCH 1/3] Finishing d2f8fb0a0a9dd from @jseyfried --- src/grammar/verify.rs | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/src/grammar/verify.rs b/src/grammar/verify.rs index 48be58f731cd..482f3f7d839b 100644 --- a/src/grammar/verify.rs +++ b/src/grammar/verify.rs @@ -37,6 +37,8 @@ use syntax::parse::token::{self, BinOpToken, DelimToken, Lit, Token}; use syntax::parse::lexer::TokenAndSpan; use syntax_pos::Pos; +use syntax::symbol::Symbol; + fn parse_token_list(file: &str) -> HashMap { fn id() -> token::Token { Token::Ident(ast::Ident::with_empty_ctxt(Name(0))) @@ -158,7 +160,7 @@ fn fix(mut lit: &str) -> ast::Name { let leading_hashes = count(lit); // +1/-1 to adjust for single quotes - parse::token::intern(&lit[leading_hashes + 1..lit.len() - leading_hashes - 1]) + Symbol::intern(&lit[leading_hashes + 1..lit.len() - leading_hashes - 1]) } /// Assuming a char/byte literal, strip the 'b' prefix and the single quotes. @@ -168,7 +170,7 @@ fn fixchar(mut lit: &str) -> ast::Name { lit = &lit[1..]; } - parse::token::intern(&lit[1..lit.len() - 1]) + Symbol::intern(&lit[1..lit.len() - 1]) } fn count(lit: &str) -> usize { @@ -196,7 +198,7 @@ fn parse_antlr_token(s: &str, tokens: &HashMap, surrogate_ let not_found = format!("didn't find token {:?} in the map", toknum); let proto_tok = tokens.get(toknum).expect(¬_found[..]); - let nm = parse::token::intern(content); + let nm = Symbol::intern(content); debug!("What we got: content (`{}`), proto: {:?}", content, proto_tok); From 2dc3fdf2bd6153ad640544776abb62ad17d6b6be Mon Sep 17 00:00:00 2001 From: Stefan Schindler Date: Sun, 27 Nov 2016 16:45:09 +0100 Subject: [PATCH 2/3] Resolve visibility issues use syntax::ast::Name; is a reexport of syntax::symbol::Symbol(u32); --- src/grammar/verify.rs | 26 ++++++++++++-------------- src/libsyntax/symbol.rs | 4 ++++ 2 files changed, 16 insertions(+), 14 deletions(-) diff --git a/src/grammar/verify.rs b/src/grammar/verify.rs index 482f3f7d839b..1903c9125e41 100644 --- a/src/grammar/verify.rs +++ b/src/grammar/verify.rs @@ -23,7 +23,6 @@ use std::fs::File; use std::io::{BufRead, Read}; use std::path::Path; -use syntax::parse; use syntax::parse::lexer; use rustc::dep_graph::DepGraph; use rustc::session::{self, config}; @@ -31,7 +30,6 @@ use rustc::middle::cstore::DummyCrateStore; use std::rc::Rc; use syntax::ast; -use syntax::ast::Name; use syntax::codemap; use syntax::parse::token::{self, BinOpToken, DelimToken, Lit, Token}; use syntax::parse::lexer::TokenAndSpan; @@ -41,7 +39,7 @@ use syntax::symbol::Symbol; fn parse_token_list(file: &str) -> HashMap { fn id() -> token::Token { - Token::Ident(ast::Ident::with_empty_ctxt(Name(0))) + Token::Ident(ast::Ident::with_empty_ctxt(Symbol::invalid())) } let mut res = HashMap::new(); @@ -67,7 +65,7 @@ fn parse_token_list(file: &str) -> HashMap { "SHL" => Token::BinOp(BinOpToken::Shl), "LBRACE" => Token::OpenDelim(DelimToken::Brace), "RARROW" => Token::RArrow, - "LIT_STR" => Token::Literal(Lit::Str_(Name(0)), None), + "LIT_STR" => Token::Literal(Lit::Str_(Symbol::invalid()), None), "DOTDOT" => Token::DotDot, "MOD_SEP" => Token::ModSep, "DOTDOTDOT" => Token::DotDotDot, @@ -77,21 +75,21 @@ fn parse_token_list(file: &str) -> HashMap { "ANDAND" => Token::AndAnd, "AT" => Token::At, "LBRACKET" => Token::OpenDelim(DelimToken::Bracket), - "LIT_STR_RAW" => Token::Literal(Lit::StrRaw(Name(0), 0), None), + "LIT_STR_RAW" => Token::Literal(Lit::StrRaw(Symbol::invalid(), 0), None), "RPAREN" => Token::CloseDelim(DelimToken::Paren), "SLASH" => Token::BinOp(BinOpToken::Slash), "COMMA" => Token::Comma, - "LIFETIME" => Token::Lifetime(ast::Ident::with_empty_ctxt(Name(0))), + "LIFETIME" => Token::Lifetime(ast::Ident::with_empty_ctxt(Symbol::invalid())), "CARET" => Token::BinOp(BinOpToken::Caret), "TILDE" => Token::Tilde, "IDENT" => id(), "PLUS" => Token::BinOp(BinOpToken::Plus), - "LIT_CHAR" => Token::Literal(Lit::Char(Name(0)), None), - "LIT_BYTE" => Token::Literal(Lit::Byte(Name(0)), None), + "LIT_CHAR" => Token::Literal(Lit::Char(Symbol::invalid()), None), + "LIT_BYTE" => Token::Literal(Lit::Byte(Symbol::invalid()), None), "EQ" => Token::Eq, "RBRACKET" => Token::CloseDelim(DelimToken::Bracket), "COMMENT" => Token::Comment, - "DOC_COMMENT" => Token::DocComment(Name(0)), + "DOC_COMMENT" => Token::DocComment(Symbol::invalid()), "DOT" => Token::Dot, "EQEQ" => Token::EqEq, "NE" => Token::Ne, @@ -101,9 +99,9 @@ fn parse_token_list(file: &str) -> HashMap { "BINOP" => Token::BinOp(BinOpToken::Plus), "POUND" => Token::Pound, "OROR" => Token::OrOr, - "LIT_INTEGER" => Token::Literal(Lit::Integer(Name(0)), None), + "LIT_INTEGER" => Token::Literal(Lit::Integer(Symbol::invalid()), None), "BINOPEQ" => Token::BinOpEq(BinOpToken::Plus), - "LIT_FLOAT" => Token::Literal(Lit::Float(Name(0)), None), + "LIT_FLOAT" => Token::Literal(Lit::Float(Symbol::invalid()), None), "WHITESPACE" => Token::Whitespace, "UNDERSCORE" => Token::Underscore, "MINUS" => Token::BinOp(BinOpToken::Minus), @@ -113,10 +111,10 @@ fn parse_token_list(file: &str) -> HashMap { "OR" => Token::BinOp(BinOpToken::Or), "GT" => Token::Gt, "LE" => Token::Le, - "LIT_BINARY" => Token::Literal(Lit::ByteStr(Name(0)), None), - "LIT_BINARY_RAW" => Token::Literal(Lit::ByteStrRaw(Name(0), 0), None), + "LIT_BINARY" => Token::Literal(Lit::ByteStr(Symbol::invalid()), None), + "LIT_BINARY_RAW" => Token::Literal(Lit::ByteStrRaw(Symbol::invalid(), 0), None), "QUESTION" => Token::Question, - "SHEBANG" => Token::Shebang(Name(0)), + "SHEBANG" => Token::Shebang(Symbol::invalid()), _ => continue, }; diff --git a/src/libsyntax/symbol.rs b/src/libsyntax/symbol.rs index fe9a176179ce..e2dcc2409328 100644 --- a/src/libsyntax/symbol.rs +++ b/src/libsyntax/symbol.rs @@ -35,6 +35,10 @@ impl Symbol { with_interner(|interner| interner.gensym(string)) } + pub fn invalid() -> Self { + Symbol(0u32) + } + pub fn as_str(self) -> InternedString { with_interner(|interner| unsafe { InternedString { From 28b64dc1b955afd6e89b21499dfa1fd1d452e31c Mon Sep 17 00:00:00 2001 From: Stefan Schindler Date: Sun, 27 Nov 2016 17:59:36 +0100 Subject: [PATCH 3/3] Use keywords::Invalid --- src/grammar/verify.rs | 28 +++++++++++++++------------- src/libsyntax/symbol.rs | 4 ---- 2 files changed, 15 insertions(+), 17 deletions(-) diff --git a/src/grammar/verify.rs b/src/grammar/verify.rs index 1903c9125e41..919fc98e438c 100644 --- a/src/grammar/verify.rs +++ b/src/grammar/verify.rs @@ -35,11 +35,11 @@ use syntax::parse::token::{self, BinOpToken, DelimToken, Lit, Token}; use syntax::parse::lexer::TokenAndSpan; use syntax_pos::Pos; -use syntax::symbol::Symbol; +use syntax::symbol::{Symbol, keywords}; fn parse_token_list(file: &str) -> HashMap { fn id() -> token::Token { - Token::Ident(ast::Ident::with_empty_ctxt(Symbol::invalid())) + Token::Ident(ast::Ident::with_empty_ctxt(keywords::Invalid.name())) } let mut res = HashMap::new(); @@ -65,7 +65,7 @@ fn parse_token_list(file: &str) -> HashMap { "SHL" => Token::BinOp(BinOpToken::Shl), "LBRACE" => Token::OpenDelim(DelimToken::Brace), "RARROW" => Token::RArrow, - "LIT_STR" => Token::Literal(Lit::Str_(Symbol::invalid()), None), + "LIT_STR" => Token::Literal(Lit::Str_(keywords::Invalid.name()), None), "DOTDOT" => Token::DotDot, "MOD_SEP" => Token::ModSep, "DOTDOTDOT" => Token::DotDotDot, @@ -75,21 +75,22 @@ fn parse_token_list(file: &str) -> HashMap { "ANDAND" => Token::AndAnd, "AT" => Token::At, "LBRACKET" => Token::OpenDelim(DelimToken::Bracket), - "LIT_STR_RAW" => Token::Literal(Lit::StrRaw(Symbol::invalid(), 0), None), + "LIT_STR_RAW" => Token::Literal(Lit::StrRaw(keywords::Invalid.name(), 0), None), "RPAREN" => Token::CloseDelim(DelimToken::Paren), "SLASH" => Token::BinOp(BinOpToken::Slash), "COMMA" => Token::Comma, - "LIFETIME" => Token::Lifetime(ast::Ident::with_empty_ctxt(Symbol::invalid())), + "LIFETIME" => Token::Lifetime( + ast::Ident::with_empty_ctxt(keywords::Invalid.name())), "CARET" => Token::BinOp(BinOpToken::Caret), "TILDE" => Token::Tilde, "IDENT" => id(), "PLUS" => Token::BinOp(BinOpToken::Plus), - "LIT_CHAR" => Token::Literal(Lit::Char(Symbol::invalid()), None), - "LIT_BYTE" => Token::Literal(Lit::Byte(Symbol::invalid()), None), + "LIT_CHAR" => Token::Literal(Lit::Char(keywords::Invalid.name()), None), + "LIT_BYTE" => Token::Literal(Lit::Byte(keywords::Invalid.name()), None), "EQ" => Token::Eq, "RBRACKET" => Token::CloseDelim(DelimToken::Bracket), "COMMENT" => Token::Comment, - "DOC_COMMENT" => Token::DocComment(Symbol::invalid()), + "DOC_COMMENT" => Token::DocComment(keywords::Invalid.name()), "DOT" => Token::Dot, "EQEQ" => Token::EqEq, "NE" => Token::Ne, @@ -99,9 +100,9 @@ fn parse_token_list(file: &str) -> HashMap { "BINOP" => Token::BinOp(BinOpToken::Plus), "POUND" => Token::Pound, "OROR" => Token::OrOr, - "LIT_INTEGER" => Token::Literal(Lit::Integer(Symbol::invalid()), None), + "LIT_INTEGER" => Token::Literal(Lit::Integer(keywords::Invalid.name()), None), "BINOPEQ" => Token::BinOpEq(BinOpToken::Plus), - "LIT_FLOAT" => Token::Literal(Lit::Float(Symbol::invalid()), None), + "LIT_FLOAT" => Token::Literal(Lit::Float(keywords::Invalid.name()), None), "WHITESPACE" => Token::Whitespace, "UNDERSCORE" => Token::Underscore, "MINUS" => Token::BinOp(BinOpToken::Minus), @@ -111,10 +112,11 @@ fn parse_token_list(file: &str) -> HashMap { "OR" => Token::BinOp(BinOpToken::Or), "GT" => Token::Gt, "LE" => Token::Le, - "LIT_BINARY" => Token::Literal(Lit::ByteStr(Symbol::invalid()), None), - "LIT_BINARY_RAW" => Token::Literal(Lit::ByteStrRaw(Symbol::invalid(), 0), None), + "LIT_BINARY" => Token::Literal(Lit::ByteStr(keywords::Invalid.name()), None), + "LIT_BINARY_RAW" => Token::Literal( + Lit::ByteStrRaw(keywords::Invalid.name(), 0), None), "QUESTION" => Token::Question, - "SHEBANG" => Token::Shebang(Symbol::invalid()), + "SHEBANG" => Token::Shebang(keywords::Invalid.name()), _ => continue, }; diff --git a/src/libsyntax/symbol.rs b/src/libsyntax/symbol.rs index e2dcc2409328..fe9a176179ce 100644 --- a/src/libsyntax/symbol.rs +++ b/src/libsyntax/symbol.rs @@ -35,10 +35,6 @@ impl Symbol { with_interner(|interner| interner.gensym(string)) } - pub fn invalid() -> Self { - Symbol(0u32) - } - pub fn as_str(self) -> InternedString { with_interner(|interner| unsafe { InternedString {