mirror of
https://github.com/rust-lang/rust.git
synced 2026-05-17 05:25:37 +03:00
auto merge of #18365 : bjz/rust/token, r=alexcrichton
[breaking-change]
(for syntax-extensions)
- Token variant identifiers have been converted to PascalCase for consistency with Rust coding standards
- Some free-functions in `syntax::token` have been converted to methods on `syntax::token::Token`:
- `can_begin_expr` -> `Token::can_begin_expr`
- `close_delimiter_for` -> `Token::get_close_delimiter`
- `is_lit` -> `Token::is_lit`
- `is_ident` -> `Token::is_ident`
- `is_path` -> `Token::is_path`
- `is_plain_ident` -> `Token::is_plain_ident`
- `is_lifetime` -> `Token::is_lifetime`
- `is_mutability` -> `Token::is_mutability`
- `to_binop` -> `Token::to_binop`
- `is_keyword` -> `Token::is_keyword`
- `is_any_keyword` -> `Token:is_any_keyword`
- `is_strict_keyword` -> `Token::is_strict_keyword`
- `is_reserved_keyword` -> `Token::is_reserved_keyword`
- `mtwt_token_eq` -> `Token::mtwt_eq`
- `token::Ident` now takes an enum instead of a boolean for clarity
- `token::{to_string, binop_to_string}` were moved to `pprust::{token_to_string, binop_to_string}`
This commit is contained in:
@@ -55,7 +55,7 @@ extern crate syntax;
|
||||
extern crate rustc;
|
||||
|
||||
use syntax::codemap::Span;
|
||||
use syntax::parse::token::{IDENT, get_ident};
|
||||
use syntax::parse::token;
|
||||
use syntax::ast::{TokenTree, TtToken};
|
||||
use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacExpr};
|
||||
use syntax::ext::build::AstBuilder; // trait for expr_uint
|
||||
@@ -71,7 +71,7 @@ fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
|
||||
("I", 1)];
|
||||
|
||||
let text = match args {
|
||||
[TtToken(_, IDENT(s, _))] => get_ident(s).to_string(),
|
||||
[TtToken(_, token::Ident(s, _))] => token::get_ident(s).to_string(),
|
||||
_ => {
|
||||
cx.span_err(sp, "argument should be a single identifier");
|
||||
return DummyResult::any(sp);
|
||||
|
||||
+103
-100
@@ -30,12 +30,12 @@
|
||||
|
||||
use syntax::ast;
|
||||
use syntax::ast::Name;
|
||||
use syntax::parse::token::*;
|
||||
use syntax::parse::token;
|
||||
use syntax::parse::lexer::TokenAndSpan;
|
||||
|
||||
fn parse_token_list(file: &str) -> HashMap<String, Token> {
|
||||
fn id() -> Token {
|
||||
IDENT(ast::Ident { name: Name(0), ctxt: 0, }, false)
|
||||
token::Ident(ast::Ident { name: Name(0), ctxt: 0, }, token::Plain)
|
||||
}
|
||||
|
||||
let mut res = HashMap::new();
|
||||
@@ -52,64 +52,64 @@ fn id() -> Token {
|
||||
let num = line.slice_from(eq + 1);
|
||||
|
||||
let tok = match val {
|
||||
"SHR" => BINOP(SHR),
|
||||
"DOLLAR" => DOLLAR,
|
||||
"LT" => LT,
|
||||
"STAR" => BINOP(STAR),
|
||||
"FLOAT_SUFFIX" => id(),
|
||||
"INT_SUFFIX" => id(),
|
||||
"SHL" => BINOP(SHL),
|
||||
"LBRACE" => LBRACE,
|
||||
"RARROW" => RARROW,
|
||||
"LIT_STR" => LIT_STR(Name(0)),
|
||||
"DOTDOT" => DOTDOT,
|
||||
"MOD_SEP" => MOD_SEP,
|
||||
"DOTDOTDOT" => DOTDOTDOT,
|
||||
"NOT" => NOT,
|
||||
"AND" => BINOP(AND),
|
||||
"LPAREN" => LPAREN,
|
||||
"ANDAND" => ANDAND,
|
||||
"AT" => AT,
|
||||
"LBRACKET" => LBRACKET,
|
||||
"LIT_STR_RAW" => LIT_STR_RAW(Name(0), 0),
|
||||
"RPAREN" => RPAREN,
|
||||
"SLASH" => BINOP(SLASH),
|
||||
"COMMA" => COMMA,
|
||||
"LIFETIME" => LIFETIME(ast::Ident { name: Name(0), ctxt: 0 }),
|
||||
"CARET" => BINOP(CARET),
|
||||
"TILDE" => TILDE,
|
||||
"IDENT" => id(),
|
||||
"PLUS" => BINOP(PLUS),
|
||||
"LIT_CHAR" => LIT_CHAR(Name(0)),
|
||||
"LIT_BYTE" => LIT_BYTE(Name(0)),
|
||||
"EQ" => EQ,
|
||||
"RBRACKET" => RBRACKET,
|
||||
"COMMENT" => COMMENT,
|
||||
"DOC_COMMENT" => DOC_COMMENT(Name(0)),
|
||||
"DOT" => DOT,
|
||||
"EQEQ" => EQEQ,
|
||||
"NE" => NE,
|
||||
"GE" => GE,
|
||||
"PERCENT" => BINOP(PERCENT),
|
||||
"RBRACE" => RBRACE,
|
||||
"BINOP" => BINOP(PLUS),
|
||||
"POUND" => POUND,
|
||||
"OROR" => OROR,
|
||||
"LIT_INTEGER" => LIT_INTEGER(Name(0)),
|
||||
"BINOPEQ" => BINOPEQ(PLUS),
|
||||
"LIT_FLOAT" => LIT_FLOAT(Name(0)),
|
||||
"WHITESPACE" => WS,
|
||||
"UNDERSCORE" => UNDERSCORE,
|
||||
"MINUS" => BINOP(MINUS),
|
||||
"SEMI" => SEMI,
|
||||
"COLON" => COLON,
|
||||
"FAT_ARROW" => FAT_ARROW,
|
||||
"OR" => BINOP(OR),
|
||||
"GT" => GT,
|
||||
"LE" => LE,
|
||||
"LIT_BINARY" => LIT_BINARY(Name(0)),
|
||||
"LIT_BINARY_RAW" => LIT_BINARY_RAW(Name(0), 0),
|
||||
_ => continue
|
||||
"SHR" => token::BinOp(token::Shr),
|
||||
"DOLLAR" => token::Dollar,
|
||||
"LT" => token::Lt,
|
||||
"STAR" => token::BinOp(token::Star),
|
||||
"FLOAT_SUFFIX" => id(),
|
||||
"INT_SUFFIX" => id(),
|
||||
"SHL" => token::BinOp(token::Shl),
|
||||
"LBRACE" => token::LBrace,
|
||||
"RARROW" => token::Rarrow,
|
||||
"LIT_STR" => token::LitStr(Name(0)),
|
||||
"DOTDOT" => token::DotDot,
|
||||
"MOD_SEP" => token::ModSep,
|
||||
"DOTDOTDOT" => token::DotDotDot,
|
||||
"NOT" => token::Not,
|
||||
"AND" => token::BinOp(token::And),
|
||||
"LPAREN" => token::LParen,
|
||||
"ANDAND" => token::AndAnd,
|
||||
"AT" => token::At,
|
||||
"LBRACKET" => token::LBracket,
|
||||
"LIT_STR_RAW" => token::LitStrRaw(Name(0), 0),
|
||||
"RPAREN" => token::RParen,
|
||||
"SLASH" => token::BinOp(token::Slash),
|
||||
"COMMA" => token::Comma,
|
||||
"LIFETIME" => token::Lifetime(ast::Ident { name: Name(0), ctxt: 0 }),
|
||||
"CARET" => token::BinOp(token::Caret),
|
||||
"TILDE" => token::Tilde,
|
||||
"IDENT" => token::Id(),
|
||||
"PLUS" => token::BinOp(token::Plus),
|
||||
"LIT_CHAR" => token::LitChar(Name(0)),
|
||||
"LIT_BYTE" => token::LitByte(Name(0)),
|
||||
"EQ" => token::Eq,
|
||||
"RBRACKET" => token::RBracket,
|
||||
"COMMENT" => token::Comment,
|
||||
"DOC_COMMENT" => token::DocComment(Name(0)),
|
||||
"DOT" => token::Dot,
|
||||
"EQEQ" => token::EqEq,
|
||||
"NE" => token::Ne,
|
||||
"GE" => token::Ge,
|
||||
"PERCENT" => token::BinOp(token::Percent),
|
||||
"RBRACE" => token::RBrace,
|
||||
"BINOP" => token::BinOp(token::Plus),
|
||||
"POUND" => token::Pound,
|
||||
"OROR" => token::OrOr,
|
||||
"LIT_INTEGER" => token::LitInteger(Name(0)),
|
||||
"BINOPEQ" => token::BinOpEq(token::Plus),
|
||||
"LIT_FLOAT" => token::LitFloat(Name(0)),
|
||||
"WHITESPACE" => token::Whitespace,
|
||||
"UNDERSCORE" => token::Underscore,
|
||||
"MINUS" => token::BinOp(token::Minus),
|
||||
"SEMI" => token::Semi,
|
||||
"COLON" => token::Colon,
|
||||
"FAT_ARROW" => token::FatArrow,
|
||||
"OR" => token::BinOp(token::Or),
|
||||
"GT" => token::Gt,
|
||||
"LE" => token::Le,
|
||||
"LIT_BINARY" => token::LitBinary(Name(0)),
|
||||
"LIT_BINARY_RAW" => token::LitBinaryRaw(Name(0), 0),
|
||||
_ => continue,
|
||||
};
|
||||
|
||||
res.insert(num.to_string(), tok);
|
||||
@@ -119,19 +119,19 @@ fn id() -> Token {
|
||||
res
|
||||
}
|
||||
|
||||
fn str_to_binop(s: &str) -> BinOp {
|
||||
fn str_to_binop(s: &str) -> BinOpToken {
|
||||
match s {
|
||||
"+" => PLUS,
|
||||
"/" => SLASH,
|
||||
"-" => MINUS,
|
||||
"*" => STAR,
|
||||
"%" => PERCENT,
|
||||
"^" => CARET,
|
||||
"&" => AND,
|
||||
"|" => OR,
|
||||
"<<" => SHL,
|
||||
">>" => SHR,
|
||||
_ => fail!("Bad binop str `{}`", s)
|
||||
"+" => token::Plus,
|
||||
"/" => token::Slash,
|
||||
"-" => token::Minus,
|
||||
"*" => token::Star,
|
||||
"%" => token::Percent,
|
||||
"^" => token::Caret,
|
||||
"&" => token::And,
|
||||
"|" => token::Or,
|
||||
"<<" => token::Shl,
|
||||
">>" => token::Shr,
|
||||
_ => fail!("Bad binop str `{}`", s),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -186,19 +186,21 @@ fn parse_antlr_token(s: &str, tokens: &HashMap<String, Token>) -> TokenAndSpan {
|
||||
debug!("What we got: content (`{}`), proto: {}", content, proto_tok);
|
||||
|
||||
let real_tok = match *proto_tok {
|
||||
BINOP(..) => BINOP(str_to_binop(content)),
|
||||
BINOPEQ(..) => BINOPEQ(str_to_binop(content.slice_to(content.len() - 1))),
|
||||
LIT_STR(..) => LIT_STR(fix(content)),
|
||||
LIT_STR_RAW(..) => LIT_STR_RAW(fix(content), count(content)),
|
||||
LIT_CHAR(..) => LIT_CHAR(fixchar(content)),
|
||||
LIT_BYTE(..) => LIT_BYTE(fixchar(content)),
|
||||
DOC_COMMENT(..) => DOC_COMMENT(nm),
|
||||
LIT_INTEGER(..) => LIT_INTEGER(nm),
|
||||
LIT_FLOAT(..) => LIT_FLOAT(nm),
|
||||
LIT_BINARY(..) => LIT_BINARY(nm),
|
||||
LIT_BINARY_RAW(..) => LIT_BINARY_RAW(fix(content), count(content)),
|
||||
IDENT(..) => IDENT(ast::Ident { name: nm, ctxt: 0 }, true),
|
||||
LIFETIME(..) => LIFETIME(ast::Ident { name: nm, ctxt: 0 }),
|
||||
token::BinOp(..) => token::BinOp(str_to_binop(content)),
|
||||
token::BinOpEq(..) => token::BinOpEq(str_to_binop(content.slice_to(
|
||||
content.len() - 1))),
|
||||
token::LitStr(..) => token::LitStr(fix(content)),
|
||||
token::LitStrRaw(..) => token::LitStrRaw(fix(content), count(content)),
|
||||
token::LitChar(..) => token::LitChar(fixchar(content)),
|
||||
token::LitByte(..) => token::LitByte(fixchar(content)),
|
||||
token::DocComment(..) => token::DocComment(nm),
|
||||
token::LitInteger(..) => token::LitInteger(nm),
|
||||
token::LitFloat(..) => token::LitFloat(nm),
|
||||
token::LitBinary(..) => token::LitBinary(nm),
|
||||
token::LitBinaryRaw(..) => token::LitBinaryRaw(fix(content), count(content)),
|
||||
token::Ident(..) => token::Ident(ast::Ident { name: nm, ctxt: 0 },
|
||||
token::ModName),
|
||||
token::Lifetime(..) => token::Lifetime(ast::Ident { name: nm, ctxt: 0 }),
|
||||
ref t => t.clone()
|
||||
};
|
||||
|
||||
@@ -222,8 +224,8 @@ fn parse_antlr_token(s: &str, tokens: &HashMap<String, Token>) -> TokenAndSpan {
|
||||
|
||||
fn tok_cmp(a: &Token, b: &Token) -> bool {
|
||||
match a {
|
||||
&IDENT(id, _) => match b {
|
||||
&IDENT(id2, _) => id == id2,
|
||||
&token::Ident(id, _) => match b {
|
||||
&token::Ident(id2, _) => id == id2,
|
||||
_ => false
|
||||
},
|
||||
_ => a == b
|
||||
@@ -281,19 +283,20 @@ macro_rules! matches (
|
||||
)
|
||||
)
|
||||
|
||||
matches!(LIT_BYTE(..),
|
||||
LIT_CHAR(..),
|
||||
LIT_INTEGER(..),
|
||||
LIT_FLOAT(..),
|
||||
LIT_STR(..),
|
||||
LIT_STR_RAW(..),
|
||||
LIT_BINARY(..),
|
||||
LIT_BINARY_RAW(..),
|
||||
IDENT(..),
|
||||
LIFETIME(..),
|
||||
INTERPOLATED(..),
|
||||
DOC_COMMENT(..),
|
||||
SHEBANG(..)
|
||||
matches!(
|
||||
LitByte(..),
|
||||
LitChar(..),
|
||||
LitInteger(..),
|
||||
LitFloat(..),
|
||||
LitStr(..),
|
||||
LitStrRaw(..),
|
||||
LitBinary(..),
|
||||
LitBinaryRaw(..),
|
||||
Ident(..),
|
||||
Lifetime(..),
|
||||
Interpolated(..),
|
||||
DocComment(..),
|
||||
Shebang(..)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -634,7 +634,7 @@ fn parse(cx: &mut ExtCtxt, tts: &[ast::TokenTree]) -> Option<String> {
|
||||
return None
|
||||
}
|
||||
};
|
||||
if !parser.eat(&token::EOF) {
|
||||
if !parser.eat(&token::Eof) {
|
||||
cx.span_err(parser.span, "only one string literal allowed");
|
||||
return None;
|
||||
}
|
||||
|
||||
@@ -428,7 +428,7 @@ fn process_struct_field_def(&mut self,
|
||||
let qualname = format!("{}::{}", qualname, name);
|
||||
let typ = ppaux::ty_to_string(&self.analysis.ty_cx,
|
||||
(*self.analysis.ty_cx.node_types.borrow())[field.node.id as uint]);
|
||||
match self.span.sub_span_before_token(field.span, token::COLON) {
|
||||
match self.span.sub_span_before_token(field.span, token::Colon) {
|
||||
Some(sub_span) => self.fmt.field_str(field.span,
|
||||
Some(sub_span),
|
||||
field.node.id,
|
||||
@@ -1175,7 +1175,7 @@ fn visit_view_item(&mut self, i: &ast::ViewItem) {
|
||||
// 'use' always introduces an alias, if there is not an explicit
|
||||
// one, there is an implicit one.
|
||||
let sub_span =
|
||||
match self.span.sub_span_before_token(path.span, token::EQ) {
|
||||
match self.span.sub_span_before_token(path.span, token::Eq) {
|
||||
Some(sub_span) => Some(sub_span),
|
||||
None => sub_span,
|
||||
};
|
||||
|
||||
@@ -19,7 +19,7 @@
|
||||
use syntax::parse::lexer;
|
||||
use syntax::parse::lexer::{Reader,StringReader};
|
||||
use syntax::parse::token;
|
||||
use syntax::parse::token::{is_keyword,keywords,is_ident,Token};
|
||||
use syntax::parse::token::{keywords, Token};
|
||||
|
||||
pub struct SpanUtils<'a> {
|
||||
pub sess: &'a Session,
|
||||
@@ -93,18 +93,18 @@ pub fn span_for_last_ident(&self, span: Span) -> Option<Span> {
|
||||
let mut bracket_count = 0u;
|
||||
loop {
|
||||
let ts = toks.next_token();
|
||||
if ts.tok == token::EOF {
|
||||
if ts.tok == token::Eof {
|
||||
return self.make_sub_span(span, result)
|
||||
}
|
||||
if bracket_count == 0 &&
|
||||
(is_ident(&ts.tok) || is_keyword(keywords::Self, &ts.tok)) {
|
||||
(ts.tok.is_ident() || ts.tok.is_keyword(keywords::Self)) {
|
||||
result = Some(ts.sp);
|
||||
}
|
||||
|
||||
bracket_count += match ts.tok {
|
||||
token::LT => 1,
|
||||
token::GT => -1,
|
||||
token::BINOP(token::SHR) => -2,
|
||||
token::Lt => 1,
|
||||
token::Gt => -1,
|
||||
token::BinOp(token::Shr) => -2,
|
||||
_ => 0
|
||||
}
|
||||
}
|
||||
@@ -116,18 +116,18 @@ pub fn span_for_first_ident(&self, span: Span) -> Option<Span> {
|
||||
let mut bracket_count = 0u;
|
||||
loop {
|
||||
let ts = toks.next_token();
|
||||
if ts.tok == token::EOF {
|
||||
if ts.tok == token::Eof {
|
||||
return None;
|
||||
}
|
||||
if bracket_count == 0 &&
|
||||
(is_ident(&ts.tok) || is_keyword(keywords::Self, &ts.tok)) {
|
||||
(ts.tok.is_ident() || ts.tok.is_keyword(keywords::Self)) {
|
||||
return self.make_sub_span(span, Some(ts.sp));
|
||||
}
|
||||
|
||||
bracket_count += match ts.tok {
|
||||
token::LT => 1,
|
||||
token::GT => -1,
|
||||
token::BINOP(token::SHR) => -2,
|
||||
token::Lt => 1,
|
||||
token::Gt => -1,
|
||||
token::BinOp(token::Shr) => -2,
|
||||
_ => 0
|
||||
}
|
||||
}
|
||||
@@ -141,36 +141,36 @@ pub fn sub_span_for_meth_name(&self, span: Span) -> Option<Span> {
|
||||
let mut result = None;
|
||||
let mut bracket_count = 0u;
|
||||
let mut last_span = None;
|
||||
while prev.tok != token::EOF {
|
||||
while prev.tok != token::Eof {
|
||||
last_span = None;
|
||||
let mut next = toks.next_token();
|
||||
|
||||
if (next.tok == token::LPAREN ||
|
||||
next.tok == token::LT) &&
|
||||
if (next.tok == token::LParen ||
|
||||
next.tok == token::Lt) &&
|
||||
bracket_count == 0 &&
|
||||
is_ident(&prev.tok) {
|
||||
prev.tok.is_ident() {
|
||||
result = Some(prev.sp);
|
||||
}
|
||||
|
||||
if bracket_count == 0 &&
|
||||
next.tok == token::MOD_SEP {
|
||||
next.tok == token::ModSep {
|
||||
let old = prev;
|
||||
prev = next;
|
||||
next = toks.next_token();
|
||||
if next.tok == token::LT &&
|
||||
is_ident(&old.tok) {
|
||||
if next.tok == token::Lt &&
|
||||
old.tok.is_ident() {
|
||||
result = Some(old.sp);
|
||||
}
|
||||
}
|
||||
|
||||
bracket_count += match prev.tok {
|
||||
token::LPAREN | token::LT => 1,
|
||||
token::RPAREN | token::GT => -1,
|
||||
token::BINOP(token::SHR) => -2,
|
||||
token::LParen | token::Lt => 1,
|
||||
token::RParen | token::Gt => -1,
|
||||
token::BinOp(token::Shr) => -2,
|
||||
_ => 0
|
||||
};
|
||||
|
||||
if is_ident(&prev.tok) && bracket_count == 0 {
|
||||
if prev.tok.is_ident() && bracket_count == 0 {
|
||||
last_span = Some(prev.sp);
|
||||
}
|
||||
prev = next;
|
||||
@@ -191,21 +191,21 @@ pub fn sub_span_for_type_name(&self, span: Span) -> Option<Span> {
|
||||
loop {
|
||||
let next = toks.next_token();
|
||||
|
||||
if (next.tok == token::LT ||
|
||||
next.tok == token::COLON) &&
|
||||
if (next.tok == token::Lt ||
|
||||
next.tok == token::Colon) &&
|
||||
bracket_count == 0 &&
|
||||
is_ident(&prev.tok) {
|
||||
prev.tok.is_ident() {
|
||||
result = Some(prev.sp);
|
||||
}
|
||||
|
||||
bracket_count += match prev.tok {
|
||||
token::LT => 1,
|
||||
token::GT => -1,
|
||||
token::BINOP(token::SHR) => -2,
|
||||
token::Lt => 1,
|
||||
token::Gt => -1,
|
||||
token::BinOp(token::Shr) => -2,
|
||||
_ => 0
|
||||
};
|
||||
|
||||
if next.tok == token::EOF {
|
||||
if next.tok == token::Eof {
|
||||
break;
|
||||
}
|
||||
prev = next;
|
||||
@@ -216,7 +216,7 @@ pub fn sub_span_for_type_name(&self, span: Span) -> Option<Span> {
|
||||
format!("Mis-counted brackets when breaking path? Parsing '{}' in {}, line {}",
|
||||
self.snippet(span), loc.file.name, loc.line).as_slice());
|
||||
}
|
||||
if result.is_none() && is_ident(&prev.tok) && bracket_count == 0 {
|
||||
if result.is_none() && prev.tok.is_ident() && bracket_count == 0 {
|
||||
return self.make_sub_span(span, Some(prev.sp));
|
||||
}
|
||||
self.make_sub_span(span, result)
|
||||
@@ -235,7 +235,7 @@ pub fn spans_with_brackets(&self, span: Span, nesting: int, limit: int) -> Vec<S
|
||||
let mut bracket_count = 0i;
|
||||
loop {
|
||||
let ts = toks.next_token();
|
||||
if ts.tok == token::EOF {
|
||||
if ts.tok == token::Eof {
|
||||
if bracket_count != 0 {
|
||||
let loc = self.sess.codemap().lookup_char_pos(span.lo);
|
||||
self.sess.span_bug(span, format!(
|
||||
@@ -248,13 +248,13 @@ pub fn spans_with_brackets(&self, span: Span, nesting: int, limit: int) -> Vec<S
|
||||
return result;
|
||||
}
|
||||
bracket_count += match ts.tok {
|
||||
token::LT => 1,
|
||||
token::GT => -1,
|
||||
token::BINOP(token::SHL) => 2,
|
||||
token::BINOP(token::SHR) => -2,
|
||||
token::Lt => 1,
|
||||
token::Gt => -1,
|
||||
token::BinOp(token::Shl) => 2,
|
||||
token::BinOp(token::Shr) => -2,
|
||||
_ => 0
|
||||
};
|
||||
if is_ident(&ts.tok) &&
|
||||
if ts.tok.is_ident() &&
|
||||
bracket_count == nesting {
|
||||
result.push(self.make_sub_span(span, Some(ts.sp)).unwrap());
|
||||
}
|
||||
@@ -265,7 +265,7 @@ pub fn sub_span_before_token(&self, span: Span, tok: Token) -> Option<Span> {
|
||||
let mut toks = self.retokenise_span(span);
|
||||
let mut prev = toks.next_token();
|
||||
loop {
|
||||
if prev.tok == token::EOF {
|
||||
if prev.tok == token::Eof {
|
||||
return None;
|
||||
}
|
||||
let next = toks.next_token();
|
||||
@@ -282,12 +282,12 @@ pub fn sub_span_after_keyword(&self,
|
||||
let mut toks = self.retokenise_span(span);
|
||||
loop {
|
||||
let ts = toks.next_token();
|
||||
if ts.tok == token::EOF {
|
||||
if ts.tok == token::Eof {
|
||||
return None;
|
||||
}
|
||||
if is_keyword(keyword, &ts.tok) {
|
||||
if ts.tok.is_keyword(keyword) {
|
||||
let ts = toks.next_token();
|
||||
if ts.tok == token::EOF {
|
||||
if ts.tok == token::Eof {
|
||||
return None
|
||||
} else {
|
||||
return self.make_sub_span(span, Some(ts.sp));
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
|
||||
use std::io;
|
||||
use syntax::parse::lexer;
|
||||
use syntax::parse::token as t;
|
||||
use syntax::parse::token;
|
||||
use syntax::parse;
|
||||
|
||||
/// Highlights some source code, returning the HTML output.
|
||||
@@ -63,19 +63,19 @@ fn doit(sess: &parse::ParseSess, mut lexer: lexer::StringReader,
|
||||
|
||||
let snip = |sp| sess.span_diagnostic.cm.span_to_snippet(sp).unwrap();
|
||||
|
||||
if next.tok == t::EOF { break }
|
||||
if next.tok == token::Eof { break }
|
||||
|
||||
let klass = match next.tok {
|
||||
t::WS => {
|
||||
token::Whitespace => {
|
||||
try!(write!(out, "{}", Escape(snip(next.sp).as_slice())));
|
||||
continue
|
||||
},
|
||||
t::COMMENT => {
|
||||
token::Comment => {
|
||||
try!(write!(out, "<span class='comment'>{}</span>",
|
||||
Escape(snip(next.sp).as_slice())));
|
||||
continue
|
||||
},
|
||||
t::SHEBANG(s) => {
|
||||
token::Shebang(s) => {
|
||||
try!(write!(out, "{}", Escape(s.as_str())));
|
||||
continue
|
||||
},
|
||||
@@ -83,24 +83,25 @@ fn doit(sess: &parse::ParseSess, mut lexer: lexer::StringReader,
|
||||
// that it's the address-of operator instead of the and-operator.
|
||||
// This allows us to give all pointers their own class (`Box` and
|
||||
// `@` are below).
|
||||
t::BINOP(t::AND) if lexer.peek().sp.lo == next.sp.hi => "kw-2",
|
||||
t::AT | t::TILDE => "kw-2",
|
||||
token::BinOp(token::And) if lexer.peek().sp.lo == next.sp.hi => "kw-2",
|
||||
token::At | token::Tilde => "kw-2",
|
||||
|
||||
// consider this as part of a macro invocation if there was a
|
||||
// leading identifier
|
||||
t::NOT if is_macro => { is_macro = false; "macro" }
|
||||
token::Not if is_macro => { is_macro = false; "macro" }
|
||||
|
||||
// operators
|
||||
t::EQ | t::LT | t::LE | t::EQEQ | t::NE | t::GE | t::GT |
|
||||
t::ANDAND | t::OROR | t::NOT | t::BINOP(..) | t::RARROW |
|
||||
t::BINOPEQ(..) | t::FAT_ARROW => "op",
|
||||
token::Eq | token::Lt | token::Le | token::EqEq | token::Ne | token::Ge | token::Gt |
|
||||
token::AndAnd | token::OrOr | token::Not | token::BinOp(..) | token::RArrow |
|
||||
token::BinOpEq(..) | token::FatArrow => "op",
|
||||
|
||||
// miscellaneous, no highlighting
|
||||
t::DOT | t::DOTDOT | t::DOTDOTDOT | t::COMMA | t::SEMI |
|
||||
t::COLON | t::MOD_SEP | t::LARROW | t::LPAREN |
|
||||
t::RPAREN | t::LBRACKET | t::LBRACE | t::RBRACE | t::QUESTION => "",
|
||||
t::DOLLAR => {
|
||||
if t::is_ident(&lexer.peek().tok) {
|
||||
token::Dot | token::DotDot | token::DotDotDot | token::Comma | token::Semi |
|
||||
token::Colon | token::ModSep | token::LArrow | token::LParen |
|
||||
token::RParen | token::LBracket | token::LBrace | token::RBrace |
|
||||
token::Question => "",
|
||||
token::Dollar => {
|
||||
if lexer.peek().tok.is_ident() {
|
||||
is_macro_nonterminal = true;
|
||||
"macro-nonterminal"
|
||||
} else {
|
||||
@@ -112,12 +113,12 @@ fn doit(sess: &parse::ParseSess, mut lexer: lexer::StringReader,
|
||||
// continue highlighting it as an attribute until the ending ']' is
|
||||
// seen, so skip out early. Down below we terminate the attribute
|
||||
// span when we see the ']'.
|
||||
t::POUND => {
|
||||
token::Pound => {
|
||||
is_attribute = true;
|
||||
try!(write!(out, r"<span class='attribute'>#"));
|
||||
continue
|
||||
}
|
||||
t::RBRACKET => {
|
||||
token::RBracket => {
|
||||
if is_attribute {
|
||||
is_attribute = false;
|
||||
try!(write!(out, "]</span>"));
|
||||
@@ -128,15 +129,15 @@ fn doit(sess: &parse::ParseSess, mut lexer: lexer::StringReader,
|
||||
}
|
||||
|
||||
// text literals
|
||||
t::LIT_BYTE(..) | t::LIT_BINARY(..) | t::LIT_BINARY_RAW(..) |
|
||||
t::LIT_CHAR(..) | t::LIT_STR(..) | t::LIT_STR_RAW(..) => "string",
|
||||
token::LitByte(..) | token::LitBinary(..) | token::LitBinaryRaw(..) |
|
||||
token::LitChar(..) | token::LitStr(..) | token::LitStrRaw(..) => "string",
|
||||
|
||||
// number literals
|
||||
t::LIT_INTEGER(..) | t::LIT_FLOAT(..) => "number",
|
||||
token::LitInteger(..) | token::LitFloat(..) => "number",
|
||||
|
||||
// keywords are also included in the identifier set
|
||||
t::IDENT(ident, _is_mod_sep) => {
|
||||
match t::get_ident(ident).get() {
|
||||
token::Ident(ident, _is_mod_sep) => {
|
||||
match token::get_ident(ident).get() {
|
||||
"ref" | "mut" => "kw-2",
|
||||
|
||||
"self" => "self",
|
||||
@@ -145,12 +146,12 @@ fn doit(sess: &parse::ParseSess, mut lexer: lexer::StringReader,
|
||||
"Option" | "Result" => "prelude-ty",
|
||||
"Some" | "None" | "Ok" | "Err" => "prelude-val",
|
||||
|
||||
_ if t::is_any_keyword(&next.tok) => "kw",
|
||||
_ if next.tok.is_any_keyword() => "kw",
|
||||
_ => {
|
||||
if is_macro_nonterminal {
|
||||
is_macro_nonterminal = false;
|
||||
"macro-nonterminal"
|
||||
} else if lexer.peek().tok == t::NOT {
|
||||
} else if lexer.peek().tok == token::Not {
|
||||
is_macro = true;
|
||||
"macro"
|
||||
} else {
|
||||
@@ -160,9 +161,9 @@ fn doit(sess: &parse::ParseSess, mut lexer: lexer::StringReader,
|
||||
}
|
||||
}
|
||||
|
||||
t::LIFETIME(..) => "lifetime",
|
||||
t::DOC_COMMENT(..) => "doccomment",
|
||||
t::UNDERSCORE | t::EOF | t::INTERPOLATED(..) => "",
|
||||
token::Lifetime(..) => "lifetime",
|
||||
token::DocComment(..) => "doccomment",
|
||||
token::Underscore | token::Eof | token::Interpolated(..) => "",
|
||||
};
|
||||
|
||||
// as mentioned above, use the original source code instead of
|
||||
|
||||
@@ -82,7 +82,7 @@ fn eq(&self, other: &Ident) -> bool {
|
||||
//
|
||||
// On the other hand, if the comparison does need to be hygienic,
|
||||
// one example and its non-hygienic counterpart would be:
|
||||
// syntax::parse::token::mtwt_token_eq
|
||||
// syntax::parse::token::Token::mtwt_eq
|
||||
// syntax::ext::tt::macro_parser::token_name_eq
|
||||
fail!("not allowed to compare these idents: {}, {}. \
|
||||
Probably related to issue \\#6993", self, other);
|
||||
|
||||
@@ -50,7 +50,7 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt,
|
||||
token_tree: &[TokenTree])
|
||||
-> Box<MacResult+'cx> {
|
||||
let code = match token_tree {
|
||||
[ast::TtToken(_, token::IDENT(code, _))] => code,
|
||||
[ast::TtToken(_, token::Ident(code, _))] => code,
|
||||
_ => unreachable!()
|
||||
};
|
||||
with_registered_diagnostics(|diagnostics| {
|
||||
@@ -82,12 +82,12 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt,
|
||||
token_tree: &[TokenTree])
|
||||
-> Box<MacResult+'cx> {
|
||||
let (code, description) = match token_tree {
|
||||
[ast::TtToken(_, token::IDENT(ref code, _))] => {
|
||||
[ast::TtToken(_, token::Ident(ref code, _))] => {
|
||||
(code, None)
|
||||
},
|
||||
[ast::TtToken(_, token::IDENT(ref code, _)),
|
||||
ast::TtToken(_, token::COMMA),
|
||||
ast::TtToken(_, token::LIT_STR_RAW(description, _))] => {
|
||||
[ast::TtToken(_, token::Ident(ref code, _)),
|
||||
ast::TtToken(_, token::Comma),
|
||||
ast::TtToken(_, token::LitStrRaw(description, _))] => {
|
||||
(code, Some(description))
|
||||
}
|
||||
_ => unreachable!()
|
||||
@@ -110,7 +110,7 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt,
|
||||
token_tree: &[TokenTree])
|
||||
-> Box<MacResult+'cx> {
|
||||
let name = match token_tree {
|
||||
[ast::TtToken(_, token::IDENT(ref name, _))] => name,
|
||||
[ast::TtToken(_, token::Ident(ref name, _))] => name,
|
||||
_ => unreachable!()
|
||||
};
|
||||
|
||||
|
||||
+23
-23
@@ -72,21 +72,21 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
|
||||
asm_str_style = Some(style);
|
||||
}
|
||||
Outputs => {
|
||||
while p.token != token::EOF &&
|
||||
p.token != token::COLON &&
|
||||
p.token != token::MOD_SEP {
|
||||
while p.token != token::Eof &&
|
||||
p.token != token::Colon &&
|
||||
p.token != token::ModSep {
|
||||
|
||||
if outputs.len() != 0 {
|
||||
p.eat(&token::COMMA);
|
||||
p.eat(&token::Comma);
|
||||
}
|
||||
|
||||
let (constraint, _str_style) = p.parse_str();
|
||||
|
||||
let span = p.last_span;
|
||||
|
||||
p.expect(&token::LPAREN);
|
||||
p.expect(&token::LParen);
|
||||
let out = p.parse_expr();
|
||||
p.expect(&token::RPAREN);
|
||||
p.expect(&token::RParen);
|
||||
|
||||
// Expands a read+write operand into two operands.
|
||||
//
|
||||
@@ -113,12 +113,12 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
|
||||
}
|
||||
}
|
||||
Inputs => {
|
||||
while p.token != token::EOF &&
|
||||
p.token != token::COLON &&
|
||||
p.token != token::MOD_SEP {
|
||||
while p.token != token::Eof &&
|
||||
p.token != token::Colon &&
|
||||
p.token != token::ModSep {
|
||||
|
||||
if inputs.len() != 0 {
|
||||
p.eat(&token::COMMA);
|
||||
p.eat(&token::Comma);
|
||||
}
|
||||
|
||||
let (constraint, _str_style) = p.parse_str();
|
||||
@@ -129,21 +129,21 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
|
||||
cx.span_err(p.last_span, "input operand constraint contains '+'");
|
||||
}
|
||||
|
||||
p.expect(&token::LPAREN);
|
||||
p.expect(&token::LParen);
|
||||
let input = p.parse_expr();
|
||||
p.expect(&token::RPAREN);
|
||||
p.expect(&token::RParen);
|
||||
|
||||
inputs.push((constraint, input));
|
||||
}
|
||||
}
|
||||
Clobbers => {
|
||||
let mut clobs = Vec::new();
|
||||
while p.token != token::EOF &&
|
||||
p.token != token::COLON &&
|
||||
p.token != token::MOD_SEP {
|
||||
while p.token != token::Eof &&
|
||||
p.token != token::Colon &&
|
||||
p.token != token::ModSep {
|
||||
|
||||
if clobs.len() != 0 {
|
||||
p.eat(&token::COMMA);
|
||||
p.eat(&token::Comma);
|
||||
}
|
||||
|
||||
let (s, _str_style) = p.parse_str();
|
||||
@@ -172,8 +172,8 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
|
||||
cx.span_warn(p.last_span, "unrecognized option");
|
||||
}
|
||||
|
||||
if p.token == token::COMMA {
|
||||
p.eat(&token::COMMA);
|
||||
if p.token == token::Comma {
|
||||
p.eat(&token::Comma);
|
||||
}
|
||||
}
|
||||
StateNone => ()
|
||||
@@ -183,17 +183,17 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
|
||||
// MOD_SEP is a double colon '::' without space in between.
|
||||
// When encountered, the state must be advanced twice.
|
||||
match (&p.token, state.next(), state.next().next()) {
|
||||
(&token::COLON, StateNone, _) |
|
||||
(&token::MOD_SEP, _, StateNone) => {
|
||||
(&token::Colon, StateNone, _) |
|
||||
(&token::ModSep, _, StateNone) => {
|
||||
p.bump();
|
||||
break 'statement;
|
||||
}
|
||||
(&token::COLON, st, _) |
|
||||
(&token::MOD_SEP, _, st) => {
|
||||
(&token::Colon, st, _) |
|
||||
(&token::ModSep, _, st) => {
|
||||
p.bump();
|
||||
state = st;
|
||||
}
|
||||
(&token::EOF, _, _) => break 'statement,
|
||||
(&token::Eof, _, _) => break 'statement,
|
||||
_ => break
|
||||
}
|
||||
}
|
||||
|
||||
@@ -684,8 +684,8 @@ pub fn get_single_str_from_tts(cx: &ExtCtxt,
|
||||
cx.span_err(sp, format!("{} takes 1 argument.", name).as_slice());
|
||||
} else {
|
||||
match tts[0] {
|
||||
ast::TtToken(_, token::LIT_STR(ident)) => return Some(parse::str_lit(ident.as_str())),
|
||||
ast::TtToken(_, token::LIT_STR_RAW(ident, _)) => {
|
||||
ast::TtToken(_, token::LitStr(ident)) => return Some(parse::str_lit(ident.as_str())),
|
||||
ast::TtToken(_, token::LitStrRaw(ident, _)) => {
|
||||
return Some(parse::raw_str_lit(ident.as_str()))
|
||||
}
|
||||
_ => {
|
||||
@@ -704,12 +704,12 @@ pub fn get_exprs_from_tts(cx: &mut ExtCtxt,
|
||||
tts: &[ast::TokenTree]) -> Option<Vec<P<ast::Expr>>> {
|
||||
let mut p = cx.new_parser_from_tts(tts);
|
||||
let mut es = Vec::new();
|
||||
while p.token != token::EOF {
|
||||
while p.token != token::Eof {
|
||||
es.push(cx.expander().fold_expr(p.parse_expr()));
|
||||
if p.eat(&token::COMMA) {
|
||||
if p.eat(&token::Comma) {
|
||||
continue;
|
||||
}
|
||||
if p.token != token::EOF {
|
||||
if p.token != token::Eof {
|
||||
cx.span_err(sp, "expected token: `,`");
|
||||
return None;
|
||||
}
|
||||
|
||||
@@ -29,7 +29,7 @@ pub fn expand_cfg<'cx>(cx: &mut ExtCtxt,
|
||||
let mut p = cx.new_parser_from_tts(tts);
|
||||
let cfg = p.parse_meta_item();
|
||||
|
||||
if !p.eat(&token::EOF) {
|
||||
if !p.eat(&token::Eof) {
|
||||
cx.span_err(sp, "expected 1 cfg-pattern");
|
||||
return DummyResult::expr(sp);
|
||||
}
|
||||
|
||||
@@ -23,21 +23,21 @@ pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]
|
||||
for (i, e) in tts.iter().enumerate() {
|
||||
if i & 1 == 1 {
|
||||
match *e {
|
||||
ast::TtToken(_, token::COMMA) => (),
|
||||
ast::TtToken(_, token::Comma) => {},
|
||||
_ => {
|
||||
cx.span_err(sp, "concat_idents! expecting comma.");
|
||||
return DummyResult::expr(sp);
|
||||
}
|
||||
},
|
||||
}
|
||||
} else {
|
||||
match *e {
|
||||
ast::TtToken(_, token::IDENT(ident,_)) => {
|
||||
ast::TtToken(_, token::Ident(ident, _)) => {
|
||||
res_str.push_str(token::get_ident(ident).get())
|
||||
}
|
||||
},
|
||||
_ => {
|
||||
cx.span_err(sp, "concat_idents! requires ident args.");
|
||||
return DummyResult::expr(sp);
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -91,7 +91,7 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span, allow_method: bool,
|
||||
// Parse the leading function expression (maybe a block, maybe a path)
|
||||
let invocation = if allow_method {
|
||||
let e = p.parse_expr();
|
||||
if !p.eat(&token::COMMA) {
|
||||
if !p.eat(&token::Comma) {
|
||||
ecx.span_err(sp, "expected token: `,`");
|
||||
return (Call(e), None);
|
||||
}
|
||||
@@ -99,28 +99,27 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span, allow_method: bool,
|
||||
} else {
|
||||
Call(p.parse_expr())
|
||||
};
|
||||
if !p.eat(&token::COMMA) {
|
||||
if !p.eat(&token::Comma) {
|
||||
ecx.span_err(sp, "expected token: `,`");
|
||||
return (invocation, None);
|
||||
}
|
||||
|
||||
if p.token == token::EOF {
|
||||
if p.token == token::Eof {
|
||||
ecx.span_err(sp, "requires at least a format string argument");
|
||||
return (invocation, None);
|
||||
}
|
||||
let fmtstr = p.parse_expr();
|
||||
let mut named = false;
|
||||
while p.token != token::EOF {
|
||||
if !p.eat(&token::COMMA) {
|
||||
while p.token != token::Eof {
|
||||
if !p.eat(&token::Comma) {
|
||||
ecx.span_err(sp, "expected token: `,`");
|
||||
return (invocation, None);
|
||||
}
|
||||
if p.token == token::EOF { break } // accept trailing commas
|
||||
if named || (token::is_ident(&p.token) &&
|
||||
p.look_ahead(1, |t| *t == token::EQ)) {
|
||||
if p.token == token::Eof { break } // accept trailing commas
|
||||
if named || (p.token.is_ident() && p.look_ahead(1, |t| *t == token::Eq)) {
|
||||
named = true;
|
||||
let ident = match p.token {
|
||||
token::IDENT(i, _) => {
|
||||
token::Ident(i, _) => {
|
||||
p.bump();
|
||||
i
|
||||
}
|
||||
@@ -139,7 +138,7 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span, allow_method: bool,
|
||||
};
|
||||
let interned_name = token::get_ident(ident);
|
||||
let name = interned_name.get();
|
||||
p.expect(&token::EQ);
|
||||
p.expect(&token::Eq);
|
||||
let e = p.parse_expr();
|
||||
match names.find_equiv(&name) {
|
||||
None => {}
|
||||
|
||||
+73
-69
@@ -515,123 +515,127 @@ fn mk_token_path(cx: &ExtCtxt, sp: Span, name: &str) -> P<ast::Expr> {
|
||||
cx.expr_path(cx.path_global(sp, idents))
|
||||
}
|
||||
|
||||
fn mk_binop(cx: &ExtCtxt, sp: Span, bop: token::BinOp) -> P<ast::Expr> {
|
||||
fn mk_binop(cx: &ExtCtxt, sp: Span, bop: token::BinOpToken) -> P<ast::Expr> {
|
||||
let name = match bop {
|
||||
PLUS => "PLUS",
|
||||
MINUS => "MINUS",
|
||||
STAR => "STAR",
|
||||
SLASH => "SLASH",
|
||||
PERCENT => "PERCENT",
|
||||
CARET => "CARET",
|
||||
AND => "AND",
|
||||
OR => "OR",
|
||||
SHL => "SHL",
|
||||
SHR => "SHR"
|
||||
token::Plus => "Plus",
|
||||
token::Minus => "Minus",
|
||||
token::Star => "Star",
|
||||
token::Slash => "Slash",
|
||||
token::Percent => "Percent",
|
||||
token::Caret => "Caret",
|
||||
token::And => "And",
|
||||
token::Or => "Or",
|
||||
token::Shl => "Shl",
|
||||
token::Shr => "Shr"
|
||||
};
|
||||
mk_token_path(cx, sp, name)
|
||||
}
|
||||
|
||||
#[allow(non_uppercase_statics)] // NOTE(stage0): remove this attribute after the next snapshot
|
||||
fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
|
||||
|
||||
match *tok {
|
||||
BINOP(binop) => {
|
||||
return cx.expr_call(sp, mk_token_path(cx, sp, "BINOP"), vec!(mk_binop(cx, sp, binop)));
|
||||
token::BinOp(binop) => {
|
||||
return cx.expr_call(sp, mk_token_path(cx, sp, "BinOp"), vec!(mk_binop(cx, sp, binop)));
|
||||
}
|
||||
BINOPEQ(binop) => {
|
||||
return cx.expr_call(sp, mk_token_path(cx, sp, "BINOPEQ"),
|
||||
token::BinOpEq(binop) => {
|
||||
return cx.expr_call(sp, mk_token_path(cx, sp, "BinOpEq"),
|
||||
vec!(mk_binop(cx, sp, binop)));
|
||||
}
|
||||
|
||||
LIT_BYTE(i) => {
|
||||
token::LitByte(i) => {
|
||||
let e_byte = mk_name(cx, sp, i.ident());
|
||||
|
||||
return cx.expr_call(sp, mk_token_path(cx, sp, "LIT_BYTE"), vec!(e_byte));
|
||||
return cx.expr_call(sp, mk_token_path(cx, sp, "LitByte"), vec!(e_byte));
|
||||
}
|
||||
|
||||
LIT_CHAR(i) => {
|
||||
token::LitChar(i) => {
|
||||
let e_char = mk_name(cx, sp, i.ident());
|
||||
|
||||
return cx.expr_call(sp, mk_token_path(cx, sp, "LIT_CHAR"), vec!(e_char));
|
||||
return cx.expr_call(sp, mk_token_path(cx, sp, "LitChar"), vec!(e_char));
|
||||
}
|
||||
|
||||
LIT_INTEGER(i) => {
|
||||
token::LitInteger(i) => {
|
||||
let e_int = mk_name(cx, sp, i.ident());
|
||||
return cx.expr_call(sp, mk_token_path(cx, sp, "LIT_INTEGER"), vec!(e_int));
|
||||
return cx.expr_call(sp, mk_token_path(cx, sp, "LitInteger"), vec!(e_int));
|
||||
}
|
||||
|
||||
LIT_FLOAT(fident) => {
|
||||
token::LitFloat(fident) => {
|
||||
let e_fident = mk_name(cx, sp, fident.ident());
|
||||
return cx.expr_call(sp, mk_token_path(cx, sp, "LIT_FLOAT"), vec!(e_fident));
|
||||
return cx.expr_call(sp, mk_token_path(cx, sp, "LitFloat"), vec!(e_fident));
|
||||
}
|
||||
|
||||
LIT_STR(ident) => {
|
||||
token::LitStr(ident) => {
|
||||
return cx.expr_call(sp,
|
||||
mk_token_path(cx, sp, "LIT_STR"),
|
||||
mk_token_path(cx, sp, "LitStr"),
|
||||
vec!(mk_name(cx, sp, ident.ident())));
|
||||
}
|
||||
|
||||
LIT_STR_RAW(ident, n) => {
|
||||
token::LitStrRaw(ident, n) => {
|
||||
return cx.expr_call(sp,
|
||||
mk_token_path(cx, sp, "LIT_STR_RAW"),
|
||||
mk_token_path(cx, sp, "LitStrRaw"),
|
||||
vec!(mk_name(cx, sp, ident.ident()), cx.expr_uint(sp, n)));
|
||||
}
|
||||
|
||||
IDENT(ident, b) => {
|
||||
token::Ident(ident, style) => {
|
||||
return cx.expr_call(sp,
|
||||
mk_token_path(cx, sp, "IDENT"),
|
||||
vec!(mk_ident(cx, sp, ident), cx.expr_bool(sp, b)));
|
||||
mk_token_path(cx, sp, "Ident"),
|
||||
vec![mk_ident(cx, sp, ident),
|
||||
match style {
|
||||
ModName => mk_token_path(cx, sp, "ModName"),
|
||||
Plain => mk_token_path(cx, sp, "Plain"),
|
||||
}]);
|
||||
}
|
||||
|
||||
LIFETIME(ident) => {
|
||||
token::Lifetime(ident) => {
|
||||
return cx.expr_call(sp,
|
||||
mk_token_path(cx, sp, "LIFETIME"),
|
||||
mk_token_path(cx, sp, "Lifetime"),
|
||||
vec!(mk_ident(cx, sp, ident)));
|
||||
}
|
||||
|
||||
DOC_COMMENT(ident) => {
|
||||
token::DocComment(ident) => {
|
||||
return cx.expr_call(sp,
|
||||
mk_token_path(cx, sp, "DOC_COMMENT"),
|
||||
mk_token_path(cx, sp, "DocComment"),
|
||||
vec!(mk_name(cx, sp, ident.ident())));
|
||||
}
|
||||
|
||||
INTERPOLATED(_) => fail!("quote! with interpolated token"),
|
||||
token::Interpolated(_) => fail!("quote! with interpolated token"),
|
||||
|
||||
_ => ()
|
||||
}
|
||||
|
||||
let name = match *tok {
|
||||
EQ => "EQ",
|
||||
LT => "LT",
|
||||
LE => "LE",
|
||||
EQEQ => "EQEQ",
|
||||
NE => "NE",
|
||||
GE => "GE",
|
||||
GT => "GT",
|
||||
ANDAND => "ANDAND",
|
||||
OROR => "OROR",
|
||||
NOT => "NOT",
|
||||
TILDE => "TILDE",
|
||||
AT => "AT",
|
||||
DOT => "DOT",
|
||||
DOTDOT => "DOTDOT",
|
||||
COMMA => "COMMA",
|
||||
SEMI => "SEMI",
|
||||
COLON => "COLON",
|
||||
MOD_SEP => "MOD_SEP",
|
||||
RARROW => "RARROW",
|
||||
LARROW => "LARROW",
|
||||
FAT_ARROW => "FAT_ARROW",
|
||||
LPAREN => "LPAREN",
|
||||
RPAREN => "RPAREN",
|
||||
LBRACKET => "LBRACKET",
|
||||
RBRACKET => "RBRACKET",
|
||||
LBRACE => "LBRACE",
|
||||
RBRACE => "RBRACE",
|
||||
POUND => "POUND",
|
||||
DOLLAR => "DOLLAR",
|
||||
UNDERSCORE => "UNDERSCORE",
|
||||
EOF => "EOF",
|
||||
_ => fail!()
|
||||
token::Eq => "Eq",
|
||||
token::Lt => "Lt",
|
||||
token::Le => "Le",
|
||||
token::EqEq => "EqEq",
|
||||
token::Ne => "Ne",
|
||||
token::Ge => "Ge",
|
||||
token::Gt => "Gt",
|
||||
token::AndAnd => "AndAnd",
|
||||
token::OrOr => "OrOr",
|
||||
token::Not => "Not",
|
||||
token::Tilde => "Tilde",
|
||||
token::At => "At",
|
||||
token::Dot => "Dot",
|
||||
token::DotDot => "DotDot",
|
||||
token::Comma => "Comma",
|
||||
token::Semi => "Semi",
|
||||
token::Colon => "Colon",
|
||||
token::ModSep => "ModSep",
|
||||
token::RArrow => "RArrow",
|
||||
token::LArrow => "LArrow",
|
||||
token::FatArrow => "FatArrow",
|
||||
token::LParen => "LParen",
|
||||
token::RParen => "RParen",
|
||||
token::LBracket => "LBracket",
|
||||
token::RBracket => "RBracket",
|
||||
token::LBrace => "LBrace",
|
||||
token::RBrace => "RBrace",
|
||||
token::Pound => "Pound",
|
||||
token::Dollar => "Dollar",
|
||||
token::Underscore => "Underscore",
|
||||
token::Eof => "Eof",
|
||||
_ => fail!(),
|
||||
};
|
||||
mk_token_path(cx, sp, name)
|
||||
}
|
||||
@@ -702,7 +706,7 @@ fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree])
|
||||
p.quote_depth += 1u;
|
||||
|
||||
let cx_expr = p.parse_expr();
|
||||
if !p.eat(&token::COMMA) {
|
||||
if !p.eat(&token::Comma) {
|
||||
p.fatal("expected token `,`");
|
||||
}
|
||||
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
use codemap::Span;
|
||||
use ext::base::ExtCtxt;
|
||||
use ext::base;
|
||||
use parse::token::{keywords, is_keyword};
|
||||
use parse::token::keywords;
|
||||
|
||||
|
||||
pub fn expand_trace_macros(cx: &mut ExtCtxt,
|
||||
@@ -20,10 +20,10 @@ pub fn expand_trace_macros(cx: &mut ExtCtxt,
|
||||
tt: &[ast::TokenTree])
|
||||
-> Box<base::MacResult+'static> {
|
||||
match tt {
|
||||
[ast::TtToken(_, ref tok)] if is_keyword(keywords::True, tok) => {
|
||||
[ast::TtToken(_, ref tok)] if tok.is_keyword(keywords::True) => {
|
||||
cx.set_trace_macros(true);
|
||||
}
|
||||
[ast::TtToken(_, ref tok)] if is_keyword(keywords::False, tok) => {
|
||||
[ast::TtToken(_, ref tok)] if tok.is_keyword(keywords::False) => {
|
||||
cx.set_trace_macros(false);
|
||||
}
|
||||
_ => cx.span_err(sp, "trace_macros! accepts only `true` or `false`"),
|
||||
|
||||
@@ -85,8 +85,9 @@
|
||||
use parse::ParseSess;
|
||||
use parse::attr::ParserAttr;
|
||||
use parse::parser::{LifetimeAndTypesWithoutColons, Parser};
|
||||
use parse::token::{Token, EOF, Nonterminal};
|
||||
use parse::token::{Token, Nonterminal};
|
||||
use parse::token;
|
||||
use print::pprust;
|
||||
use ptr::P;
|
||||
|
||||
use std::rc::Rc;
|
||||
@@ -226,8 +227,8 @@ pub fn parse_or_else(sess: &ParseSess,
|
||||
/// unhygienic comparison)
|
||||
pub fn token_name_eq(t1 : &Token, t2 : &Token) -> bool {
|
||||
match (t1,t2) {
|
||||
(&token::IDENT(id1,_),&token::IDENT(id2,_))
|
||||
| (&token::LIFETIME(id1),&token::LIFETIME(id2)) =>
|
||||
(&token::Ident(id1,_),&token::Ident(id2,_))
|
||||
| (&token::Lifetime(id1),&token::Lifetime(id2)) =>
|
||||
id1.name == id2.name,
|
||||
_ => *t1 == *t2
|
||||
}
|
||||
@@ -354,9 +355,9 @@ pub fn parse(sess: &ParseSess,
|
||||
// Built-in nonterminals never start with these tokens,
|
||||
// so we can eliminate them from consideration.
|
||||
match tok {
|
||||
token::RPAREN |
|
||||
token::RBRACE |
|
||||
token::RBRACKET => {},
|
||||
token::RParen |
|
||||
token::RBrace |
|
||||
token::RBracket => {},
|
||||
_ => bb_eis.push(ei)
|
||||
}
|
||||
}
|
||||
@@ -372,7 +373,7 @@ pub fn parse(sess: &ParseSess,
|
||||
}
|
||||
|
||||
/* error messages here could be improved with links to orig. rules */
|
||||
if token_name_eq(&tok, &EOF) {
|
||||
if token_name_eq(&tok, &token::Eof) {
|
||||
if eof_eis.len() == 1u {
|
||||
let mut v = Vec::new();
|
||||
for dv in eof_eis.get_mut(0).matches.iter_mut() {
|
||||
@@ -402,7 +403,7 @@ pub fn parse(sess: &ParseSess,
|
||||
nts, next_eis.len()).to_string());
|
||||
} else if bb_eis.len() == 0u && next_eis.len() == 0u {
|
||||
return Failure(sp, format!("no rules expected the token `{}`",
|
||||
token::to_string(&tok)).to_string());
|
||||
pprust::token_to_string(&tok)).to_string());
|
||||
} else if next_eis.len() > 0u {
|
||||
/* Now process the next token */
|
||||
while next_eis.len() > 0u {
|
||||
@@ -447,9 +448,9 @@ pub fn parse_nt(p: &mut Parser, name: &str) -> Nonterminal {
|
||||
"ty" => token::NtTy(p.parse_ty(false /* no need to disambiguate*/)),
|
||||
// this could be handled like a token, since it is one
|
||||
"ident" => match p.token {
|
||||
token::IDENT(sn,b) => { p.bump(); token::NtIdent(box sn,b) }
|
||||
token::Ident(sn,b) => { p.bump(); token::NtIdent(box sn,b) }
|
||||
_ => {
|
||||
let token_str = token::to_string(&p.token);
|
||||
let token_str = pprust::token_to_string(&p.token);
|
||||
p.fatal((format!("expected ident, found {}",
|
||||
token_str.as_slice())).as_slice())
|
||||
}
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
use parse::parser::Parser;
|
||||
use parse::attr::ParserAttr;
|
||||
use parse::token::{special_idents, gensym_ident};
|
||||
use parse::token::{FAT_ARROW, SEMI, NtMatchers, NtTT, EOF};
|
||||
use parse::token::{NtMatchers, NtTT};
|
||||
use parse::token;
|
||||
use print;
|
||||
use ptr::P;
|
||||
@@ -43,10 +43,10 @@ impl<'a> ParserAnyMacro<'a> {
|
||||
/// allowed to be there.
|
||||
fn ensure_complete_parse(&self, allow_semi: bool) {
|
||||
let mut parser = self.parser.borrow_mut();
|
||||
if allow_semi && parser.token == SEMI {
|
||||
if allow_semi && parser.token == token::Semi {
|
||||
parser.bump()
|
||||
}
|
||||
if parser.token != EOF {
|
||||
if parser.token != token::Eof {
|
||||
let token_str = parser.this_token_to_string();
|
||||
let msg = format!("macro expansion ignores token `{}` and any \
|
||||
following",
|
||||
@@ -89,7 +89,7 @@ fn make_methods(self: Box<ParserAnyMacro<'a>>) -> Option<SmallVector<P<ast::Meth
|
||||
loop {
|
||||
let mut parser = self.parser.borrow_mut();
|
||||
match parser.token {
|
||||
EOF => break,
|
||||
token::Eof => break,
|
||||
_ => {
|
||||
let attrs = parser.parse_outer_attributes();
|
||||
ret.push(parser.parse_method(attrs, ast::Inherited))
|
||||
@@ -231,12 +231,13 @@ fn ms(m: Matcher_) -> Matcher {
|
||||
let argument_gram = vec!(
|
||||
ms(MatchSeq(vec!(
|
||||
ms(MatchNonterminal(lhs_nm, special_idents::matchers, 0u)),
|
||||
ms(MatchTok(FAT_ARROW)),
|
||||
ms(MatchNonterminal(rhs_nm, special_idents::tt, 1u))), Some(SEMI),
|
||||
ast::OneOrMore, 0u, 2u)),
|
||||
ms(MatchTok(token::FatArrow)),
|
||||
ms(MatchNonterminal(rhs_nm, special_idents::tt, 1u))),
|
||||
Some(token::Semi), ast::OneOrMore, 0u, 2u)),
|
||||
//to phase into semicolon-termination instead of
|
||||
//semicolon-separation
|
||||
ms(MatchSeq(vec!(ms(MatchTok(SEMI))), None, ast::ZeroOrMore, 2u, 2u)));
|
||||
ms(MatchSeq(vec!(ms(MatchTok(token::Semi))), None,
|
||||
ast::ZeroOrMore, 2u, 2u)));
|
||||
|
||||
|
||||
// Parse the macro_rules! invocation (`none` is for no interpolations):
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
use codemap::{Span, DUMMY_SP};
|
||||
use diagnostic::SpanHandler;
|
||||
use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
|
||||
use parse::token::{EOF, INTERPOLATED, IDENT, Token, NtIdent};
|
||||
use parse::token::{Token, NtIdent};
|
||||
use parse::token;
|
||||
use parse::lexer::TokenAndSpan;
|
||||
|
||||
@@ -66,7 +66,7 @@ pub fn new_tt_reader<'a>(sp_diag: &'a SpanHandler,
|
||||
repeat_idx: Vec::new(),
|
||||
repeat_len: Vec::new(),
|
||||
/* dummy values, never read: */
|
||||
cur_tok: EOF,
|
||||
cur_tok: token::Eof,
|
||||
cur_span: DUMMY_SP,
|
||||
};
|
||||
tt_next_token(&mut r); /* get cur_tok and cur_span set up */
|
||||
@@ -158,7 +158,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
|
||||
loop {
|
||||
let should_pop = match r.stack.last() {
|
||||
None => {
|
||||
assert_eq!(ret_val.tok, EOF);
|
||||
assert_eq!(ret_val.tok, token::Eof);
|
||||
return ret_val;
|
||||
}
|
||||
Some(frame) => {
|
||||
@@ -175,7 +175,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
|
||||
let prev = r.stack.pop().unwrap();
|
||||
match r.stack.last_mut() {
|
||||
None => {
|
||||
r.cur_tok = EOF;
|
||||
r.cur_tok = token::Eof;
|
||||
return ret_val;
|
||||
}
|
||||
Some(frame) => {
|
||||
@@ -272,13 +272,13 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
|
||||
(b) we actually can, since it's a token. */
|
||||
MatchedNonterminal(NtIdent(box sn, b)) => {
|
||||
r.cur_span = sp;
|
||||
r.cur_tok = IDENT(sn,b);
|
||||
r.cur_tok = token::Ident(sn,b);
|
||||
return ret_val;
|
||||
}
|
||||
MatchedNonterminal(ref other_whole_nt) => {
|
||||
// FIXME(pcwalton): Bad copy.
|
||||
r.cur_span = sp;
|
||||
r.cur_tok = INTERPOLATED((*other_whole_nt).clone());
|
||||
r.cur_tok = token::Interpolated((*other_whole_nt).clone());
|
||||
return ret_val;
|
||||
}
|
||||
MatchedSeq(..) => {
|
||||
|
||||
@@ -602,11 +602,11 @@ pub fn noop_fold_tts<T: Folder>(tts: &[TokenTree], fld: &mut T) -> Vec<TokenTree
|
||||
// apply ident folder if it's an ident, apply other folds to interpolated nodes
|
||||
pub fn noop_fold_token<T: Folder>(t: token::Token, fld: &mut T) -> token::Token {
|
||||
match t {
|
||||
token::IDENT(id, followed_by_colons) => {
|
||||
token::IDENT(fld.fold_ident(id), followed_by_colons)
|
||||
token::Ident(id, followed_by_colons) => {
|
||||
token::Ident(fld.fold_ident(id), followed_by_colons)
|
||||
}
|
||||
token::LIFETIME(id) => token::LIFETIME(fld.fold_ident(id)),
|
||||
token::INTERPOLATED(nt) => token::INTERPOLATED(fld.fold_interpolated(nt)),
|
||||
token::Lifetime(id) => token::Lifetime(fld.fold_ident(id)),
|
||||
token::Interpolated(nt) => token::Interpolated(fld.fold_interpolated(nt)),
|
||||
_ => t
|
||||
}
|
||||
}
|
||||
|
||||
+16
-17
@@ -14,7 +14,6 @@
|
||||
use parse::common::*; //resolve bug?
|
||||
use parse::token;
|
||||
use parse::parser::Parser;
|
||||
use parse::token::INTERPOLATED;
|
||||
use ptr::P;
|
||||
|
||||
/// A parser that can parse attributes.
|
||||
@@ -36,10 +35,10 @@ fn parse_outer_attributes(&mut self) -> Vec<ast::Attribute> {
|
||||
debug!("parse_outer_attributes: self.token={}",
|
||||
self.token);
|
||||
match self.token {
|
||||
token::POUND => {
|
||||
token::Pound => {
|
||||
attrs.push(self.parse_attribute(false));
|
||||
}
|
||||
token::DOC_COMMENT(s) => {
|
||||
token::DocComment(s) => {
|
||||
let attr = ::attr::mk_sugared_doc_attr(
|
||||
attr::mk_attr_id(),
|
||||
self.id_to_interned_str(s.ident()),
|
||||
@@ -66,11 +65,11 @@ fn parse_attribute(&mut self, permit_inner: bool) -> ast::Attribute {
|
||||
debug!("parse_attributes: permit_inner={} self.token={}",
|
||||
permit_inner, self.token);
|
||||
let (span, value, mut style) = match self.token {
|
||||
token::POUND => {
|
||||
token::Pound => {
|
||||
let lo = self.span.lo;
|
||||
self.bump();
|
||||
|
||||
let style = if self.eat(&token::NOT) {
|
||||
let style = if self.eat(&token::Not) {
|
||||
if !permit_inner {
|
||||
let span = self.span;
|
||||
self.span_err(span,
|
||||
@@ -82,10 +81,10 @@ fn parse_attribute(&mut self, permit_inner: bool) -> ast::Attribute {
|
||||
ast::AttrOuter
|
||||
};
|
||||
|
||||
self.expect(&token::LBRACKET);
|
||||
self.expect(&token::LBracket);
|
||||
let meta_item = self.parse_meta_item();
|
||||
let hi = self.span.hi;
|
||||
self.expect(&token::RBRACKET);
|
||||
self.expect(&token::RBracket);
|
||||
|
||||
(mk_sp(lo, hi), meta_item, style)
|
||||
}
|
||||
@@ -96,7 +95,7 @@ fn parse_attribute(&mut self, permit_inner: bool) -> ast::Attribute {
|
||||
}
|
||||
};
|
||||
|
||||
if permit_inner && self.eat(&token::SEMI) {
|
||||
if permit_inner && self.eat(&token::Semi) {
|
||||
self.span_warn(span, "this inner attribute syntax is deprecated. \
|
||||
The new syntax is `#![foo]`, with a bang and no semicolon.");
|
||||
style = ast::AttrInner;
|
||||
@@ -130,10 +129,10 @@ fn parse_inner_attrs_and_next(&mut self)
|
||||
let mut next_outer_attrs: Vec<ast::Attribute> = Vec::new();
|
||||
loop {
|
||||
let attr = match self.token {
|
||||
token::POUND => {
|
||||
token::Pound => {
|
||||
self.parse_attribute(true)
|
||||
}
|
||||
token::DOC_COMMENT(s) => {
|
||||
token::DocComment(s) => {
|
||||
// we need to get the position of this token before we bump.
|
||||
let Span { lo, hi, .. } = self.span;
|
||||
self.bump();
|
||||
@@ -161,7 +160,7 @@ fn parse_inner_attrs_and_next(&mut self)
|
||||
/// | IDENT meta_seq
|
||||
fn parse_meta_item(&mut self) -> P<ast::MetaItem> {
|
||||
let nt_meta = match self.token {
|
||||
token::INTERPOLATED(token::NtMeta(ref e)) => {
|
||||
token::Interpolated(token::NtMeta(ref e)) => {
|
||||
Some(e.clone())
|
||||
}
|
||||
_ => None
|
||||
@@ -179,7 +178,7 @@ fn parse_meta_item(&mut self) -> P<ast::MetaItem> {
|
||||
let ident = self.parse_ident();
|
||||
let name = self.id_to_interned_str(ident);
|
||||
match self.token {
|
||||
token::EQ => {
|
||||
token::Eq => {
|
||||
self.bump();
|
||||
let lit = self.parse_lit();
|
||||
// FIXME #623 Non-string meta items are not serialized correctly;
|
||||
@@ -195,7 +194,7 @@ fn parse_meta_item(&mut self) -> P<ast::MetaItem> {
|
||||
let hi = self.span.hi;
|
||||
P(spanned(lo, hi, ast::MetaNameValue(name, lit)))
|
||||
}
|
||||
token::LPAREN => {
|
||||
token::LParen => {
|
||||
let inner_items = self.parse_meta_seq();
|
||||
let hi = self.span.hi;
|
||||
P(spanned(lo, hi, ast::MetaList(name, inner_items)))
|
||||
@@ -209,15 +208,15 @@ fn parse_meta_item(&mut self) -> P<ast::MetaItem> {
|
||||
|
||||
/// matches meta_seq = ( COMMASEP(meta_item) )
|
||||
fn parse_meta_seq(&mut self) -> Vec<P<ast::MetaItem>> {
|
||||
self.parse_seq(&token::LPAREN,
|
||||
&token::RPAREN,
|
||||
seq_sep_trailing_disallowed(token::COMMA),
|
||||
self.parse_seq(&token::LParen,
|
||||
&token::RParen,
|
||||
seq_sep_trailing_disallowed(token::Comma),
|
||||
|p| p.parse_meta_item()).node
|
||||
}
|
||||
|
||||
fn parse_optional_meta(&mut self) -> Vec<P<ast::MetaItem>> {
|
||||
match self.token {
|
||||
token::LPAREN => self.parse_meta_seq(),
|
||||
token::LParen => self.parse_meta_seq(),
|
||||
_ => Vec::new()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
use parse::lexer::{StringReader, TokenAndSpan};
|
||||
use parse::lexer::is_block_doc_comment;
|
||||
use parse::lexer;
|
||||
use parse::token;
|
||||
use print::pprust;
|
||||
|
||||
use std::io;
|
||||
use std::str;
|
||||
@@ -367,13 +367,13 @@ pub fn gather_comments_and_literals(span_diagnostic: &diagnostic::SpanHandler,
|
||||
rdr.next_token();
|
||||
//discard, and look ahead; we're working with internal state
|
||||
let TokenAndSpan { tok, sp } = rdr.peek();
|
||||
if token::is_lit(&tok) {
|
||||
if tok.is_lit() {
|
||||
rdr.with_str_from(bstart, |s| {
|
||||
debug!("tok lit: {}", s);
|
||||
literals.push(Literal {lit: s.to_string(), pos: sp.lo});
|
||||
})
|
||||
} else {
|
||||
debug!("tok: {}", token::to_string(&tok));
|
||||
debug!("tok: {}", pprust::token_to_string(&tok));
|
||||
}
|
||||
first_read = false;
|
||||
}
|
||||
|
||||
+122
-112
@@ -69,7 +69,7 @@ fn is_eof(&self) -> bool { self.curr.is_none() }
|
||||
/// Return the next token. EFFECT: advances the string_reader.
|
||||
fn next_token(&mut self) -> TokenAndSpan {
|
||||
let ret_val = TokenAndSpan {
|
||||
tok: replace(&mut self.peek_tok, token::UNDERSCORE),
|
||||
tok: replace(&mut self.peek_tok, token::Underscore),
|
||||
sp: self.peek_span,
|
||||
};
|
||||
self.advance_token();
|
||||
@@ -92,7 +92,7 @@ fn peek(&self) -> TokenAndSpan {
|
||||
|
||||
impl<'a> Reader for TtReader<'a> {
|
||||
fn is_eof(&self) -> bool {
|
||||
self.cur_tok == token::EOF
|
||||
self.cur_tok == token::Eof
|
||||
}
|
||||
fn next_token(&mut self) -> TokenAndSpan {
|
||||
let r = tt_next_token(self);
|
||||
@@ -136,7 +136,7 @@ pub fn new_raw<'b>(span_diagnostic: &'b SpanHandler,
|
||||
curr: Some('\n'),
|
||||
filemap: filemap,
|
||||
/* dummy values; not read */
|
||||
peek_tok: token::EOF,
|
||||
peek_tok: token::Eof,
|
||||
peek_span: codemap::DUMMY_SP,
|
||||
read_embedded_ident: false,
|
||||
};
|
||||
@@ -213,7 +213,7 @@ fn advance_token(&mut self) {
|
||||
},
|
||||
None => {
|
||||
if self.is_eof() {
|
||||
self.peek_tok = token::EOF;
|
||||
self.peek_tok = token::Eof;
|
||||
} else {
|
||||
let start_bytepos = self.last_pos;
|
||||
self.peek_tok = self.next_token_inner();
|
||||
@@ -396,9 +396,9 @@ fn scan_comment(&mut self) -> Option<TokenAndSpan> {
|
||||
return self.with_str_from(start_bpos, |string| {
|
||||
// but comments with only more "/"s are not
|
||||
let tok = if is_doc_comment(string) {
|
||||
token::DOC_COMMENT(token::intern(string))
|
||||
token::DocComment(token::intern(string))
|
||||
} else {
|
||||
token::COMMENT
|
||||
token::Comment
|
||||
};
|
||||
|
||||
return Some(TokenAndSpan{
|
||||
@@ -410,7 +410,7 @@ fn scan_comment(&mut self) -> Option<TokenAndSpan> {
|
||||
let start_bpos = self.last_pos - BytePos(2);
|
||||
while !self.curr_is('\n') && !self.is_eof() { self.bump(); }
|
||||
return Some(TokenAndSpan {
|
||||
tok: token::COMMENT,
|
||||
tok: token::Comment,
|
||||
sp: codemap::mk_sp(start_bpos, self.last_pos)
|
||||
});
|
||||
}
|
||||
@@ -440,7 +440,7 @@ fn scan_comment(&mut self) -> Option<TokenAndSpan> {
|
||||
let start = self.last_pos;
|
||||
while !self.curr_is('\n') && !self.is_eof() { self.bump(); }
|
||||
return Some(TokenAndSpan {
|
||||
tok: token::SHEBANG(self.name_from(start)),
|
||||
tok: token::Shebang(self.name_from(start)),
|
||||
sp: codemap::mk_sp(start, self.last_pos)
|
||||
});
|
||||
}
|
||||
@@ -466,7 +466,7 @@ fn scan_whitespace_or_comment(&mut self) -> Option<TokenAndSpan> {
|
||||
let start_bpos = self.last_pos;
|
||||
while is_whitespace(self.curr) { self.bump(); }
|
||||
let c = Some(TokenAndSpan {
|
||||
tok: token::WS,
|
||||
tok: token::Whitespace,
|
||||
sp: codemap::mk_sp(start_bpos, self.last_pos)
|
||||
});
|
||||
debug!("scanning whitespace: {}", c);
|
||||
@@ -519,9 +519,9 @@ fn scan_block_comment(&mut self) -> Option<TokenAndSpan> {
|
||||
self.translate_crlf(start_bpos, string,
|
||||
"bare CR not allowed in block doc-comment")
|
||||
} else { string.into_maybe_owned() };
|
||||
token::DOC_COMMENT(token::intern(string.as_slice()))
|
||||
token::DocComment(token::intern(string.as_slice()))
|
||||
} else {
|
||||
token::COMMENT
|
||||
token::Comment
|
||||
};
|
||||
|
||||
Some(TokenAndSpan{
|
||||
@@ -642,17 +642,17 @@ fn scan_number(&mut self, c: char) -> token::Token {
|
||||
}
|
||||
'u' | 'i' => {
|
||||
self.scan_int_suffix();
|
||||
return token::LIT_INTEGER(self.name_from(start_bpos));
|
||||
return token::LitInteger(self.name_from(start_bpos));
|
||||
},
|
||||
'f' => {
|
||||
let last_pos = self.last_pos;
|
||||
self.scan_float_suffix();
|
||||
self.check_float_base(start_bpos, last_pos, base);
|
||||
return token::LIT_FLOAT(self.name_from(start_bpos));
|
||||
return token::LitFloat(self.name_from(start_bpos));
|
||||
}
|
||||
_ => {
|
||||
// just a 0
|
||||
return token::LIT_INTEGER(self.name_from(start_bpos));
|
||||
return token::LitInteger(self.name_from(start_bpos));
|
||||
}
|
||||
}
|
||||
} else if c.is_digit_radix(10) {
|
||||
@@ -665,7 +665,7 @@ fn scan_number(&mut self, c: char) -> token::Token {
|
||||
self.err_span_(start_bpos, self.last_pos, "no valid digits found for number");
|
||||
// eat any suffix
|
||||
self.scan_int_suffix();
|
||||
return token::LIT_INTEGER(token::intern("0"));
|
||||
return token::LitInteger(token::intern("0"));
|
||||
}
|
||||
|
||||
// might be a float, but don't be greedy if this is actually an
|
||||
@@ -683,13 +683,13 @@ fn scan_number(&mut self, c: char) -> token::Token {
|
||||
}
|
||||
let last_pos = self.last_pos;
|
||||
self.check_float_base(start_bpos, last_pos, base);
|
||||
return token::LIT_FLOAT(self.name_from(start_bpos));
|
||||
return token::LitFloat(self.name_from(start_bpos));
|
||||
} else if self.curr_is('f') {
|
||||
// or it might be an integer literal suffixed as a float
|
||||
self.scan_float_suffix();
|
||||
let last_pos = self.last_pos;
|
||||
self.check_float_base(start_bpos, last_pos, base);
|
||||
return token::LIT_FLOAT(self.name_from(start_bpos));
|
||||
return token::LitFloat(self.name_from(start_bpos));
|
||||
} else {
|
||||
// it might be a float if it has an exponent
|
||||
if self.curr_is('e') || self.curr_is('E') {
|
||||
@@ -697,11 +697,11 @@ fn scan_number(&mut self, c: char) -> token::Token {
|
||||
self.scan_float_suffix();
|
||||
let last_pos = self.last_pos;
|
||||
self.check_float_base(start_bpos, last_pos, base);
|
||||
return token::LIT_FLOAT(self.name_from(start_bpos));
|
||||
return token::LitFloat(self.name_from(start_bpos));
|
||||
}
|
||||
// but we certainly have an integer!
|
||||
self.scan_int_suffix();
|
||||
return token::LIT_INTEGER(self.name_from(start_bpos));
|
||||
return token::LitInteger(self.name_from(start_bpos));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -889,13 +889,13 @@ fn check_float_base(&mut self, start_bpos: BytePos, last_bpos: BytePos, base: ui
|
||||
}
|
||||
}
|
||||
|
||||
fn binop(&mut self, op: token::BinOp) -> token::Token {
|
||||
fn binop(&mut self, op: token::BinOpToken) -> token::Token {
|
||||
self.bump();
|
||||
if self.curr_is('=') {
|
||||
self.bump();
|
||||
return token::BINOPEQ(op);
|
||||
return token::BinOpEq(op);
|
||||
} else {
|
||||
return token::BINOP(op);
|
||||
return token::BinOp(op);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -919,14 +919,16 @@ fn next_token_inner(&mut self) -> token::Token {
|
||||
|
||||
return self.with_str_from(start, |string| {
|
||||
if string == "_" {
|
||||
token::UNDERSCORE
|
||||
token::Underscore
|
||||
} else {
|
||||
let is_mod_name = self.curr_is(':') && self.nextch_is(':');
|
||||
|
||||
// FIXME: perform NFKC normalization here. (Issue #2253)
|
||||
token::IDENT(str_to_ident(string), is_mod_name)
|
||||
if self.curr_is(':') && self.nextch_is(':') {
|
||||
token::Ident(str_to_ident(string), token::ModName)
|
||||
} else {
|
||||
token::Ident(str_to_ident(string), token::Plain)
|
||||
}
|
||||
}
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
if is_dec_digit(c) {
|
||||
@@ -937,8 +939,11 @@ fn next_token_inner(&mut self) -> token::Token {
|
||||
match (c.unwrap(), self.nextch(), self.nextnextch()) {
|
||||
('\x00', Some('n'), Some('a')) => {
|
||||
let ast_ident = self.scan_embedded_hygienic_ident();
|
||||
let is_mod_name = self.curr_is(':') && self.nextch_is(':');
|
||||
return token::IDENT(ast_ident, is_mod_name);
|
||||
return if self.curr_is(':') && self.nextch_is(':') {
|
||||
token::Ident(ast_ident, token::ModName)
|
||||
} else {
|
||||
token::Ident(ast_ident, token::Plain)
|
||||
};
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
@@ -946,84 +951,84 @@ fn next_token_inner(&mut self) -> token::Token {
|
||||
|
||||
match c.expect("next_token_inner called at EOF") {
|
||||
// One-byte tokens.
|
||||
';' => { self.bump(); return token::SEMI; }
|
||||
',' => { self.bump(); return token::COMMA; }
|
||||
';' => { self.bump(); return token::Semi; }
|
||||
',' => { self.bump(); return token::Comma; }
|
||||
'.' => {
|
||||
self.bump();
|
||||
return if self.curr_is('.') {
|
||||
self.bump();
|
||||
if self.curr_is('.') {
|
||||
self.bump();
|
||||
token::DOTDOTDOT
|
||||
token::DotDotDot
|
||||
} else {
|
||||
token::DOTDOT
|
||||
token::DotDot
|
||||
}
|
||||
} else {
|
||||
token::DOT
|
||||
token::Dot
|
||||
};
|
||||
}
|
||||
'(' => { self.bump(); return token::LPAREN; }
|
||||
')' => { self.bump(); return token::RPAREN; }
|
||||
'{' => { self.bump(); return token::LBRACE; }
|
||||
'}' => { self.bump(); return token::RBRACE; }
|
||||
'[' => { self.bump(); return token::LBRACKET; }
|
||||
']' => { self.bump(); return token::RBRACKET; }
|
||||
'@' => { self.bump(); return token::AT; }
|
||||
'#' => { self.bump(); return token::POUND; }
|
||||
'~' => { self.bump(); return token::TILDE; }
|
||||
'?' => { self.bump(); return token::QUESTION; }
|
||||
'(' => { self.bump(); return token::LParen; }
|
||||
')' => { self.bump(); return token::RParen; }
|
||||
'{' => { self.bump(); return token::LBrace; }
|
||||
'}' => { self.bump(); return token::RBrace; }
|
||||
'[' => { self.bump(); return token::LBracket; }
|
||||
']' => { self.bump(); return token::RBracket; }
|
||||
'@' => { self.bump(); return token::At; }
|
||||
'#' => { self.bump(); return token::Pound; }
|
||||
'~' => { self.bump(); return token::Tilde; }
|
||||
'?' => { self.bump(); return token::Question; }
|
||||
':' => {
|
||||
self.bump();
|
||||
if self.curr_is(':') {
|
||||
self.bump();
|
||||
return token::MOD_SEP;
|
||||
return token::ModSep;
|
||||
} else {
|
||||
return token::COLON;
|
||||
return token::Colon;
|
||||
}
|
||||
}
|
||||
|
||||
'$' => { self.bump(); return token::DOLLAR; }
|
||||
'$' => { self.bump(); return token::Dollar; }
|
||||
|
||||
// Multi-byte tokens.
|
||||
'=' => {
|
||||
self.bump();
|
||||
if self.curr_is('=') {
|
||||
self.bump();
|
||||
return token::EQEQ;
|
||||
return token::EqEq;
|
||||
} else if self.curr_is('>') {
|
||||
self.bump();
|
||||
return token::FAT_ARROW;
|
||||
return token::FatArrow;
|
||||
} else {
|
||||
return token::EQ;
|
||||
return token::Eq;
|
||||
}
|
||||
}
|
||||
'!' => {
|
||||
self.bump();
|
||||
if self.curr_is('=') {
|
||||
self.bump();
|
||||
return token::NE;
|
||||
} else { return token::NOT; }
|
||||
return token::Ne;
|
||||
} else { return token::Not; }
|
||||
}
|
||||
'<' => {
|
||||
self.bump();
|
||||
match self.curr.unwrap_or('\x00') {
|
||||
'=' => { self.bump(); return token::LE; }
|
||||
'<' => { return self.binop(token::SHL); }
|
||||
'=' => { self.bump(); return token::Le; }
|
||||
'<' => { return self.binop(token::Shl); }
|
||||
'-' => {
|
||||
self.bump();
|
||||
match self.curr.unwrap_or('\x00') {
|
||||
_ => { return token::LARROW; }
|
||||
_ => { return token::LArrow; }
|
||||
}
|
||||
}
|
||||
_ => { return token::LT; }
|
||||
_ => { return token::Lt; }
|
||||
}
|
||||
}
|
||||
'>' => {
|
||||
self.bump();
|
||||
match self.curr.unwrap_or('\x00') {
|
||||
'=' => { self.bump(); return token::GE; }
|
||||
'>' => { return self.binop(token::SHR); }
|
||||
_ => { return token::GT; }
|
||||
'=' => { self.bump(); return token::Ge; }
|
||||
'>' => { return self.binop(token::Shr); }
|
||||
_ => { return token::Gt; }
|
||||
}
|
||||
}
|
||||
'\'' => {
|
||||
@@ -1056,22 +1061,21 @@ fn next_token_inner(&mut self) -> token::Token {
|
||||
str_to_ident(lifetime_name)
|
||||
});
|
||||
let keyword_checking_token =
|
||||
&token::IDENT(keyword_checking_ident, false);
|
||||
&token::Ident(keyword_checking_ident, token::Plain);
|
||||
let last_bpos = self.last_pos;
|
||||
if token::is_keyword(token::keywords::Self,
|
||||
keyword_checking_token) {
|
||||
if keyword_checking_token.is_keyword(token::keywords::Self) {
|
||||
self.err_span_(start,
|
||||
last_bpos,
|
||||
"invalid lifetime name: 'self \
|
||||
is no longer a special lifetime");
|
||||
} else if token::is_any_keyword(keyword_checking_token) &&
|
||||
!token::is_keyword(token::keywords::Static,
|
||||
keyword_checking_token) {
|
||||
} else if keyword_checking_token.is_any_keyword() &&
|
||||
!keyword_checking_token.is_keyword(token::keywords::Static)
|
||||
{
|
||||
self.err_span_(start,
|
||||
last_bpos,
|
||||
"invalid lifetime name");
|
||||
}
|
||||
return token::LIFETIME(ident);
|
||||
return token::Lifetime(ident);
|
||||
}
|
||||
|
||||
// Otherwise it is a character constant:
|
||||
@@ -1087,7 +1091,7 @@ fn next_token_inner(&mut self) -> token::Token {
|
||||
}
|
||||
let id = if valid { self.name_from(start) } else { token::intern("0") };
|
||||
self.bump(); // advance curr past token
|
||||
return token::LIT_CHAR(id);
|
||||
return token::LitChar(id);
|
||||
}
|
||||
'b' => {
|
||||
self.bump();
|
||||
@@ -1095,7 +1099,7 @@ fn next_token_inner(&mut self) -> token::Token {
|
||||
Some('\'') => self.scan_byte(),
|
||||
Some('"') => self.scan_byte_string(),
|
||||
Some('r') => self.scan_raw_byte_string(),
|
||||
_ => unreachable!() // Should have been a token::IDENT above.
|
||||
_ => unreachable!() // Should have been a token::Ident above.
|
||||
};
|
||||
|
||||
}
|
||||
@@ -1118,7 +1122,7 @@ fn next_token_inner(&mut self) -> token::Token {
|
||||
let id = if valid { self.name_from(start_bpos + BytePos(1)) }
|
||||
else { token::intern("??") };
|
||||
self.bump();
|
||||
return token::LIT_STR(id);
|
||||
return token::LitStr(id);
|
||||
}
|
||||
'r' => {
|
||||
let start_bpos = self.last_pos;
|
||||
@@ -1185,33 +1189,33 @@ fn next_token_inner(&mut self) -> token::Token {
|
||||
} else {
|
||||
token::intern("??")
|
||||
};
|
||||
return token::LIT_STR_RAW(id, hash_count);
|
||||
return token::LitStrRaw(id, hash_count);
|
||||
}
|
||||
'-' => {
|
||||
if self.nextch_is('>') {
|
||||
self.bump();
|
||||
self.bump();
|
||||
return token::RARROW;
|
||||
} else { return self.binop(token::MINUS); }
|
||||
return token::RArrow;
|
||||
} else { return self.binop(token::Minus); }
|
||||
}
|
||||
'&' => {
|
||||
if self.nextch_is('&') {
|
||||
self.bump();
|
||||
self.bump();
|
||||
return token::ANDAND;
|
||||
} else { return self.binop(token::AND); }
|
||||
return token::AndAnd;
|
||||
} else { return self.binop(token::And); }
|
||||
}
|
||||
'|' => {
|
||||
match self.nextch() {
|
||||
Some('|') => { self.bump(); self.bump(); return token::OROR; }
|
||||
_ => { return self.binop(token::OR); }
|
||||
Some('|') => { self.bump(); self.bump(); return token::OrOr; }
|
||||
_ => { return self.binop(token::Or); }
|
||||
}
|
||||
}
|
||||
'+' => { return self.binop(token::PLUS); }
|
||||
'*' => { return self.binop(token::STAR); }
|
||||
'/' => { return self.binop(token::SLASH); }
|
||||
'^' => { return self.binop(token::CARET); }
|
||||
'%' => { return self.binop(token::PERCENT); }
|
||||
'+' => { return self.binop(token::Plus); }
|
||||
'*' => { return self.binop(token::Star); }
|
||||
'/' => { return self.binop(token::Slash); }
|
||||
'^' => { return self.binop(token::Caret); }
|
||||
'%' => { return self.binop(token::Percent); }
|
||||
c => {
|
||||
let last_bpos = self.last_pos;
|
||||
let bpos = self.pos;
|
||||
@@ -1275,7 +1279,7 @@ fn scan_byte(&mut self) -> token::Token {
|
||||
|
||||
let id = if valid { self.name_from(start) } else { token::intern("??") };
|
||||
self.bump(); // advance curr past token
|
||||
return token::LIT_BYTE(id);
|
||||
return token::LitByte(id);
|
||||
}
|
||||
|
||||
fn scan_byte_string(&mut self) -> token::Token {
|
||||
@@ -1297,7 +1301,7 @@ fn scan_byte_string(&mut self) -> token::Token {
|
||||
}
|
||||
let id = if valid { self.name_from(start) } else { token::intern("??") };
|
||||
self.bump();
|
||||
return token::LIT_BINARY(id);
|
||||
return token::LitBinary(id);
|
||||
}
|
||||
|
||||
fn scan_raw_byte_string(&mut self) -> token::Token {
|
||||
@@ -1348,7 +1352,7 @@ fn scan_raw_byte_string(&mut self) -> token::Token {
|
||||
self.bump();
|
||||
}
|
||||
self.bump();
|
||||
return token::LIT_BINARY_RAW(self.name_from_to(content_start_bpos, content_end_bpos),
|
||||
return token::LitBinaryRaw(self.name_from_to(content_start_bpos, content_end_bpos),
|
||||
hash_count);
|
||||
}
|
||||
}
|
||||
@@ -1431,20 +1435,20 @@ fn setup<'a>(span_handler: &'a diagnostic::SpanHandler,
|
||||
"/* my source file */ \
|
||||
fn main() { println!(\"zebra\"); }\n".to_string());
|
||||
let id = str_to_ident("fn");
|
||||
assert_eq!(string_reader.next_token().tok, token::COMMENT);
|
||||
assert_eq!(string_reader.next_token().tok, token::WS);
|
||||
assert_eq!(string_reader.next_token().tok, token::Comment);
|
||||
assert_eq!(string_reader.next_token().tok, token::Whitespace);
|
||||
let tok1 = string_reader.next_token();
|
||||
let tok2 = TokenAndSpan{
|
||||
tok:token::IDENT(id, false),
|
||||
tok:token::Ident(id, token::Plain),
|
||||
sp:Span {lo:BytePos(21),hi:BytePos(23),expn_id: NO_EXPANSION}};
|
||||
assert_eq!(tok1,tok2);
|
||||
assert_eq!(string_reader.next_token().tok, token::WS);
|
||||
assert_eq!(string_reader.next_token().tok, token::Whitespace);
|
||||
// the 'main' id is already read:
|
||||
assert_eq!(string_reader.last_pos.clone(), BytePos(28));
|
||||
// read another token:
|
||||
let tok3 = string_reader.next_token();
|
||||
let tok4 = TokenAndSpan{
|
||||
tok:token::IDENT(str_to_ident("main"), false),
|
||||
tok:token::Ident(str_to_ident("main"), token::Plain),
|
||||
sp:Span {lo:BytePos(24),hi:BytePos(28),expn_id: NO_EXPANSION}};
|
||||
assert_eq!(tok3,tok4);
|
||||
// the lparen is already read:
|
||||
@@ -1459,66 +1463,72 @@ fn check_tokenization (mut string_reader: StringReader, expected: Vec<token::Tok
|
||||
}
|
||||
}
|
||||
|
||||
// make the identifier by looking up the string in the interner
|
||||
#[cfg(stage0)]
|
||||
fn mk_ident (id: &str, is_mod_name: bool) -> token::Token {
|
||||
token::IDENT (str_to_ident(id),is_mod_name)
|
||||
token::Ident(str_to_ident(id), is_mod_name)
|
||||
}
|
||||
|
||||
// make the identifier by looking up the string in the interner
|
||||
#[cfg(not(stage0))]
|
||||
fn mk_ident(id: &str, style: token::IdentStyle) -> token::Token {
|
||||
token::Ident(str_to_ident(id), style)
|
||||
}
|
||||
|
||||
#[test] fn doublecolonparsing () {
|
||||
check_tokenization(setup(&mk_sh(), "a b".to_string()),
|
||||
vec!(mk_ident("a",false),
|
||||
token::WS,
|
||||
mk_ident("b",false)));
|
||||
vec![mk_ident("a", token::Plain),
|
||||
token::Whitespace,
|
||||
mk_ident("b", token::Plain)]);
|
||||
}
|
||||
|
||||
#[test] fn dcparsing_2 () {
|
||||
check_tokenization(setup(&mk_sh(), "a::b".to_string()),
|
||||
vec!(mk_ident("a",true),
|
||||
token::MOD_SEP,
|
||||
mk_ident("b",false)));
|
||||
vec![mk_ident("a",token::ModName),
|
||||
token::ModSep,
|
||||
mk_ident("b", token::Plain)]);
|
||||
}
|
||||
|
||||
#[test] fn dcparsing_3 () {
|
||||
check_tokenization(setup(&mk_sh(), "a ::b".to_string()),
|
||||
vec!(mk_ident("a",false),
|
||||
token::WS,
|
||||
token::MOD_SEP,
|
||||
mk_ident("b",false)));
|
||||
vec![mk_ident("a", token::Plain),
|
||||
token::Whitespace,
|
||||
token::ModSep,
|
||||
mk_ident("b", token::Plain)]);
|
||||
}
|
||||
|
||||
#[test] fn dcparsing_4 () {
|
||||
check_tokenization(setup(&mk_sh(), "a:: b".to_string()),
|
||||
vec!(mk_ident("a",true),
|
||||
token::MOD_SEP,
|
||||
token::WS,
|
||||
mk_ident("b",false)));
|
||||
vec![mk_ident("a",token::ModName),
|
||||
token::ModSep,
|
||||
token::Whitespace,
|
||||
mk_ident("b", token::Plain)]);
|
||||
}
|
||||
|
||||
#[test] fn character_a() {
|
||||
assert_eq!(setup(&mk_sh(), "'a'".to_string()).next_token().tok,
|
||||
token::LIT_CHAR(token::intern("a")));
|
||||
token::LitChar(token::intern("a")));
|
||||
}
|
||||
|
||||
#[test] fn character_space() {
|
||||
assert_eq!(setup(&mk_sh(), "' '".to_string()).next_token().tok,
|
||||
token::LIT_CHAR(token::intern(" ")));
|
||||
token::LitChar(token::intern(" ")));
|
||||
}
|
||||
|
||||
#[test] fn character_escaped() {
|
||||
assert_eq!(setup(&mk_sh(), "'\\n'".to_string()).next_token().tok,
|
||||
token::LIT_CHAR(token::intern("\\n")));
|
||||
token::LitChar(token::intern("\\n")));
|
||||
}
|
||||
|
||||
#[test] fn lifetime_name() {
|
||||
assert_eq!(setup(&mk_sh(), "'abc".to_string()).next_token().tok,
|
||||
token::LIFETIME(token::str_to_ident("'abc")));
|
||||
token::Lifetime(token::str_to_ident("'abc")));
|
||||
}
|
||||
|
||||
#[test] fn raw_string() {
|
||||
assert_eq!(setup(&mk_sh(),
|
||||
"r###\"\"#a\\b\x00c\"\"###".to_string()).next_token()
|
||||
.tok,
|
||||
token::LIT_STR_RAW(token::intern("\"#a\\b\x00c\""), 3));
|
||||
token::LitStrRaw(token::intern("\"#a\\b\x00c\""), 3));
|
||||
}
|
||||
|
||||
#[test] fn line_doc_comments() {
|
||||
@@ -1531,10 +1541,10 @@ fn mk_ident (id: &str, is_mod_name: bool) -> token::Token {
|
||||
let sh = mk_sh();
|
||||
let mut lexer = setup(&sh, "/* /* */ */'a'".to_string());
|
||||
match lexer.next_token().tok {
|
||||
token::COMMENT => { },
|
||||
token::Comment => { },
|
||||
_ => fail!("expected a comment!")
|
||||
}
|
||||
assert_eq!(lexer.next_token().tok, token::LIT_CHAR(token::intern("a")));
|
||||
assert_eq!(lexer.next_token().tok, token::LitChar(token::intern("a")));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
+31
-31
@@ -793,34 +793,34 @@ fn string_to_tts_macro () {
|
||||
let tts = string_to_tts("macro_rules! zip (($a)=>($a))".to_string());
|
||||
let tts: &[ast::TokenTree] = tts.as_slice();
|
||||
match tts {
|
||||
[ast::TtToken(_, token::IDENT(name_macro_rules, false)),
|
||||
ast::TtToken(_, token::NOT),
|
||||
ast::TtToken(_, token::IDENT(name_zip, false)),
|
||||
[ast::TtToken(_, token::Ident(name_macro_rules, token::Plain)),
|
||||
ast::TtToken(_, token::Not),
|
||||
ast::TtToken(_, token::Ident(name_zip, token::Plain)),
|
||||
ast::TtDelimited(_, ref macro_delimed)]
|
||||
if name_macro_rules.as_str() == "macro_rules"
|
||||
&& name_zip.as_str() == "zip" => {
|
||||
let (ref macro_open, ref macro_tts, ref macro_close) = **macro_delimed;
|
||||
match (macro_open, macro_tts.as_slice(), macro_close) {
|
||||
(&ast::Delimiter { token: token::LPAREN, .. },
|
||||
(&ast::Delimiter { token: token::LParen, .. },
|
||||
[ast::TtDelimited(_, ref first_delimed),
|
||||
ast::TtToken(_, token::FAT_ARROW),
|
||||
ast::TtToken(_, token::FatArrow),
|
||||
ast::TtDelimited(_, ref second_delimed)],
|
||||
&ast::Delimiter { token: token::RPAREN, .. }) => {
|
||||
&ast::Delimiter { token: token::RParen, .. }) => {
|
||||
let (ref first_open, ref first_tts, ref first_close) = **first_delimed;
|
||||
match (first_open, first_tts.as_slice(), first_close) {
|
||||
(&ast::Delimiter { token: token::LPAREN, .. },
|
||||
[ast::TtToken(_, token::DOLLAR),
|
||||
ast::TtToken(_, token::IDENT(name, false))],
|
||||
&ast::Delimiter { token: token::RPAREN, .. })
|
||||
(&ast::Delimiter { token: token::LParen, .. },
|
||||
[ast::TtToken(_, token::Dollar),
|
||||
ast::TtToken(_, token::Ident(name, token::Plain))],
|
||||
&ast::Delimiter { token: token::RParen, .. })
|
||||
if name.as_str() == "a" => {},
|
||||
_ => fail!("value 3: {}", **first_delimed),
|
||||
}
|
||||
let (ref second_open, ref second_tts, ref second_close) = **second_delimed;
|
||||
match (second_open, second_tts.as_slice(), second_close) {
|
||||
(&ast::Delimiter { token: token::LPAREN, .. },
|
||||
[ast::TtToken(_, token::DOLLAR),
|
||||
ast::TtToken(_, token::IDENT(name, false))],
|
||||
&ast::Delimiter { token: token::RPAREN, .. })
|
||||
(&ast::Delimiter { token: token::LParen, .. },
|
||||
[ast::TtToken(_, token::Dollar),
|
||||
ast::TtToken(_, token::Ident(name, token::Plain))],
|
||||
&ast::Delimiter { token: token::RParen, .. })
|
||||
if name.as_str() == "a" => {},
|
||||
_ => fail!("value 4: {}", **second_delimed),
|
||||
}
|
||||
@@ -842,10 +842,10 @@ fn string_to_tts_1 () {
|
||||
\"fields\":[\
|
||||
null,\
|
||||
{\
|
||||
\"variant\":\"IDENT\",\
|
||||
\"variant\":\"Ident\",\
|
||||
\"fields\":[\
|
||||
\"fn\",\
|
||||
false\
|
||||
\"Plain\"\
|
||||
]\
|
||||
}\
|
||||
]\
|
||||
@@ -855,10 +855,10 @@ fn string_to_tts_1 () {
|
||||
\"fields\":[\
|
||||
null,\
|
||||
{\
|
||||
\"variant\":\"IDENT\",\
|
||||
\"variant\":\"Ident\",\
|
||||
\"fields\":[\
|
||||
\"a\",\
|
||||
false\
|
||||
\"Plain\"\
|
||||
]\
|
||||
}\
|
||||
]\
|
||||
@@ -870,7 +870,7 @@ fn string_to_tts_1 () {
|
||||
[\
|
||||
{\
|
||||
\"span\":null,\
|
||||
\"token\":\"LPAREN\"\
|
||||
\"token\":\"LParen\"\
|
||||
},\
|
||||
[\
|
||||
{\
|
||||
@@ -878,10 +878,10 @@ fn string_to_tts_1 () {
|
||||
\"fields\":[\
|
||||
null,\
|
||||
{\
|
||||
\"variant\":\"IDENT\",\
|
||||
\"variant\":\"Ident\",\
|
||||
\"fields\":[\
|
||||
\"b\",\
|
||||
false\
|
||||
\"Plain\"\
|
||||
]\
|
||||
}\
|
||||
]\
|
||||
@@ -890,7 +890,7 @@ fn string_to_tts_1 () {
|
||||
\"variant\":\"TtToken\",\
|
||||
\"fields\":[\
|
||||
null,\
|
||||
\"COLON\"\
|
||||
\"Colon\"\
|
||||
]\
|
||||
},\
|
||||
{\
|
||||
@@ -898,10 +898,10 @@ fn string_to_tts_1 () {
|
||||
\"fields\":[\
|
||||
null,\
|
||||
{\
|
||||
\"variant\":\"IDENT\",\
|
||||
\"variant\":\"Ident\",\
|
||||
\"fields\":[\
|
||||
\"int\",\
|
||||
false\
|
||||
\"Plain\"\
|
||||
]\
|
||||
}\
|
||||
]\
|
||||
@@ -909,7 +909,7 @@ fn string_to_tts_1 () {
|
||||
],\
|
||||
{\
|
||||
\"span\":null,\
|
||||
\"token\":\"RPAREN\"\
|
||||
\"token\":\"RParen\"\
|
||||
}\
|
||||
]\
|
||||
]\
|
||||
@@ -921,7 +921,7 @@ fn string_to_tts_1 () {
|
||||
[\
|
||||
{\
|
||||
\"span\":null,\
|
||||
\"token\":\"LBRACE\"\
|
||||
\"token\":\"LBrace\"\
|
||||
},\
|
||||
[\
|
||||
{\
|
||||
@@ -929,10 +929,10 @@ fn string_to_tts_1 () {
|
||||
\"fields\":[\
|
||||
null,\
|
||||
{\
|
||||
\"variant\":\"IDENT\",\
|
||||
\"variant\":\"Ident\",\
|
||||
\"fields\":[\
|
||||
\"b\",\
|
||||
false\
|
||||
\"Plain\"\
|
||||
]\
|
||||
}\
|
||||
]\
|
||||
@@ -941,13 +941,13 @@ fn string_to_tts_1 () {
|
||||
\"variant\":\"TtToken\",\
|
||||
\"fields\":[\
|
||||
null,\
|
||||
\"SEMI\"\
|
||||
\"Semi\"\
|
||||
]\
|
||||
}\
|
||||
],\
|
||||
{\
|
||||
\"span\":null,\
|
||||
\"token\":\"RBRACE\"\
|
||||
\"token\":\"RBrace\"\
|
||||
}\
|
||||
]\
|
||||
]\
|
||||
@@ -1002,7 +1002,7 @@ fn string_to_tts_1 () {
|
||||
}
|
||||
|
||||
fn parser_done(p: Parser){
|
||||
assert_eq!(p.token.clone(), token::EOF);
|
||||
assert_eq!(p.token.clone(), token::Eof);
|
||||
}
|
||||
|
||||
#[test] fn parse_ident_pat () {
|
||||
|
||||
@@ -118,7 +118,7 @@ fn report(&mut self,
|
||||
|
||||
fn is_obsolete_ident(&mut self, ident: &str) -> bool {
|
||||
match self.token {
|
||||
token::IDENT(sid, _) => {
|
||||
token::Ident(sid, _) => {
|
||||
token::get_ident(sid).equiv(&ident)
|
||||
}
|
||||
_ => false
|
||||
|
||||
+506
-534
@@ -74,11 +74,11 @@
|
||||
use parse::lexer::Reader;
|
||||
use parse::lexer::TokenAndSpan;
|
||||
use parse::obsolete::*;
|
||||
use parse::token::{INTERPOLATED, InternedString, can_begin_expr};
|
||||
use parse::token::{is_ident, is_ident_or_path, is_plain_ident};
|
||||
use parse::token::{keywords, special_idents, token_to_binop};
|
||||
use parse::token::InternedString;
|
||||
use parse::token::{keywords, special_idents};
|
||||
use parse::token;
|
||||
use parse::{new_sub_parser_from_file, ParseSess};
|
||||
use print::pprust;
|
||||
use ptr::P;
|
||||
use owned_slice::OwnedSlice;
|
||||
|
||||
@@ -134,34 +134,33 @@ enum ItemOrViewItem {
|
||||
}
|
||||
|
||||
|
||||
/// Possibly accept an `INTERPOLATED` expression (a pre-parsed expression
|
||||
/// dropped into the token stream, which happens while parsing the
|
||||
/// result of macro expansion)
|
||||
/// Placement of these is not as complex as I feared it would be.
|
||||
/// The important thing is to make sure that lookahead doesn't balk
|
||||
/// at INTERPOLATED tokens
|
||||
/// Possibly accept an `token::Interpolated` expression (a pre-parsed expression
|
||||
/// dropped into the token stream, which happens while parsing the result of
|
||||
/// macro expansion). Placement of these is not as complex as I feared it would
|
||||
/// be. The important thing is to make sure that lookahead doesn't balk at
|
||||
/// `token::Interpolated` tokens.
|
||||
macro_rules! maybe_whole_expr (
|
||||
($p:expr) => (
|
||||
{
|
||||
let found = match $p.token {
|
||||
INTERPOLATED(token::NtExpr(ref e)) => {
|
||||
token::Interpolated(token::NtExpr(ref e)) => {
|
||||
Some((*e).clone())
|
||||
}
|
||||
INTERPOLATED(token::NtPath(_)) => {
|
||||
token::Interpolated(token::NtPath(_)) => {
|
||||
// FIXME: The following avoids an issue with lexical borrowck scopes,
|
||||
// but the clone is unfortunate.
|
||||
let pt = match $p.token {
|
||||
INTERPOLATED(token::NtPath(ref pt)) => (**pt).clone(),
|
||||
token::Interpolated(token::NtPath(ref pt)) => (**pt).clone(),
|
||||
_ => unreachable!()
|
||||
};
|
||||
let span = $p.span;
|
||||
Some($p.mk_expr(span.lo, span.hi, ExprPath(pt)))
|
||||
}
|
||||
INTERPOLATED(token::NtBlock(_)) => {
|
||||
token::Interpolated(token::NtBlock(_)) => {
|
||||
// FIXME: The following avoids an issue with lexical borrowck scopes,
|
||||
// but the clone is unfortunate.
|
||||
let b = match $p.token {
|
||||
INTERPOLATED(token::NtBlock(ref b)) => (*b).clone(),
|
||||
token::Interpolated(token::NtBlock(ref b)) => (*b).clone(),
|
||||
_ => unreachable!()
|
||||
};
|
||||
let span = $p.span;
|
||||
@@ -185,13 +184,13 @@ macro_rules! maybe_whole (
|
||||
($p:expr, $constructor:ident) => (
|
||||
{
|
||||
let found = match ($p).token {
|
||||
INTERPOLATED(token::$constructor(_)) => {
|
||||
token::Interpolated(token::$constructor(_)) => {
|
||||
Some(($p).bump_and_get())
|
||||
}
|
||||
_ => None
|
||||
};
|
||||
match found {
|
||||
Some(INTERPOLATED(token::$constructor(x))) => {
|
||||
Some(token::Interpolated(token::$constructor(x))) => {
|
||||
return x.clone()
|
||||
}
|
||||
_ => {}
|
||||
@@ -201,13 +200,13 @@ macro_rules! maybe_whole (
|
||||
(no_clone $p:expr, $constructor:ident) => (
|
||||
{
|
||||
let found = match ($p).token {
|
||||
INTERPOLATED(token::$constructor(_)) => {
|
||||
token::Interpolated(token::$constructor(_)) => {
|
||||
Some(($p).bump_and_get())
|
||||
}
|
||||
_ => None
|
||||
};
|
||||
match found {
|
||||
Some(INTERPOLATED(token::$constructor(x))) => {
|
||||
Some(token::Interpolated(token::$constructor(x))) => {
|
||||
return x
|
||||
}
|
||||
_ => {}
|
||||
@@ -217,13 +216,13 @@ macro_rules! maybe_whole (
|
||||
(deref $p:expr, $constructor:ident) => (
|
||||
{
|
||||
let found = match ($p).token {
|
||||
INTERPOLATED(token::$constructor(_)) => {
|
||||
token::Interpolated(token::$constructor(_)) => {
|
||||
Some(($p).bump_and_get())
|
||||
}
|
||||
_ => None
|
||||
};
|
||||
match found {
|
||||
Some(INTERPOLATED(token::$constructor(x))) => {
|
||||
Some(token::Interpolated(token::$constructor(x))) => {
|
||||
return (*x).clone()
|
||||
}
|
||||
_ => {}
|
||||
@@ -233,13 +232,13 @@ macro_rules! maybe_whole (
|
||||
(Some $p:expr, $constructor:ident) => (
|
||||
{
|
||||
let found = match ($p).token {
|
||||
INTERPOLATED(token::$constructor(_)) => {
|
||||
token::Interpolated(token::$constructor(_)) => {
|
||||
Some(($p).bump_and_get())
|
||||
}
|
||||
_ => None
|
||||
};
|
||||
match found {
|
||||
Some(INTERPOLATED(token::$constructor(x))) => {
|
||||
Some(token::Interpolated(token::$constructor(x))) => {
|
||||
return Some(x.clone()),
|
||||
}
|
||||
_ => {}
|
||||
@@ -249,13 +248,13 @@ macro_rules! maybe_whole (
|
||||
(iovi $p:expr, $constructor:ident) => (
|
||||
{
|
||||
let found = match ($p).token {
|
||||
INTERPOLATED(token::$constructor(_)) => {
|
||||
token::Interpolated(token::$constructor(_)) => {
|
||||
Some(($p).bump_and_get())
|
||||
}
|
||||
_ => None
|
||||
};
|
||||
match found {
|
||||
Some(INTERPOLATED(token::$constructor(x))) => {
|
||||
Some(token::Interpolated(token::$constructor(x))) => {
|
||||
return IoviItem(x.clone())
|
||||
}
|
||||
_ => {}
|
||||
@@ -265,13 +264,13 @@ macro_rules! maybe_whole (
|
||||
(pair_empty $p:expr, $constructor:ident) => (
|
||||
{
|
||||
let found = match ($p).token {
|
||||
INTERPOLATED(token::$constructor(_)) => {
|
||||
token::Interpolated(token::$constructor(_)) => {
|
||||
Some(($p).bump_and_get())
|
||||
}
|
||||
_ => None
|
||||
};
|
||||
match found {
|
||||
Some(INTERPOLATED(token::$constructor(x))) => {
|
||||
Some(token::Interpolated(token::$constructor(x))) => {
|
||||
return (Vec::new(), x)
|
||||
}
|
||||
_ => {}
|
||||
@@ -336,7 +335,7 @@ pub struct Parser<'a> {
|
||||
}
|
||||
|
||||
fn is_plain_ident_or_underscore(t: &token::Token) -> bool {
|
||||
is_plain_ident(t) || *t == token::UNDERSCORE
|
||||
t.is_plain_ident() || *t == token::Underscore
|
||||
}
|
||||
|
||||
/// Get a token the parser cares about
|
||||
@@ -344,7 +343,7 @@ fn real_token(rdr: &mut Reader) -> TokenAndSpan {
|
||||
let mut t = rdr.next_token();
|
||||
loop {
|
||||
match t.tok {
|
||||
token::WS | token::COMMENT | token::SHEBANG(_) => {
|
||||
token::Whitespace | token::Comment | token::Shebang(_) => {
|
||||
t = rdr.next_token();
|
||||
},
|
||||
_ => break
|
||||
@@ -362,7 +361,7 @@ pub fn new(sess: &'a ParseSess,
|
||||
let tok0 = real_token(&mut *rdr);
|
||||
let span = tok0.sp;
|
||||
let placeholder = TokenAndSpan {
|
||||
tok: token::UNDERSCORE,
|
||||
tok: token::Underscore,
|
||||
sp: span,
|
||||
};
|
||||
|
||||
@@ -396,7 +395,7 @@ pub fn new(sess: &'a ParseSess,
|
||||
|
||||
/// Convert a token to a string using self's reader
|
||||
pub fn token_to_string(token: &token::Token) -> String {
|
||||
token::to_string(token)
|
||||
pprust::token_to_string(token)
|
||||
}
|
||||
|
||||
/// Convert the current token to a string using self's reader
|
||||
@@ -475,15 +474,15 @@ fn tokens_to_string(tokens: &[token::Token]) -> String {
|
||||
/// recover (without consuming any expected input token). Returns
|
||||
/// true if and only if input was consumed for recovery.
|
||||
pub fn check_for_erroneous_unit_struct_expecting(&mut self, expected: &[token::Token]) -> bool {
|
||||
if self.token == token::LBRACE
|
||||
&& expected.iter().all(|t| *t != token::LBRACE)
|
||||
&& self.look_ahead(1, |t| *t == token::RBRACE) {
|
||||
if self.token == token::LBrace
|
||||
&& expected.iter().all(|t| *t != token::LBrace)
|
||||
&& self.look_ahead(1, |t| *t == token::RBrace) {
|
||||
// matched; signal non-fatal error and recover.
|
||||
let span = self.span;
|
||||
self.span_err(span,
|
||||
"unit-like struct construction is written with no trailing `{ }`");
|
||||
self.eat(&token::LBRACE);
|
||||
self.eat(&token::RBRACE);
|
||||
self.eat(&token::LBrace);
|
||||
self.eat(&token::RBrace);
|
||||
true
|
||||
} else {
|
||||
false
|
||||
@@ -518,7 +517,7 @@ pub fn commit_expr_expecting(&mut self, e: &Expr, edible: token::Token) {
|
||||
pub fn commit_stmt(&mut self, edible: &[token::Token], inedible: &[token::Token]) {
|
||||
if self.last_token
|
||||
.as_ref()
|
||||
.map_or(false, |t| is_ident_or_path(&**t)) {
|
||||
.map_or(false, |t| t.is_ident() || t.is_path()) {
|
||||
let mut expected = edible.iter().map(|x| x.clone()).collect::<Vec<_>>();
|
||||
expected.push_all(inedible.as_slice());
|
||||
self.check_for_erroneous_unit_struct_expecting(
|
||||
@@ -535,11 +534,11 @@ pub fn parse_ident(&mut self) -> ast::Ident {
|
||||
self.check_strict_keywords();
|
||||
self.check_reserved_keywords();
|
||||
match self.token {
|
||||
token::IDENT(i, _) => {
|
||||
token::Ident(i, _) => {
|
||||
self.bump();
|
||||
i
|
||||
}
|
||||
token::INTERPOLATED(token::NtIdent(..)) => {
|
||||
token::Interpolated(token::NtIdent(..)) => {
|
||||
self.bug("ident interpolation not converted to real token");
|
||||
}
|
||||
_ => {
|
||||
@@ -570,14 +569,10 @@ pub fn eat(&mut self, tok: &token::Token) -> bool {
|
||||
is_present
|
||||
}
|
||||
|
||||
pub fn is_keyword(&mut self, kw: keywords::Keyword) -> bool {
|
||||
token::is_keyword(kw, &self.token)
|
||||
}
|
||||
|
||||
/// If the next token is the given keyword, eat it and return
|
||||
/// true. Otherwise, return false.
|
||||
pub fn eat_keyword(&mut self, kw: keywords::Keyword) -> bool {
|
||||
if self.is_keyword(kw) {
|
||||
if self.token.is_keyword(kw) {
|
||||
self.bump();
|
||||
true
|
||||
} else {
|
||||
@@ -599,7 +594,7 @@ pub fn expect_keyword(&mut self, kw: keywords::Keyword) {
|
||||
|
||||
/// Signal an error if the given string is a strict keyword
|
||||
pub fn check_strict_keywords(&mut self) {
|
||||
if token::is_strict_keyword(&self.token) {
|
||||
if self.token.is_strict_keyword() {
|
||||
let token_str = self.this_token_to_string();
|
||||
let span = self.span;
|
||||
self.span_err(span,
|
||||
@@ -610,7 +605,7 @@ pub fn check_strict_keywords(&mut self) {
|
||||
|
||||
/// Signal an error if the current token is a reserved keyword
|
||||
pub fn check_reserved_keywords(&mut self) {
|
||||
if token::is_reserved_keyword(&self.token) {
|
||||
if self.token.is_reserved_keyword() {
|
||||
let token_str = self.this_token_to_string();
|
||||
self.fatal(format!("`{}` is a reserved keyword",
|
||||
token_str).as_slice())
|
||||
@@ -621,16 +616,16 @@ pub fn check_reserved_keywords(&mut self) {
|
||||
/// `&` and continue. If an `&` is not seen, signal an error.
|
||||
fn expect_and(&mut self) {
|
||||
match self.token {
|
||||
token::BINOP(token::AND) => self.bump(),
|
||||
token::ANDAND => {
|
||||
token::BinOp(token::And) => self.bump(),
|
||||
token::AndAnd => {
|
||||
let span = self.span;
|
||||
let lo = span.lo + BytePos(1);
|
||||
self.replace_token(token::BINOP(token::AND), lo, span.hi)
|
||||
self.replace_token(token::BinOp(token::And), lo, span.hi)
|
||||
}
|
||||
_ => {
|
||||
let token_str = self.this_token_to_string();
|
||||
let found_token =
|
||||
Parser::token_to_string(&token::BINOP(token::AND));
|
||||
Parser::token_to_string(&token::BinOp(token::And));
|
||||
self.fatal(format!("expected `{}`, found `{}`",
|
||||
found_token,
|
||||
token_str).as_slice())
|
||||
@@ -642,16 +637,16 @@ fn expect_and(&mut self) {
|
||||
/// `|` and continue. If a `|` is not seen, signal an error.
|
||||
fn expect_or(&mut self) {
|
||||
match self.token {
|
||||
token::BINOP(token::OR) => self.bump(),
|
||||
token::OROR => {
|
||||
token::BinOp(token::Or) => self.bump(),
|
||||
token::OrOr => {
|
||||
let span = self.span;
|
||||
let lo = span.lo + BytePos(1);
|
||||
self.replace_token(token::BINOP(token::OR), lo, span.hi)
|
||||
self.replace_token(token::BinOp(token::Or), lo, span.hi)
|
||||
}
|
||||
_ => {
|
||||
let found_token = self.this_token_to_string();
|
||||
let token_str =
|
||||
Parser::token_to_string(&token::BINOP(token::OR));
|
||||
Parser::token_to_string(&token::BinOp(token::Or));
|
||||
self.fatal(format!("expected `{}`, found `{}`",
|
||||
token_str,
|
||||
found_token).as_slice())
|
||||
@@ -681,16 +676,16 @@ fn expect_or(&mut self) {
|
||||
/// impl Foo<<'a> ||>() { ... }
|
||||
fn eat_lt(&mut self, force: bool) -> bool {
|
||||
match self.token {
|
||||
token::LT => { self.bump(); true }
|
||||
token::BINOP(token::SHL) => {
|
||||
token::Lt => { self.bump(); true }
|
||||
token::BinOp(token::Shl) => {
|
||||
let next_lifetime = self.look_ahead(1, |t| match *t {
|
||||
token::LIFETIME(..) => true,
|
||||
token::Lifetime(..) => true,
|
||||
_ => false,
|
||||
});
|
||||
if force || next_lifetime {
|
||||
let span = self.span;
|
||||
let lo = span.lo + BytePos(1);
|
||||
self.replace_token(token::LT, lo, span.hi);
|
||||
self.replace_token(token::Lt, lo, span.hi);
|
||||
true
|
||||
} else {
|
||||
false
|
||||
@@ -703,7 +698,7 @@ fn eat_lt(&mut self, force: bool) -> bool {
|
||||
fn expect_lt(&mut self) {
|
||||
if !self.eat_lt(true) {
|
||||
let found_token = self.this_token_to_string();
|
||||
let token_str = Parser::token_to_string(&token::LT);
|
||||
let token_str = Parser::token_to_string(&token::Lt);
|
||||
self.fatal(format!("expected `{}`, found `{}`",
|
||||
token_str,
|
||||
found_token).as_slice())
|
||||
@@ -718,8 +713,8 @@ fn parse_seq_to_before_or<T>(
|
||||
-> Vec<T> {
|
||||
let mut first = true;
|
||||
let mut vector = Vec::new();
|
||||
while self.token != token::BINOP(token::OR) &&
|
||||
self.token != token::OROR {
|
||||
while self.token != token::BinOp(token::Or) &&
|
||||
self.token != token::OrOr {
|
||||
if first {
|
||||
first = false
|
||||
} else {
|
||||
@@ -736,24 +731,24 @@ fn parse_seq_to_before_or<T>(
|
||||
/// signal an error.
|
||||
pub fn expect_gt(&mut self) {
|
||||
match self.token {
|
||||
token::GT => self.bump(),
|
||||
token::BINOP(token::SHR) => {
|
||||
token::Gt => self.bump(),
|
||||
token::BinOp(token::Shr) => {
|
||||
let span = self.span;
|
||||
let lo = span.lo + BytePos(1);
|
||||
self.replace_token(token::GT, lo, span.hi)
|
||||
self.replace_token(token::Gt, lo, span.hi)
|
||||
}
|
||||
token::BINOPEQ(token::SHR) => {
|
||||
token::BinOpEq(token::Shr) => {
|
||||
let span = self.span;
|
||||
let lo = span.lo + BytePos(1);
|
||||
self.replace_token(token::GE, lo, span.hi)
|
||||
self.replace_token(token::Ge, lo, span.hi)
|
||||
}
|
||||
token::GE => {
|
||||
token::Ge => {
|
||||
let span = self.span;
|
||||
let lo = span.lo + BytePos(1);
|
||||
self.replace_token(token::EQ, lo, span.hi)
|
||||
self.replace_token(token::Eq, lo, span.hi)
|
||||
}
|
||||
_ => {
|
||||
let gt_str = Parser::token_to_string(&token::GT);
|
||||
let gt_str = Parser::token_to_string(&token::Gt);
|
||||
let this_token_str = self.this_token_to_string();
|
||||
self.fatal(format!("expected `{}`, found `{}`",
|
||||
gt_str,
|
||||
@@ -777,10 +772,10 @@ pub fn parse_seq_to_before_gt<T>(
|
||||
// commas in generic parameters, because it can stop either after
|
||||
// parsing a type or after parsing a comma.
|
||||
for i in iter::count(0u, 1) {
|
||||
if self.token == token::GT
|
||||
|| self.token == token::BINOP(token::SHR)
|
||||
|| self.token == token::GE
|
||||
|| self.token == token::BINOPEQ(token::SHR) {
|
||||
if self.token == token::Gt
|
||||
|| self.token == token::BinOp(token::Shr)
|
||||
|| self.token == token::Ge
|
||||
|| self.token == token::BinOpEq(token::Shr) {
|
||||
break;
|
||||
}
|
||||
|
||||
@@ -897,7 +892,7 @@ pub fn parse_seq<T>(
|
||||
pub fn bump(&mut self) {
|
||||
self.last_span = self.span;
|
||||
// Stash token for error recovery (sometimes; clone is not necessarily cheap).
|
||||
self.last_token = if is_ident_or_path(&self.token) {
|
||||
self.last_token = if self.token.is_ident() || self.token.is_path() {
|
||||
Some(box self.token.clone())
|
||||
} else {
|
||||
None
|
||||
@@ -911,7 +906,7 @@ pub fn bump(&mut self) {
|
||||
self.buffer_start = next_index as int;
|
||||
|
||||
let placeholder = TokenAndSpan {
|
||||
tok: token::UNDERSCORE,
|
||||
tok: token::Underscore,
|
||||
sp: self.span,
|
||||
};
|
||||
replace(&mut self.buffer[buffer_start], placeholder)
|
||||
@@ -923,7 +918,7 @@ pub fn bump(&mut self) {
|
||||
|
||||
/// Advance the parser by one token and return the bumped token.
|
||||
pub fn bump_and_get(&mut self) -> token::Token {
|
||||
let old_token = replace(&mut self.token, token::UNDERSCORE);
|
||||
let old_token = replace(&mut self.token, token::Underscore);
|
||||
self.bump();
|
||||
old_token
|
||||
}
|
||||
@@ -987,13 +982,13 @@ pub fn id_to_interned_str(&mut self, id: Ident) -> InternedString {
|
||||
/// Is the current token one of the keywords that signals a bare function
|
||||
/// type?
|
||||
pub fn token_is_bare_fn_keyword(&mut self) -> bool {
|
||||
if token::is_keyword(keywords::Fn, &self.token) {
|
||||
if self.token.is_keyword(keywords::Fn) {
|
||||
return true
|
||||
}
|
||||
|
||||
if token::is_keyword(keywords::Unsafe, &self.token) ||
|
||||
token::is_keyword(keywords::Once, &self.token) {
|
||||
return self.look_ahead(1, |t| token::is_keyword(keywords::Fn, t))
|
||||
if self.token.is_keyword(keywords::Unsafe) ||
|
||||
self.token.is_keyword(keywords::Once) {
|
||||
return self.look_ahead(1, |t| t.is_keyword(keywords::Fn))
|
||||
}
|
||||
|
||||
false
|
||||
@@ -1001,28 +996,21 @@ pub fn token_is_bare_fn_keyword(&mut self) -> bool {
|
||||
|
||||
/// Is the current token one of the keywords that signals a closure type?
|
||||
pub fn token_is_closure_keyword(&mut self) -> bool {
|
||||
token::is_keyword(keywords::Unsafe, &self.token) ||
|
||||
token::is_keyword(keywords::Once, &self.token)
|
||||
self.token.is_keyword(keywords::Unsafe) ||
|
||||
self.token.is_keyword(keywords::Once)
|
||||
}
|
||||
|
||||
/// Is the current token one of the keywords that signals an old-style
|
||||
/// closure type (with explicit sigil)?
|
||||
pub fn token_is_old_style_closure_keyword(&mut self) -> bool {
|
||||
token::is_keyword(keywords::Unsafe, &self.token) ||
|
||||
token::is_keyword(keywords::Once, &self.token) ||
|
||||
token::is_keyword(keywords::Fn, &self.token)
|
||||
}
|
||||
|
||||
pub fn token_is_lifetime(tok: &token::Token) -> bool {
|
||||
match *tok {
|
||||
token::LIFETIME(..) => true,
|
||||
_ => false,
|
||||
}
|
||||
self.token.is_keyword(keywords::Unsafe) ||
|
||||
self.token.is_keyword(keywords::Once) ||
|
||||
self.token.is_keyword(keywords::Fn)
|
||||
}
|
||||
|
||||
pub fn get_lifetime(&mut self) -> ast::Ident {
|
||||
match self.token {
|
||||
token::LIFETIME(ref ident) => *ident,
|
||||
token::Lifetime(ref ident) => *ident,
|
||||
_ => self.bug("not a lifetime"),
|
||||
}
|
||||
}
|
||||
@@ -1074,7 +1062,7 @@ pub fn parse_proc_type(&mut self) -> Ty_ {
|
||||
|
||||
*/
|
||||
|
||||
let lifetime_defs = if self.eat(&token::LT) {
|
||||
let lifetime_defs = if self.eat(&token::Lt) {
|
||||
let lifetime_defs = self.parse_lifetime_defs();
|
||||
self.expect_gt();
|
||||
lifetime_defs
|
||||
@@ -1103,25 +1091,23 @@ pub fn parse_proc_type(&mut self) -> Ty_ {
|
||||
/// Parses an optional unboxed closure kind (`&:`, `&mut:`, or `:`).
|
||||
pub fn parse_optional_unboxed_closure_kind(&mut self)
|
||||
-> Option<UnboxedClosureKind> {
|
||||
if self.token == token::BINOP(token::AND) &&
|
||||
self.look_ahead(1, |t| {
|
||||
token::is_keyword(keywords::Mut, t)
|
||||
}) &&
|
||||
self.look_ahead(2, |t| *t == token::COLON) {
|
||||
if self.token == token::BinOp(token::And) &&
|
||||
self.look_ahead(1, |t| t.is_keyword(keywords::Mut)) &&
|
||||
self.look_ahead(2, |t| *t == token::Colon) {
|
||||
self.bump();
|
||||
self.bump();
|
||||
self.bump();
|
||||
return Some(FnMutUnboxedClosureKind)
|
||||
}
|
||||
|
||||
if self.token == token::BINOP(token::AND) &&
|
||||
self.look_ahead(1, |t| *t == token::COLON) {
|
||||
if self.token == token::BinOp(token::And) &&
|
||||
self.look_ahead(1, |t| *t == token::Colon) {
|
||||
self.bump();
|
||||
self.bump();
|
||||
return Some(FnUnboxedClosureKind)
|
||||
}
|
||||
|
||||
if self.eat(&token::COLON) {
|
||||
if self.eat(&token::Colon) {
|
||||
return Some(FnOnceUnboxedClosureKind)
|
||||
}
|
||||
|
||||
@@ -1147,7 +1133,7 @@ pub fn parse_ty_closure(&mut self) -> Ty_ {
|
||||
let fn_style = self.parse_unsafety();
|
||||
let onceness = if self.eat_keyword(keywords::Once) {Once} else {Many};
|
||||
|
||||
let lifetime_defs = if self.eat(&token::LT) {
|
||||
let lifetime_defs = if self.eat(&token::Lt) {
|
||||
let lifetime_defs = self.parse_lifetime_defs();
|
||||
self.expect_gt();
|
||||
|
||||
@@ -1156,7 +1142,7 @@ pub fn parse_ty_closure(&mut self) -> Ty_ {
|
||||
Vec::new()
|
||||
};
|
||||
|
||||
let (optional_unboxed_closure_kind, inputs) = if self.eat(&token::OROR) {
|
||||
let (optional_unboxed_closure_kind, inputs) = if self.eat(&token::OrOr) {
|
||||
(None, Vec::new())
|
||||
} else {
|
||||
self.expect_or();
|
||||
@@ -1165,7 +1151,7 @@ pub fn parse_ty_closure(&mut self) -> Ty_ {
|
||||
self.parse_optional_unboxed_closure_kind();
|
||||
|
||||
let inputs = self.parse_seq_to_before_or(
|
||||
&token::COMMA,
|
||||
&token::Comma,
|
||||
|p| p.parse_arg_general(false));
|
||||
self.expect_or();
|
||||
(optional_unboxed_closure_kind, inputs)
|
||||
@@ -1221,7 +1207,7 @@ pub fn parse_ty_fn_decl(&mut self, allow_variadic: bool)
|
||||
Lifetime_defs
|
||||
|
||||
*/
|
||||
let lifetime_defs = if self.eat(&token::LT) {
|
||||
let lifetime_defs = if self.eat(&token::Lt) {
|
||||
let lifetime_defs = self.parse_lifetime_defs();
|
||||
self.expect_gt();
|
||||
lifetime_defs
|
||||
@@ -1247,7 +1233,7 @@ fn parse_associated_type(&mut self, attrs: Vec<Attribute>)
|
||||
let lo = self.span.lo;
|
||||
let ident = self.parse_ident();
|
||||
let hi = self.span.hi;
|
||||
self.expect(&token::SEMI);
|
||||
self.expect(&token::Semi);
|
||||
AssociatedType {
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
span: mk_sp(lo, hi),
|
||||
@@ -1262,10 +1248,10 @@ fn parse_typedef(&mut self, attrs: Vec<Attribute>, vis: Visibility)
|
||||
-> Typedef {
|
||||
let lo = self.span.lo;
|
||||
let ident = self.parse_ident();
|
||||
self.expect(&token::EQ);
|
||||
self.expect(&token::Eq);
|
||||
let typ = self.parse_ty(true);
|
||||
let hi = self.span.hi;
|
||||
self.expect(&token::SEMI);
|
||||
self.expect(&token::Semi);
|
||||
Typedef {
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
span: mk_sp(lo, hi),
|
||||
@@ -1279,8 +1265,8 @@ fn parse_typedef(&mut self, attrs: Vec<Attribute>, vis: Visibility)
|
||||
/// Parse the items in a trait declaration
|
||||
pub fn parse_trait_items(&mut self) -> Vec<TraitItem> {
|
||||
self.parse_unspanned_seq(
|
||||
&token::LBRACE,
|
||||
&token::RBRACE,
|
||||
&token::LBrace,
|
||||
&token::RBrace,
|
||||
seq_sep_none(),
|
||||
|p| {
|
||||
let attrs = p.parse_outer_attributes();
|
||||
@@ -1317,7 +1303,7 @@ pub fn parse_trait_items(&mut self) -> Vec<TraitItem> {
|
||||
|
||||
let hi = p.last_span.hi;
|
||||
match p.token {
|
||||
token::SEMI => {
|
||||
token::Semi => {
|
||||
p.bump();
|
||||
debug!("parse_trait_methods(): parsing required method");
|
||||
RequiredMethod(TypeMethod {
|
||||
@@ -1333,7 +1319,7 @@ pub fn parse_trait_items(&mut self) -> Vec<TraitItem> {
|
||||
vis: vis,
|
||||
})
|
||||
}
|
||||
token::LBRACE => {
|
||||
token::LBrace => {
|
||||
debug!("parse_trait_methods(): parsing provided method");
|
||||
let (inner_attrs, body) =
|
||||
p.parse_inner_attrs_and_block();
|
||||
@@ -1377,7 +1363,7 @@ pub fn parse_ty_field(&mut self) -> TypeField {
|
||||
let lo = self.span.lo;
|
||||
let mutbl = self.parse_mutability();
|
||||
let id = self.parse_ident();
|
||||
self.expect(&token::COLON);
|
||||
self.expect(&token::Colon);
|
||||
let ty = self.parse_ty(true);
|
||||
let hi = ty.span.hi;
|
||||
ast::TypeField {
|
||||
@@ -1389,9 +1375,9 @@ pub fn parse_ty_field(&mut self) -> TypeField {
|
||||
|
||||
/// Parse optional return type [ -> TY ] in function decl
|
||||
pub fn parse_ret_ty(&mut self) -> (RetStyle, P<Ty>) {
|
||||
return if self.eat(&token::RARROW) {
|
||||
return if self.eat(&token::RArrow) {
|
||||
let lo = self.span.lo;
|
||||
if self.eat(&token::NOT) {
|
||||
if self.eat(&token::Not) {
|
||||
(
|
||||
NoReturn,
|
||||
P(Ty {
|
||||
@@ -1425,9 +1411,9 @@ pub fn parse_ty(&mut self, plus_allowed: bool) -> P<Ty> {
|
||||
|
||||
let lo = self.span.lo;
|
||||
|
||||
let t = if self.token == token::LPAREN {
|
||||
let t = if self.token == token::LParen {
|
||||
self.bump();
|
||||
if self.token == token::RPAREN {
|
||||
if self.token == token::RParen {
|
||||
self.bump();
|
||||
TyNil
|
||||
} else {
|
||||
@@ -1436,9 +1422,9 @@ pub fn parse_ty(&mut self, plus_allowed: bool) -> P<Ty> {
|
||||
// of type t
|
||||
let mut ts = vec!(self.parse_ty(true));
|
||||
let mut one_tuple = false;
|
||||
while self.token == token::COMMA {
|
||||
while self.token == token::Comma {
|
||||
self.bump();
|
||||
if self.token != token::RPAREN {
|
||||
if self.token != token::RParen {
|
||||
ts.push(self.parse_ty(true));
|
||||
}
|
||||
else {
|
||||
@@ -1447,30 +1433,30 @@ pub fn parse_ty(&mut self, plus_allowed: bool) -> P<Ty> {
|
||||
}
|
||||
|
||||
if ts.len() == 1 && !one_tuple {
|
||||
self.expect(&token::RPAREN);
|
||||
self.expect(&token::RParen);
|
||||
TyParen(ts.into_iter().nth(0).unwrap())
|
||||
} else {
|
||||
let t = TyTup(ts);
|
||||
self.expect(&token::RPAREN);
|
||||
self.expect(&token::RParen);
|
||||
t
|
||||
}
|
||||
}
|
||||
} else if self.token == token::TILDE {
|
||||
} else if self.token == token::Tilde {
|
||||
// OWNED POINTER
|
||||
self.bump();
|
||||
let last_span = self.last_span;
|
||||
match self.token {
|
||||
token::LBRACKET => self.obsolete(last_span, ObsoleteOwnedVector),
|
||||
token::LBracket => self.obsolete(last_span, ObsoleteOwnedVector),
|
||||
_ => self.obsolete(last_span, ObsoleteOwnedType)
|
||||
}
|
||||
TyUniq(self.parse_ty(false))
|
||||
} else if self.token == token::BINOP(token::STAR) {
|
||||
} else if self.token == token::BinOp(token::Star) {
|
||||
// STAR POINTER (bare pointer?)
|
||||
self.bump();
|
||||
TyPtr(self.parse_ptr())
|
||||
} else if self.token == token::LBRACKET {
|
||||
} else if self.token == token::LBracket {
|
||||
// VECTOR
|
||||
self.expect(&token::LBRACKET);
|
||||
self.expect(&token::LBracket);
|
||||
let t = self.parse_ty(true);
|
||||
|
||||
// Parse the `, ..e` in `[ int, ..e ]`
|
||||
@@ -1479,24 +1465,24 @@ pub fn parse_ty(&mut self, plus_allowed: bool) -> P<Ty> {
|
||||
None => TyVec(t),
|
||||
Some(suffix) => TyFixedLengthVec(t, suffix)
|
||||
};
|
||||
self.expect(&token::RBRACKET);
|
||||
self.expect(&token::RBracket);
|
||||
t
|
||||
} else if self.token == token::BINOP(token::AND) ||
|
||||
self.token == token::ANDAND {
|
||||
} else if self.token == token::BinOp(token::And) ||
|
||||
self.token == token::AndAnd {
|
||||
// BORROWED POINTER
|
||||
self.expect_and();
|
||||
self.parse_borrowed_pointee()
|
||||
} else if self.is_keyword(keywords::Extern) ||
|
||||
self.is_keyword(keywords::Unsafe) ||
|
||||
} else if self.token.is_keyword(keywords::Extern) ||
|
||||
self.token.is_keyword(keywords::Unsafe) ||
|
||||
self.token_is_bare_fn_keyword() {
|
||||
// BARE FUNCTION
|
||||
self.parse_ty_bare_fn()
|
||||
} else if self.token_is_closure_keyword() ||
|
||||
self.token == token::BINOP(token::OR) ||
|
||||
self.token == token::OROR ||
|
||||
(self.token == token::LT &&
|
||||
self.token == token::BinOp(token::Or) ||
|
||||
self.token == token::OrOr ||
|
||||
(self.token == token::Lt &&
|
||||
self.look_ahead(1, |t| {
|
||||
*t == token::GT || Parser::token_is_lifetime(t)
|
||||
*t == token::Gt || t.is_lifetime()
|
||||
})) {
|
||||
// CLOSURE
|
||||
|
||||
@@ -1504,28 +1490,29 @@ pub fn parse_ty(&mut self, plus_allowed: bool) -> P<Ty> {
|
||||
} else if self.eat_keyword(keywords::Typeof) {
|
||||
// TYPEOF
|
||||
// In order to not be ambiguous, the type must be surrounded by parens.
|
||||
self.expect(&token::LPAREN);
|
||||
self.expect(&token::LParen);
|
||||
let e = self.parse_expr();
|
||||
self.expect(&token::RPAREN);
|
||||
self.expect(&token::RParen);
|
||||
TyTypeof(e)
|
||||
} else if self.eat_keyword(keywords::Proc) {
|
||||
self.parse_proc_type()
|
||||
} else if self.token == token::LT {
|
||||
} else if self.token == token::Lt {
|
||||
// QUALIFIED PATH
|
||||
self.bump();
|
||||
let for_type = self.parse_ty(true);
|
||||
self.expect_keyword(keywords::As);
|
||||
let trait_name = self.parse_path(LifetimeAndTypesWithoutColons);
|
||||
self.expect(&token::GT);
|
||||
self.expect(&token::MOD_SEP);
|
||||
self.expect(&token::Gt);
|
||||
self.expect(&token::ModSep);
|
||||
let item_name = self.parse_ident();
|
||||
TyQPath(P(QPath {
|
||||
for_type: for_type,
|
||||
trait_name: trait_name.path,
|
||||
item_name: item_name,
|
||||
}))
|
||||
} else if self.token == token::MOD_SEP
|
||||
|| is_ident_or_path(&self.token) {
|
||||
} else if self.token == token::ModSep
|
||||
|| self.token.is_ident()
|
||||
|| self.token.is_path() {
|
||||
// NAMED TYPE
|
||||
let mode = if plus_allowed {
|
||||
LifetimeAndTypesAndBounds
|
||||
@@ -1537,7 +1524,7 @@ pub fn parse_ty(&mut self, plus_allowed: bool) -> P<Ty> {
|
||||
bounds
|
||||
} = self.parse_path(mode);
|
||||
TyPath(path, bounds, ast::DUMMY_NODE_ID)
|
||||
} else if self.eat(&token::UNDERSCORE) {
|
||||
} else if self.eat(&token::Underscore) {
|
||||
// TYPE TO BE INFERRED
|
||||
TyInfer
|
||||
} else {
|
||||
@@ -1576,9 +1563,9 @@ pub fn parse_ptr(&mut self) -> MutTy {
|
||||
|
||||
pub fn is_named_argument(&mut self) -> bool {
|
||||
let offset = match self.token {
|
||||
token::BINOP(token::AND) => 1,
|
||||
token::ANDAND => 1,
|
||||
_ if token::is_keyword(keywords::Mut, &self.token) => 1,
|
||||
token::BinOp(token::And) => 1,
|
||||
token::AndAnd => 1,
|
||||
_ if self.token.is_keyword(keywords::Mut) => 1,
|
||||
_ => 0
|
||||
};
|
||||
|
||||
@@ -1586,10 +1573,10 @@ pub fn is_named_argument(&mut self) -> bool {
|
||||
|
||||
if offset == 0 {
|
||||
is_plain_ident_or_underscore(&self.token)
|
||||
&& self.look_ahead(1, |t| *t == token::COLON)
|
||||
&& self.look_ahead(1, |t| *t == token::Colon)
|
||||
} else {
|
||||
self.look_ahead(offset, |t| is_plain_ident_or_underscore(t))
|
||||
&& self.look_ahead(offset + 1, |t| *t == token::COLON)
|
||||
&& self.look_ahead(offset + 1, |t| *t == token::Colon)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1601,7 +1588,7 @@ pub fn parse_arg_general(&mut self, require_name: bool) -> Arg {
|
||||
require_name);
|
||||
let pat = self.parse_pat();
|
||||
|
||||
self.expect(&token::COLON);
|
||||
self.expect(&token::Colon);
|
||||
pat
|
||||
} else {
|
||||
debug!("parse_arg_general ident_to_pat");
|
||||
@@ -1627,7 +1614,7 @@ pub fn parse_arg(&mut self) -> Arg {
|
||||
/// Parse an argument in a lambda header e.g. |arg, arg|
|
||||
pub fn parse_fn_block_arg(&mut self) -> Arg {
|
||||
let pat = self.parse_pat();
|
||||
let t = if self.eat(&token::COLON) {
|
||||
let t = if self.eat(&token::Colon) {
|
||||
self.parse_ty(true)
|
||||
} else {
|
||||
P(Ty {
|
||||
@@ -1644,8 +1631,8 @@ pub fn parse_fn_block_arg(&mut self) -> Arg {
|
||||
}
|
||||
|
||||
pub fn maybe_parse_fixed_vstore(&mut self) -> Option<P<ast::Expr>> {
|
||||
if self.token == token::COMMA &&
|
||||
self.look_ahead(1, |t| *t == token::DOTDOT) {
|
||||
if self.token == token::Comma &&
|
||||
self.look_ahead(1, |t| *t == token::DotDot) {
|
||||
self.bump();
|
||||
self.bump();
|
||||
Some(self.parse_expr())
|
||||
@@ -1657,24 +1644,24 @@ pub fn maybe_parse_fixed_vstore(&mut self) -> Option<P<ast::Expr>> {
|
||||
/// Matches token_lit = LIT_INTEGER | ...
|
||||
pub fn lit_from_token(&mut self, tok: &token::Token) -> Lit_ {
|
||||
match *tok {
|
||||
token::LIT_BYTE(i) => LitByte(parse::byte_lit(i.as_str()).val0()),
|
||||
token::LIT_CHAR(i) => LitChar(parse::char_lit(i.as_str()).val0()),
|
||||
token::LIT_INTEGER(s) => parse::integer_lit(s.as_str(),
|
||||
token::LitByte(i) => LitByte(parse::byte_lit(i.as_str()).val0()),
|
||||
token::LitChar(i) => LitChar(parse::char_lit(i.as_str()).val0()),
|
||||
token::LitInteger(s) => parse::integer_lit(s.as_str(),
|
||||
&self.sess.span_diagnostic, self.span),
|
||||
token::LIT_FLOAT(s) => parse::float_lit(s.as_str()),
|
||||
token::LIT_STR(s) => {
|
||||
token::LitFloat(s) => parse::float_lit(s.as_str()),
|
||||
token::LitStr(s) => {
|
||||
LitStr(token::intern_and_get_ident(parse::str_lit(s.as_str()).as_slice()),
|
||||
ast::CookedStr)
|
||||
}
|
||||
token::LIT_STR_RAW(s, n) => {
|
||||
token::LitStrRaw(s, n) => {
|
||||
LitStr(token::intern_and_get_ident(parse::raw_str_lit(s.as_str()).as_slice()),
|
||||
ast::RawStr(n))
|
||||
}
|
||||
token::LIT_BINARY(i) =>
|
||||
token::LitBinary(i) =>
|
||||
LitBinary(parse::binary_lit(i.as_str())),
|
||||
token::LIT_BINARY_RAW(i, _) =>
|
||||
token::LitBinaryRaw(i, _) =>
|
||||
LitBinary(Rc::new(i.as_str().as_bytes().iter().map(|&x| x).collect())),
|
||||
token::LPAREN => { self.expect(&token::RPAREN); LitNil },
|
||||
token::LParen => { self.expect(&token::RParen); LitNil },
|
||||
_ => { self.unexpected_last(tok); }
|
||||
}
|
||||
}
|
||||
@@ -1697,7 +1684,7 @@ pub fn parse_lit(&mut self) -> Lit {
|
||||
/// matches '-' lit | lit
|
||||
pub fn parse_literal_maybe_minus(&mut self) -> P<Expr> {
|
||||
let minus_lo = self.span.lo;
|
||||
let minus_present = self.eat(&token::BINOP(token::MINUS));
|
||||
let minus_present = self.eat(&token::BinOp(token::Minus));
|
||||
|
||||
let lo = self.span.lo;
|
||||
let literal = P(self.parse_lit());
|
||||
@@ -1720,11 +1707,11 @@ pub fn parse_literal_maybe_minus(&mut self) -> P<Expr> {
|
||||
pub fn parse_path(&mut self, mode: PathParsingMode) -> PathAndBounds {
|
||||
// Check for a whole path...
|
||||
let found = match self.token {
|
||||
INTERPOLATED(token::NtPath(_)) => Some(self.bump_and_get()),
|
||||
token::Interpolated(token::NtPath(_)) => Some(self.bump_and_get()),
|
||||
_ => None,
|
||||
};
|
||||
match found {
|
||||
Some(INTERPOLATED(token::NtPath(box path))) => {
|
||||
Some(token::Interpolated(token::NtPath(box path))) => {
|
||||
return PathAndBounds {
|
||||
path: path,
|
||||
bounds: None
|
||||
@@ -1734,7 +1721,7 @@ pub fn parse_path(&mut self, mode: PathParsingMode) -> PathAndBounds {
|
||||
}
|
||||
|
||||
let lo = self.span.lo;
|
||||
let is_global = self.eat(&token::MOD_SEP);
|
||||
let is_global = self.eat(&token::ModSep);
|
||||
|
||||
// Parse any number of segments and bound sets. A segment is an
|
||||
// identifier followed by an optional lifetime and a set of types.
|
||||
@@ -1747,7 +1734,7 @@ pub fn parse_path(&mut self, mode: PathParsingMode) -> PathAndBounds {
|
||||
// Parse the '::' before type parameters if it's required. If
|
||||
// it is required and wasn't present, then we're done.
|
||||
if mode == LifetimeAndTypesWithColons &&
|
||||
!self.eat(&token::MOD_SEP) {
|
||||
!self.eat(&token::ModSep) {
|
||||
segments.push(ast::PathSegment {
|
||||
identifier: identifier,
|
||||
lifetimes: Vec::new(),
|
||||
@@ -1778,7 +1765,7 @@ pub fn parse_path(&mut self, mode: PathParsingMode) -> PathAndBounds {
|
||||
// a double colon to get here in the first place.
|
||||
if !(mode == LifetimeAndTypesWithColons &&
|
||||
!any_lifetime_or_types) {
|
||||
if !self.eat(&token::MOD_SEP) {
|
||||
if !self.eat(&token::ModSep) {
|
||||
break
|
||||
}
|
||||
}
|
||||
@@ -1790,7 +1777,7 @@ pub fn parse_path(&mut self, mode: PathParsingMode) -> PathAndBounds {
|
||||
// error.
|
||||
let opt_bounds = {
|
||||
if mode == LifetimeAndTypesAndBounds &&
|
||||
self.eat(&token::BINOP(token::PLUS))
|
||||
self.eat(&token::BinOp(token::Plus))
|
||||
{
|
||||
let bounds = self.parse_ty_param_bounds();
|
||||
|
||||
@@ -1828,7 +1815,7 @@ pub fn parse_path(&mut self, mode: PathParsingMode) -> PathAndBounds {
|
||||
/// parses 0 or 1 lifetime
|
||||
pub fn parse_opt_lifetime(&mut self) -> Option<ast::Lifetime> {
|
||||
match self.token {
|
||||
token::LIFETIME(..) => {
|
||||
token::Lifetime(..) => {
|
||||
Some(self.parse_lifetime())
|
||||
}
|
||||
_ => {
|
||||
@@ -1841,7 +1828,7 @@ pub fn parse_opt_lifetime(&mut self) -> Option<ast::Lifetime> {
|
||||
/// Matches lifetime = LIFETIME
|
||||
pub fn parse_lifetime(&mut self) -> ast::Lifetime {
|
||||
match self.token {
|
||||
token::LIFETIME(i) => {
|
||||
token::Lifetime(i) => {
|
||||
let span = self.span;
|
||||
self.bump();
|
||||
return ast::Lifetime {
|
||||
@@ -1865,11 +1852,11 @@ pub fn parse_lifetime_defs(&mut self) -> Vec<ast::LifetimeDef> {
|
||||
let mut res = Vec::new();
|
||||
loop {
|
||||
match self.token {
|
||||
token::LIFETIME(_) => {
|
||||
token::Lifetime(_) => {
|
||||
let lifetime = self.parse_lifetime();
|
||||
let bounds =
|
||||
if self.eat(&token::COLON) {
|
||||
self.parse_lifetimes(token::BINOP(token::PLUS))
|
||||
if self.eat(&token::Colon) {
|
||||
self.parse_lifetimes(token::BinOp(token::Plus))
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
@@ -1883,9 +1870,9 @@ pub fn parse_lifetime_defs(&mut self) -> Vec<ast::LifetimeDef> {
|
||||
}
|
||||
|
||||
match self.token {
|
||||
token::COMMA => { self.bump(); }
|
||||
token::GT => { return res; }
|
||||
token::BINOP(token::SHR) => { return res; }
|
||||
token::Comma => { self.bump(); }
|
||||
token::Gt => { return res; }
|
||||
token::BinOp(token::Shr) => { return res; }
|
||||
_ => {
|
||||
let msg = format!("expected `,` or `>` after lifetime \
|
||||
name, got: {}",
|
||||
@@ -1910,7 +1897,7 @@ pub fn parse_lifetimes(&mut self, sep: token::Token) -> Vec<ast::Lifetime> {
|
||||
let mut res = Vec::new();
|
||||
loop {
|
||||
match self.token {
|
||||
token::LIFETIME(_) => {
|
||||
token::Lifetime(_) => {
|
||||
res.push(self.parse_lifetime());
|
||||
}
|
||||
_ => {
|
||||
@@ -1926,11 +1913,6 @@ pub fn parse_lifetimes(&mut self, sep: token::Token) -> Vec<ast::Lifetime> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn token_is_mutability(tok: &token::Token) -> bool {
|
||||
token::is_keyword(keywords::Mut, tok) ||
|
||||
token::is_keyword(keywords::Const, tok)
|
||||
}
|
||||
|
||||
/// Parse mutability declaration (mut/const/imm)
|
||||
pub fn parse_mutability(&mut self) -> Mutability {
|
||||
if self.eat_keyword(keywords::Mut) {
|
||||
@@ -1945,7 +1927,7 @@ pub fn parse_field(&mut self) -> Field {
|
||||
let lo = self.span.lo;
|
||||
let i = self.parse_ident();
|
||||
let hi = self.last_span.hi;
|
||||
self.expect(&token::COLON);
|
||||
self.expect(&token::Colon);
|
||||
let e = self.parse_expr();
|
||||
ast::Field {
|
||||
ident: spanned(lo, hi, i),
|
||||
@@ -2043,31 +2025,31 @@ pub fn parse_bottom_expr(&mut self) -> P<Expr> {
|
||||
let ex: Expr_;
|
||||
|
||||
match self.token {
|
||||
token::LPAREN => {
|
||||
token::LParen => {
|
||||
self.bump();
|
||||
// (e) is parenthesized e
|
||||
// (e,) is a tuple with only one field, e
|
||||
let mut trailing_comma = false;
|
||||
if self.token == token::RPAREN {
|
||||
if self.token == token::RParen {
|
||||
hi = self.span.hi;
|
||||
self.bump();
|
||||
let lit = P(spanned(lo, hi, LitNil));
|
||||
return self.mk_expr(lo, hi, ExprLit(lit));
|
||||
}
|
||||
let mut es = vec!(self.parse_expr());
|
||||
self.commit_expr(&**es.last().unwrap(), &[], &[token::COMMA, token::RPAREN]);
|
||||
while self.token == token::COMMA {
|
||||
self.commit_expr(&**es.last().unwrap(), &[], &[token::Comma, token::RParen]);
|
||||
while self.token == token::Comma {
|
||||
self.bump();
|
||||
if self.token != token::RPAREN {
|
||||
if self.token != token::RParen {
|
||||
es.push(self.parse_expr());
|
||||
self.commit_expr(&**es.last().unwrap(), &[],
|
||||
&[token::COMMA, token::RPAREN]);
|
||||
&[token::Comma, token::RParen]);
|
||||
} else {
|
||||
trailing_comma = true;
|
||||
}
|
||||
}
|
||||
hi = self.span.hi;
|
||||
self.commit_expr_expecting(&**es.last().unwrap(), token::RPAREN);
|
||||
self.commit_expr_expecting(&**es.last().unwrap(), token::RParen);
|
||||
|
||||
return if es.len() == 1 && !trailing_comma {
|
||||
self.mk_expr(lo, hi, ExprParen(es.into_iter().nth(0).unwrap()))
|
||||
@@ -2075,50 +2057,50 @@ pub fn parse_bottom_expr(&mut self) -> P<Expr> {
|
||||
self.mk_expr(lo, hi, ExprTup(es))
|
||||
}
|
||||
},
|
||||
token::LBRACE => {
|
||||
token::LBrace => {
|
||||
self.bump();
|
||||
let blk = self.parse_block_tail(lo, DefaultBlock);
|
||||
return self.mk_expr(blk.span.lo, blk.span.hi,
|
||||
ExprBlock(blk));
|
||||
},
|
||||
token::BINOP(token::OR) | token::OROR => {
|
||||
token::BinOp(token::Or) | token::OrOr => {
|
||||
return self.parse_lambda_expr(CaptureByRef);
|
||||
},
|
||||
// FIXME #13626: Should be able to stick in
|
||||
// token::SELF_KEYWORD_NAME
|
||||
token::IDENT(id @ ast::Ident{
|
||||
name: ast::Name(token::SELF_KEYWORD_NAME_NUM),
|
||||
ctxt: _
|
||||
} ,false) => {
|
||||
token::Ident(id @ ast::Ident {
|
||||
name: ast::Name(token::SELF_KEYWORD_NAME_NUM),
|
||||
ctxt: _
|
||||
}, token::Plain) => {
|
||||
self.bump();
|
||||
let path = ast_util::ident_to_path(mk_sp(lo, hi), id);
|
||||
ex = ExprPath(path);
|
||||
hi = self.last_span.hi;
|
||||
}
|
||||
token::LBRACKET => {
|
||||
token::LBracket => {
|
||||
self.bump();
|
||||
|
||||
if self.token == token::RBRACKET {
|
||||
if self.token == token::RBracket {
|
||||
// Empty vector.
|
||||
self.bump();
|
||||
ex = ExprVec(Vec::new());
|
||||
} else {
|
||||
// Nonempty vector.
|
||||
let first_expr = self.parse_expr();
|
||||
if self.token == token::COMMA &&
|
||||
self.look_ahead(1, |t| *t == token::DOTDOT) {
|
||||
if self.token == token::Comma &&
|
||||
self.look_ahead(1, |t| *t == token::DotDot) {
|
||||
// Repeating vector syntax: [ 0, ..512 ]
|
||||
self.bump();
|
||||
self.bump();
|
||||
let count = self.parse_expr();
|
||||
self.expect(&token::RBRACKET);
|
||||
self.expect(&token::RBracket);
|
||||
ex = ExprRepeat(first_expr, count);
|
||||
} else if self.token == token::COMMA {
|
||||
} else if self.token == token::Comma {
|
||||
// Vector with two or more elements.
|
||||
self.bump();
|
||||
let remaining_exprs = self.parse_seq_to_end(
|
||||
&token::RBRACKET,
|
||||
seq_sep_trailing_allowed(token::COMMA),
|
||||
&token::RBracket,
|
||||
seq_sep_trailing_allowed(token::Comma),
|
||||
|p| p.parse_expr()
|
||||
);
|
||||
let mut exprs = vec!(first_expr);
|
||||
@@ -2126,7 +2108,7 @@ pub fn parse_bottom_expr(&mut self) -> P<Expr> {
|
||||
ex = ExprVec(exprs);
|
||||
} else {
|
||||
// Vector with one element.
|
||||
self.expect(&token::RBRACKET);
|
||||
self.expect(&token::RBracket);
|
||||
ex = ExprVec(vec!(first_expr));
|
||||
}
|
||||
}
|
||||
@@ -2158,10 +2140,10 @@ pub fn parse_bottom_expr(&mut self) -> P<Expr> {
|
||||
if self.eat_keyword(keywords::While) {
|
||||
return self.parse_while_expr(None);
|
||||
}
|
||||
if Parser::token_is_lifetime(&self.token) {
|
||||
if self.token.is_lifetime() {
|
||||
let lifetime = self.get_lifetime();
|
||||
self.bump();
|
||||
self.expect(&token::COLON);
|
||||
self.expect(&token::Colon);
|
||||
if self.eat_keyword(keywords::While) {
|
||||
return self.parse_while_expr(Some(lifetime))
|
||||
}
|
||||
@@ -2178,7 +2160,7 @@ pub fn parse_bottom_expr(&mut self) -> P<Expr> {
|
||||
}
|
||||
if self.eat_keyword(keywords::Continue) {
|
||||
let lo = self.span.lo;
|
||||
let ex = if Parser::token_is_lifetime(&self.token) {
|
||||
let ex = if self.token.is_lifetime() {
|
||||
let lifetime = self.get_lifetime();
|
||||
self.bump();
|
||||
ExprAgain(Some(lifetime))
|
||||
@@ -2198,7 +2180,7 @@ pub fn parse_bottom_expr(&mut self) -> P<Expr> {
|
||||
}
|
||||
if self.eat_keyword(keywords::Return) {
|
||||
// RETURN expression
|
||||
if can_begin_expr(&self.token) {
|
||||
if self.token.can_begin_expr() {
|
||||
let e = self.parse_expr();
|
||||
hi = e.span.hi;
|
||||
ex = ExprRet(Some(e));
|
||||
@@ -2207,7 +2189,7 @@ pub fn parse_bottom_expr(&mut self) -> P<Expr> {
|
||||
}
|
||||
} else if self.eat_keyword(keywords::Break) {
|
||||
// BREAK expression
|
||||
if Parser::token_is_lifetime(&self.token) {
|
||||
if self.token.is_lifetime() {
|
||||
let lifetime = self.get_lifetime();
|
||||
self.bump();
|
||||
ex = ExprBreak(Some(lifetime));
|
||||
@@ -2215,19 +2197,19 @@ pub fn parse_bottom_expr(&mut self) -> P<Expr> {
|
||||
ex = ExprBreak(None);
|
||||
}
|
||||
hi = self.span.hi;
|
||||
} else if self.token == token::MOD_SEP ||
|
||||
is_ident(&self.token) &&
|
||||
!self.is_keyword(keywords::True) &&
|
||||
!self.is_keyword(keywords::False) {
|
||||
} else if self.token == token::ModSep ||
|
||||
self.token.is_ident() &&
|
||||
!self.token.is_keyword(keywords::True) &&
|
||||
!self.token.is_keyword(keywords::False) {
|
||||
let pth =
|
||||
self.parse_path(LifetimeAndTypesWithColons).path;
|
||||
|
||||
// `!`, as an operator, is prefix, so we know this isn't that
|
||||
if self.token == token::NOT {
|
||||
if self.token == token::Not {
|
||||
// MACRO INVOCATION expression
|
||||
self.bump();
|
||||
|
||||
let ket = token::close_delimiter_for(&self.token)
|
||||
let ket = self.token.get_close_delimiter()
|
||||
.unwrap_or_else(|| {
|
||||
self.fatal("expected open delimiter")
|
||||
});
|
||||
@@ -2245,7 +2227,7 @@ pub fn parse_bottom_expr(&mut self) -> P<Expr> {
|
||||
tts,
|
||||
EMPTY_CTXT));
|
||||
}
|
||||
if self.token == token::LBRACE {
|
||||
if self.token == token::LBrace {
|
||||
// This is a struct literal, unless we're prohibited
|
||||
// from parsing struct literals here.
|
||||
if !self.restrictions.contains(RESTRICTION_NO_STRUCT_LITERAL) {
|
||||
@@ -2254,16 +2236,16 @@ pub fn parse_bottom_expr(&mut self) -> P<Expr> {
|
||||
let mut fields = Vec::new();
|
||||
let mut base = None;
|
||||
|
||||
while self.token != token::RBRACE {
|
||||
if self.eat(&token::DOTDOT) {
|
||||
while self.token != token::RBrace {
|
||||
if self.eat(&token::DotDot) {
|
||||
base = Some(self.parse_expr());
|
||||
break;
|
||||
}
|
||||
|
||||
fields.push(self.parse_field());
|
||||
self.commit_expr(&*fields.last().unwrap().expr,
|
||||
&[token::COMMA],
|
||||
&[token::RBRACE]);
|
||||
&[token::Comma],
|
||||
&[token::RBrace]);
|
||||
}
|
||||
|
||||
if fields.len() == 0 && base.is_none() {
|
||||
@@ -2276,7 +2258,7 @@ pub fn parse_bottom_expr(&mut self) -> P<Expr> {
|
||||
}
|
||||
|
||||
hi = self.span.hi;
|
||||
self.expect(&token::RBRACE);
|
||||
self.expect(&token::RBrace);
|
||||
ex = ExprStruct(pth, fields, base);
|
||||
return self.mk_expr(lo, hi, ex);
|
||||
}
|
||||
@@ -2299,7 +2281,7 @@ pub fn parse_bottom_expr(&mut self) -> P<Expr> {
|
||||
/// Parse a block or unsafe block
|
||||
pub fn parse_block_expr(&mut self, lo: BytePos, blk_mode: BlockCheckMode)
|
||||
-> P<Expr> {
|
||||
self.expect(&token::LBRACE);
|
||||
self.expect(&token::LBrace);
|
||||
let blk = self.parse_block_tail(lo, blk_mode);
|
||||
return self.mk_expr(blk.span.lo, blk.span.hi, ExprBlock(blk));
|
||||
}
|
||||
@@ -2316,13 +2298,13 @@ pub fn parse_dot_or_call_expr_with(&mut self, e0: P<Expr>) -> P<Expr> {
|
||||
let mut hi;
|
||||
loop {
|
||||
// expr.f
|
||||
if self.eat(&token::DOT) {
|
||||
if self.eat(&token::Dot) {
|
||||
match self.token {
|
||||
token::IDENT(i, _) => {
|
||||
token::Ident(i, _) => {
|
||||
let dot = self.last_span.hi;
|
||||
hi = self.span.hi;
|
||||
self.bump();
|
||||
let (_, tys) = if self.eat(&token::MOD_SEP) {
|
||||
let (_, tys) = if self.eat(&token::ModSep) {
|
||||
self.expect_lt();
|
||||
self.parse_generic_values_after_lt()
|
||||
} else {
|
||||
@@ -2331,11 +2313,11 @@ pub fn parse_dot_or_call_expr_with(&mut self, e0: P<Expr>) -> P<Expr> {
|
||||
|
||||
// expr.f() method call
|
||||
match self.token {
|
||||
token::LPAREN => {
|
||||
token::LParen => {
|
||||
let mut es = self.parse_unspanned_seq(
|
||||
&token::LPAREN,
|
||||
&token::RPAREN,
|
||||
seq_sep_trailing_allowed(token::COMMA),
|
||||
&token::LParen,
|
||||
&token::RParen,
|
||||
seq_sep_trailing_allowed(token::Comma),
|
||||
|p| p.parse_expr()
|
||||
);
|
||||
hi = self.last_span.hi;
|
||||
@@ -2352,12 +2334,12 @@ pub fn parse_dot_or_call_expr_with(&mut self, e0: P<Expr>) -> P<Expr> {
|
||||
}
|
||||
}
|
||||
}
|
||||
token::LIT_INTEGER(n) => {
|
||||
token::LitInteger(n) => {
|
||||
let index = n.as_str();
|
||||
let dot = self.last_span.hi;
|
||||
hi = self.span.hi;
|
||||
self.bump();
|
||||
let (_, tys) = if self.eat(&token::MOD_SEP) {
|
||||
let (_, tys) = if self.eat(&token::ModSep) {
|
||||
self.expect_lt();
|
||||
self.parse_generic_values_after_lt()
|
||||
} else {
|
||||
@@ -2377,7 +2359,7 @@ pub fn parse_dot_or_call_expr_with(&mut self, e0: P<Expr>) -> P<Expr> {
|
||||
}
|
||||
}
|
||||
}
|
||||
token::LIT_FLOAT(n) => {
|
||||
token::LitFloat(n) => {
|
||||
self.bump();
|
||||
let last_span = self.last_span;
|
||||
self.span_err(last_span,
|
||||
@@ -2394,11 +2376,11 @@ pub fn parse_dot_or_call_expr_with(&mut self, e0: P<Expr>) -> P<Expr> {
|
||||
if self.expr_is_complete(&*e) { break; }
|
||||
match self.token {
|
||||
// expr(...)
|
||||
token::LPAREN => {
|
||||
token::LParen => {
|
||||
let es = self.parse_unspanned_seq(
|
||||
&token::LPAREN,
|
||||
&token::RPAREN,
|
||||
seq_sep_trailing_allowed(token::COMMA),
|
||||
&token::LParen,
|
||||
&token::RParen,
|
||||
seq_sep_trailing_allowed(token::Comma),
|
||||
|p| p.parse_expr()
|
||||
);
|
||||
hi = self.last_span.hi;
|
||||
@@ -2411,7 +2393,7 @@ pub fn parse_dot_or_call_expr_with(&mut self, e0: P<Expr>) -> P<Expr> {
|
||||
// Could be either an index expression or a slicing expression.
|
||||
// Any slicing non-terminal can have a mutable version with `mut`
|
||||
// after the opening square bracket.
|
||||
token::LBRACKET => {
|
||||
token::LBracket => {
|
||||
self.bump();
|
||||
let mutbl = if self.eat_keyword(keywords::Mut) {
|
||||
MutMutable
|
||||
@@ -2420,18 +2402,18 @@ pub fn parse_dot_or_call_expr_with(&mut self, e0: P<Expr>) -> P<Expr> {
|
||||
};
|
||||
match self.token {
|
||||
// e[]
|
||||
token::RBRACKET => {
|
||||
token::RBracket => {
|
||||
self.bump();
|
||||
hi = self.span.hi;
|
||||
let slice = self.mk_slice(e, None, None, mutbl);
|
||||
e = self.mk_expr(lo, hi, slice)
|
||||
}
|
||||
// e[..e]
|
||||
token::DOTDOT => {
|
||||
token::DotDot => {
|
||||
self.bump();
|
||||
match self.token {
|
||||
// e[..]
|
||||
token::RBRACKET => {
|
||||
token::RBracket => {
|
||||
self.bump();
|
||||
hi = self.span.hi;
|
||||
let slice = self.mk_slice(e, None, None, mutbl);
|
||||
@@ -2445,7 +2427,7 @@ pub fn parse_dot_or_call_expr_with(&mut self, e0: P<Expr>) -> P<Expr> {
|
||||
_ => {
|
||||
hi = self.span.hi;
|
||||
let e2 = self.parse_expr();
|
||||
self.commit_expr_expecting(&*e2, token::RBRACKET);
|
||||
self.commit_expr_expecting(&*e2, token::RBracket);
|
||||
let slice = self.mk_slice(e, None, Some(e2), mutbl);
|
||||
e = self.mk_expr(lo, hi, slice)
|
||||
}
|
||||
@@ -2456,18 +2438,18 @@ pub fn parse_dot_or_call_expr_with(&mut self, e0: P<Expr>) -> P<Expr> {
|
||||
let ix = self.parse_expr();
|
||||
match self.token {
|
||||
// e[e..] | e[e..e]
|
||||
token::DOTDOT => {
|
||||
token::DotDot => {
|
||||
self.bump();
|
||||
let e2 = match self.token {
|
||||
// e[e..]
|
||||
token::RBRACKET => {
|
||||
token::RBracket => {
|
||||
self.bump();
|
||||
None
|
||||
}
|
||||
// e[e..e]
|
||||
_ => {
|
||||
let e2 = self.parse_expr();
|
||||
self.commit_expr_expecting(&*e2, token::RBRACKET);
|
||||
self.commit_expr_expecting(&*e2, token::RBracket);
|
||||
Some(e2)
|
||||
}
|
||||
};
|
||||
@@ -2482,7 +2464,7 @@ pub fn parse_dot_or_call_expr_with(&mut self, e0: P<Expr>) -> P<Expr> {
|
||||
"`mut` keyword is invalid in index expressions");
|
||||
}
|
||||
hi = self.span.hi;
|
||||
self.commit_expr_expecting(&*ix, token::RBRACKET);
|
||||
self.commit_expr_expecting(&*ix, token::RBracket);
|
||||
let index = self.mk_index(e, ix);
|
||||
e = self.mk_expr(lo, hi, index)
|
||||
}
|
||||
@@ -2502,11 +2484,11 @@ pub fn parse_dot_or_call_expr_with(&mut self, e0: P<Expr>) -> P<Expr> {
|
||||
pub fn parse_sep_and_kleene_op(&mut self) -> (Option<token::Token>, ast::KleeneOp) {
|
||||
fn parse_kleene_op(parser: &mut Parser) -> Option<ast::KleeneOp> {
|
||||
match parser.token {
|
||||
token::BINOP(token::STAR) => {
|
||||
token::BinOp(token::Star) => {
|
||||
parser.bump();
|
||||
Some(ast::ZeroOrMore)
|
||||
},
|
||||
token::BINOP(token::PLUS) => {
|
||||
token::BinOp(token::Plus) => {
|
||||
parser.bump();
|
||||
Some(ast::OneOrMore)
|
||||
},
|
||||
@@ -2543,7 +2525,7 @@ pub fn parse_token_tree(&mut self) -> TokenTree {
|
||||
fn parse_non_delim_tt_tok(p: &mut Parser) -> TokenTree {
|
||||
maybe_whole!(deref p, NtTT);
|
||||
match p.token {
|
||||
token::RPAREN | token::RBRACE | token::RBRACKET => {
|
||||
token::RParen | token::RBrace | token::RBracket => {
|
||||
// This is a conservative error: only report the last unclosed delimiter. The
|
||||
// previous unclosed delimiters could actually be closed! The parser just hasn't
|
||||
// gotten to them yet.
|
||||
@@ -2556,14 +2538,14 @@ fn parse_non_delim_tt_tok(p: &mut Parser) -> TokenTree {
|
||||
token_str).as_slice())
|
||||
},
|
||||
/* we ought to allow different depths of unquotation */
|
||||
token::DOLLAR if p.quote_depth > 0u => {
|
||||
token::Dollar if p.quote_depth > 0u => {
|
||||
p.bump();
|
||||
let sp = p.span;
|
||||
|
||||
if p.token == token::LPAREN {
|
||||
if p.token == token::LParen {
|
||||
let seq = p.parse_seq(
|
||||
&token::LPAREN,
|
||||
&token::RPAREN,
|
||||
&token::LParen,
|
||||
&token::RParen,
|
||||
seq_sep_none(),
|
||||
|p| p.parse_token_tree()
|
||||
);
|
||||
@@ -2582,8 +2564,8 @@ fn parse_non_delim_tt_tok(p: &mut Parser) -> TokenTree {
|
||||
}
|
||||
}
|
||||
|
||||
match (&self.token, token::close_delimiter_for(&self.token)) {
|
||||
(&token::EOF, _) => {
|
||||
match (&self.token, self.token.get_close_delimiter()) {
|
||||
(&token::Eof, _) => {
|
||||
let open_braces = self.open_braces.clone();
|
||||
for sp in open_braces.iter() {
|
||||
self.span_note(*sp, "Did you mean to close this delimiter?");
|
||||
@@ -2628,7 +2610,7 @@ fn parse_non_delim_tt_tok(p: &mut Parser) -> TokenTree {
|
||||
// up to EOF.
|
||||
pub fn parse_all_token_trees(&mut self) -> Vec<TokenTree> {
|
||||
let mut tts = Vec::new();
|
||||
while self.token != token::EOF {
|
||||
while self.token != token::Eof {
|
||||
tts.push(self.parse_token_tree());
|
||||
}
|
||||
tts
|
||||
@@ -2639,7 +2621,7 @@ pub fn parse_matchers(&mut self) -> Vec<Matcher> {
|
||||
// the interpolation of Matcher's
|
||||
maybe_whole!(self, NtMatchers);
|
||||
let mut name_idx = 0u;
|
||||
match token::close_delimiter_for(&self.token) {
|
||||
match self.token.get_close_delimiter() {
|
||||
Some(other_delimiter) => {
|
||||
self.bump();
|
||||
self.parse_matcher_subseq_upto(&mut name_idx, &other_delimiter)
|
||||
@@ -2659,8 +2641,8 @@ pub fn parse_matcher_subseq_upto(&mut self,
|
||||
let mut lparens = 0u;
|
||||
|
||||
while self.token != *ket || lparens > 0u {
|
||||
if self.token == token::LPAREN { lparens += 1u; }
|
||||
if self.token == token::RPAREN { lparens -= 1u; }
|
||||
if self.token == token::LParen { lparens += 1u; }
|
||||
if self.token == token::RParen { lparens -= 1u; }
|
||||
ret_val.push(self.parse_matcher(name_idx));
|
||||
}
|
||||
|
||||
@@ -2672,13 +2654,13 @@ pub fn parse_matcher_subseq_upto(&mut self,
|
||||
pub fn parse_matcher(&mut self, name_idx: &mut uint) -> Matcher {
|
||||
let lo = self.span.lo;
|
||||
|
||||
let m = if self.token == token::DOLLAR {
|
||||
let m = if self.token == token::Dollar {
|
||||
self.bump();
|
||||
if self.token == token::LPAREN {
|
||||
if self.token == token::LParen {
|
||||
let name_idx_lo = *name_idx;
|
||||
self.bump();
|
||||
let ms = self.parse_matcher_subseq_upto(name_idx,
|
||||
&token::RPAREN);
|
||||
&token::RParen);
|
||||
if ms.len() == 0u {
|
||||
self.fatal("repetition body must be nonempty");
|
||||
}
|
||||
@@ -2686,7 +2668,7 @@ pub fn parse_matcher(&mut self, name_idx: &mut uint) -> Matcher {
|
||||
MatchSeq(ms, sep, kleene_op, name_idx_lo, *name_idx)
|
||||
} else {
|
||||
let bound_to = self.parse_ident();
|
||||
self.expect(&token::COLON);
|
||||
self.expect(&token::Colon);
|
||||
let nt_name = self.parse_ident();
|
||||
let m = MatchNonterminal(bound_to, nt_name, *name_idx);
|
||||
*name_idx += 1;
|
||||
@@ -2706,36 +2688,36 @@ pub fn parse_prefix_expr(&mut self) -> P<Expr> {
|
||||
|
||||
let ex;
|
||||
match self.token {
|
||||
token::NOT => {
|
||||
token::Not => {
|
||||
self.bump();
|
||||
let e = self.parse_prefix_expr();
|
||||
hi = e.span.hi;
|
||||
ex = self.mk_unary(UnNot, e);
|
||||
}
|
||||
token::BINOP(token::MINUS) => {
|
||||
token::BinOp(token::Minus) => {
|
||||
self.bump();
|
||||
let e = self.parse_prefix_expr();
|
||||
hi = e.span.hi;
|
||||
ex = self.mk_unary(UnNeg, e);
|
||||
}
|
||||
token::BINOP(token::STAR) => {
|
||||
token::BinOp(token::Star) => {
|
||||
self.bump();
|
||||
let e = self.parse_prefix_expr();
|
||||
hi = e.span.hi;
|
||||
ex = self.mk_unary(UnDeref, e);
|
||||
}
|
||||
token::BINOP(token::AND) | token::ANDAND => {
|
||||
token::BinOp(token::And) | token::AndAnd => {
|
||||
self.expect_and();
|
||||
let m = self.parse_mutability();
|
||||
let e = self.parse_prefix_expr();
|
||||
hi = e.span.hi;
|
||||
ex = ExprAddrOf(m, e);
|
||||
}
|
||||
token::TILDE => {
|
||||
token::Tilde => {
|
||||
self.bump();
|
||||
let last_span = self.last_span;
|
||||
match self.token {
|
||||
token::LBRACKET => self.obsolete(last_span, ObsoleteOwnedVector),
|
||||
token::LBracket => self.obsolete(last_span, ObsoleteOwnedVector),
|
||||
_ => self.obsolete(last_span, ObsoleteOwnedExpr)
|
||||
}
|
||||
|
||||
@@ -2743,19 +2725,19 @@ pub fn parse_prefix_expr(&mut self) -> P<Expr> {
|
||||
hi = e.span.hi;
|
||||
ex = self.mk_unary(UnUniq, e);
|
||||
}
|
||||
token::IDENT(_, _) => {
|
||||
if !self.is_keyword(keywords::Box) {
|
||||
token::Ident(_, _) => {
|
||||
if !self.token.is_keyword(keywords::Box) {
|
||||
return self.parse_dot_or_call_expr();
|
||||
}
|
||||
|
||||
self.bump();
|
||||
|
||||
// Check for a place: `box(PLACE) EXPR`.
|
||||
if self.eat(&token::LPAREN) {
|
||||
if self.eat(&token::LParen) {
|
||||
// Support `box() EXPR` as the default.
|
||||
if !self.eat(&token::RPAREN) {
|
||||
if !self.eat(&token::RParen) {
|
||||
let place = self.parse_expr();
|
||||
self.expect(&token::RPAREN);
|
||||
self.expect(&token::RParen);
|
||||
let subexpression = self.parse_prefix_expr();
|
||||
hi = subexpression.span.hi;
|
||||
ex = ExprBox(place, subexpression);
|
||||
@@ -2785,12 +2767,12 @@ pub fn parse_more_binops(&mut self, lhs: P<Expr>, min_prec: uint) -> P<Expr> {
|
||||
|
||||
// Prevent dynamic borrow errors later on by limiting the
|
||||
// scope of the borrows.
|
||||
if self.token == token::BINOP(token::OR) &&
|
||||
if self.token == token::BinOp(token::Or) &&
|
||||
self.restrictions.contains(RESTRICTION_NO_BAR_OP) {
|
||||
return lhs;
|
||||
}
|
||||
|
||||
let cur_opt = token_to_binop(&self.token);
|
||||
let cur_opt = self.token.to_binop();
|
||||
match cur_opt {
|
||||
Some(cur_op) => {
|
||||
let cur_prec = operator_prec(cur_op);
|
||||
@@ -2829,25 +2811,25 @@ pub fn parse_assign_expr(&mut self) -> P<Expr> {
|
||||
let lhs = self.parse_binops();
|
||||
let restrictions = self.restrictions & RESTRICTION_NO_STRUCT_LITERAL;
|
||||
match self.token {
|
||||
token::EQ => {
|
||||
token::Eq => {
|
||||
self.bump();
|
||||
let rhs = self.parse_expr_res(restrictions);
|
||||
self.mk_expr(lo, rhs.span.hi, ExprAssign(lhs, rhs))
|
||||
}
|
||||
token::BINOPEQ(op) => {
|
||||
token::BinOpEq(op) => {
|
||||
self.bump();
|
||||
let rhs = self.parse_expr_res(restrictions);
|
||||
let aop = match op {
|
||||
token::PLUS => BiAdd,
|
||||
token::MINUS => BiSub,
|
||||
token::STAR => BiMul,
|
||||
token::SLASH => BiDiv,
|
||||
token::PERCENT => BiRem,
|
||||
token::CARET => BiBitXor,
|
||||
token::AND => BiBitAnd,
|
||||
token::OR => BiBitOr,
|
||||
token::SHL => BiShl,
|
||||
token::SHR => BiShr
|
||||
token::Plus => BiAdd,
|
||||
token::Minus => BiSub,
|
||||
token::Star => BiMul,
|
||||
token::Slash => BiDiv,
|
||||
token::Percent => BiRem,
|
||||
token::Caret => BiBitXor,
|
||||
token::And => BiBitAnd,
|
||||
token::Or => BiBitOr,
|
||||
token::Shl => BiShl,
|
||||
token::Shr => BiShr
|
||||
};
|
||||
let rhs_span = rhs.span;
|
||||
let assign_op = self.mk_assign_op(aop, lhs, rhs);
|
||||
@@ -2861,7 +2843,7 @@ pub fn parse_assign_expr(&mut self) -> P<Expr> {
|
||||
|
||||
/// Parse an 'if' or 'if let' expression ('if' token already eaten)
|
||||
pub fn parse_if_expr(&mut self) -> P<Expr> {
|
||||
if self.is_keyword(keywords::Let) {
|
||||
if self.token.is_keyword(keywords::Let) {
|
||||
return self.parse_if_let_expr();
|
||||
}
|
||||
let lo = self.last_span.lo;
|
||||
@@ -2882,7 +2864,7 @@ pub fn parse_if_let_expr(&mut self) -> P<Expr> {
|
||||
let lo = self.last_span.lo;
|
||||
self.expect_keyword(keywords::Let);
|
||||
let pat = self.parse_pat();
|
||||
self.expect(&token::EQ);
|
||||
self.expect(&token::Eq);
|
||||
let expr = self.parse_expr_res(RESTRICTION_NO_STRUCT_LITERAL);
|
||||
let thn = self.parse_block();
|
||||
let (hi, els) = if self.eat_keyword(keywords::Else) {
|
||||
@@ -2952,7 +2934,7 @@ pub fn parse_for_expr(&mut self, opt_ident: Option<ast::Ident>) -> P<Expr> {
|
||||
|
||||
/// Parse a 'while' or 'while let' expression ('while' token already eaten)
|
||||
pub fn parse_while_expr(&mut self, opt_ident: Option<ast::Ident>) -> P<Expr> {
|
||||
if self.is_keyword(keywords::Let) {
|
||||
if self.token.is_keyword(keywords::Let) {
|
||||
return self.parse_while_let_expr(opt_ident);
|
||||
}
|
||||
let lo = self.last_span.lo;
|
||||
@@ -2967,7 +2949,7 @@ pub fn parse_while_let_expr(&mut self, opt_ident: Option<ast::Ident>) -> P<Expr>
|
||||
let lo = self.last_span.lo;
|
||||
self.expect_keyword(keywords::Let);
|
||||
let pat = self.parse_pat();
|
||||
self.expect(&token::EQ);
|
||||
self.expect(&token::Eq);
|
||||
let expr = self.parse_expr_res(RESTRICTION_NO_STRUCT_LITERAL);
|
||||
let body = self.parse_block();
|
||||
let hi = body.span.hi;
|
||||
@@ -2984,9 +2966,9 @@ pub fn parse_loop_expr(&mut self, opt_ident: Option<ast::Ident>) -> P<Expr> {
|
||||
fn parse_match_expr(&mut self) -> P<Expr> {
|
||||
let lo = self.last_span.lo;
|
||||
let discriminant = self.parse_expr_res(RESTRICTION_NO_STRUCT_LITERAL);
|
||||
self.commit_expr_expecting(&*discriminant, token::LBRACE);
|
||||
self.commit_expr_expecting(&*discriminant, token::LBrace);
|
||||
let mut arms: Vec<Arm> = Vec::new();
|
||||
while self.token != token::RBRACE {
|
||||
while self.token != token::RBrace {
|
||||
arms.push(self.parse_arm());
|
||||
}
|
||||
let hi = self.span.hi;
|
||||
@@ -3001,17 +2983,17 @@ pub fn parse_arm(&mut self) -> Arm {
|
||||
if self.eat_keyword(keywords::If) {
|
||||
guard = Some(self.parse_expr());
|
||||
}
|
||||
self.expect(&token::FAT_ARROW);
|
||||
self.expect(&token::FatArrow);
|
||||
let expr = self.parse_expr_res(RESTRICTION_STMT_EXPR);
|
||||
|
||||
let require_comma =
|
||||
!classify::expr_is_simple_block(&*expr)
|
||||
&& self.token != token::RBRACE;
|
||||
&& self.token != token::RBrace;
|
||||
|
||||
if require_comma {
|
||||
self.commit_expr(&*expr, &[token::COMMA], &[token::RBRACE]);
|
||||
self.commit_expr(&*expr, &[token::Comma], &[token::RBrace]);
|
||||
} else {
|
||||
self.eat(&token::COMMA);
|
||||
self.eat(&token::Comma);
|
||||
}
|
||||
|
||||
ast::Arm {
|
||||
@@ -3038,7 +3020,7 @@ pub fn parse_expr_res(&mut self, r: Restrictions) -> P<Expr> {
|
||||
|
||||
/// Parse the RHS of a local variable declaration (e.g. '= 14;')
|
||||
fn parse_initializer(&mut self) -> Option<P<Expr>> {
|
||||
if self.token == token::EQ {
|
||||
if self.token == token::Eq {
|
||||
self.bump();
|
||||
Some(self.parse_expr())
|
||||
} else {
|
||||
@@ -3051,7 +3033,7 @@ fn parse_pats(&mut self) -> Vec<P<Pat>> {
|
||||
let mut pats = Vec::new();
|
||||
loop {
|
||||
pats.push(self.parse_pat());
|
||||
if self.token == token::BINOP(token::OR) { self.bump(); }
|
||||
if self.token == token::BinOp(token::Or) { self.bump(); }
|
||||
else { return pats; }
|
||||
};
|
||||
}
|
||||
@@ -3065,19 +3047,19 @@ fn parse_pat_vec_elements(
|
||||
let mut first = true;
|
||||
let mut before_slice = true;
|
||||
|
||||
while self.token != token::RBRACKET {
|
||||
while self.token != token::RBracket {
|
||||
if first {
|
||||
first = false;
|
||||
} else {
|
||||
self.expect(&token::COMMA);
|
||||
self.expect(&token::Comma);
|
||||
}
|
||||
|
||||
if before_slice {
|
||||
if self.token == token::DOTDOT {
|
||||
if self.token == token::DotDot {
|
||||
self.bump();
|
||||
|
||||
if self.token == token::COMMA ||
|
||||
self.token == token::RBRACKET {
|
||||
if self.token == token::Comma ||
|
||||
self.token == token::RBracket {
|
||||
slice = Some(P(ast::Pat {
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
node: PatWild(PatWildMulti),
|
||||
@@ -3094,7 +3076,7 @@ fn parse_pat_vec_elements(
|
||||
}
|
||||
|
||||
let subpat = self.parse_pat();
|
||||
if before_slice && self.token == token::DOTDOT {
|
||||
if before_slice && self.token == token::DotDot {
|
||||
self.bump();
|
||||
slice = Some(subpat);
|
||||
before_slice = false;
|
||||
@@ -3113,21 +3095,21 @@ fn parse_pat_fields(&mut self) -> (Vec<codemap::Spanned<ast::FieldPat>> , bool)
|
||||
let mut fields = Vec::new();
|
||||
let mut etc = false;
|
||||
let mut first = true;
|
||||
while self.token != token::RBRACE {
|
||||
while self.token != token::RBrace {
|
||||
if first {
|
||||
first = false;
|
||||
} else {
|
||||
self.expect(&token::COMMA);
|
||||
self.expect(&token::Comma);
|
||||
// accept trailing commas
|
||||
if self.token == token::RBRACE { break }
|
||||
if self.token == token::RBrace { break }
|
||||
}
|
||||
|
||||
let lo = self.span.lo;
|
||||
let hi;
|
||||
|
||||
if self.token == token::DOTDOT {
|
||||
if self.token == token::DotDot {
|
||||
self.bump();
|
||||
if self.token != token::RBRACE {
|
||||
if self.token != token::RBrace {
|
||||
let token_str = self.this_token_to_string();
|
||||
self.fatal(format!("expected `{}`, found `{}`", "}",
|
||||
token_str).as_slice())
|
||||
@@ -3146,7 +3128,7 @@ fn parse_pat_fields(&mut self) -> (Vec<codemap::Spanned<ast::FieldPat>> , bool)
|
||||
|
||||
let fieldname = self.parse_ident();
|
||||
|
||||
let (subpat, is_shorthand) = if self.token == token::COLON {
|
||||
let (subpat, is_shorthand) = if self.token == token::Colon {
|
||||
match bind_type {
|
||||
BindByRef(..) | BindByValue(MutMutable) => {
|
||||
let token_str = self.this_token_to_string();
|
||||
@@ -3186,7 +3168,7 @@ pub fn parse_pat(&mut self) -> P<Pat> {
|
||||
let pat;
|
||||
match self.token {
|
||||
// parse _
|
||||
token::UNDERSCORE => {
|
||||
token::Underscore => {
|
||||
self.bump();
|
||||
pat = PatWild(PatWildSingle);
|
||||
hi = self.last_span.hi;
|
||||
@@ -3196,7 +3178,7 @@ pub fn parse_pat(&mut self) -> P<Pat> {
|
||||
span: mk_sp(lo, hi)
|
||||
})
|
||||
}
|
||||
token::TILDE => {
|
||||
token::Tilde => {
|
||||
// parse ~pat
|
||||
self.bump();
|
||||
let sub = self.parse_pat();
|
||||
@@ -3210,7 +3192,7 @@ pub fn parse_pat(&mut self) -> P<Pat> {
|
||||
span: mk_sp(lo, hi)
|
||||
})
|
||||
}
|
||||
token::BINOP(token::AND) | token::ANDAND => {
|
||||
token::BinOp(token::And) | token::AndAnd => {
|
||||
// parse &pat
|
||||
let lo = self.span.lo;
|
||||
self.expect_and();
|
||||
@@ -3223,10 +3205,10 @@ pub fn parse_pat(&mut self) -> P<Pat> {
|
||||
span: mk_sp(lo, hi)
|
||||
})
|
||||
}
|
||||
token::LPAREN => {
|
||||
token::LParen => {
|
||||
// parse (pat,pat,pat,...) as tuple
|
||||
self.bump();
|
||||
if self.token == token::RPAREN {
|
||||
if self.token == token::RParen {
|
||||
hi = self.span.hi;
|
||||
self.bump();
|
||||
let lit = P(codemap::Spanned {
|
||||
@@ -3236,15 +3218,15 @@ pub fn parse_pat(&mut self) -> P<Pat> {
|
||||
pat = PatLit(expr);
|
||||
} else {
|
||||
let mut fields = vec!(self.parse_pat());
|
||||
if self.look_ahead(1, |t| *t != token::RPAREN) {
|
||||
while self.token == token::COMMA {
|
||||
if self.look_ahead(1, |t| *t != token::RParen) {
|
||||
while self.token == token::Comma {
|
||||
self.bump();
|
||||
if self.token == token::RPAREN { break; }
|
||||
if self.token == token::RParen { break; }
|
||||
fields.push(self.parse_pat());
|
||||
}
|
||||
}
|
||||
if fields.len() == 1 { self.expect(&token::COMMA); }
|
||||
self.expect(&token::RPAREN);
|
||||
if fields.len() == 1 { self.expect(&token::Comma); }
|
||||
self.expect(&token::RParen);
|
||||
pat = PatTup(fields);
|
||||
}
|
||||
hi = self.last_span.hi;
|
||||
@@ -3254,13 +3236,13 @@ pub fn parse_pat(&mut self) -> P<Pat> {
|
||||
span: mk_sp(lo, hi)
|
||||
})
|
||||
}
|
||||
token::LBRACKET => {
|
||||
token::LBracket => {
|
||||
// parse [pat,pat,...] as vector pattern
|
||||
self.bump();
|
||||
let (before, slice, after) =
|
||||
self.parse_pat_vec_elements();
|
||||
|
||||
self.expect(&token::RBRACKET);
|
||||
self.expect(&token::RBracket);
|
||||
pat = ast::PatVec(before, slice, after);
|
||||
hi = self.last_span.hi;
|
||||
return P(ast::Pat {
|
||||
@@ -3273,20 +3255,21 @@ pub fn parse_pat(&mut self) -> P<Pat> {
|
||||
}
|
||||
// at this point, token != _, ~, &, &&, (, [
|
||||
|
||||
if (!is_ident_or_path(&self.token) && self.token != token::MOD_SEP)
|
||||
|| self.is_keyword(keywords::True)
|
||||
|| self.is_keyword(keywords::False) {
|
||||
if (!(self.token.is_ident() || self.token.is_path())
|
||||
&& self.token != token::ModSep)
|
||||
|| self.token.is_keyword(keywords::True)
|
||||
|| self.token.is_keyword(keywords::False) {
|
||||
// Parse an expression pattern or exp .. exp.
|
||||
//
|
||||
// These expressions are limited to literals (possibly
|
||||
// preceded by unary-minus) or identifiers.
|
||||
let val = self.parse_literal_maybe_minus();
|
||||
if (self.token == token::DOTDOTDOT) &&
|
||||
if (self.token == token::DotDotDot) &&
|
||||
self.look_ahead(1, |t| {
|
||||
*t != token::COMMA && *t != token::RBRACKET
|
||||
*t != token::Comma && *t != token::RBracket
|
||||
}) {
|
||||
self.bump();
|
||||
let end = if is_ident_or_path(&self.token) {
|
||||
let end = if self.token.is_ident() || self.token.is_path() {
|
||||
let path = self.parse_path(LifetimeAndTypesWithColons)
|
||||
.path;
|
||||
let hi = self.span.hi;
|
||||
@@ -3320,27 +3303,27 @@ pub fn parse_pat(&mut self) -> P<Pat> {
|
||||
} else {
|
||||
let can_be_enum_or_struct = self.look_ahead(1, |t| {
|
||||
match *t {
|
||||
token::LPAREN | token::LBRACKET | token::LT |
|
||||
token::LBRACE | token::MOD_SEP => true,
|
||||
token::LParen | token::LBracket | token::Lt |
|
||||
token::LBrace | token::ModSep => true,
|
||||
_ => false,
|
||||
}
|
||||
});
|
||||
|
||||
if self.look_ahead(1, |t| *t == token::DOTDOTDOT) &&
|
||||
if self.look_ahead(1, |t| *t == token::DotDotDot) &&
|
||||
self.look_ahead(2, |t| {
|
||||
*t != token::COMMA && *t != token::RBRACKET
|
||||
*t != token::Comma && *t != token::RBracket
|
||||
}) {
|
||||
let start = self.parse_expr_res(RESTRICTION_NO_BAR_OP);
|
||||
self.eat(&token::DOTDOTDOT);
|
||||
self.eat(&token::DotDotDot);
|
||||
let end = self.parse_expr_res(RESTRICTION_NO_BAR_OP);
|
||||
pat = PatRange(start, end);
|
||||
} else if is_plain_ident(&self.token) && !can_be_enum_or_struct {
|
||||
} else if self.token.is_plain_ident() && !can_be_enum_or_struct {
|
||||
let id = self.parse_ident();
|
||||
let id_span = self.last_span;
|
||||
let pth1 = codemap::Spanned{span:id_span, node: id};
|
||||
if self.eat(&token::NOT) {
|
||||
if self.eat(&token::Not) {
|
||||
// macro invocation
|
||||
let ket = token::close_delimiter_for(&self.token)
|
||||
let ket = self.token.get_close_delimiter()
|
||||
.unwrap_or_else(|| self.fatal("expected open delimiter"));
|
||||
self.bump();
|
||||
|
||||
@@ -3351,7 +3334,7 @@ pub fn parse_pat(&mut self) -> P<Pat> {
|
||||
let mac = MacInvocTT(ident_to_path(id_span,id), tts, EMPTY_CTXT);
|
||||
pat = ast::PatMac(codemap::Spanned {node: mac, span: self.span});
|
||||
} else {
|
||||
let sub = if self.eat(&token::AT) {
|
||||
let sub = if self.eat(&token::At) {
|
||||
// parse foo @ pat
|
||||
Some(self.parse_pat())
|
||||
} else {
|
||||
@@ -3365,7 +3348,7 @@ pub fn parse_pat(&mut self) -> P<Pat> {
|
||||
let enum_path = self.parse_path(LifetimeAndTypesWithColons)
|
||||
.path;
|
||||
match self.token {
|
||||
token::LBRACE => {
|
||||
token::LBrace => {
|
||||
self.bump();
|
||||
let (fields, etc) =
|
||||
self.parse_pat_fields();
|
||||
@@ -3375,10 +3358,10 @@ pub fn parse_pat(&mut self) -> P<Pat> {
|
||||
_ => {
|
||||
let mut args: Vec<P<Pat>> = Vec::new();
|
||||
match self.token {
|
||||
token::LPAREN => {
|
||||
token::LParen => {
|
||||
let is_dotdot = self.look_ahead(1, |t| {
|
||||
match *t {
|
||||
token::DOTDOT => true,
|
||||
token::DotDot => true,
|
||||
_ => false,
|
||||
}
|
||||
});
|
||||
@@ -3386,13 +3369,13 @@ pub fn parse_pat(&mut self) -> P<Pat> {
|
||||
// This is a "top constructor only" pat
|
||||
self.bump();
|
||||
self.bump();
|
||||
self.expect(&token::RPAREN);
|
||||
self.expect(&token::RParen);
|
||||
pat = PatEnum(enum_path, None);
|
||||
} else {
|
||||
args = self.parse_enum_variant_seq(
|
||||
&token::LPAREN,
|
||||
&token::RPAREN,
|
||||
seq_sep_trailing_allowed(token::COMMA),
|
||||
&token::LParen,
|
||||
&token::RParen,
|
||||
seq_sep_trailing_allowed(token::Comma),
|
||||
|p| p.parse_pat()
|
||||
);
|
||||
pat = PatEnum(enum_path, Some(args));
|
||||
@@ -3439,7 +3422,7 @@ pub fn parse_pat(&mut self) -> P<Pat> {
|
||||
fn parse_pat_ident(&mut self,
|
||||
binding_mode: ast::BindingMode)
|
||||
-> ast::Pat_ {
|
||||
if !is_plain_ident(&self.token) {
|
||||
if !self.token.is_plain_ident() {
|
||||
let span = self.span;
|
||||
let tok_str = self.this_token_to_string();
|
||||
self.span_fatal(span,
|
||||
@@ -3448,7 +3431,7 @@ fn parse_pat_ident(&mut self,
|
||||
let ident = self.parse_ident();
|
||||
let last_span = self.last_span;
|
||||
let name = codemap::Spanned{span: last_span, node: ident};
|
||||
let sub = if self.eat(&token::AT) {
|
||||
let sub = if self.eat(&token::At) {
|
||||
Some(self.parse_pat())
|
||||
} else {
|
||||
None
|
||||
@@ -3460,7 +3443,7 @@ fn parse_pat_ident(&mut self,
|
||||
// leads to a parse error. Note that if there is no explicit
|
||||
// binding mode then we do not end up here, because the lookahead
|
||||
// will direct us over to parse_enum_variant()
|
||||
if self.token == token::LPAREN {
|
||||
if self.token == token::LParen {
|
||||
let last_span = self.last_span;
|
||||
self.span_fatal(
|
||||
last_span,
|
||||
@@ -3480,7 +3463,7 @@ fn parse_local(&mut self) -> P<Local> {
|
||||
node: TyInfer,
|
||||
span: mk_sp(lo, lo),
|
||||
});
|
||||
if self.eat(&token::COLON) {
|
||||
if self.eat(&token::Colon) {
|
||||
ty = self.parse_ty(true);
|
||||
}
|
||||
let init = self.parse_initializer();
|
||||
@@ -3505,11 +3488,11 @@ fn parse_let(&mut self) -> P<Decl> {
|
||||
fn parse_name_and_ty(&mut self, pr: Visibility,
|
||||
attrs: Vec<Attribute> ) -> StructField {
|
||||
let lo = self.span.lo;
|
||||
if !is_plain_ident(&self.token) {
|
||||
if !self.token.is_plain_ident() {
|
||||
self.fatal("expected ident");
|
||||
}
|
||||
let name = self.parse_ident();
|
||||
self.expect(&token::COLON);
|
||||
self.expect(&token::Colon);
|
||||
let ty = self.parse_ty(true);
|
||||
spanned(lo, self.last_span.hi, ast::StructField_ {
|
||||
kind: NamedField(name, pr),
|
||||
@@ -3543,14 +3526,14 @@ fn check_expected_item(p: &mut Parser, attrs: &[Attribute]) {
|
||||
}
|
||||
|
||||
let lo = self.span.lo;
|
||||
if self.is_keyword(keywords::Let) {
|
||||
if self.token.is_keyword(keywords::Let) {
|
||||
check_expected_item(self, item_attrs.as_slice());
|
||||
self.expect_keyword(keywords::Let);
|
||||
let decl = self.parse_let();
|
||||
P(spanned(lo, decl.span.hi, StmtDecl(decl, ast::DUMMY_NODE_ID)))
|
||||
} else if is_ident(&self.token)
|
||||
&& !token::is_any_keyword(&self.token)
|
||||
&& self.look_ahead(1, |t| *t == token::NOT) {
|
||||
} else if self.token.is_ident()
|
||||
&& !self.token.is_any_keyword()
|
||||
&& self.look_ahead(1, |t| *t == token::Not) {
|
||||
// it's a macro invocation:
|
||||
|
||||
check_expected_item(self, item_attrs.as_slice());
|
||||
@@ -3560,7 +3543,7 @@ fn check_expected_item(p: &mut Parser, attrs: &[Attribute]) {
|
||||
let pth = self.parse_path(NoTypesAllowed).path;
|
||||
self.bump();
|
||||
|
||||
let id = if token::close_delimiter_for(&self.token).is_some() {
|
||||
let id = if self.token.get_close_delimiter().is_some() {
|
||||
token::special_idents::invalid // no special identifier
|
||||
} else {
|
||||
self.parse_ident()
|
||||
@@ -3569,7 +3552,7 @@ fn check_expected_item(p: &mut Parser, attrs: &[Attribute]) {
|
||||
// check that we're pointing at delimiters (need to check
|
||||
// again after the `if`, because of `parse_ident`
|
||||
// consuming more tokens).
|
||||
let (bra, ket) = match token::close_delimiter_for(&self.token) {
|
||||
let (bra, ket) = match self.token.get_close_delimiter() {
|
||||
Some(ket) => (self.token.clone(), ket),
|
||||
None => {
|
||||
// we only expect an ident if we didn't parse one
|
||||
@@ -3649,7 +3632,7 @@ pub fn parse_block(&mut self) -> P<Block> {
|
||||
maybe_whole!(no_clone self, NtBlock);
|
||||
|
||||
let lo = self.span.lo;
|
||||
self.expect(&token::LBRACE);
|
||||
self.expect(&token::LBrace);
|
||||
|
||||
return self.parse_block_tail_(lo, DefaultBlock, Vec::new());
|
||||
}
|
||||
@@ -3661,7 +3644,7 @@ fn parse_inner_attrs_and_block(&mut self)
|
||||
maybe_whole!(pair_empty self, NtBlock);
|
||||
|
||||
let lo = self.span.lo;
|
||||
self.expect(&token::LBRACE);
|
||||
self.expect(&token::LBrace);
|
||||
let (inner, next) = self.parse_inner_attrs_and_next();
|
||||
|
||||
(inner, self.parse_block_tail_(lo, DefaultBlock, next))
|
||||
@@ -3698,12 +3681,12 @@ fn parse_block_tail_(&mut self, lo: BytePos, s: BlockCheckMode,
|
||||
|
||||
let mut attributes_box = attrs_remaining;
|
||||
|
||||
while self.token != token::RBRACE {
|
||||
while self.token != token::RBrace {
|
||||
// parsing items even when they're not allowed lets us give
|
||||
// better error messages and recover more gracefully.
|
||||
attributes_box.push_all(self.parse_outer_attributes().as_slice());
|
||||
match self.token {
|
||||
token::SEMI => {
|
||||
token::Semi => {
|
||||
if !attributes_box.is_empty() {
|
||||
let last_span = self.last_span;
|
||||
self.span_err(last_span,
|
||||
@@ -3712,7 +3695,7 @@ fn parse_block_tail_(&mut self, lo: BytePos, s: BlockCheckMode,
|
||||
}
|
||||
self.bump(); // empty
|
||||
}
|
||||
token::RBRACE => {
|
||||
token::RBrace => {
|
||||
// fall through and out.
|
||||
}
|
||||
_ => {
|
||||
@@ -3723,11 +3706,11 @@ fn parse_block_tail_(&mut self, lo: BytePos, s: BlockCheckMode,
|
||||
// expression without semicolon
|
||||
if classify::expr_requires_semi_to_be_stmt(&*e) {
|
||||
// Just check for errors and recover; do not eat semicolon yet.
|
||||
self.commit_stmt(&[], &[token::SEMI, token::RBRACE]);
|
||||
self.commit_stmt(&[], &[token::Semi, token::RBrace]);
|
||||
}
|
||||
|
||||
match self.token {
|
||||
token::SEMI => {
|
||||
token::Semi => {
|
||||
self.bump();
|
||||
let span_with_semi = Span {
|
||||
lo: span.lo,
|
||||
@@ -3739,7 +3722,7 @@ fn parse_block_tail_(&mut self, lo: BytePos, s: BlockCheckMode,
|
||||
span: span_with_semi,
|
||||
}));
|
||||
}
|
||||
token::RBRACE => {
|
||||
token::RBrace => {
|
||||
expr = Some(e);
|
||||
}
|
||||
_ => {
|
||||
@@ -3753,14 +3736,14 @@ fn parse_block_tail_(&mut self, lo: BytePos, s: BlockCheckMode,
|
||||
StmtMac(m, semi) => {
|
||||
// statement macro; might be an expr
|
||||
match self.token {
|
||||
token::SEMI => {
|
||||
token::Semi => {
|
||||
stmts.push(P(Spanned {
|
||||
node: StmtMac(m, true),
|
||||
span: span,
|
||||
}));
|
||||
self.bump();
|
||||
}
|
||||
token::RBRACE => {
|
||||
token::RBrace => {
|
||||
// if a block ends in `m!(arg)` without
|
||||
// a `;`, it must be an expr
|
||||
expr = Some(
|
||||
@@ -3778,7 +3761,7 @@ fn parse_block_tail_(&mut self, lo: BytePos, s: BlockCheckMode,
|
||||
}
|
||||
_ => { // all other kinds of statements:
|
||||
if classify::stmt_ends_with_semi(&node) {
|
||||
self.commit_stmt_expecting(token::SEMI);
|
||||
self.commit_stmt_expecting(token::Semi);
|
||||
}
|
||||
|
||||
stmts.push(P(Spanned {
|
||||
@@ -3814,7 +3797,7 @@ fn parse_block_tail_(&mut self, lo: BytePos, s: BlockCheckMode,
|
||||
fn parse_colon_then_ty_param_bounds(&mut self)
|
||||
-> OwnedSlice<TyParamBound>
|
||||
{
|
||||
if !self.eat(&token::COLON) {
|
||||
if !self.eat(&token::Colon) {
|
||||
OwnedSlice::empty()
|
||||
} else {
|
||||
self.parse_ty_param_bounds()
|
||||
@@ -3830,7 +3813,7 @@ fn parse_ty_param_bounds(&mut self)
|
||||
{
|
||||
let mut result = vec!();
|
||||
loop {
|
||||
let lifetime_defs = if self.eat(&token::LT) {
|
||||
let lifetime_defs = if self.eat(&token::Lt) {
|
||||
let lifetime_defs = self.parse_lifetime_defs();
|
||||
self.expect_gt();
|
||||
lifetime_defs
|
||||
@@ -3838,7 +3821,7 @@ fn parse_ty_param_bounds(&mut self)
|
||||
Vec::new()
|
||||
};
|
||||
match self.token {
|
||||
token::LIFETIME(lifetime) => {
|
||||
token::Lifetime(lifetime) => {
|
||||
if lifetime_defs.len() > 0 {
|
||||
let span = self.last_span;
|
||||
self.span_err(span, "lifetime declarations are not \
|
||||
@@ -3852,14 +3835,14 @@ fn parse_ty_param_bounds(&mut self)
|
||||
}));
|
||||
self.bump();
|
||||
}
|
||||
token::MOD_SEP | token::IDENT(..) => {
|
||||
token::ModSep | token::Ident(..) => {
|
||||
let path =
|
||||
self.parse_path(LifetimeAndTypesWithoutColons).path;
|
||||
if self.token == token::LPAREN {
|
||||
if self.token == token::LParen {
|
||||
self.bump();
|
||||
let inputs = self.parse_seq_to_end(
|
||||
&token::RPAREN,
|
||||
seq_sep_trailing_allowed(token::COMMA),
|
||||
&token::RParen,
|
||||
seq_sep_trailing_allowed(token::Comma),
|
||||
|p| p.parse_arg_general(false));
|
||||
let (return_style, output) = self.parse_ret_ty();
|
||||
result.push(UnboxedFnTyParamBound(P(UnboxedFnBound {
|
||||
@@ -3884,7 +3867,7 @@ fn parse_ty_param_bounds(&mut self)
|
||||
_ => break,
|
||||
}
|
||||
|
||||
if !self.eat(&token::BINOP(token::PLUS)) {
|
||||
if !self.eat(&token::BinOp(token::Plus)) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
@@ -3920,7 +3903,7 @@ fn parse_ty_param(&mut self) -> TyParam {
|
||||
let mut span = self.span;
|
||||
let mut ident = self.parse_ident();
|
||||
let mut unbound = None;
|
||||
if self.eat(&token::QUESTION) {
|
||||
if self.eat(&token::Question) {
|
||||
let tref = Parser::trait_ref_from_ident(ident, span);
|
||||
unbound = Some(TraitTyParamBound(tref));
|
||||
span = self.span;
|
||||
@@ -3929,7 +3912,7 @@ fn parse_ty_param(&mut self) -> TyParam {
|
||||
|
||||
let bounds = self.parse_colon_then_ty_param_bounds();
|
||||
|
||||
let default = if self.token == token::EQ {
|
||||
let default = if self.token == token::Eq {
|
||||
self.bump();
|
||||
Some(self.parse_ty(true))
|
||||
}
|
||||
@@ -3953,10 +3936,10 @@ fn parse_ty_param(&mut self) -> TyParam {
|
||||
/// | ( < lifetimes , typaramseq ( , )? > )
|
||||
/// where typaramseq = ( typaram ) | ( typaram , typaramseq )
|
||||
pub fn parse_generics(&mut self) -> ast::Generics {
|
||||
if self.eat(&token::LT) {
|
||||
if self.eat(&token::Lt) {
|
||||
let lifetime_defs = self.parse_lifetime_defs();
|
||||
let mut seen_default = false;
|
||||
let ty_params = self.parse_seq_to_gt(Some(token::COMMA), |p| {
|
||||
let ty_params = self.parse_seq_to_gt(Some(token::Comma), |p| {
|
||||
p.forbid_lifetime();
|
||||
let ty_param = p.parse_ty_param();
|
||||
if ty_param.default.is_some() {
|
||||
@@ -3982,9 +3965,9 @@ pub fn parse_generics(&mut self) -> ast::Generics {
|
||||
}
|
||||
|
||||
fn parse_generic_values_after_lt(&mut self) -> (Vec<ast::Lifetime>, Vec<P<Ty>> ) {
|
||||
let lifetimes = self.parse_lifetimes(token::COMMA);
|
||||
let lifetimes = self.parse_lifetimes(token::Comma);
|
||||
let result = self.parse_seq_to_gt(
|
||||
Some(token::COMMA),
|
||||
Some(token::Comma),
|
||||
|p| {
|
||||
p.forbid_lifetime();
|
||||
p.parse_ty(true)
|
||||
@@ -3994,7 +3977,7 @@ fn parse_generic_values_after_lt(&mut self) -> (Vec<ast::Lifetime>, Vec<P<Ty>> )
|
||||
}
|
||||
|
||||
fn forbid_lifetime(&mut self) {
|
||||
if Parser::token_is_lifetime(&self.token) {
|
||||
if self.token.is_lifetime() {
|
||||
let span = self.span;
|
||||
self.span_fatal(span, "lifetime parameters must be declared \
|
||||
prior to type parameters");
|
||||
@@ -4011,10 +3994,10 @@ fn parse_where_clause(&mut self, generics: &mut ast::Generics) {
|
||||
loop {
|
||||
let lo = self.span.lo;
|
||||
let ident = match self.token {
|
||||
token::IDENT(..) => self.parse_ident(),
|
||||
token::Ident(..) => self.parse_ident(),
|
||||
_ => break,
|
||||
};
|
||||
self.expect(&token::COLON);
|
||||
self.expect(&token::Colon);
|
||||
|
||||
let bounds = self.parse_ty_param_bounds();
|
||||
let hi = self.span.hi;
|
||||
@@ -4034,7 +4017,7 @@ fn parse_where_clause(&mut self, generics: &mut ast::Generics) {
|
||||
});
|
||||
parsed_something = true;
|
||||
|
||||
if !self.eat(&token::COMMA) {
|
||||
if !self.eat(&token::Comma) {
|
||||
break
|
||||
}
|
||||
}
|
||||
@@ -4052,14 +4035,14 @@ fn parse_fn_args(&mut self, named_args: bool, allow_variadic: bool)
|
||||
let sp = self.span;
|
||||
let mut args: Vec<Option<Arg>> =
|
||||
self.parse_unspanned_seq(
|
||||
&token::LPAREN,
|
||||
&token::RPAREN,
|
||||
seq_sep_trailing_allowed(token::COMMA),
|
||||
&token::LParen,
|
||||
&token::RParen,
|
||||
seq_sep_trailing_allowed(token::Comma),
|
||||
|p| {
|
||||
if p.token == token::DOTDOTDOT {
|
||||
if p.token == token::DotDotDot {
|
||||
p.bump();
|
||||
if allow_variadic {
|
||||
if p.token != token::RPAREN {
|
||||
if p.token != token::RParen {
|
||||
let span = p.span;
|
||||
p.span_fatal(span,
|
||||
"`...` must be last in argument list for variadic function");
|
||||
@@ -4112,14 +4095,14 @@ pub fn parse_fn_decl(&mut self, allow_variadic: bool) -> P<FnDecl> {
|
||||
|
||||
fn is_self_ident(&mut self) -> bool {
|
||||
match self.token {
|
||||
token::IDENT(id, false) => id.name == special_idents::self_.name,
|
||||
token::Ident(id, token::Plain) => id.name == special_idents::self_.name,
|
||||
_ => false
|
||||
}
|
||||
}
|
||||
|
||||
fn expect_self_ident(&mut self) -> ast::Ident {
|
||||
match self.token {
|
||||
token::IDENT(id, false) if id.name == special_idents::self_.name => {
|
||||
token::Ident(id, token::Plain) if id.name == special_idents::self_.name => {
|
||||
self.bump();
|
||||
id
|
||||
},
|
||||
@@ -4146,29 +4129,22 @@ fn maybe_parse_borrowed_explicit_self(this: &mut Parser)
|
||||
//
|
||||
// We already know that the current token is `&`.
|
||||
|
||||
if this.look_ahead(1, |t| token::is_keyword(keywords::Self, t)) {
|
||||
if this.look_ahead(1, |t| t.is_keyword(keywords::Self)) {
|
||||
this.bump();
|
||||
SelfRegion(None, MutImmutable, this.expect_self_ident())
|
||||
} else if this.look_ahead(1, |t| Parser::token_is_mutability(t)) &&
|
||||
this.look_ahead(2,
|
||||
|t| token::is_keyword(keywords::Self,
|
||||
t)) {
|
||||
} else if this.look_ahead(1, |t| t.is_mutability()) &&
|
||||
this.look_ahead(2, |t| t.is_keyword(keywords::Self)) {
|
||||
this.bump();
|
||||
let mutability = this.parse_mutability();
|
||||
SelfRegion(None, mutability, this.expect_self_ident())
|
||||
} else if this.look_ahead(1, |t| Parser::token_is_lifetime(t)) &&
|
||||
this.look_ahead(2,
|
||||
|t| token::is_keyword(keywords::Self,
|
||||
t)) {
|
||||
} else if this.look_ahead(1, |t| t.is_lifetime()) &&
|
||||
this.look_ahead(2, |t| t.is_keyword(keywords::Self)) {
|
||||
this.bump();
|
||||
let lifetime = this.parse_lifetime();
|
||||
SelfRegion(Some(lifetime), MutImmutable, this.expect_self_ident())
|
||||
} else if this.look_ahead(1, |t| Parser::token_is_lifetime(t)) &&
|
||||
this.look_ahead(2, |t| {
|
||||
Parser::token_is_mutability(t)
|
||||
}) &&
|
||||
this.look_ahead(3, |t| token::is_keyword(keywords::Self,
|
||||
t)) {
|
||||
} else if this.look_ahead(1, |t| t.is_lifetime()) &&
|
||||
this.look_ahead(2, |t| t.is_mutability()) &&
|
||||
this.look_ahead(3, |t| t.is_keyword(keywords::Self)) {
|
||||
this.bump();
|
||||
let lifetime = this.parse_lifetime();
|
||||
let mutability = this.parse_mutability();
|
||||
@@ -4178,7 +4154,7 @@ fn maybe_parse_borrowed_explicit_self(this: &mut Parser)
|
||||
}
|
||||
}
|
||||
|
||||
self.expect(&token::LPAREN);
|
||||
self.expect(&token::LParen);
|
||||
|
||||
// A bit of complexity and lookahead is needed here in order to be
|
||||
// backwards compatible.
|
||||
@@ -4188,15 +4164,15 @@ fn maybe_parse_borrowed_explicit_self(this: &mut Parser)
|
||||
|
||||
let mut mutbl_self = MutImmutable;
|
||||
let explicit_self = match self.token {
|
||||
token::BINOP(token::AND) => {
|
||||
token::BinOp(token::And) => {
|
||||
let eself = maybe_parse_borrowed_explicit_self(self);
|
||||
self_ident_lo = self.last_span.lo;
|
||||
self_ident_hi = self.last_span.hi;
|
||||
eself
|
||||
}
|
||||
token::TILDE => {
|
||||
token::Tilde => {
|
||||
// We need to make sure it isn't a type
|
||||
if self.look_ahead(1, |t| token::is_keyword(keywords::Self, t)) {
|
||||
if self.look_ahead(1, |t| t.is_keyword(keywords::Self)) {
|
||||
self.bump();
|
||||
drop(self.expect_self_ident());
|
||||
let last_span = self.last_span;
|
||||
@@ -4204,11 +4180,11 @@ fn maybe_parse_borrowed_explicit_self(this: &mut Parser)
|
||||
}
|
||||
SelfStatic
|
||||
}
|
||||
token::BINOP(token::STAR) => {
|
||||
token::BinOp(token::Star) => {
|
||||
// Possibly "*self" or "*mut self" -- not supported. Try to avoid
|
||||
// emitting cryptic "unexpected token" errors.
|
||||
self.bump();
|
||||
let _mutability = if Parser::token_is_mutability(&self.token) {
|
||||
let _mutability = if self.token.is_mutability() {
|
||||
self.parse_mutability()
|
||||
} else {
|
||||
MutImmutable
|
||||
@@ -4221,36 +4197,32 @@ fn maybe_parse_borrowed_explicit_self(this: &mut Parser)
|
||||
// error case, making bogus self ident:
|
||||
SelfValue(special_idents::self_)
|
||||
}
|
||||
token::IDENT(..) => {
|
||||
token::Ident(..) => {
|
||||
if self.is_self_ident() {
|
||||
let self_ident = self.expect_self_ident();
|
||||
|
||||
// Determine whether this is the fully explicit form, `self:
|
||||
// TYPE`.
|
||||
if self.eat(&token::COLON) {
|
||||
if self.eat(&token::Colon) {
|
||||
SelfExplicit(self.parse_ty(false), self_ident)
|
||||
} else {
|
||||
SelfValue(self_ident)
|
||||
}
|
||||
} else if Parser::token_is_mutability(&self.token) &&
|
||||
self.look_ahead(1, |t| {
|
||||
token::is_keyword(keywords::Self, t)
|
||||
}) {
|
||||
} else if self.token.is_mutability() &&
|
||||
self.look_ahead(1, |t| t.is_keyword(keywords::Self)) {
|
||||
mutbl_self = self.parse_mutability();
|
||||
let self_ident = self.expect_self_ident();
|
||||
|
||||
// Determine whether this is the fully explicit form,
|
||||
// `self: TYPE`.
|
||||
if self.eat(&token::COLON) {
|
||||
if self.eat(&token::Colon) {
|
||||
SelfExplicit(self.parse_ty(false), self_ident)
|
||||
} else {
|
||||
SelfValue(self_ident)
|
||||
}
|
||||
} else if Parser::token_is_mutability(&self.token) &&
|
||||
self.look_ahead(1, |t| *t == token::TILDE) &&
|
||||
self.look_ahead(2, |t| {
|
||||
token::is_keyword(keywords::Self, t)
|
||||
}) {
|
||||
} else if self.token.is_mutability() &&
|
||||
self.look_ahead(1, |t| *t == token::Tilde) &&
|
||||
self.look_ahead(2, |t| t.is_keyword(keywords::Self)) {
|
||||
mutbl_self = self.parse_mutability();
|
||||
self.bump();
|
||||
drop(self.expect_self_ident());
|
||||
@@ -4273,18 +4245,18 @@ macro_rules! parse_remaining_arguments {
|
||||
{
|
||||
// If we parsed a self type, expect a comma before the argument list.
|
||||
match self.token {
|
||||
token::COMMA => {
|
||||
token::Comma => {
|
||||
self.bump();
|
||||
let sep = seq_sep_trailing_allowed(token::COMMA);
|
||||
let sep = seq_sep_trailing_allowed(token::Comma);
|
||||
let mut fn_inputs = self.parse_seq_to_before_end(
|
||||
&token::RPAREN,
|
||||
&token::RParen,
|
||||
sep,
|
||||
parse_arg_fn
|
||||
);
|
||||
fn_inputs.insert(0, Arg::new_self(explicit_self_sp, mutbl_self, $self_id));
|
||||
fn_inputs
|
||||
}
|
||||
token::RPAREN => {
|
||||
token::RParen => {
|
||||
vec!(Arg::new_self(explicit_self_sp, mutbl_self, $self_id))
|
||||
}
|
||||
_ => {
|
||||
@@ -4298,8 +4270,8 @@ macro_rules! parse_remaining_arguments {
|
||||
|
||||
let fn_inputs = match explicit_self {
|
||||
SelfStatic => {
|
||||
let sep = seq_sep_trailing_allowed(token::COMMA);
|
||||
self.parse_seq_to_before_end(&token::RPAREN, sep, parse_arg_fn)
|
||||
let sep = seq_sep_trailing_allowed(token::Comma);
|
||||
self.parse_seq_to_before_end(&token::RParen, sep, parse_arg_fn)
|
||||
}
|
||||
SelfValue(id) => parse_remaining_arguments!(id),
|
||||
SelfRegion(_,_,id) => parse_remaining_arguments!(id),
|
||||
@@ -4307,7 +4279,7 @@ macro_rules! parse_remaining_arguments {
|
||||
};
|
||||
|
||||
|
||||
self.expect(&token::RPAREN);
|
||||
self.expect(&token::RParen);
|
||||
|
||||
let hi = self.span.hi;
|
||||
|
||||
@@ -4327,22 +4299,22 @@ macro_rules! parse_remaining_arguments {
|
||||
fn parse_fn_block_decl(&mut self)
|
||||
-> (P<FnDecl>, Option<UnboxedClosureKind>) {
|
||||
let (optional_unboxed_closure_kind, inputs_captures) = {
|
||||
if self.eat(&token::OROR) {
|
||||
if self.eat(&token::OrOr) {
|
||||
(None, Vec::new())
|
||||
} else {
|
||||
self.expect(&token::BINOP(token::OR));
|
||||
self.expect(&token::BinOp(token::Or));
|
||||
let optional_unboxed_closure_kind =
|
||||
self.parse_optional_unboxed_closure_kind();
|
||||
let args = self.parse_seq_to_before_end(
|
||||
&token::BINOP(token::OR),
|
||||
seq_sep_trailing_allowed(token::COMMA),
|
||||
&token::BinOp(token::Or),
|
||||
seq_sep_trailing_allowed(token::Comma),
|
||||
|p| p.parse_fn_block_arg()
|
||||
);
|
||||
self.bump();
|
||||
(optional_unboxed_closure_kind, args)
|
||||
}
|
||||
};
|
||||
let (style, output) = if self.token == token::RARROW {
|
||||
let (style, output) = if self.token == token::RArrow {
|
||||
self.parse_ret_ty()
|
||||
} else {
|
||||
(Return, P(Ty {
|
||||
@@ -4363,12 +4335,12 @@ fn parse_fn_block_decl(&mut self)
|
||||
/// Parses the `(arg, arg) -> return_type` header on a procedure.
|
||||
fn parse_proc_decl(&mut self) -> P<FnDecl> {
|
||||
let inputs =
|
||||
self.parse_unspanned_seq(&token::LPAREN,
|
||||
&token::RPAREN,
|
||||
seq_sep_trailing_allowed(token::COMMA),
|
||||
self.parse_unspanned_seq(&token::LParen,
|
||||
&token::RParen,
|
||||
seq_sep_trailing_allowed(token::Comma),
|
||||
|p| p.parse_fn_block_arg());
|
||||
|
||||
let (style, output) = if self.token == token::RARROW {
|
||||
let (style, output) = if self.token == token::RArrow {
|
||||
self.parse_ret_ty()
|
||||
} else {
|
||||
(Return, P(Ty {
|
||||
@@ -4431,16 +4403,16 @@ pub fn parse_method(&mut self,
|
||||
|
||||
// code copied from parse_macro_use_or_failure... abstraction!
|
||||
let (method_, hi, new_attrs) = {
|
||||
if !token::is_any_keyword(&self.token)
|
||||
&& self.look_ahead(1, |t| *t == token::NOT)
|
||||
&& (self.look_ahead(2, |t| *t == token::LPAREN)
|
||||
|| self.look_ahead(2, |t| *t == token::LBRACE)) {
|
||||
if !self.token.is_any_keyword()
|
||||
&& self.look_ahead(1, |t| *t == token::Not)
|
||||
&& (self.look_ahead(2, |t| *t == token::LParen)
|
||||
|| self.look_ahead(2, |t| *t == token::LBrace)) {
|
||||
// method macro.
|
||||
let pth = self.parse_path(NoTypesAllowed).path;
|
||||
self.expect(&token::NOT);
|
||||
self.expect(&token::Not);
|
||||
|
||||
// eat a matched-delimiter token tree:
|
||||
let tts = match token::close_delimiter_for(&self.token) {
|
||||
let tts = match self.token.get_close_delimiter() {
|
||||
Some(ket) => {
|
||||
self.bump();
|
||||
self.parse_seq_to_end(&ket,
|
||||
@@ -4512,10 +4484,10 @@ fn parse_item_trait(&mut self) -> ItemInfo {
|
||||
|
||||
fn parse_impl_items(&mut self) -> (Vec<ImplItem>, Vec<Attribute>) {
|
||||
let mut impl_items = Vec::new();
|
||||
self.expect(&token::LBRACE);
|
||||
self.expect(&token::LBrace);
|
||||
let (inner_attrs, mut method_attrs) =
|
||||
self.parse_inner_attrs_and_next();
|
||||
while !self.eat(&token::RBRACE) {
|
||||
while !self.eat(&token::RBrace) {
|
||||
method_attrs.extend(self.parse_outer_attributes().into_iter());
|
||||
let vis = self.parse_visibility();
|
||||
if self.eat_keyword(keywords::Type) {
|
||||
@@ -4541,7 +4513,7 @@ fn parse_item_impl(&mut self) -> ItemInfo {
|
||||
|
||||
// Special case: if the next identifier that follows is '(', don't
|
||||
// allow this to be parsed as a trait.
|
||||
let could_be_trait = self.token != token::LPAREN;
|
||||
let could_be_trait = self.token != token::LParen;
|
||||
|
||||
// Parse the trait.
|
||||
let mut ty = self.parse_ty(true);
|
||||
@@ -4589,7 +4561,7 @@ fn parse_item_struct(&mut self) -> ItemInfo {
|
||||
let class_name = self.parse_ident();
|
||||
let mut generics = self.parse_generics();
|
||||
|
||||
if self.eat(&token::COLON) {
|
||||
if self.eat(&token::Colon) {
|
||||
let ty = self.parse_ty(true);
|
||||
self.span_err(ty.span, "`virtual` structs have been removed from the language");
|
||||
}
|
||||
@@ -4599,11 +4571,11 @@ fn parse_item_struct(&mut self) -> ItemInfo {
|
||||
let mut fields: Vec<StructField>;
|
||||
let is_tuple_like;
|
||||
|
||||
if self.eat(&token::LBRACE) {
|
||||
if self.eat(&token::LBrace) {
|
||||
// It's a record-like struct.
|
||||
is_tuple_like = false;
|
||||
fields = Vec::new();
|
||||
while self.token != token::RBRACE {
|
||||
while self.token != token::RBrace {
|
||||
fields.push(self.parse_struct_decl_field());
|
||||
}
|
||||
if fields.len() == 0 {
|
||||
@@ -4612,13 +4584,13 @@ fn parse_item_struct(&mut self) -> ItemInfo {
|
||||
token::get_ident(class_name)).as_slice());
|
||||
}
|
||||
self.bump();
|
||||
} else if self.token == token::LPAREN {
|
||||
} else if self.token == token::LParen {
|
||||
// It's a tuple-like struct.
|
||||
is_tuple_like = true;
|
||||
fields = self.parse_unspanned_seq(
|
||||
&token::LPAREN,
|
||||
&token::RPAREN,
|
||||
seq_sep_trailing_allowed(token::COMMA),
|
||||
&token::LParen,
|
||||
&token::RParen,
|
||||
seq_sep_trailing_allowed(token::Comma),
|
||||
|p| {
|
||||
let attrs = p.parse_outer_attributes();
|
||||
let lo = p.span.lo;
|
||||
@@ -4635,8 +4607,8 @@ fn parse_item_struct(&mut self) -> ItemInfo {
|
||||
written as `struct {};`",
|
||||
token::get_ident(class_name)).as_slice());
|
||||
}
|
||||
self.expect(&token::SEMI);
|
||||
} else if self.eat(&token::SEMI) {
|
||||
self.expect(&token::Semi);
|
||||
} else if self.eat(&token::Semi) {
|
||||
// It's a unit-like struct.
|
||||
is_tuple_like = true;
|
||||
fields = Vec::new();
|
||||
@@ -4664,10 +4636,10 @@ pub fn parse_single_struct_field(&mut self,
|
||||
-> StructField {
|
||||
let a_var = self.parse_name_and_ty(vis, attrs);
|
||||
match self.token {
|
||||
token::COMMA => {
|
||||
token::Comma => {
|
||||
self.bump();
|
||||
}
|
||||
token::RBRACE => {}
|
||||
token::RBrace => {}
|
||||
_ => {
|
||||
let span = self.span;
|
||||
let token_str = self.this_token_to_string();
|
||||
@@ -4701,7 +4673,7 @@ fn parse_for_sized(&mut self) -> Option<ast::TyParamBound> {
|
||||
if self.eat_keyword(keywords::For) {
|
||||
let span = self.span;
|
||||
let ident = self.parse_ident();
|
||||
if !self.eat(&token::QUESTION) {
|
||||
if !self.eat(&token::Question) {
|
||||
self.span_err(span,
|
||||
"expected 'Sized?' after `for` in trait item");
|
||||
return None;
|
||||
@@ -4776,11 +4748,11 @@ fn parse_mod_items(&mut self,
|
||||
|
||||
fn parse_item_const(&mut self, m: Option<Mutability>) -> ItemInfo {
|
||||
let id = self.parse_ident();
|
||||
self.expect(&token::COLON);
|
||||
self.expect(&token::Colon);
|
||||
let ty = self.parse_ty(true);
|
||||
self.expect(&token::EQ);
|
||||
self.expect(&token::Eq);
|
||||
let e = self.parse_expr();
|
||||
self.commit_expr_expecting(&*e, token::SEMI);
|
||||
self.commit_expr_expecting(&*e, token::Semi);
|
||||
let item = match m {
|
||||
Some(m) => ItemStatic(ty, m, e),
|
||||
None => ItemConst(ty, e),
|
||||
@@ -4792,20 +4764,20 @@ fn parse_item_const(&mut self, m: Option<Mutability>) -> ItemInfo {
|
||||
fn parse_item_mod(&mut self, outer_attrs: &[Attribute]) -> ItemInfo {
|
||||
let id_span = self.span;
|
||||
let id = self.parse_ident();
|
||||
if self.token == token::SEMI {
|
||||
if self.token == token::Semi {
|
||||
self.bump();
|
||||
// This mod is in an external file. Let's go get it!
|
||||
let (m, attrs) = self.eval_src_mod(id, outer_attrs, id_span);
|
||||
(id, m, Some(attrs))
|
||||
} else {
|
||||
self.push_mod_path(id, outer_attrs);
|
||||
self.expect(&token::LBRACE);
|
||||
self.expect(&token::LBrace);
|
||||
let mod_inner_lo = self.span.lo;
|
||||
let old_owns_directory = self.owns_directory;
|
||||
self.owns_directory = true;
|
||||
let (inner, next) = self.parse_inner_attrs_and_next();
|
||||
let m = self.parse_mod_items(token::RBRACE, next, mod_inner_lo);
|
||||
self.expect(&token::RBRACE);
|
||||
let m = self.parse_mod_items(token::RBrace, next, mod_inner_lo);
|
||||
self.expect(&token::RBrace);
|
||||
self.owns_directory = old_owns_directory;
|
||||
self.pop_mod_path();
|
||||
(id, ItemMod(m), Some(inner))
|
||||
@@ -4929,7 +4901,7 @@ fn eval_src_mod_from_path(&mut self,
|
||||
let mod_inner_lo = p0.span.lo;
|
||||
let (mod_attrs, next) = p0.parse_inner_attrs_and_next();
|
||||
let first_item_outer_attrs = next;
|
||||
let m0 = p0.parse_mod_items(token::EOF, first_item_outer_attrs, mod_inner_lo);
|
||||
let m0 = p0.parse_mod_items(token::Eof, first_item_outer_attrs, mod_inner_lo);
|
||||
self.sess.included_mod_stack.borrow_mut().pop();
|
||||
return (ast::ItemMod(m0), mod_attrs);
|
||||
}
|
||||
@@ -4944,7 +4916,7 @@ fn parse_item_foreign_fn(&mut self, vis: ast::Visibility,
|
||||
let decl = self.parse_fn_decl(true);
|
||||
self.parse_where_clause(&mut generics);
|
||||
let hi = self.span.hi;
|
||||
self.expect(&token::SEMI);
|
||||
self.expect(&token::Semi);
|
||||
P(ast::ForeignItem {
|
||||
ident: ident,
|
||||
attrs: attrs,
|
||||
@@ -4964,10 +4936,10 @@ fn parse_item_foreign_static(&mut self, vis: ast::Visibility,
|
||||
let mutbl = self.eat_keyword(keywords::Mut);
|
||||
|
||||
let ident = self.parse_ident();
|
||||
self.expect(&token::COLON);
|
||||
self.expect(&token::Colon);
|
||||
let ty = self.parse_ty(true);
|
||||
let hi = self.span.hi;
|
||||
self.expect(&token::SEMI);
|
||||
self.expect(&token::Semi);
|
||||
P(ForeignItem {
|
||||
ident: ident,
|
||||
attrs: attrs,
|
||||
@@ -5006,7 +4978,7 @@ fn parse_foreign_mod_items(&mut self,
|
||||
self.span_err(last_span,
|
||||
Parser::expected_item_err(attrs_remaining.as_slice()));
|
||||
}
|
||||
assert!(self.token == token::RBRACE);
|
||||
assert!(self.token == token::RBrace);
|
||||
ast::ForeignMod {
|
||||
abi: abi,
|
||||
view_items: view_items,
|
||||
@@ -5029,16 +5001,16 @@ fn parse_item_extern_crate(&mut self,
|
||||
|
||||
let span = self.span;
|
||||
let (maybe_path, ident) = match self.token {
|
||||
token::IDENT(..) => {
|
||||
token::Ident(..) => {
|
||||
let the_ident = self.parse_ident();
|
||||
let path = if self.eat(&token::EQ) {
|
||||
let path = if self.eat(&token::Eq) {
|
||||
let path = self.parse_str();
|
||||
let span = self.span;
|
||||
self.obsolete(span, ObsoleteExternCrateRenaming);
|
||||
Some(path)
|
||||
} else if self.eat_keyword(keywords::As) {
|
||||
// skip the ident if there is one
|
||||
if is_ident(&self.token) { self.bump(); }
|
||||
if self.token.is_ident() { self.bump(); }
|
||||
|
||||
self.span_err(span,
|
||||
format!("expected `;`, found `as`; perhaps you meant \
|
||||
@@ -5048,14 +5020,14 @@ fn parse_item_extern_crate(&mut self,
|
||||
} else {
|
||||
None
|
||||
};
|
||||
self.expect(&token::SEMI);
|
||||
self.expect(&token::Semi);
|
||||
(path, the_ident)
|
||||
},
|
||||
token::LIT_STR(..) | token::LIT_STR_RAW(..) => {
|
||||
token::LitStr(..) | token::LitStrRaw(..) => {
|
||||
let path = self.parse_str();
|
||||
self.expect_keyword(keywords::As);
|
||||
let the_ident = self.parse_ident();
|
||||
self.expect(&token::SEMI);
|
||||
self.expect(&token::Semi);
|
||||
(Some(path), the_ident)
|
||||
},
|
||||
_ => {
|
||||
@@ -5093,13 +5065,13 @@ fn parse_item_foreign_mod(&mut self,
|
||||
attrs: Vec<Attribute> )
|
||||
-> ItemOrViewItem {
|
||||
|
||||
self.expect(&token::LBRACE);
|
||||
self.expect(&token::LBrace);
|
||||
|
||||
let abi = opt_abi.unwrap_or(abi::C);
|
||||
|
||||
let (inner, next) = self.parse_inner_attrs_and_next();
|
||||
let m = self.parse_foreign_mod_items(abi, next);
|
||||
self.expect(&token::RBRACE);
|
||||
self.expect(&token::RBrace);
|
||||
|
||||
let last_span = self.last_span;
|
||||
let item = self.mk_item(lo,
|
||||
@@ -5116,9 +5088,9 @@ fn parse_item_type(&mut self) -> ItemInfo {
|
||||
let ident = self.parse_ident();
|
||||
let mut tps = self.parse_generics();
|
||||
self.parse_where_clause(&mut tps);
|
||||
self.expect(&token::EQ);
|
||||
self.expect(&token::Eq);
|
||||
let ty = self.parse_ty(true);
|
||||
self.expect(&token::SEMI);
|
||||
self.expect(&token::Semi);
|
||||
(ident, ItemTy(ty, tps), None)
|
||||
}
|
||||
|
||||
@@ -5126,7 +5098,7 @@ fn parse_item_type(&mut self) -> ItemInfo {
|
||||
/// this should probably be renamed or refactored...
|
||||
fn parse_struct_def(&mut self) -> P<StructDef> {
|
||||
let mut fields: Vec<StructField> = Vec::new();
|
||||
while self.token != token::RBRACE {
|
||||
while self.token != token::RBrace {
|
||||
fields.push(self.parse_struct_decl_field());
|
||||
}
|
||||
self.bump();
|
||||
@@ -5142,7 +5114,7 @@ fn parse_enum_def(&mut self, _generics: &ast::Generics) -> EnumDef {
|
||||
let mut variants = Vec::new();
|
||||
let mut all_nullary = true;
|
||||
let mut any_disr = None;
|
||||
while self.token != token::RBRACE {
|
||||
while self.token != token::RBrace {
|
||||
let variant_attrs = self.parse_outer_attributes();
|
||||
let vlo = self.span.lo;
|
||||
|
||||
@@ -5153,16 +5125,16 @@ fn parse_enum_def(&mut self, _generics: &ast::Generics) -> EnumDef {
|
||||
let mut args = Vec::new();
|
||||
let mut disr_expr = None;
|
||||
ident = self.parse_ident();
|
||||
if self.eat(&token::LBRACE) {
|
||||
if self.eat(&token::LBrace) {
|
||||
// Parse a struct variant.
|
||||
all_nullary = false;
|
||||
kind = StructVariantKind(self.parse_struct_def());
|
||||
} else if self.token == token::LPAREN {
|
||||
} else if self.token == token::LParen {
|
||||
all_nullary = false;
|
||||
let arg_tys = self.parse_enum_variant_seq(
|
||||
&token::LPAREN,
|
||||
&token::RPAREN,
|
||||
seq_sep_trailing_allowed(token::COMMA),
|
||||
&token::LParen,
|
||||
&token::RParen,
|
||||
seq_sep_trailing_allowed(token::Comma),
|
||||
|p| p.parse_ty(true)
|
||||
);
|
||||
for ty in arg_tys.into_iter() {
|
||||
@@ -5172,7 +5144,7 @@ fn parse_enum_def(&mut self, _generics: &ast::Generics) -> EnumDef {
|
||||
});
|
||||
}
|
||||
kind = TupleVariantKind(args);
|
||||
} else if self.eat(&token::EQ) {
|
||||
} else if self.eat(&token::Eq) {
|
||||
disr_expr = Some(self.parse_expr());
|
||||
any_disr = disr_expr.as_ref().map(|expr| expr.span);
|
||||
kind = TupleVariantKind(args);
|
||||
@@ -5190,9 +5162,9 @@ fn parse_enum_def(&mut self, _generics: &ast::Generics) -> EnumDef {
|
||||
};
|
||||
variants.push(P(spanned(vlo, self.last_span.hi, vr)));
|
||||
|
||||
if !self.eat(&token::COMMA) { break; }
|
||||
if !self.eat(&token::Comma) { break; }
|
||||
}
|
||||
self.expect(&token::RBRACE);
|
||||
self.expect(&token::RBrace);
|
||||
match any_disr {
|
||||
Some(disr_span) if !all_nullary =>
|
||||
self.span_err(disr_span,
|
||||
@@ -5208,7 +5180,7 @@ fn parse_item_enum(&mut self) -> ItemInfo {
|
||||
let id = self.parse_ident();
|
||||
let mut generics = self.parse_generics();
|
||||
self.parse_where_clause(&mut generics);
|
||||
self.expect(&token::LBRACE);
|
||||
self.expect(&token::LBrace);
|
||||
|
||||
let enum_definition = self.parse_enum_def(&generics);
|
||||
(id, ItemEnum(enum_definition, generics), None)
|
||||
@@ -5216,7 +5188,7 @@ fn parse_item_enum(&mut self) -> ItemInfo {
|
||||
|
||||
fn fn_expr_lookahead(tok: &token::Token) -> bool {
|
||||
match *tok {
|
||||
token::LPAREN | token::AT | token::TILDE | token::BINOP(_) => true,
|
||||
token::LParen | token::At | token::Tilde | token::BinOp(_) => true,
|
||||
_ => false
|
||||
}
|
||||
}
|
||||
@@ -5225,7 +5197,7 @@ fn fn_expr_lookahead(tok: &token::Token) -> bool {
|
||||
/// the `extern` keyword, if one is found.
|
||||
fn parse_opt_abi(&mut self) -> Option<abi::Abi> {
|
||||
match self.token {
|
||||
token::LIT_STR(s) | token::LIT_STR_RAW(s, _) => {
|
||||
token::LitStr(s) | token::LitStrRaw(s, _) => {
|
||||
self.bump();
|
||||
let the_string = s.as_str();
|
||||
match abi::lookup(the_string) {
|
||||
@@ -5256,7 +5228,7 @@ fn parse_item_or_view_item(&mut self,
|
||||
macros_allowed: bool)
|
||||
-> ItemOrViewItem {
|
||||
let nt_item = match self.token {
|
||||
INTERPOLATED(token::NtItem(ref item)) => {
|
||||
token::Interpolated(token::NtItem(ref item)) => {
|
||||
Some((**item).clone())
|
||||
}
|
||||
_ => None
|
||||
@@ -5280,7 +5252,7 @@ fn parse_item_or_view_item(&mut self,
|
||||
if self.eat_keyword(keywords::Use) {
|
||||
// USE ITEM (IoviViewItem)
|
||||
let view_item = self.parse_use();
|
||||
self.expect(&token::SEMI);
|
||||
self.expect(&token::Semi);
|
||||
return IoviViewItem(ast::ViewItem {
|
||||
node: view_item,
|
||||
attrs: attrs,
|
||||
@@ -5319,7 +5291,7 @@ fn parse_item_or_view_item(&mut self,
|
||||
visibility,
|
||||
maybe_append(attrs, extra_attrs));
|
||||
return IoviItem(item);
|
||||
} else if self.token == token::LBRACE {
|
||||
} else if self.token == token::LBrace {
|
||||
return self.parse_item_foreign_mod(lo, opt_abi, visibility, attrs);
|
||||
}
|
||||
|
||||
@@ -5336,7 +5308,7 @@ fn parse_item_or_view_item(&mut self,
|
||||
}
|
||||
|
||||
// the rest are all guaranteed to be items:
|
||||
if self.is_keyword(keywords::Static) {
|
||||
if self.token.is_keyword(keywords::Static) {
|
||||
// STATIC ITEM
|
||||
self.bump();
|
||||
let m = if self.eat_keyword(keywords::Mut) {MutMutable} else {MutImmutable};
|
||||
@@ -5350,7 +5322,7 @@ fn parse_item_or_view_item(&mut self,
|
||||
maybe_append(attrs, extra_attrs));
|
||||
return IoviItem(item);
|
||||
}
|
||||
if self.is_keyword(keywords::Const) {
|
||||
if self.token.is_keyword(keywords::Const) {
|
||||
// CONST ITEM
|
||||
self.bump();
|
||||
if self.eat_keyword(keywords::Mut) {
|
||||
@@ -5368,7 +5340,7 @@ fn parse_item_or_view_item(&mut self,
|
||||
maybe_append(attrs, extra_attrs));
|
||||
return IoviItem(item);
|
||||
}
|
||||
if self.is_keyword(keywords::Fn) &&
|
||||
if self.token.is_keyword(keywords::Fn) &&
|
||||
self.look_ahead(1, |f| !Parser::fn_expr_lookahead(f)) {
|
||||
// FUNCTION ITEM
|
||||
self.bump();
|
||||
@@ -5383,8 +5355,8 @@ fn parse_item_or_view_item(&mut self,
|
||||
maybe_append(attrs, extra_attrs));
|
||||
return IoviItem(item);
|
||||
}
|
||||
if self.is_keyword(keywords::Unsafe)
|
||||
&& self.look_ahead(1u, |t| *t != token::LBRACE) {
|
||||
if self.token.is_keyword(keywords::Unsafe)
|
||||
&& self.look_ahead(1u, |t| *t != token::LBrace) {
|
||||
// UNSAFE FUNCTION ITEM
|
||||
self.bump();
|
||||
let abi = if self.eat_keyword(keywords::Extern) {
|
||||
@@ -5490,12 +5462,12 @@ fn parse_foreign_item(&mut self,
|
||||
|
||||
let visibility = self.parse_visibility();
|
||||
|
||||
if self.is_keyword(keywords::Static) {
|
||||
if self.token.is_keyword(keywords::Static) {
|
||||
// FOREIGN STATIC ITEM
|
||||
let item = self.parse_item_foreign_static(visibility, attrs);
|
||||
return IoviForeignItem(item);
|
||||
}
|
||||
if self.is_keyword(keywords::Fn) || self.is_keyword(keywords::Unsafe) {
|
||||
if self.token.is_keyword(keywords::Fn) || self.token.is_keyword(keywords::Unsafe) {
|
||||
// FOREIGN FUNCTION ITEM
|
||||
let item = self.parse_item_foreign_fn(visibility, attrs);
|
||||
return IoviForeignItem(item);
|
||||
@@ -5511,27 +5483,27 @@ fn parse_macro_use_or_failure(
|
||||
lo: BytePos,
|
||||
visibility: Visibility
|
||||
) -> ItemOrViewItem {
|
||||
if macros_allowed && !token::is_any_keyword(&self.token)
|
||||
&& self.look_ahead(1, |t| *t == token::NOT)
|
||||
&& (self.look_ahead(2, |t| is_plain_ident(t))
|
||||
|| self.look_ahead(2, |t| *t == token::LPAREN)
|
||||
|| self.look_ahead(2, |t| *t == token::LBRACE)) {
|
||||
if macros_allowed && !self.token.is_any_keyword()
|
||||
&& self.look_ahead(1, |t| *t == token::Not)
|
||||
&& (self.look_ahead(2, |t| t.is_plain_ident())
|
||||
|| self.look_ahead(2, |t| *t == token::LParen)
|
||||
|| self.look_ahead(2, |t| *t == token::LBrace)) {
|
||||
// MACRO INVOCATION ITEM
|
||||
|
||||
// item macro.
|
||||
let pth = self.parse_path(NoTypesAllowed).path;
|
||||
self.expect(&token::NOT);
|
||||
self.expect(&token::Not);
|
||||
|
||||
// a 'special' identifier (like what `macro_rules!` uses)
|
||||
// is optional. We should eventually unify invoc syntax
|
||||
// and remove this.
|
||||
let id = if is_plain_ident(&self.token) {
|
||||
let id = if self.token.is_plain_ident() {
|
||||
self.parse_ident()
|
||||
} else {
|
||||
token::special_idents::invalid // no special identifier
|
||||
};
|
||||
// eat a matched-delimiter token tree:
|
||||
let tts = match token::close_delimiter_for(&self.token) {
|
||||
let tts = match self.token.get_close_delimiter() {
|
||||
Some(ket) => {
|
||||
self.bump();
|
||||
self.parse_seq_to_end(&ket,
|
||||
@@ -5601,11 +5573,11 @@ fn parse_use(&mut self) -> ViewItem_ {
|
||||
fn parse_view_path(&mut self) -> P<ViewPath> {
|
||||
let lo = self.span.lo;
|
||||
|
||||
if self.token == token::LBRACE {
|
||||
if self.token == token::LBrace {
|
||||
// use {foo,bar}
|
||||
let idents = self.parse_unspanned_seq(
|
||||
&token::LBRACE, &token::RBRACE,
|
||||
seq_sep_trailing_allowed(token::COMMA),
|
||||
&token::LBrace, &token::RBrace,
|
||||
seq_sep_trailing_allowed(token::Comma),
|
||||
|p| p.parse_path_list_item());
|
||||
let path = ast::Path {
|
||||
span: mk_sp(lo, self.span.hi),
|
||||
@@ -5619,12 +5591,12 @@ fn parse_view_path(&mut self) -> P<ViewPath> {
|
||||
let first_ident = self.parse_ident();
|
||||
let mut path = vec!(first_ident);
|
||||
match self.token {
|
||||
token::EQ => {
|
||||
token::Eq => {
|
||||
// x = foo::bar
|
||||
self.bump();
|
||||
let path_lo = self.span.lo;
|
||||
path = vec!(self.parse_ident());
|
||||
while self.token == token::MOD_SEP {
|
||||
while self.token == token::ModSep {
|
||||
self.bump();
|
||||
let id = self.parse_ident();
|
||||
path.push(id);
|
||||
@@ -5647,23 +5619,23 @@ fn parse_view_path(&mut self) -> P<ViewPath> {
|
||||
ast::DUMMY_NODE_ID)));
|
||||
}
|
||||
|
||||
token::MOD_SEP => {
|
||||
token::ModSep => {
|
||||
// foo::bar or foo::{a,b,c} or foo::*
|
||||
while self.token == token::MOD_SEP {
|
||||
while self.token == token::ModSep {
|
||||
self.bump();
|
||||
|
||||
match self.token {
|
||||
token::IDENT(i, _) => {
|
||||
token::Ident(i, _) => {
|
||||
self.bump();
|
||||
path.push(i);
|
||||
}
|
||||
|
||||
// foo::bar::{a,b,c}
|
||||
token::LBRACE => {
|
||||
token::LBrace => {
|
||||
let idents = self.parse_unspanned_seq(
|
||||
&token::LBRACE,
|
||||
&token::RBRACE,
|
||||
seq_sep_trailing_allowed(token::COMMA),
|
||||
&token::LBrace,
|
||||
&token::RBrace,
|
||||
seq_sep_trailing_allowed(token::Comma),
|
||||
|p| p.parse_path_list_item()
|
||||
);
|
||||
let path = ast::Path {
|
||||
@@ -5682,7 +5654,7 @@ fn parse_view_path(&mut self) -> P<ViewPath> {
|
||||
}
|
||||
|
||||
// foo::bar::*
|
||||
token::BINOP(token::STAR) => {
|
||||
token::BinOp(token::Star) => {
|
||||
self.bump();
|
||||
let path = ast::Path {
|
||||
span: mk_sp(lo, self.span.hi),
|
||||
@@ -5821,7 +5793,7 @@ fn parse_foreign_items(&mut self, first_item_attrs: Vec<Attribute> ,
|
||||
loop {
|
||||
match self.parse_foreign_item(attrs, macros_allowed) {
|
||||
IoviNone(returned_attrs) => {
|
||||
if self.token == token::RBRACE {
|
||||
if self.token == token::RBrace {
|
||||
attrs = returned_attrs;
|
||||
break
|
||||
}
|
||||
@@ -5860,7 +5832,7 @@ pub fn parse_crate_mod(&mut self) -> Crate {
|
||||
let (inner, next) = self.parse_inner_attrs_and_next();
|
||||
let first_item_outer_attrs = next;
|
||||
// parse the items inside the crate:
|
||||
let m = self.parse_mod_items(token::EOF, first_item_outer_attrs, lo);
|
||||
let m = self.parse_mod_items(token::Eof, first_item_outer_attrs, lo);
|
||||
|
||||
ast::Crate {
|
||||
module: m,
|
||||
@@ -5874,8 +5846,8 @@ pub fn parse_crate_mod(&mut self) -> Crate {
|
||||
pub fn parse_optional_str(&mut self)
|
||||
-> Option<(InternedString, ast::StrStyle)> {
|
||||
let (s, style) = match self.token {
|
||||
token::LIT_STR(s) => (self.id_to_interned_str(s.ident()), ast::CookedStr),
|
||||
token::LIT_STR_RAW(s, n) => {
|
||||
token::LitStr(s) => (self.id_to_interned_str(s.ident()), ast::CookedStr),
|
||||
token::LitStrRaw(s, n) => {
|
||||
(self.id_to_interned_str(s.ident()), ast::RawStr(n))
|
||||
}
|
||||
_ => return None
|
||||
|
||||
+384
-389
@@ -9,9 +9,7 @@
|
||||
// except according to those terms.
|
||||
|
||||
use ast;
|
||||
use ast::{Ident, Name, Mrk};
|
||||
use ext::mtwt;
|
||||
use parse::token;
|
||||
use ptr::P;
|
||||
use util::interner::{RcStr, StrInterner};
|
||||
use util::interner;
|
||||
@@ -22,94 +20,377 @@
|
||||
use std::path::BytesContainer;
|
||||
use std::rc::Rc;
|
||||
|
||||
// NOTE(stage0): remove these re-exports after the next snapshot
|
||||
// (needed to allow quotations to pass stage0)
|
||||
#[cfg(stage0)] pub use self::Plus as PLUS;
|
||||
#[cfg(stage0)] pub use self::Minus as MINUS;
|
||||
#[cfg(stage0)] pub use self::Star as STAR;
|
||||
#[cfg(stage0)] pub use self::Slash as SLASH;
|
||||
#[cfg(stage0)] pub use self::Percent as PERCENT;
|
||||
#[cfg(stage0)] pub use self::Caret as CARET;
|
||||
#[cfg(stage0)] pub use self::And as AND;
|
||||
#[cfg(stage0)] pub use self::Or as OR;
|
||||
#[cfg(stage0)] pub use self::Shl as SHL;
|
||||
#[cfg(stage0)] pub use self::Shr as SHR;
|
||||
#[cfg(stage0)] pub use self::Eq as EQ;
|
||||
#[cfg(stage0)] pub use self::Lt as LT;
|
||||
#[cfg(stage0)] pub use self::Le as LE;
|
||||
#[cfg(stage0)] pub use self::EqEq as EQEQ;
|
||||
#[cfg(stage0)] pub use self::Ne as NE;
|
||||
#[cfg(stage0)] pub use self::Ge as GE;
|
||||
#[cfg(stage0)] pub use self::Gt as GT;
|
||||
#[cfg(stage0)] pub use self::AndAnd as ANDAND;
|
||||
#[cfg(stage0)] pub use self::OrOr as OROR;
|
||||
#[cfg(stage0)] pub use self::Not as NOT;
|
||||
#[cfg(stage0)] pub use self::Tilde as TILDE;
|
||||
#[cfg(stage0)] pub use self::BinOp as BINOP;
|
||||
#[cfg(stage0)] pub use self::BinOpEq as BINOPEQ;
|
||||
#[cfg(stage0)] pub use self::At as AT;
|
||||
#[cfg(stage0)] pub use self::Dot as DOT;
|
||||
#[cfg(stage0)] pub use self::DotDot as DOTDOT;
|
||||
#[cfg(stage0)] pub use self::DotDotDot as DOTDOTDOT;
|
||||
#[cfg(stage0)] pub use self::Comma as COMMA;
|
||||
#[cfg(stage0)] pub use self::Semi as SEMI;
|
||||
#[cfg(stage0)] pub use self::Colon as COLON;
|
||||
#[cfg(stage0)] pub use self::ModSep as MOD_SEP;
|
||||
#[cfg(stage0)] pub use self::RArrow as RARROW;
|
||||
#[cfg(stage0)] pub use self::LArrow as LARROW;
|
||||
#[cfg(stage0)] pub use self::FatArrow as FAT_ARROW;
|
||||
#[cfg(stage0)] pub use self::LParen as LPAREN;
|
||||
#[cfg(stage0)] pub use self::RParen as RPAREN;
|
||||
#[cfg(stage0)] pub use self::LBracket as LBRACKET;
|
||||
#[cfg(stage0)] pub use self::RBracket as RBRACKET;
|
||||
#[cfg(stage0)] pub use self::LBrace as LBRACE;
|
||||
#[cfg(stage0)] pub use self::RBrace as RBRACE;
|
||||
#[cfg(stage0)] pub use self::Pound as POUND;
|
||||
#[cfg(stage0)] pub use self::Dollar as DOLLAR;
|
||||
#[cfg(stage0)] pub use self::Question as QUESTION;
|
||||
#[cfg(stage0)] pub use self::LitByte as LIT_BYTE;
|
||||
#[cfg(stage0)] pub use self::LitChar as LIT_CHAR;
|
||||
#[cfg(stage0)] pub use self::LitInteger as LIT_INTEGER;
|
||||
#[cfg(stage0)] pub use self::LitFloat as LIT_FLOAT;
|
||||
#[cfg(stage0)] pub use self::LitStr as LIT_STR;
|
||||
#[cfg(stage0)] pub use self::LitStrRaw as LIT_STR_RAW;
|
||||
#[cfg(stage0)] pub use self::LitBinary as LIT_BINARY;
|
||||
#[cfg(stage0)] pub use self::LitBinaryRaw as LIT_BINARY_RAW;
|
||||
#[cfg(stage0)] pub use self::Ident as IDENT;
|
||||
#[cfg(stage0)] pub use self::Underscore as UNDERSCORE;
|
||||
#[cfg(stage0)] pub use self::Lifetime as LIFETIME;
|
||||
#[cfg(stage0)] pub use self::Interpolated as INTERPOLATED;
|
||||
#[cfg(stage0)] pub use self::DocComment as DOC_COMMENT;
|
||||
#[cfg(stage0)] pub use self::Whitespace as WS;
|
||||
#[cfg(stage0)] pub use self::Comment as COMMENT;
|
||||
#[cfg(stage0)] pub use self::Shebang as SHEBANG;
|
||||
#[cfg(stage0)] pub use self::Eof as EOF;
|
||||
|
||||
#[allow(non_camel_case_types)]
|
||||
#[deriving(Clone, Encodable, Decodable, PartialEq, Eq, Hash, Show)]
|
||||
pub enum BinOp {
|
||||
PLUS,
|
||||
MINUS,
|
||||
STAR,
|
||||
SLASH,
|
||||
PERCENT,
|
||||
CARET,
|
||||
AND,
|
||||
OR,
|
||||
SHL,
|
||||
SHR,
|
||||
pub enum BinOpToken {
|
||||
Plus,
|
||||
Minus,
|
||||
Star,
|
||||
Slash,
|
||||
Percent,
|
||||
Caret,
|
||||
And,
|
||||
Or,
|
||||
Shl,
|
||||
Shr,
|
||||
}
|
||||
|
||||
#[cfg(stage0)]
|
||||
#[allow(non_uppercase_statics)]
|
||||
pub const ModName: bool = true;
|
||||
#[cfg(stage0)]
|
||||
#[allow(non_uppercase_statics)]
|
||||
pub const Plain: bool = false;
|
||||
|
||||
#[deriving(Clone, Encodable, Decodable, PartialEq, Eq, Hash, Show)]
|
||||
#[cfg(not(stage0))]
|
||||
pub enum IdentStyle {
|
||||
/// `::` follows the identifier with no whitespace in-between.
|
||||
ModName,
|
||||
Plain,
|
||||
}
|
||||
|
||||
#[allow(non_camel_case_types)]
|
||||
#[deriving(Clone, Encodable, Decodable, PartialEq, Eq, Hash, Show)]
|
||||
pub enum Token {
|
||||
/* Expression-operator symbols. */
|
||||
EQ,
|
||||
LT,
|
||||
LE,
|
||||
EQEQ,
|
||||
NE,
|
||||
GE,
|
||||
GT,
|
||||
ANDAND,
|
||||
OROR,
|
||||
NOT,
|
||||
TILDE,
|
||||
BINOP(BinOp),
|
||||
BINOPEQ(BinOp),
|
||||
Eq,
|
||||
Lt,
|
||||
Le,
|
||||
EqEq,
|
||||
Ne,
|
||||
Ge,
|
||||
Gt,
|
||||
AndAnd,
|
||||
OrOr,
|
||||
Not,
|
||||
Tilde,
|
||||
BinOp(BinOpToken),
|
||||
BinOpEq(BinOpToken),
|
||||
|
||||
/* Structural symbols */
|
||||
AT,
|
||||
DOT,
|
||||
DOTDOT,
|
||||
DOTDOTDOT,
|
||||
COMMA,
|
||||
SEMI,
|
||||
COLON,
|
||||
MOD_SEP,
|
||||
RARROW,
|
||||
LARROW,
|
||||
FAT_ARROW,
|
||||
LPAREN,
|
||||
RPAREN,
|
||||
LBRACKET,
|
||||
RBRACKET,
|
||||
LBRACE,
|
||||
RBRACE,
|
||||
POUND,
|
||||
DOLLAR,
|
||||
QUESTION,
|
||||
At,
|
||||
Dot,
|
||||
DotDot,
|
||||
DotDotDot,
|
||||
Comma,
|
||||
Semi,
|
||||
Colon,
|
||||
ModSep,
|
||||
RArrow,
|
||||
LArrow,
|
||||
FatArrow,
|
||||
LParen,
|
||||
RParen,
|
||||
LBracket,
|
||||
RBracket,
|
||||
LBrace,
|
||||
RBrace,
|
||||
Pound,
|
||||
Dollar,
|
||||
Question,
|
||||
|
||||
/* Literals */
|
||||
LIT_BYTE(Name),
|
||||
LIT_CHAR(Name),
|
||||
LIT_INTEGER(Name),
|
||||
LIT_FLOAT(Name),
|
||||
LIT_STR(Name),
|
||||
LIT_STR_RAW(Name, uint), /* raw str delimited by n hash symbols */
|
||||
LIT_BINARY(Name),
|
||||
LIT_BINARY_RAW(Name, uint), /* raw binary str delimited by n hash symbols */
|
||||
LitByte(ast::Name),
|
||||
LitChar(ast::Name),
|
||||
LitInteger(ast::Name),
|
||||
LitFloat(ast::Name),
|
||||
LitStr(ast::Name),
|
||||
LitStrRaw(ast::Name, uint), /* raw str delimited by n hash symbols */
|
||||
LitBinary(ast::Name),
|
||||
LitBinaryRaw(ast::Name, uint), /* raw binary str delimited by n hash symbols */
|
||||
|
||||
/* Name components */
|
||||
/// An identifier contains an "is_mod_name" boolean,
|
||||
/// indicating whether :: follows this token with no
|
||||
/// whitespace in between.
|
||||
IDENT(Ident, bool),
|
||||
UNDERSCORE,
|
||||
LIFETIME(Ident),
|
||||
#[cfg(stage0)]
|
||||
Ident(ast::Ident, bool),
|
||||
#[cfg(not(stage0))]
|
||||
Ident(ast::Ident, IdentStyle),
|
||||
Underscore,
|
||||
Lifetime(ast::Ident),
|
||||
|
||||
/* For interpolation */
|
||||
INTERPOLATED(Nonterminal),
|
||||
DOC_COMMENT(Name),
|
||||
Interpolated(Nonterminal),
|
||||
DocComment(ast::Name),
|
||||
|
||||
// Junk. These carry no data because we don't really care about the data
|
||||
// they *would* carry, and don't really want to allocate a new ident for
|
||||
// them. Instead, users could extract that from the associated span.
|
||||
|
||||
/// Whitespace
|
||||
WS,
|
||||
Whitespace,
|
||||
/// Comment
|
||||
COMMENT,
|
||||
SHEBANG(Name),
|
||||
Comment,
|
||||
Shebang(ast::Name),
|
||||
|
||||
EOF,
|
||||
Eof,
|
||||
}
|
||||
|
||||
impl Token {
|
||||
/// Returns `true` if the token can appear at the start of an expression.
|
||||
pub fn can_begin_expr(&self) -> bool {
|
||||
match *self {
|
||||
LParen => true,
|
||||
LBrace => true,
|
||||
LBracket => true,
|
||||
Ident(_, _) => true,
|
||||
Underscore => true,
|
||||
Tilde => true,
|
||||
LitByte(_) => true,
|
||||
LitChar(_) => true,
|
||||
LitInteger(_) => true,
|
||||
LitFloat(_) => true,
|
||||
LitStr(_) => true,
|
||||
LitStrRaw(_, _) => true,
|
||||
LitBinary(_) => true,
|
||||
LitBinaryRaw(_, _) => true,
|
||||
Pound => true,
|
||||
At => true,
|
||||
Not => true,
|
||||
BinOp(Minus) => true,
|
||||
BinOp(Star) => true,
|
||||
BinOp(And) => true,
|
||||
BinOp(Or) => true, // in lambda syntax
|
||||
OrOr => true, // in lambda syntax
|
||||
ModSep => true,
|
||||
Interpolated(NtExpr(..)) => true,
|
||||
Interpolated(NtIdent(..)) => true,
|
||||
Interpolated(NtBlock(..)) => true,
|
||||
Interpolated(NtPath(..)) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the matching close delimiter if this is an open delimiter,
|
||||
/// otherwise `None`.
|
||||
pub fn get_close_delimiter(&self) -> Option<Token> {
|
||||
match *self {
|
||||
LParen => Some(RParen),
|
||||
LBrace => Some(RBrace),
|
||||
LBracket => Some(RBracket),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if the token is any literal
|
||||
pub fn is_lit(&self) -> bool {
|
||||
match *self {
|
||||
LitByte(_) => true,
|
||||
LitChar(_) => true,
|
||||
LitInteger(_) => true,
|
||||
LitFloat(_) => true,
|
||||
LitStr(_) => true,
|
||||
LitStrRaw(_, _) => true,
|
||||
LitBinary(_) => true,
|
||||
LitBinaryRaw(_, _) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if the token is an identifier.
|
||||
pub fn is_ident(&self) -> bool {
|
||||
match *self {
|
||||
Ident(_, _) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if the token is an interpolated path.
|
||||
pub fn is_path(&self) -> bool {
|
||||
match *self {
|
||||
Interpolated(NtPath(..)) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if the token is a path that is not followed by a `::`
|
||||
/// token.
|
||||
#[allow(non_uppercase_statics)] // NOTE(stage0): remove this attribute after the next snapshot
|
||||
pub fn is_plain_ident(&self) -> bool {
|
||||
match *self {
|
||||
Ident(_, Plain) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if the token is a lifetime.
|
||||
pub fn is_lifetime(&self) -> bool {
|
||||
match *self {
|
||||
Lifetime(..) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if the token is either the `mut` or `const` keyword.
|
||||
pub fn is_mutability(&self) -> bool {
|
||||
self.is_keyword(keywords::Mut) ||
|
||||
self.is_keyword(keywords::Const)
|
||||
}
|
||||
|
||||
/// Maps a token to its corresponding binary operator.
|
||||
pub fn to_binop(&self) -> Option<ast::BinOp> {
|
||||
match *self {
|
||||
BinOp(Star) => Some(ast::BiMul),
|
||||
BinOp(Slash) => Some(ast::BiDiv),
|
||||
BinOp(Percent) => Some(ast::BiRem),
|
||||
BinOp(Plus) => Some(ast::BiAdd),
|
||||
BinOp(Minus) => Some(ast::BiSub),
|
||||
BinOp(Shl) => Some(ast::BiShl),
|
||||
BinOp(Shr) => Some(ast::BiShr),
|
||||
BinOp(And) => Some(ast::BiBitAnd),
|
||||
BinOp(Caret) => Some(ast::BiBitXor),
|
||||
BinOp(Or) => Some(ast::BiBitOr),
|
||||
Lt => Some(ast::BiLt),
|
||||
Le => Some(ast::BiLe),
|
||||
Ge => Some(ast::BiGe),
|
||||
Gt => Some(ast::BiGt),
|
||||
EqEq => Some(ast::BiEq),
|
||||
Ne => Some(ast::BiNe),
|
||||
AndAnd => Some(ast::BiAnd),
|
||||
OrOr => Some(ast::BiOr),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if the token is a given keyword, `kw`.
|
||||
#[allow(non_uppercase_statics)] // NOTE(stage0): remove this attribute after the next snapshot
|
||||
pub fn is_keyword(&self, kw: keywords::Keyword) -> bool {
|
||||
match *self {
|
||||
Ident(sid, Plain) => kw.to_name() == sid.name,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if the token is either a special identifier, or a strict
|
||||
/// or reserved keyword.
|
||||
#[allow(non_uppercase_statics)] // NOTE(stage0): remove this attribute after the next snapshot
|
||||
pub fn is_any_keyword(&self) -> bool {
|
||||
match *self {
|
||||
Ident(sid, Plain) => {
|
||||
let n = sid.name;
|
||||
|
||||
n == SELF_KEYWORD_NAME
|
||||
|| n == STATIC_KEYWORD_NAME
|
||||
|| n == SUPER_KEYWORD_NAME
|
||||
|| STRICT_KEYWORD_START <= n
|
||||
&& n <= RESERVED_KEYWORD_FINAL
|
||||
},
|
||||
_ => false
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if the token may not appear as an identifier.
|
||||
#[allow(non_uppercase_statics)] // NOTE(stage0): remove this attribute after the next snapshot
|
||||
pub fn is_strict_keyword(&self) -> bool {
|
||||
match *self {
|
||||
Ident(sid, Plain) => {
|
||||
let n = sid.name;
|
||||
|
||||
n == SELF_KEYWORD_NAME
|
||||
|| n == STATIC_KEYWORD_NAME
|
||||
|| n == SUPER_KEYWORD_NAME
|
||||
|| STRICT_KEYWORD_START <= n
|
||||
&& n <= STRICT_KEYWORD_FINAL
|
||||
},
|
||||
Ident(sid, ModName) => {
|
||||
let n = sid.name;
|
||||
|
||||
n != SELF_KEYWORD_NAME
|
||||
&& n != SUPER_KEYWORD_NAME
|
||||
&& STRICT_KEYWORD_START <= n
|
||||
&& n <= STRICT_KEYWORD_FINAL
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if the token is a keyword that has been reserved for
|
||||
/// possible future use.
|
||||
#[allow(non_uppercase_statics)] // NOTE(stage0): remove this attribute after the next snapshot
|
||||
pub fn is_reserved_keyword(&self) -> bool {
|
||||
match *self {
|
||||
Ident(sid, Plain) => {
|
||||
let n = sid.name;
|
||||
|
||||
RESERVED_KEYWORD_START <= n
|
||||
&& n <= RESERVED_KEYWORD_FINAL
|
||||
},
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Hygienic identifier equality comparison.
|
||||
///
|
||||
/// See `styntax::ext::mtwt`.
|
||||
pub fn mtwt_eq(&self, other : &Token) -> bool {
|
||||
match (self, other) {
|
||||
(&Ident(id1,_), &Ident(id2,_)) | (&Lifetime(id1), &Lifetime(id2)) =>
|
||||
mtwt::resolve(id1) == mtwt::resolve(id2),
|
||||
_ => *self == *other
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[deriving(Clone, Encodable, Decodable, PartialEq, Eq, Hash)]
|
||||
@@ -121,8 +402,10 @@ pub enum Nonterminal {
|
||||
NtPat( P<ast::Pat>),
|
||||
NtExpr( P<ast::Expr>),
|
||||
NtTy( P<ast::Ty>),
|
||||
/// See IDENT, above, for meaning of bool in NtIdent:
|
||||
NtIdent(Box<Ident>, bool),
|
||||
#[cfg(stage0)]
|
||||
NtIdent(Box<ast::Ident>, bool),
|
||||
#[cfg(not(stage0))]
|
||||
NtIdent(Box<ast::Ident>, IdentStyle),
|
||||
/// Stuff inside brackets for attributes
|
||||
NtMeta( P<ast::MetaItem>),
|
||||
NtPath(Box<ast::Path>),
|
||||
@@ -148,204 +431,6 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn binop_to_string(o: BinOp) -> &'static str {
|
||||
match o {
|
||||
PLUS => "+",
|
||||
MINUS => "-",
|
||||
STAR => "*",
|
||||
SLASH => "/",
|
||||
PERCENT => "%",
|
||||
CARET => "^",
|
||||
AND => "&",
|
||||
OR => "|",
|
||||
SHL => "<<",
|
||||
SHR => ">>"
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_string(t: &Token) -> String {
|
||||
match *t {
|
||||
EQ => "=".into_string(),
|
||||
LT => "<".into_string(),
|
||||
LE => "<=".into_string(),
|
||||
EQEQ => "==".into_string(),
|
||||
NE => "!=".into_string(),
|
||||
GE => ">=".into_string(),
|
||||
GT => ">".into_string(),
|
||||
NOT => "!".into_string(),
|
||||
TILDE => "~".into_string(),
|
||||
OROR => "||".into_string(),
|
||||
ANDAND => "&&".into_string(),
|
||||
BINOP(op) => binop_to_string(op).into_string(),
|
||||
BINOPEQ(op) => {
|
||||
let mut s = binop_to_string(op).into_string();
|
||||
s.push_str("=");
|
||||
s
|
||||
}
|
||||
|
||||
/* Structural symbols */
|
||||
AT => "@".into_string(),
|
||||
DOT => ".".into_string(),
|
||||
DOTDOT => "..".into_string(),
|
||||
DOTDOTDOT => "...".into_string(),
|
||||
COMMA => ",".into_string(),
|
||||
SEMI => ";".into_string(),
|
||||
COLON => ":".into_string(),
|
||||
MOD_SEP => "::".into_string(),
|
||||
RARROW => "->".into_string(),
|
||||
LARROW => "<-".into_string(),
|
||||
FAT_ARROW => "=>".into_string(),
|
||||
LPAREN => "(".into_string(),
|
||||
RPAREN => ")".into_string(),
|
||||
LBRACKET => "[".into_string(),
|
||||
RBRACKET => "]".into_string(),
|
||||
LBRACE => "{".into_string(),
|
||||
RBRACE => "}".into_string(),
|
||||
POUND => "#".into_string(),
|
||||
DOLLAR => "$".into_string(),
|
||||
QUESTION => "?".into_string(),
|
||||
|
||||
/* Literals */
|
||||
LIT_BYTE(b) => {
|
||||
format!("b'{}'", b.as_str())
|
||||
}
|
||||
LIT_CHAR(c) => {
|
||||
format!("'{}'", c.as_str())
|
||||
}
|
||||
LIT_INTEGER(c) | LIT_FLOAT(c) => {
|
||||
c.as_str().into_string()
|
||||
}
|
||||
|
||||
LIT_STR(s) => {
|
||||
format!("\"{}\"", s.as_str())
|
||||
}
|
||||
LIT_STR_RAW(s, n) => {
|
||||
format!("r{delim}\"{string}\"{delim}",
|
||||
delim="#".repeat(n), string=s.as_str())
|
||||
}
|
||||
LIT_BINARY(v) => {
|
||||
format!("b\"{}\"", v.as_str())
|
||||
}
|
||||
LIT_BINARY_RAW(s, n) => {
|
||||
format!("br{delim}\"{string}\"{delim}",
|
||||
delim="#".repeat(n), string=s.as_str())
|
||||
}
|
||||
|
||||
/* Name components */
|
||||
IDENT(s, _) => get_ident(s).get().into_string(),
|
||||
LIFETIME(s) => {
|
||||
format!("{}", get_ident(s))
|
||||
}
|
||||
UNDERSCORE => "_".into_string(),
|
||||
|
||||
/* Other */
|
||||
DOC_COMMENT(s) => s.as_str().into_string(),
|
||||
EOF => "<eof>".into_string(),
|
||||
WS => " ".into_string(),
|
||||
COMMENT => "/* */".into_string(),
|
||||
SHEBANG(s) => format!("/* shebang: {}*/", s.as_str()),
|
||||
|
||||
INTERPOLATED(ref nt) => {
|
||||
match nt {
|
||||
&NtExpr(ref e) => ::print::pprust::expr_to_string(&**e),
|
||||
&NtMeta(ref e) => ::print::pprust::meta_item_to_string(&**e),
|
||||
&NtTy(ref e) => ::print::pprust::ty_to_string(&**e),
|
||||
&NtPath(ref e) => ::print::pprust::path_to_string(&**e),
|
||||
_ => {
|
||||
let mut s = "an interpolated ".into_string();
|
||||
match *nt {
|
||||
NtItem(..) => s.push_str("item"),
|
||||
NtBlock(..) => s.push_str("block"),
|
||||
NtStmt(..) => s.push_str("statement"),
|
||||
NtPat(..) => s.push_str("pattern"),
|
||||
NtMeta(..) => fail!("should have been handled"),
|
||||
NtExpr(..) => fail!("should have been handled"),
|
||||
NtTy(..) => fail!("should have been handled"),
|
||||
NtIdent(..) => s.push_str("identifier"),
|
||||
NtPath(..) => fail!("should have been handled"),
|
||||
NtTT(..) => s.push_str("tt"),
|
||||
NtMatchers(..) => s.push_str("matcher sequence")
|
||||
};
|
||||
s
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn can_begin_expr(t: &Token) -> bool {
|
||||
match *t {
|
||||
LPAREN => true,
|
||||
LBRACE => true,
|
||||
LBRACKET => true,
|
||||
IDENT(_, _) => true,
|
||||
UNDERSCORE => true,
|
||||
TILDE => true,
|
||||
LIT_BYTE(_) => true,
|
||||
LIT_CHAR(_) => true,
|
||||
LIT_INTEGER(_) => true,
|
||||
LIT_FLOAT(_) => true,
|
||||
LIT_STR(_) => true,
|
||||
LIT_STR_RAW(_, _) => true,
|
||||
LIT_BINARY(_) => true,
|
||||
LIT_BINARY_RAW(_, _) => true,
|
||||
POUND => true,
|
||||
AT => true,
|
||||
NOT => true,
|
||||
BINOP(MINUS) => true,
|
||||
BINOP(STAR) => true,
|
||||
BINOP(AND) => true,
|
||||
BINOP(OR) => true, // in lambda syntax
|
||||
OROR => true, // in lambda syntax
|
||||
MOD_SEP => true,
|
||||
INTERPOLATED(NtExpr(..))
|
||||
| INTERPOLATED(NtIdent(..))
|
||||
| INTERPOLATED(NtBlock(..))
|
||||
| INTERPOLATED(NtPath(..)) => true,
|
||||
_ => false
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the matching close delimiter if this is an open delimiter,
|
||||
/// otherwise `None`.
|
||||
pub fn close_delimiter_for(t: &Token) -> Option<Token> {
|
||||
match *t {
|
||||
LPAREN => Some(RPAREN),
|
||||
LBRACE => Some(RBRACE),
|
||||
LBRACKET => Some(RBRACKET),
|
||||
_ => None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_lit(t: &Token) -> bool {
|
||||
match *t {
|
||||
LIT_BYTE(_) => true,
|
||||
LIT_CHAR(_) => true,
|
||||
LIT_INTEGER(_) => true,
|
||||
LIT_FLOAT(_) => true,
|
||||
LIT_STR(_) => true,
|
||||
LIT_STR_RAW(_, _) => true,
|
||||
LIT_BINARY(_) => true,
|
||||
LIT_BINARY_RAW(_, _) => true,
|
||||
_ => false
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_ident(t: &Token) -> bool {
|
||||
match *t { IDENT(_, _) => true, _ => false }
|
||||
}
|
||||
|
||||
pub fn is_ident_or_path(t: &Token) -> bool {
|
||||
match *t {
|
||||
IDENT(_, _) | INTERPOLATED(NtPath(..)) => true,
|
||||
_ => false
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_plain_ident(t: &Token) -> bool {
|
||||
match *t { IDENT(_, false) => true, _ => false }
|
||||
}
|
||||
|
||||
// Get the first "argument"
|
||||
macro_rules! first {
|
||||
( $first:expr, $( $remainder:expr, )* ) => ( $first )
|
||||
@@ -376,22 +461,28 @@ pub mod keywords {
|
||||
$( ($rk_name:expr, $rk_variant:ident, $rk_str:expr); )*
|
||||
}
|
||||
) => {
|
||||
static STRICT_KEYWORD_START: Name = first!($( Name($sk_name), )*);
|
||||
static STRICT_KEYWORD_FINAL: Name = last!($( Name($sk_name), )*);
|
||||
static RESERVED_KEYWORD_START: Name = first!($( Name($rk_name), )*);
|
||||
static RESERVED_KEYWORD_FINAL: Name = last!($( Name($rk_name), )*);
|
||||
static STRICT_KEYWORD_START: ast::Name = first!($( ast::Name($sk_name), )*);
|
||||
static STRICT_KEYWORD_FINAL: ast::Name = last!($( ast::Name($sk_name), )*);
|
||||
static RESERVED_KEYWORD_START: ast::Name = first!($( ast::Name($rk_name), )*);
|
||||
static RESERVED_KEYWORD_FINAL: ast::Name = last!($( ast::Name($rk_name), )*);
|
||||
|
||||
pub mod special_idents {
|
||||
use ast::{Ident, Name};
|
||||
use ast;
|
||||
$(
|
||||
#[allow(non_uppercase_statics)]
|
||||
pub const $si_static: Ident = Ident { name: Name($si_name), ctxt: 0 };
|
||||
pub const $si_static: ast::Ident = ast::Ident {
|
||||
name: ast::Name($si_name),
|
||||
ctxt: 0,
|
||||
};
|
||||
)*
|
||||
}
|
||||
|
||||
pub mod special_names {
|
||||
use ast::Name;
|
||||
$( #[allow(non_uppercase_statics)] pub const $si_static: Name = Name($si_name); )*
|
||||
use ast;
|
||||
$(
|
||||
#[allow(non_uppercase_statics)]
|
||||
pub const $si_static: ast::Name = ast::Name($si_name);
|
||||
)*
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -402,7 +493,7 @@ pub mod special_names {
|
||||
* the language and may not appear as identifiers.
|
||||
*/
|
||||
pub mod keywords {
|
||||
use ast::Name;
|
||||
use ast;
|
||||
|
||||
pub enum Keyword {
|
||||
$( $sk_variant, )*
|
||||
@@ -410,10 +501,10 @@ pub enum Keyword {
|
||||
}
|
||||
|
||||
impl Keyword {
|
||||
pub fn to_name(&self) -> Name {
|
||||
pub fn to_name(&self) -> ast::Name {
|
||||
match *self {
|
||||
$( $sk_variant => Name($sk_name), )*
|
||||
$( $rk_variant => Name($rk_name), )*
|
||||
$( $sk_variant => ast::Name($sk_name), )*
|
||||
$( $rk_variant => ast::Name($rk_name), )*
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -432,9 +523,9 @@ fn mk_fresh_ident_interner() -> IdentInterner {
|
||||
}}
|
||||
|
||||
// If the special idents get renumbered, remember to modify these two as appropriate
|
||||
pub const SELF_KEYWORD_NAME: Name = Name(SELF_KEYWORD_NAME_NUM);
|
||||
const STATIC_KEYWORD_NAME: Name = Name(STATIC_KEYWORD_NAME_NUM);
|
||||
const SUPER_KEYWORD_NAME: Name = Name(SUPER_KEYWORD_NAME_NUM);
|
||||
pub const SELF_KEYWORD_NAME: ast::Name = ast::Name(SELF_KEYWORD_NAME_NUM);
|
||||
const STATIC_KEYWORD_NAME: ast::Name = ast::Name(STATIC_KEYWORD_NAME_NUM);
|
||||
const SUPER_KEYWORD_NAME: ast::Name = ast::Name(SUPER_KEYWORD_NAME_NUM);
|
||||
|
||||
pub const SELF_KEYWORD_NAME_NUM: u32 = 1;
|
||||
const STATIC_KEYWORD_NAME_NUM: u32 = 2;
|
||||
@@ -526,34 +617,6 @@ pub mod keywords {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Maps a token to a record specifying the corresponding binary
|
||||
* operator
|
||||
*/
|
||||
pub fn token_to_binop(tok: &Token) -> Option<ast::BinOp> {
|
||||
match *tok {
|
||||
BINOP(STAR) => Some(ast::BiMul),
|
||||
BINOP(SLASH) => Some(ast::BiDiv),
|
||||
BINOP(PERCENT) => Some(ast::BiRem),
|
||||
BINOP(PLUS) => Some(ast::BiAdd),
|
||||
BINOP(MINUS) => Some(ast::BiSub),
|
||||
BINOP(SHL) => Some(ast::BiShl),
|
||||
BINOP(SHR) => Some(ast::BiShr),
|
||||
BINOP(AND) => Some(ast::BiBitAnd),
|
||||
BINOP(CARET) => Some(ast::BiBitXor),
|
||||
BINOP(OR) => Some(ast::BiBitOr),
|
||||
LT => Some(ast::BiLt),
|
||||
LE => Some(ast::BiLe),
|
||||
GE => Some(ast::BiGe),
|
||||
GT => Some(ast::BiGt),
|
||||
EQEQ => Some(ast::BiEq),
|
||||
NE => Some(ast::BiNe),
|
||||
ANDAND => Some(ast::BiAnd),
|
||||
OROR => Some(ast::BiOr),
|
||||
_ => None
|
||||
}
|
||||
}
|
||||
|
||||
// looks like we can get rid of this completely...
|
||||
pub type IdentInterner = StrInterner;
|
||||
|
||||
@@ -646,7 +709,7 @@ fn encode(&self, s: &mut S) -> Result<(), E> {
|
||||
|
||||
/// Returns the string contents of a name, using the task-local interner.
|
||||
#[inline]
|
||||
pub fn get_name(name: Name) -> InternedString {
|
||||
pub fn get_name(name: ast::Name) -> InternedString {
|
||||
let interner = get_ident_interner();
|
||||
InternedString::new_from_rc_str(interner.get(name))
|
||||
}
|
||||
@@ -654,7 +717,7 @@ pub fn get_name(name: Name) -> InternedString {
|
||||
/// Returns the string contents of an identifier, using the task-local
|
||||
/// interner.
|
||||
#[inline]
|
||||
pub fn get_ident(ident: Ident) -> InternedString {
|
||||
pub fn get_ident(ident: ast::Ident) -> InternedString {
|
||||
get_name(ident.name)
|
||||
}
|
||||
|
||||
@@ -667,32 +730,32 @@ pub fn intern_and_get_ident(s: &str) -> InternedString {
|
||||
|
||||
/// Maps a string to its interned representation.
|
||||
#[inline]
|
||||
pub fn intern(s: &str) -> Name {
|
||||
pub fn intern(s: &str) -> ast::Name {
|
||||
get_ident_interner().intern(s)
|
||||
}
|
||||
|
||||
/// gensym's a new uint, using the current interner.
|
||||
#[inline]
|
||||
pub fn gensym(s: &str) -> Name {
|
||||
pub fn gensym(s: &str) -> ast::Name {
|
||||
get_ident_interner().gensym(s)
|
||||
}
|
||||
|
||||
/// Maps a string to an identifier with an empty syntax context.
|
||||
#[inline]
|
||||
pub fn str_to_ident(s: &str) -> Ident {
|
||||
Ident::new(intern(s))
|
||||
pub fn str_to_ident(s: &str) -> ast::Ident {
|
||||
ast::Ident::new(intern(s))
|
||||
}
|
||||
|
||||
/// Maps a string to a gensym'ed identifier.
|
||||
#[inline]
|
||||
pub fn gensym_ident(s: &str) -> Ident {
|
||||
Ident::new(gensym(s))
|
||||
pub fn gensym_ident(s: &str) -> ast::Ident {
|
||||
ast::Ident::new(gensym(s))
|
||||
}
|
||||
|
||||
// create a fresh name that maps to the same string as the old one.
|
||||
// note that this guarantees that str_ptr_eq(ident_to_string(src),interner_get(fresh_name(src)));
|
||||
// that is, that the new name and the old one are connected to ptr_eq strings.
|
||||
pub fn fresh_name(src: &Ident) -> Name {
|
||||
pub fn fresh_name(src: &ast::Ident) -> ast::Name {
|
||||
let interner = get_ident_interner();
|
||||
interner.gensym_copy(src.name)
|
||||
// following: debug version. Could work in final except that it's incompatible with
|
||||
@@ -703,78 +766,10 @@ pub fn fresh_name(src: &Ident) -> Name {
|
||||
}
|
||||
|
||||
// create a fresh mark.
|
||||
pub fn fresh_mark() -> Mrk {
|
||||
pub fn fresh_mark() -> ast::Mrk {
|
||||
gensym("mark").uint() as u32
|
||||
}
|
||||
|
||||
// See the macro above about the types of keywords
|
||||
|
||||
pub fn is_keyword(kw: keywords::Keyword, tok: &Token) -> bool {
|
||||
match *tok {
|
||||
token::IDENT(sid, false) => { kw.to_name() == sid.name }
|
||||
_ => { false }
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_any_keyword(tok: &Token) -> bool {
|
||||
match *tok {
|
||||
token::IDENT(sid, false) => {
|
||||
let n = sid.name;
|
||||
|
||||
n == SELF_KEYWORD_NAME
|
||||
|| n == STATIC_KEYWORD_NAME
|
||||
|| n == SUPER_KEYWORD_NAME
|
||||
|| STRICT_KEYWORD_START <= n
|
||||
&& n <= RESERVED_KEYWORD_FINAL
|
||||
},
|
||||
_ => false
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_strict_keyword(tok: &Token) -> bool {
|
||||
match *tok {
|
||||
token::IDENT(sid, false) => {
|
||||
let n = sid.name;
|
||||
|
||||
n == SELF_KEYWORD_NAME
|
||||
|| n == STATIC_KEYWORD_NAME
|
||||
|| n == SUPER_KEYWORD_NAME
|
||||
|| STRICT_KEYWORD_START <= n
|
||||
&& n <= STRICT_KEYWORD_FINAL
|
||||
},
|
||||
token::IDENT(sid, true) => {
|
||||
let n = sid.name;
|
||||
|
||||
n != SELF_KEYWORD_NAME
|
||||
&& n != SUPER_KEYWORD_NAME
|
||||
&& STRICT_KEYWORD_START <= n
|
||||
&& n <= STRICT_KEYWORD_FINAL
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_reserved_keyword(tok: &Token) -> bool {
|
||||
match *tok {
|
||||
token::IDENT(sid, false) => {
|
||||
let n = sid.name;
|
||||
|
||||
RESERVED_KEYWORD_START <= n
|
||||
&& n <= RESERVED_KEYWORD_FINAL
|
||||
},
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn mtwt_token_eq(t1 : &Token, t2 : &Token) -> bool {
|
||||
match (t1,t2) {
|
||||
(&IDENT(id1,_),&IDENT(id2,_)) | (&LIFETIME(id1),&LIFETIME(id2)) =>
|
||||
mtwt::resolve(id1) == mtwt::resolve(id2),
|
||||
_ => *t1 == *t2
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
@@ -786,9 +781,9 @@ fn mark_ident(id : ast::Ident, m : ast::Mrk) -> ast::Ident {
|
||||
}
|
||||
|
||||
#[test] fn mtwt_token_eq_test() {
|
||||
assert!(mtwt_token_eq(>,>));
|
||||
assert!(Gt.mtwt_eq(&Gt));
|
||||
let a = str_to_ident("bac");
|
||||
let a1 = mark_ident(a,92);
|
||||
assert!(mtwt_token_eq(&IDENT(a,true),&IDENT(a1,false)));
|
||||
assert!(Ident(a, ModName).mtwt_eq(&Ident(a1, Plain)));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,6 +21,7 @@
|
||||
use codemap::{CodeMap, BytePos};
|
||||
use codemap;
|
||||
use diagnostic;
|
||||
use parse::token::{BinOpToken, Token};
|
||||
use parse::token;
|
||||
use parse::lexer::comments;
|
||||
use parse;
|
||||
@@ -181,6 +182,101 @@ pub fn to_string(f: |&mut State| -> IoResult<()>) -> String {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn binop_to_string(op: BinOpToken) -> &'static str {
|
||||
match op {
|
||||
token::Plus => "+",
|
||||
token::Minus => "-",
|
||||
token::Star => "*",
|
||||
token::Slash => "/",
|
||||
token::Percent => "%",
|
||||
token::Caret => "^",
|
||||
token::And => "&",
|
||||
token::Or => "|",
|
||||
token::Shl => "<<",
|
||||
token::Shr => ">>",
|
||||
}
|
||||
}
|
||||
|
||||
pub fn token_to_string(tok: &Token) -> String {
|
||||
match *tok {
|
||||
token::Eq => "=".into_string(),
|
||||
token::Lt => "<".into_string(),
|
||||
token::Le => "<=".into_string(),
|
||||
token::EqEq => "==".into_string(),
|
||||
token::Ne => "!=".into_string(),
|
||||
token::Ge => ">=".into_string(),
|
||||
token::Gt => ">".into_string(),
|
||||
token::Not => "!".into_string(),
|
||||
token::Tilde => "~".into_string(),
|
||||
token::OrOr => "||".into_string(),
|
||||
token::AndAnd => "&&".into_string(),
|
||||
token::BinOp(op) => binop_to_string(op).into_string(),
|
||||
token::BinOpEq(op) => format!("{}=", binop_to_string(op)),
|
||||
|
||||
/* Structural symbols */
|
||||
token::At => "@".into_string(),
|
||||
token::Dot => ".".into_string(),
|
||||
token::DotDot => "..".into_string(),
|
||||
token::DotDotDot => "...".into_string(),
|
||||
token::Comma => ",".into_string(),
|
||||
token::Semi => ";".into_string(),
|
||||
token::Colon => ":".into_string(),
|
||||
token::ModSep => "::".into_string(),
|
||||
token::RArrow => "->".into_string(),
|
||||
token::LArrow => "<-".into_string(),
|
||||
token::FatArrow => "=>".into_string(),
|
||||
token::LParen => "(".into_string(),
|
||||
token::RParen => ")".into_string(),
|
||||
token::LBracket => "[".into_string(),
|
||||
token::RBracket => "]".into_string(),
|
||||
token::LBrace => "{".into_string(),
|
||||
token::RBrace => "}".into_string(),
|
||||
token::Pound => "#".into_string(),
|
||||
token::Dollar => "$".into_string(),
|
||||
token::Question => "?".into_string(),
|
||||
|
||||
/* Literals */
|
||||
token::LitByte(b) => format!("b'{}'", b.as_str()),
|
||||
token::LitChar(c) => format!("'{}'", c.as_str()),
|
||||
token::LitFloat(c) => c.as_str().into_string(),
|
||||
token::LitInteger(c) => c.as_str().into_string(),
|
||||
token::LitStr(s) => format!("\"{}\"", s.as_str()),
|
||||
token::LitStrRaw(s, n) => format!("r{delim}\"{string}\"{delim}",
|
||||
delim="#".repeat(n),
|
||||
string=s.as_str()),
|
||||
token::LitBinary(v) => format!("b\"{}\"", v.as_str()),
|
||||
token::LitBinaryRaw(s, n) => format!("br{delim}\"{string}\"{delim}",
|
||||
delim="#".repeat(n),
|
||||
string=s.as_str()),
|
||||
|
||||
/* Name components */
|
||||
token::Ident(s, _) => token::get_ident(s).get().into_string(),
|
||||
token::Lifetime(s) => format!("{}", token::get_ident(s)),
|
||||
token::Underscore => "_".into_string(),
|
||||
|
||||
/* Other */
|
||||
token::DocComment(s) => s.as_str().into_string(),
|
||||
token::Eof => "<eof>".into_string(),
|
||||
token::Whitespace => " ".into_string(),
|
||||
token::Comment => "/* */".into_string(),
|
||||
token::Shebang(s) => format!("/* shebang: {}*/", s.as_str()),
|
||||
|
||||
token::Interpolated(ref nt) => match *nt {
|
||||
token::NtExpr(ref e) => expr_to_string(&**e),
|
||||
token::NtMeta(ref e) => meta_item_to_string(&**e),
|
||||
token::NtTy(ref e) => ty_to_string(&**e),
|
||||
token::NtPath(ref e) => path_to_string(&**e),
|
||||
token::NtItem(..) => "an interpolated item".into_string(),
|
||||
token::NtBlock(..) => "an interpolated block".into_string(),
|
||||
token::NtStmt(..) => "an interpolated statement".into_string(),
|
||||
token::NtPat(..) => "an interpolated pattern".into_string(),
|
||||
token::NtIdent(..) => "an interpolated identifier".into_string(),
|
||||
token::NtTT(..) => "an interpolated tt".into_string(),
|
||||
token::NtMatchers(..) => "an interpolated matcher sequence".into_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME (Issue #16472): the thing_to_string_impls macro should go away
|
||||
// after we revise the syntax::ext::quote::ToToken impls to go directly
|
||||
// to token-trees instead of thing -> string -> token-trees.
|
||||
@@ -1026,16 +1122,16 @@ pub fn print_tt(&mut self, tt: &ast::TokenTree) -> IoResult<()> {
|
||||
match *tt {
|
||||
ast::TtDelimited(_, ref delimed) => {
|
||||
let (ref open, ref tts, ref close) = **delimed;
|
||||
try!(word(&mut self.s, parse::token::to_string(&open.token).as_slice()));
|
||||
try!(word(&mut self.s, token_to_string(&open.token).as_slice()));
|
||||
try!(space(&mut self.s));
|
||||
try!(self.print_tts(tts.as_slice()));
|
||||
try!(space(&mut self.s));
|
||||
word(&mut self.s, parse::token::to_string(&close.token).as_slice())
|
||||
word(&mut self.s, token_to_string(&close.token).as_slice())
|
||||
},
|
||||
ast::TtToken(_, ref tk) => {
|
||||
try!(word(&mut self.s, parse::token::to_string(tk).as_slice()));
|
||||
try!(word(&mut self.s, token_to_string(tk).as_slice()));
|
||||
match *tk {
|
||||
parse::token::DOC_COMMENT(..) => {
|
||||
parse::token::DocComment(..) => {
|
||||
hardbreak(&mut self.s)
|
||||
}
|
||||
_ => Ok(())
|
||||
@@ -1049,10 +1145,9 @@ pub fn print_tt(&mut self, tt: &ast::TokenTree) -> IoResult<()> {
|
||||
try!(word(&mut self.s, ")"));
|
||||
match *separator {
|
||||
Some(ref tk) => {
|
||||
try!(word(&mut self.s,
|
||||
parse::token::to_string(tk).as_slice()));
|
||||
try!(word(&mut self.s, token_to_string(tk).as_slice()));
|
||||
}
|
||||
None => ()
|
||||
None => {},
|
||||
}
|
||||
match kleene_op {
|
||||
ast::ZeroOrMore => word(&mut self.s, "*"),
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
extern crate rustc;
|
||||
|
||||
use syntax::codemap::Span;
|
||||
use syntax::parse::token::{IDENT, get_ident};
|
||||
use syntax::parse::token;
|
||||
use syntax::ast::{TokenTree, TtToken};
|
||||
use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacExpr};
|
||||
use syntax::ext::build::AstBuilder; // trait for expr_uint
|
||||
@@ -39,7 +39,7 @@ fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
|
||||
("I", 1)];
|
||||
|
||||
let text = match args {
|
||||
[TtToken(_, IDENT(s, _))] => get_ident(s).to_string(),
|
||||
[TtToken(_, token::Ident(s, _))] => token::get_ident(s).to_string(),
|
||||
_ => {
|
||||
cx.span_err(sp, "argument should be a single identifier");
|
||||
return DummyResult::any(sp);
|
||||
|
||||
@@ -8,4 +8,4 @@
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
type t = { f: () }; //~ ERROR expected type, found token LBRACE
|
||||
type t = { f: () }; //~ ERROR expected type, found token LBrace
|
||||
|
||||
Reference in New Issue
Block a user