Skip to content

Commit 5d6241d

Browse files
committed
rollup merge of #18430 : bjz/token
Conflicts: src/libsyntax/parse/parser.rs
2 parents 00975e0 + 98a4770 commit 5d6241d

File tree

17 files changed

+373
-377
lines changed

17 files changed

+373
-377
lines changed

src/grammar/verify.rs

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -59,20 +59,20 @@ fn parse_token_list(file: &str) -> HashMap<String, Token> {
5959
"FLOAT_SUFFIX" => id(),
6060
"INT_SUFFIX" => id(),
6161
"SHL" => token::BinOp(token::Shl),
62-
"LBRACE" => token::LBrace,
62+
"LBRACE" => token::OpenDelim(token::Brace),
6363
"RARROW" => token::Rarrow,
6464
"LIT_STR" => token::LitStr(Name(0)),
6565
"DOTDOT" => token::DotDot,
6666
"MOD_SEP" => token::ModSep,
6767
"DOTDOTDOT" => token::DotDotDot,
6868
"NOT" => token::Not,
6969
"AND" => token::BinOp(token::And),
70-
"LPAREN" => token::LParen,
70+
"LPAREN" => token::OpenDelim(token::Paren),
7171
"ANDAND" => token::AndAnd,
7272
"AT" => token::At,
73-
"LBRACKET" => token::LBracket,
73+
"LBRACKET" => token::OpenDelim(token::Bracket),
7474
"LIT_STR_RAW" => token::LitStrRaw(Name(0), 0),
75-
"RPAREN" => token::RParen,
75+
"RPAREN" => token::CloseDelim(token::Paren),
7676
"SLASH" => token::BinOp(token::Slash),
7777
"COMMA" => token::Comma,
7878
"LIFETIME" => token::Lifetime(ast::Ident { name: Name(0), ctxt: 0 }),
@@ -83,15 +83,15 @@ fn parse_token_list(file: &str) -> HashMap<String, Token> {
8383
"LIT_CHAR" => token::LitChar(Name(0)),
8484
"LIT_BYTE" => token::LitByte(Name(0)),
8585
"EQ" => token::Eq,
86-
"RBRACKET" => token::RBracket,
86+
"RBRACKET" => token::CloseDelim(token::Bracket),
8787
"COMMENT" => token::Comment,
8888
"DOC_COMMENT" => token::DocComment(Name(0)),
8989
"DOT" => token::Dot,
9090
"EQEQ" => token::EqEq,
9191
"NE" => token::Ne,
9292
"GE" => token::Ge,
9393
"PERCENT" => token::BinOp(token::Percent),
94-
"RBRACE" => token::RBrace,
94+
"RBRACE" => token::CloseDelim(token::Brace),
9595
"BINOP" => token::BinOp(token::Plus),
9696
"POUND" => token::Pound,
9797
"OROR" => token::OrOr,

src/librustc/middle/save/span_utils.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -145,7 +145,7 @@ impl<'a> SpanUtils<'a> {
145145
last_span = None;
146146
let mut next = toks.next_token();
147147

148-
if (next.tok == token::LParen ||
148+
if (next.tok == token::OpenDelim(token::Paren) ||
149149
next.tok == token::Lt) &&
150150
bracket_count == 0 &&
151151
prev.tok.is_ident() {
@@ -164,8 +164,8 @@ impl<'a> SpanUtils<'a> {
164164
}
165165

166166
bracket_count += match prev.tok {
167-
token::LParen | token::Lt => 1,
168-
token::RParen | token::Gt => -1,
167+
token::OpenDelim(token::Paren) | token::Lt => 1,
168+
token::CloseDelim(token::Paren) | token::Gt => -1,
169169
token::BinOp(token::Shr) => -2,
170170
_ => 0
171171
};

src/librustdoc/html/highlight.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -97,8 +97,8 @@ fn doit(sess: &parse::ParseSess, mut lexer: lexer::StringReader,
9797

9898
// miscellaneous, no highlighting
9999
token::Dot | token::DotDot | token::DotDotDot | token::Comma | token::Semi |
100-
token::Colon | token::ModSep | token::LArrow | token::LParen |
101-
token::RParen | token::LBracket | token::LBrace | token::RBrace |
100+
token::Colon | token::ModSep | token::LArrow | token::OpenDelim(_) |
101+
token::CloseDelim(token::Brace) | token::CloseDelim(token::Paren) |
102102
token::Question => "",
103103
token::Dollar => {
104104
if lexer.peek().tok.is_ident() {
@@ -118,7 +118,7 @@ fn doit(sess: &parse::ParseSess, mut lexer: lexer::StringReader,
118118
try!(write!(out, r"<span class='attribute'>#"));
119119
continue
120120
}
121-
token::RBracket => {
121+
token::CloseDelim(token::Bracket) => {
122122
if is_attribute {
123123
is_attribute = false;
124124
try!(write!(out, "]</span>"));

src/libsyntax/ast.rs

Lines changed: 36 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -595,17 +595,38 @@ pub enum CaptureClause {
595595
CaptureByRef,
596596
}
597597

598-
/// A token that delimits a sequence of token trees
599-
#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)]
600-
pub struct Delimiter {
601-
pub span: Span,
602-
pub token: ::parse::token::Token,
603-
}
598+
/// A delimited sequence of token trees
599+
#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)]
600+
pub struct Delimited {
601+
/// The type of delimiter
602+
pub delim: token::DelimToken,
603+
/// The span covering the opening delimiter
604+
pub open_span: Span,
605+
/// The delimited sequence of token trees
606+
pub tts: Vec<TokenTree>,
607+
/// The span covering the closing delimiter
608+
pub close_span: Span,
609+
}
610+
611+
impl Delimited {
612+
/// Returns the opening delimiter as a token.
613+
pub fn open_token(&self) -> token::Token {
614+
token::OpenDelim(self.delim)
615+
}
616+
617+
/// Returns the closing delimiter as a token.
618+
pub fn close_token(&self) -> token::Token {
619+
token::CloseDelim(self.delim)
620+
}
621+
622+
/// Returns the opening delimiter as a token tree.
623+
pub fn open_tt(&self) -> TokenTree {
624+
TtToken(self.open_span, self.open_token())
625+
}
604626

605-
impl Delimiter {
606-
/// Convert the delimiter to a `TtToken`
607-
pub fn to_tt(&self) -> TokenTree {
608-
TtToken(self.span, self.token.clone())
627+
/// Returns the closing delimiter as a token tree.
628+
pub fn close_tt(&self) -> TokenTree {
629+
TtToken(self.close_span, self.close_token())
609630
}
610631
}
611632

@@ -635,15 +656,15 @@ pub enum KleeneOp {
635656
#[doc="For macro invocations; parsing is delegated to the macro"]
636657
pub enum TokenTree {
637658
/// A single token
638-
TtToken(Span, ::parse::token::Token),
659+
TtToken(Span, token::Token),
639660
/// A delimited sequence of token trees
640-
TtDelimited(Span, Rc<(Delimiter, Vec<TokenTree>, Delimiter)>),
661+
TtDelimited(Span, Rc<Delimited>),
641662

642663
// These only make sense for right-hand-sides of MBE macros:
643664

644665
/// A Kleene-style repetition sequence with an optional separator.
645666
// FIXME(eddyb) #6308 Use Rc<[TokenTree]> after DST.
646-
TtSequence(Span, Rc<Vec<TokenTree>>, Option<::parse::token::Token>, KleeneOp),
667+
TtSequence(Span, Rc<Vec<TokenTree>>, Option<token::Token>, KleeneOp),
647668
/// A syntactic variable that will be filled in by macro expansion.
648669
TtNonterminal(Span, Ident)
649670
}
@@ -715,10 +736,10 @@ pub type Matcher = Spanned<Matcher_>;
715736
#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)]
716737
pub enum Matcher_ {
717738
/// Match one token
718-
MatchTok(::parse::token::Token),
739+
MatchTok(token::Token),
719740
/// Match repetitions of a sequence: body, separator, Kleene operator,
720741
/// lo, hi position-in-match-array used:
721-
MatchSeq(Vec<Matcher> , Option<::parse::token::Token>, KleeneOp, uint, uint),
742+
MatchSeq(Vec<Matcher>, Option<token::Token>, KleeneOp, uint, uint),
722743
/// Parse a Rust NT: name to bind, name of NT, position in match array:
723744
MatchNonterminal(Ident, Ident, uint)
724745
}

src/libsyntax/ext/asm.rs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -84,9 +84,9 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
8484

8585
let span = p.last_span;
8686

87-
p.expect(&token::LParen);
87+
p.expect(&token::OpenDelim(token::Paren));
8888
let out = p.parse_expr();
89-
p.expect(&token::RParen);
89+
p.expect(&token::CloseDelim(token::Paren));
9090

9191
// Expands a read+write operand into two operands.
9292
//
@@ -129,9 +129,9 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
129129
cx.span_err(p.last_span, "input operand constraint contains '+'");
130130
}
131131

132-
p.expect(&token::LParen);
132+
p.expect(&token::OpenDelim(token::Paren));
133133
let input = p.parse_expr();
134-
p.expect(&token::RParen);
134+
p.expect(&token::CloseDelim(token::Paren));
135135

136136
inputs.push((constraint, input));
137137
}

src/libsyntax/ext/quote.rs

Lines changed: 21 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -531,6 +531,15 @@ fn mk_binop(cx: &ExtCtxt, sp: Span, bop: token::BinOpToken) -> P<ast::Expr> {
531531
mk_token_path(cx, sp, name)
532532
}
533533

534+
fn mk_delim(cx: &ExtCtxt, sp: Span, delim: token::DelimToken) -> P<ast::Expr> {
535+
let name = match delim {
536+
token::Paren => "Paren",
537+
token::Bracket => "Bracket",
538+
token::Brace => "Brace",
539+
};
540+
mk_token_path(cx, sp, name)
541+
}
542+
534543
#[allow(non_uppercase_statics)] // NOTE(stage0): remove this attribute after the next snapshot
535544
fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
536545
match *tok {
@@ -542,6 +551,15 @@ fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
542551
vec!(mk_binop(cx, sp, binop)));
543552
}
544553

554+
token::OpenDelim(delim) => {
555+
return cx.expr_call(sp, mk_token_path(cx, sp, "OpenDelim"),
556+
vec![mk_delim(cx, sp, delim)]);
557+
}
558+
token::CloseDelim(delim) => {
559+
return cx.expr_call(sp, mk_token_path(cx, sp, "CloseDelim"),
560+
vec![mk_delim(cx, sp, delim)]);
561+
}
562+
545563
token::LitByte(i) => {
546564
let e_byte = mk_name(cx, sp, i.ident());
547565

@@ -625,12 +643,6 @@ fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
625643
token::RArrow => "RArrow",
626644
token::LArrow => "LArrow",
627645
token::FatArrow => "FatArrow",
628-
token::LParen => "LParen",
629-
token::RParen => "RParen",
630-
token::LBracket => "LBracket",
631-
token::RBracket => "RBracket",
632-
token::LBrace => "LBrace",
633-
token::RBrace => "RBrace",
634646
token::Pound => "Pound",
635647
token::Dollar => "Dollar",
636648
token::Underscore => "Underscore",
@@ -640,7 +652,6 @@ fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
640652
mk_token_path(cx, sp, name)
641653
}
642654

643-
644655
fn mk_tt(cx: &ExtCtxt, _: Span, tt: &ast::TokenTree) -> Vec<P<ast::Stmt>> {
645656
match *tt {
646657
ast::TtToken(sp, ref tok) => {
@@ -656,10 +667,9 @@ fn mk_tt(cx: &ExtCtxt, _: Span, tt: &ast::TokenTree) -> Vec<P<ast::Stmt>> {
656667
vec!(cx.stmt_expr(e_push))
657668
},
658669
ast::TtDelimited(sp, ref delimed) => {
659-
let (ref open, ref tts, ref close) = **delimed;
660-
mk_tt(cx, sp, &open.to_tt()).into_iter()
661-
.chain(tts.iter().flat_map(|tt| mk_tt(cx, sp, tt).into_iter()))
662-
.chain(mk_tt(cx, sp, &close.to_tt()).into_iter())
670+
mk_tt(cx, sp, &delimed.open_tt()).into_iter()
671+
.chain(delimed.tts.iter().flat_map(|tt| mk_tt(cx, sp, tt).into_iter()))
672+
.chain(mk_tt(cx, sp, &delimed.close_tt()).into_iter())
663673
.collect()
664674
},
665675
ast::TtSequence(..) => panic!("TtSequence in quote!"),

src/libsyntax/ext/tt/macro_parser.rs

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -355,10 +355,8 @@ pub fn parse(sess: &ParseSess,
355355
// Built-in nonterminals never start with these tokens,
356356
// so we can eliminate them from consideration.
357357
match tok {
358-
token::RParen |
359-
token::RBrace |
360-
token::RBracket => {},
361-
_ => bb_eis.push(ei)
358+
token::CloseDelim(_) => {},
359+
_ => bb_eis.push(ei),
362360
}
363361
}
364362
MatchTok(ref t) => {

src/libsyntax/ext/tt/macro_rules.rs

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -172,10 +172,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
172172
MatchedNonterminal(NtTT(ref tt)) => {
173173
match **tt {
174174
// ignore delimiters
175-
TtDelimited(_, ref delimed) => {
176-
let (_, ref tts, _) = **delimed;
177-
tts.clone()
178-
},
175+
TtDelimited(_, ref delimed) => delimed.tts.clone(),
179176
_ => cx.span_fatal(sp, "macro rhs must be delimited"),
180177
}
181178
},

src/libsyntax/ext/tt/transcribe.rs

Lines changed: 6 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -129,8 +129,7 @@ impl Add<LockstepIterSize, LockstepIterSize> for LockstepIterSize {
129129
fn lockstep_iter_size(t: &TokenTree, r: &TtReader) -> LockstepIterSize {
130130
match *t {
131131
TtDelimited(_, ref delimed) => {
132-
let (_, ref tts, _) = **delimed;
133-
tts.iter().fold(LisUnconstrained, |size, tt| {
132+
delimed.tts.iter().fold(LisUnconstrained, |size, tt| {
134133
size + lockstep_iter_size(tt, r)
135134
})
136135
},
@@ -207,14 +206,13 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
207206
};
208207
match t {
209208
TtDelimited(_, ref delimed) => {
210-
let (ref open, ref tts, ref close) = **delimed;
211-
let mut forest = Vec::with_capacity(1 + tts.len() + 1);
212-
forest.push(open.to_tt());
213-
forest.extend(tts.iter().map(|x| (*x).clone()));
214-
forest.push(close.to_tt());
209+
let mut tts = Vec::with_capacity(1 + delimed.tts.len() + 1);
210+
tts.push(delimed.open_tt());
211+
tts.extend(delimed.tts.iter().map(|tt| tt.clone()));
212+
tts.push(delimed.close_tt());
215213

216214
r.stack.push(TtFrame {
217-
forest: Rc::new(forest),
215+
forest: Rc::new(tts),
218216
idx: 0,
219217
dotdotdoted: false,
220218
sep: None

src/libsyntax/fold.rs

Lines changed: 8 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -572,18 +572,14 @@ pub fn noop_fold_tt<T: Folder>(tt: &TokenTree, fld: &mut T) -> TokenTree {
572572
TtToken(span, ref tok) =>
573573
TtToken(span, fld.fold_token(tok.clone())),
574574
TtDelimited(span, ref delimed) => {
575-
let (ref open, ref tts, ref close) = **delimed;
576-
TtDelimited(span, Rc::new((
577-
Delimiter {
578-
span: open.span,
579-
token: fld.fold_token(open.token.clone())
580-
},
581-
fld.fold_tts(tts.as_slice()),
582-
Delimiter {
583-
span: close.span,
584-
token: fld.fold_token(close.token.clone())
585-
},
586-
)))
575+
TtDelimited(span, Rc::new(
576+
Delimited {
577+
delim: delimed.delim,
578+
open_span: delimed.open_span,
579+
tts: fld.fold_tts(delimed.tts.as_slice()),
580+
close_span: delimed.close_span,
581+
}
582+
))
587583
},
588584
TtSequence(span, ref pattern, ref sep, is_optional) =>
589585
TtSequence(span,

src/libsyntax/parse/attr.rs

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -81,10 +81,10 @@ impl<'a> ParserAttr for Parser<'a> {
8181
ast::AttrOuter
8282
};
8383

84-
self.expect(&token::LBracket);
84+
self.expect(&token::OpenDelim(token::Bracket));
8585
let meta_item = self.parse_meta_item();
8686
let hi = self.span.hi;
87-
self.expect(&token::RBracket);
87+
self.expect(&token::CloseDelim(token::Bracket));
8888

8989
(mk_sp(lo, hi), meta_item, style)
9090
}
@@ -194,7 +194,7 @@ impl<'a> ParserAttr for Parser<'a> {
194194
let hi = self.span.hi;
195195
P(spanned(lo, hi, ast::MetaNameValue(name, lit)))
196196
}
197-
token::LParen => {
197+
token::OpenDelim(token::Paren) => {
198198
let inner_items = self.parse_meta_seq();
199199
let hi = self.span.hi;
200200
P(spanned(lo, hi, ast::MetaList(name, inner_items)))
@@ -208,15 +208,15 @@ impl<'a> ParserAttr for Parser<'a> {
208208

209209
/// matches meta_seq = ( COMMASEP(meta_item) )
210210
fn parse_meta_seq(&mut self) -> Vec<P<ast::MetaItem>> {
211-
self.parse_seq(&token::LParen,
212-
&token::RParen,
211+
self.parse_seq(&token::OpenDelim(token::Paren),
212+
&token::CloseDelim(token::Paren),
213213
seq_sep_trailing_disallowed(token::Comma),
214214
|p| p.parse_meta_item()).node
215215
}
216216

217217
fn parse_optional_meta(&mut self) -> Vec<P<ast::MetaItem>> {
218218
match self.token {
219-
token::LParen => self.parse_meta_seq(),
219+
token::OpenDelim(token::Paren) => self.parse_meta_seq(),
220220
_ => Vec::new()
221221
}
222222
}

src/libsyntax/parse/lexer/mod.rs

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -967,12 +967,12 @@ impl<'a> StringReader<'a> {
967967
token::Dot
968968
};
969969
}
970-
'(' => { self.bump(); return token::LParen; }
971-
')' => { self.bump(); return token::RParen; }
972-
'{' => { self.bump(); return token::LBrace; }
973-
'}' => { self.bump(); return token::RBrace; }
974-
'[' => { self.bump(); return token::LBracket; }
975-
']' => { self.bump(); return token::RBracket; }
970+
'(' => { self.bump(); return token::OpenDelim(token::Paren); }
971+
')' => { self.bump(); return token::CloseDelim(token::Paren); }
972+
'{' => { self.bump(); return token::OpenDelim(token::Brace); }
973+
'}' => { self.bump(); return token::CloseDelim(token::Brace); }
974+
'[' => { self.bump(); return token::OpenDelim(token::Bracket); }
975+
']' => { self.bump(); return token::CloseDelim(token::Bracket); }
976976
'@' => { self.bump(); return token::At; }
977977
'#' => { self.bump(); return token::Pound; }
978978
'~' => { self.bump(); return token::Tilde; }

0 commit comments

Comments
 (0)