Skip to content

Commit 67ce3f4

Browse files
committed
syntax: Switch function parameter order in TokenTree::token
1 parent 350a34f commit 67ce3f4

File tree

15 files changed

+63
-62
lines changed

15 files changed

+63
-62
lines changed

src/libsyntax/attr/mod.rs

+5-5
Original file line numberDiff line numberDiff line change
@@ -465,10 +465,10 @@ impl MetaItem {
465465
let mod_sep_span = Span::new(last_pos,
466466
segment.ident.span.lo(),
467467
segment.ident.span.ctxt());
468-
idents.push(TokenTree::token(mod_sep_span, token::ModSep).into());
468+
idents.push(TokenTree::token(token::ModSep, mod_sep_span).into());
469469
}
470-
idents.push(TokenTree::token(segment.ident.span,
471-
TokenKind::from_ast_ident(segment.ident)).into());
470+
idents.push(TokenTree::token(TokenKind::from_ast_ident(segment.ident),
471+
segment.ident.span).into());
472472
last_pos = segment.ident.span.hi();
473473
}
474474
self.node.tokens(self.span).append_to_tree_and_joint_vec(&mut idents);
@@ -532,15 +532,15 @@ impl MetaItemKind {
532532
match *self {
533533
MetaItemKind::Word => TokenStream::empty(),
534534
MetaItemKind::NameValue(ref lit) => {
535-
let mut vec = vec![TokenTree::token(span, token::Eq).into()];
535+
let mut vec = vec![TokenTree::token(token::Eq, span).into()];
536536
lit.tokens().append_to_tree_and_joint_vec(&mut vec);
537537
TokenStream::new(vec)
538538
}
539539
MetaItemKind::List(ref list) => {
540540
let mut tokens = Vec::new();
541541
for (i, item) in list.iter().enumerate() {
542542
if i > 0 {
543-
tokens.push(TokenTree::token(span, token::Comma).into());
543+
tokens.push(TokenTree::token(token::Comma, span).into());
544544
}
545545
item.tokens().append_to_tree_and_joint_vec(&mut tokens);
546546
}

src/libsyntax/ext/base.rs

+3-2
Original file line numberDiff line numberDiff line change
@@ -268,8 +268,9 @@ impl<F> TTMacroExpander for F
268268
if let tokenstream::TokenTree::Token(token) = tt {
269269
if let token::Interpolated(nt) = &token.kind {
270270
if let token::NtIdent(ident, is_raw) = **nt {
271-
*tt = tokenstream::TokenTree::token(ident.span,
272-
token::Ident(ident.name, is_raw));
271+
*tt = tokenstream::TokenTree::token(
272+
token::Ident(ident.name, is_raw), ident.span
273+
);
273274
}
274275
}
275276
}

src/libsyntax/ext/expand.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -585,14 +585,14 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
585585
}
586586
AttrProcMacro(ref mac, ..) => {
587587
self.gate_proc_macro_attr_item(attr.span, &item);
588-
let item_tok = TokenTree::token(DUMMY_SP, token::Interpolated(Lrc::new(match item {
588+
let item_tok = TokenTree::token(token::Interpolated(Lrc::new(match item {
589589
Annotatable::Item(item) => token::NtItem(item),
590590
Annotatable::TraitItem(item) => token::NtTraitItem(item.into_inner()),
591591
Annotatable::ImplItem(item) => token::NtImplItem(item.into_inner()),
592592
Annotatable::ForeignItem(item) => token::NtForeignItem(item.into_inner()),
593593
Annotatable::Stmt(stmt) => token::NtStmt(stmt.into_inner()),
594594
Annotatable::Expr(expr) => token::NtExpr(expr),
595-
}))).into();
595+
})), DUMMY_SP).into();
596596
let input = self.extract_proc_macro_attr_input(attr.tokens, attr.span);
597597
let tok_result = mac.expand(self.cx, attr.span, input, item_tok);
598598
let res = self.parse_ast_fragment(tok_result, invoc.fragment_kind,

src/libsyntax/ext/tt/macro_rules.rs

+5-5
Original file line numberDiff line numberDiff line change
@@ -270,7 +270,7 @@ pub fn compile(
270270
quoted::TokenTree::Sequence(DelimSpan::dummy(), Lrc::new(quoted::SequenceRepetition {
271271
tts: vec![
272272
quoted::TokenTree::MetaVarDecl(DUMMY_SP, lhs_nm, ast::Ident::from_str("tt")),
273-
quoted::TokenTree::token(DUMMY_SP, token::FatArrow),
273+
quoted::TokenTree::token(token::FatArrow, DUMMY_SP),
274274
quoted::TokenTree::MetaVarDecl(DUMMY_SP, rhs_nm, ast::Ident::from_str("tt")),
275275
],
276276
separator: Some(if body.legacy { token::Semi } else { token::Comma }),
@@ -279,7 +279,7 @@ pub fn compile(
279279
})),
280280
// to phase into semicolon-termination instead of semicolon-separation
281281
quoted::TokenTree::Sequence(DelimSpan::dummy(), Lrc::new(quoted::SequenceRepetition {
282-
tts: vec![quoted::TokenTree::token(DUMMY_SP, token::Semi)],
282+
tts: vec![quoted::TokenTree::token(token::Semi, DUMMY_SP)],
283283
separator: None,
284284
op: quoted::KleeneOp::ZeroOrMore,
285285
num_captures: 0
@@ -613,7 +613,7 @@ impl FirstSets {
613613

614614
if let (Some(ref sep), true) = (seq_rep.separator.clone(),
615615
subfirst.maybe_empty) {
616-
first.add_one_maybe(TokenTree::token(sp.entire(), sep.clone()));
616+
first.add_one_maybe(TokenTree::token(sep.clone(), sp.entire()));
617617
}
618618

619619
// Reverse scan: Sequence comes before `first`.
@@ -663,7 +663,7 @@ impl FirstSets {
663663

664664
if let (Some(ref sep), true) = (seq_rep.separator.clone(),
665665
subfirst.maybe_empty) {
666-
first.add_one_maybe(TokenTree::token(sp.entire(), sep.clone()));
666+
first.add_one_maybe(TokenTree::token(sep.clone(), sp.entire()));
667667
}
668668

669669
assert!(first.maybe_empty);
@@ -869,7 +869,7 @@ fn check_matcher_core(sess: &ParseSess,
869869
let mut new;
870870
let my_suffix = if let Some(ref u) = seq_rep.separator {
871871
new = suffix_first.clone();
872-
new.add_one_maybe(TokenTree::token(sp.entire(), u.clone()));
872+
new.add_one_maybe(TokenTree::token(u.clone(), sp.entire()));
873873
&new
874874
} else {
875875
&suffix_first

src/libsyntax/ext/tt/quoted.rs

+5-5
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ impl Delimited {
4040
} else {
4141
span.with_lo(span.lo() + BytePos(self.delim.len() as u32))
4242
};
43-
TokenTree::token(open_span, self.open_token())
43+
TokenTree::token(self.open_token(), open_span)
4444
}
4545

4646
/// Returns a `self::TokenTree` with a `Span` corresponding to the closing delimiter.
@@ -50,7 +50,7 @@ impl Delimited {
5050
} else {
5151
span.with_lo(span.hi() - BytePos(self.delim.len() as u32))
5252
};
53-
TokenTree::token(close_span, self.close_token())
53+
TokenTree::token(self.close_token(), close_span)
5454
}
5555
}
5656

@@ -153,7 +153,7 @@ impl TokenTree {
153153
}
154154
}
155155

156-
crate fn token(span: Span, kind: TokenKind) -> TokenTree {
156+
crate fn token(kind: TokenKind, span: Span) -> TokenTree {
157157
TokenTree::Token(Token::new(kind, span))
158158
}
159159
}
@@ -325,7 +325,7 @@ where
325325
let (ident, is_raw) = token.ident().unwrap();
326326
let span = ident.span.with_lo(span.lo());
327327
if ident.name == kw::Crate && !is_raw {
328-
TokenTree::token(span, token::Ident(kw::DollarCrate, is_raw))
328+
TokenTree::token(token::Ident(kw::DollarCrate, is_raw), span)
329329
} else {
330330
TokenTree::MetaVar(span, ident)
331331
}
@@ -342,7 +342,7 @@ where
342342
}
343343

344344
// There are no more tokens. Just return the `$` we already have.
345-
None => TokenTree::token(span, token::Dollar),
345+
None => TokenTree::token(token::Dollar, span),
346346
},
347347

348348
// `tree` is an arbitrary token. Keep it.

src/libsyntax/ext/tt/transcribe.rs

+4-4
Original file line numberDiff line numberDiff line change
@@ -119,7 +119,7 @@ pub fn transcribe(
119119
Some((tt, _)) => tt.span(),
120120
None => DUMMY_SP,
121121
};
122-
result.push(TokenTree::token(prev_span, sep).into());
122+
result.push(TokenTree::token(sep, prev_span).into());
123123
}
124124
continue;
125125
}
@@ -225,7 +225,7 @@ pub fn transcribe(
225225
result.push(tt.clone().into());
226226
} else {
227227
sp = sp.apply_mark(cx.current_expansion.mark);
228-
let token = TokenTree::token(sp, token::Interpolated(nt.clone()));
228+
let token = TokenTree::token(token::Interpolated(nt.clone()), sp);
229229
result.push(token.into());
230230
}
231231
} else {
@@ -241,8 +241,8 @@ pub fn transcribe(
241241
let ident =
242242
Ident::new(ident.name, ident.span.apply_mark(cx.current_expansion.mark));
243243
sp = sp.apply_mark(cx.current_expansion.mark);
244-
result.push(TokenTree::token(sp, token::Dollar).into());
245-
result.push(TokenTree::token(sp, token::TokenKind::from_ast_ident(ident)).into());
244+
result.push(TokenTree::token(token::Dollar, sp).into());
245+
result.push(TokenTree::token(token::TokenKind::from_ast_ident(ident), sp).into());
246246
}
247247
}
248248

src/libsyntax/parse/attr.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -157,7 +157,7 @@ impl<'a> Parser<'a> {
157157
self.check(&token::OpenDelim(DelimToken::Brace)) {
158158
self.parse_token_tree().into()
159159
} else if self.eat(&token::Eq) {
160-
let eq = TokenTree::token(self.prev_span, token::Eq);
160+
let eq = TokenTree::token(token::Eq, self.prev_span);
161161
let mut is_interpolated_expr = false;
162162
if let token::Interpolated(nt) = &self.token.kind {
163163
if let token::NtExpr(..) = **nt {

src/libsyntax/parse/literal.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -261,7 +261,7 @@ impl Lit {
261261
token::Bool => token::Ident(self.token.symbol, false),
262262
_ => token::Literal(self.token),
263263
};
264-
TokenTree::token(self.span, token).into()
264+
TokenTree::token(token, self.span).into()
265265
}
266266
}
267267

src/libsyntax/parse/mod.rs

+7-7
Original file line numberDiff line numberDiff line change
@@ -476,23 +476,23 @@ mod tests {
476476
let tts = string_to_stream("fn a (b : i32) { b; }".to_string());
477477

478478
let expected = TokenStream::new(vec![
479-
TokenTree::token(sp(0, 2), token::Ident(kw::Fn, false)).into(),
480-
TokenTree::token(sp(3, 4), token::Ident(Name::intern("a"), false)).into(),
479+
TokenTree::token(token::Ident(kw::Fn, false), sp(0, 2)).into(),
480+
TokenTree::token(token::Ident(Name::intern("a"), false), sp(3, 4)).into(),
481481
TokenTree::Delimited(
482482
DelimSpan::from_pair(sp(5, 6), sp(13, 14)),
483483
token::DelimToken::Paren,
484484
TokenStream::new(vec![
485-
TokenTree::token(sp(6, 7), token::Ident(Name::intern("b"), false)).into(),
486-
TokenTree::token(sp(8, 9), token::Colon).into(),
487-
TokenTree::token(sp(10, 13), token::Ident(sym::i32, false)).into(),
485+
TokenTree::token(token::Ident(Name::intern("b"), false), sp(6, 7)).into(),
486+
TokenTree::token(token::Colon, sp(8, 9)).into(),
487+
TokenTree::token(token::Ident(sym::i32, false), sp(10, 13)).into(),
488488
]).into(),
489489
).into(),
490490
TokenTree::Delimited(
491491
DelimSpan::from_pair(sp(15, 16), sp(20, 21)),
492492
token::DelimToken::Brace,
493493
TokenStream::new(vec![
494-
TokenTree::token(sp(17, 18), token::Ident(Name::intern("b"), false)).into(),
495-
TokenTree::token(sp(18, 19), token::Semi).into(),
494+
TokenTree::token(token::Ident(Name::intern("b"), false), sp(17, 18)).into(),
495+
TokenTree::token(token::Semi, sp(18, 19)).into(),
496496
]).into(),
497497
).into()
498498
]);

src/libsyntax/parse/parser.rs

+7-7
Original file line numberDiff line numberDiff line change
@@ -362,11 +362,11 @@ impl TokenCursor {
362362
delim_span,
363363
token::Bracket,
364364
[
365-
TokenTree::token(sp, token::Ident(sym::doc, false)),
366-
TokenTree::token(sp, token::Eq),
367-
TokenTree::token(sp, token::TokenKind::lit(
365+
TokenTree::token(token::Ident(sym::doc, false), sp),
366+
TokenTree::token(token::Eq, sp),
367+
TokenTree::token(token::TokenKind::lit(
368368
token::StrRaw(num_of_hashes), Symbol::intern(&stripped), None
369-
)),
369+
), sp),
370370
]
371371
.iter().cloned().collect::<TokenStream>().into(),
372372
);
@@ -375,10 +375,10 @@ impl TokenCursor {
375375
delim_span,
376376
token::NoDelim,
377377
&if doc_comment_style(&name.as_str()) == AttrStyle::Inner {
378-
[TokenTree::token(sp, token::Pound), TokenTree::token(sp, token::Not), body]
378+
[TokenTree::token(token::Pound, sp), TokenTree::token(token::Not, sp), body]
379379
.iter().cloned().collect::<TokenStream>().into()
380380
} else {
381-
[TokenTree::token(sp, token::Pound), body]
381+
[TokenTree::token(token::Pound, sp), body]
382382
.iter().cloned().collect::<TokenStream>().into()
383383
},
384384
)));
@@ -4344,7 +4344,7 @@ impl<'a> Parser<'a> {
43444344
};
43454345
TokenStream::new(vec![
43464346
args.into(),
4347-
TokenTree::token(token_lo.to(self.prev_span), token::FatArrow).into(),
4347+
TokenTree::token(token::FatArrow, token_lo.to(self.prev_span)).into(),
43484348
body.into(),
43494349
])
43504350
} else {

src/libsyntax/parse/token.rs

+4-4
Original file line numberDiff line numberDiff line change
@@ -763,10 +763,10 @@ impl Nonterminal {
763763
prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span)
764764
}
765765
Nonterminal::NtIdent(ident, is_raw) => {
766-
Some(TokenTree::token(ident.span, Ident(ident.name, is_raw)).into())
766+
Some(TokenTree::token(Ident(ident.name, is_raw), ident.span).into())
767767
}
768768
Nonterminal::NtLifetime(ident) => {
769-
Some(TokenTree::token(ident.span, Lifetime(ident.name)).into())
769+
Some(TokenTree::token(Lifetime(ident.name), ident.span).into())
770770
}
771771
Nonterminal::NtTT(ref tt) => {
772772
Some(tt.clone().into())
@@ -852,7 +852,7 @@ fn prepend_attrs(sess: &ParseSess,
852852
if attr.path.segments.len() == 1 && attr.path.segments[0].args.is_none() {
853853
let ident = attr.path.segments[0].ident;
854854
let token = Ident(ident.name, ident.as_str().starts_with("r#"));
855-
brackets.push(tokenstream::TokenTree::token(ident.span, token));
855+
brackets.push(tokenstream::TokenTree::token(token, ident.span));
856856

857857
// ... and for more complicated paths, fall back to a reparse hack that
858858
// should eventually be removed.
@@ -866,7 +866,7 @@ fn prepend_attrs(sess: &ParseSess,
866866
// The span we list here for `#` and for `[ ... ]` are both wrong in
867867
// that it encompasses more than each token, but it hopefully is "good
868868
// enough" for now at least.
869-
builder.push(tokenstream::TokenTree::token(attr.span, Pound));
869+
builder.push(tokenstream::TokenTree::token(Pound, attr.span));
870870
let delim_span = DelimSpan::from_single(attr.span);
871871
builder.push(tokenstream::TokenTree::Delimited(
872872
delim_span, DelimToken::Bracket, brackets.build().into()));

src/libsyntax/tokenstream.rs

+9-9
Original file line numberDiff line numberDiff line change
@@ -138,7 +138,7 @@ impl TokenTree {
138138
TokenStream::new(vec![(self, Joint)])
139139
}
140140

141-
pub fn token(span: Span, kind: TokenKind) -> TokenTree {
141+
pub fn token(kind: TokenKind, span: Span) -> TokenTree {
142142
TokenTree::Token(Token::new(kind, span))
143143
}
144144

@@ -149,7 +149,7 @@ impl TokenTree {
149149
} else {
150150
span.with_hi(span.lo() + BytePos(delim.len() as u32))
151151
};
152-
TokenTree::token(open_span, token::OpenDelim(delim))
152+
TokenTree::token(token::OpenDelim(delim), open_span)
153153
}
154154

155155
/// Returns the closing delimiter as a token tree.
@@ -159,7 +159,7 @@ impl TokenTree {
159159
} else {
160160
span.with_lo(span.hi() - BytePos(delim.len() as u32))
161161
};
162-
TokenTree::token(close_span, token::CloseDelim(delim))
162+
TokenTree::token(token::CloseDelim(delim), close_span)
163163
}
164164
}
165165

@@ -212,7 +212,7 @@ impl TokenStream {
212212
_ => continue,
213213
};
214214
let sp = sp.shrink_to_hi();
215-
let comma = (TokenTree::token(sp, token::Comma), NonJoint);
215+
let comma = (TokenTree::token(token::Comma, sp), NonJoint);
216216
suggestion = Some((pos, comma, sp));
217217
}
218218
}
@@ -433,7 +433,7 @@ impl TokenStreamBuilder {
433433
let last_stream = self.0.pop().unwrap();
434434
self.push_all_but_last_tree(&last_stream);
435435
let glued_span = last_token.span.to(token.span);
436-
let glued_tt = TokenTree::token(glued_span, glued_tok);
436+
let glued_tt = TokenTree::token(glued_tok, glued_span);
437437
let glued_tokenstream = TokenStream::new(vec![(glued_tt, is_joint)]);
438438
self.0.push(glued_tokenstream);
439439
self.push_all_but_first_tree(&stream);
@@ -660,7 +660,7 @@ mod tests {
660660
with_default_globals(|| {
661661
let test0: TokenStream = Vec::<TokenTree>::new().into_iter().collect();
662662
let test1: TokenStream =
663-
TokenTree::token(sp(0, 1), token::Ident(Name::intern("a"), false)).into();
663+
TokenTree::token(token::Ident(Name::intern("a"), false), sp(0, 1)).into();
664664
let test2 = string_to_ts("foo(bar::baz)");
665665

666666
assert_eq!(test0.is_empty(), true);
@@ -673,9 +673,9 @@ mod tests {
673673
fn test_dotdotdot() {
674674
with_default_globals(|| {
675675
let mut builder = TokenStreamBuilder::new();
676-
builder.push(TokenTree::token(sp(0, 1), token::Dot).joint());
677-
builder.push(TokenTree::token(sp(1, 2), token::Dot).joint());
678-
builder.push(TokenTree::token(sp(2, 3), token::Dot));
676+
builder.push(TokenTree::token(token::Dot, sp(0, 1)).joint());
677+
builder.push(TokenTree::token(token::Dot, sp(1, 2)).joint());
678+
builder.push(TokenTree::token(token::Dot, sp(2, 3)));
679679
let stream = builder.build();
680680
assert!(stream.eq_unspanned(&string_to_ts("...")));
681681
assert_eq!(stream.trees().count(), 1);

src/libsyntax_ext/assert.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -30,11 +30,11 @@ pub fn expand_assert<'cx>(
3030
path: Path::from_ident(Ident::new(sym::panic, sp)),
3131
tts: custom_message.unwrap_or_else(|| {
3232
TokenStream::from(TokenTree::token(
33-
DUMMY_SP,
3433
TokenKind::lit(token::Str, Symbol::intern(&format!(
3534
"assertion failed: {}",
3635
pprust::expr_to_string(&cond_expr).escape_debug()
3736
)), None),
37+
DUMMY_SP,
3838
))
3939
}).into(),
4040
delim: MacDelimiter::Parenthesis,

src/libsyntax_ext/deriving/custom.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -69,7 +69,7 @@ impl MultiItemModifier for ProcMacroDerive {
6969
MarkAttrs(&self.attrs).visit_item(&item);
7070

7171
let token = token::Interpolated(Lrc::new(token::NtItem(item)));
72-
let input = tokenstream::TokenTree::token(DUMMY_SP, token).into();
72+
let input = tokenstream::TokenTree::token(token, DUMMY_SP).into();
7373

7474
let server = proc_macro_server::Rustc::new(ecx);
7575
let stream = match self.client.run(&EXEC_STRATEGY, server, input) {

0 commit comments

Comments
 (0)