Skip to content

Commit eb3ac29

Browse files
committed
Reduce the size of Token and make it cheaper to clone by refactoring
`Token::Interpolated(Nonterminal)` -> `Token::Interpolated(Rc<Nonterminal>)`.
1 parent 5f280a5 commit eb3ac29

File tree

10 files changed

+159
-226
lines changed

10 files changed

+159
-226
lines changed

src/libsyntax/ext/quote.rs

Lines changed: 32 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -80,67 +80,71 @@ pub mod rt {
8080

8181
impl ToTokens for ast::Path {
8282
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
83-
vec![TokenTree::Token(DUMMY_SP,
84-
token::Interpolated(token::NtPath(Box::new(self.clone()))))]
83+
let nt = token::NtPath(self.clone());
84+
vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))]
8585
}
8686
}
8787

8888
impl ToTokens for ast::Ty {
8989
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
90-
vec![TokenTree::Token(self.span, token::Interpolated(token::NtTy(P(self.clone()))))]
90+
let nt = token::NtTy(P(self.clone()));
91+
vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))]
9192
}
9293
}
9394

9495
impl ToTokens for ast::Block {
9596
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
96-
vec![TokenTree::Token(self.span, token::Interpolated(token::NtBlock(P(self.clone()))))]
97+
let nt = token::NtBlock(P(self.clone()));
98+
vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))]
9799
}
98100
}
99101

100102
impl ToTokens for ast::Generics {
101103
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
102-
vec![TokenTree::Token(DUMMY_SP, token::Interpolated(token::NtGenerics(self.clone())))]
104+
let nt = token::NtGenerics(self.clone());
105+
vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))]
103106
}
104107
}
105108

106109
impl ToTokens for ast::WhereClause {
107110
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
108-
vec![TokenTree::Token(DUMMY_SP,
109-
token::Interpolated(token::NtWhereClause(self.clone())))]
111+
let nt = token::NtWhereClause(self.clone());
112+
vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))]
110113
}
111114
}
112115

113116
impl ToTokens for P<ast::Item> {
114117
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
115-
vec![TokenTree::Token(self.span, token::Interpolated(token::NtItem(self.clone())))]
118+
let nt = token::NtItem(self.clone());
119+
vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))]
116120
}
117121
}
118122

119123
impl ToTokens for ast::ImplItem {
120124
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
121-
vec![TokenTree::Token(self.span,
122-
token::Interpolated(token::NtImplItem(P(self.clone()))))]
125+
let nt = token::NtImplItem(self.clone());
126+
vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))]
123127
}
124128
}
125129

126130
impl ToTokens for P<ast::ImplItem> {
127131
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
128-
vec![TokenTree::Token(self.span, token::Interpolated(token::NtImplItem(self.clone())))]
132+
let nt = token::NtImplItem((**self).clone());
133+
vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))]
129134
}
130135
}
131136

132137
impl ToTokens for ast::TraitItem {
133138
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
134-
vec![TokenTree::Token(self.span,
135-
token::Interpolated(token::NtTraitItem(P(self.clone()))))]
139+
let nt = token::NtTraitItem(self.clone());
140+
vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))]
136141
}
137142
}
138143

139144
impl ToTokens for ast::Stmt {
140145
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
141-
let mut tts = vec![
142-
TokenTree::Token(self.span, token::Interpolated(token::NtStmt(P(self.clone()))))
143-
];
146+
let nt = token::NtStmt(self.clone());
147+
let mut tts = vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))];
144148

145149
// Some statements require a trailing semicolon.
146150
if classify::stmt_ends_with_semi(&self.node) {
@@ -153,31 +157,36 @@ pub mod rt {
153157

154158
impl ToTokens for P<ast::Expr> {
155159
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
156-
vec![TokenTree::Token(self.span, token::Interpolated(token::NtExpr(self.clone())))]
160+
let nt = token::NtExpr(self.clone());
161+
vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))]
157162
}
158163
}
159164

160165
impl ToTokens for P<ast::Pat> {
161166
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
162-
vec![TokenTree::Token(self.span, token::Interpolated(token::NtPat(self.clone())))]
167+
let nt = token::NtPat(self.clone());
168+
vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))]
163169
}
164170
}
165171

166172
impl ToTokens for ast::Arm {
167173
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
168-
vec![TokenTree::Token(DUMMY_SP, token::Interpolated(token::NtArm(self.clone())))]
174+
let nt = token::NtArm(self.clone());
175+
vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))]
169176
}
170177
}
171178

172179
impl ToTokens for ast::Arg {
173180
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
174-
vec![TokenTree::Token(DUMMY_SP, token::Interpolated(token::NtArg(self.clone())))]
181+
let nt = token::NtArg(self.clone());
182+
vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))]
175183
}
176184
}
177185

178186
impl ToTokens for P<ast::Block> {
179187
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
180-
vec![TokenTree::Token(DUMMY_SP, token::Interpolated(token::NtBlock(self.clone())))]
188+
let nt = token::NtBlock(self.clone());
189+
vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))]
181190
}
182191
}
183192

@@ -204,7 +213,8 @@ pub mod rt {
204213

205214
impl ToTokens for P<ast::MetaItem> {
206215
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
207-
vec![TokenTree::Token(DUMMY_SP, token::Interpolated(token::NtMeta(self.clone())))]
216+
let nt = token::NtMeta(self.clone());
217+
vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))]
208218
}
209219
}
210220

src/libsyntax/ext/tt/macro_parser.rs

Lines changed: 6 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -89,7 +89,6 @@ use parse::token::{DocComment, MatchNt, SubstNt};
8989
use parse::token::{Token, Nonterminal};
9090
use parse::token;
9191
use print::pprust;
92-
use ptr::P;
9392
use tokenstream::{self, TokenTree};
9493
use util::small_vector::SmallVector;
9594

@@ -198,7 +197,7 @@ pub fn initial_matcher_pos(ms: Vec<TokenTree>, sep: Option<Token>, lo: BytePos)
198197
199198
pub enum NamedMatch {
200199
MatchedSeq(Vec<Rc<NamedMatch>>, syntax_pos::Span),
201-
MatchedNonterminal(Nonterminal)
200+
MatchedNonterminal(Rc<Nonterminal>)
202201
}
203202

204203
pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>])
@@ -482,7 +481,7 @@ pub fn parse(sess: &ParseSess, mut rdr: TtReader, ms: &[TokenTree]) -> NamedPars
482481
if let TokenTree::Token(span, MatchNt(_, ident)) = ei.top_elts.get_tt(ei.idx) {
483482
let match_cur = ei.match_cur;
484483
(&mut ei.matches[match_cur]).push(Rc::new(MatchedNonterminal(
485-
parse_nt(&mut rust_parser, span, &ident.name.as_str()))));
484+
Rc::new(parse_nt(&mut rust_parser, span, &ident.name.as_str())))));
486485
ei.idx += 1;
487486
ei.match_cur += 1;
488487
} else {
@@ -503,7 +502,7 @@ pub fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal {
503502
"tt" => {
504503
p.quote_depth += 1; //but in theory, non-quoted tts might be useful
505504
let res: ::parse::PResult<'a, _> = p.parse_token_tree();
506-
let res = token::NtTT(P(panictry!(res)));
505+
let res = token::NtTT(panictry!(res));
507506
p.quote_depth -= 1;
508507
return res;
509508
}
@@ -521,7 +520,7 @@ pub fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal {
521520
},
522521
"block" => token::NtBlock(panictry!(p.parse_block())),
523522
"stmt" => match panictry!(p.parse_stmt()) {
524-
Some(s) => token::NtStmt(P(s)),
523+
Some(s) => token::NtStmt(s),
525524
None => {
526525
p.fatal("expected a statement").emit();
527526
panic!(FatalError);
@@ -534,7 +533,7 @@ pub fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal {
534533
"ident" => match p.token {
535534
token::Ident(sn) => {
536535
p.bump();
537-
token::NtIdent(Box::new(Spanned::<Ident>{node: sn, span: p.span}))
536+
token::NtIdent(Spanned::<Ident>{node: sn, span: p.span})
538537
}
539538
_ => {
540539
let token_str = pprust::token_to_string(&p.token);
@@ -544,7 +543,7 @@ pub fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal {
544543
}
545544
},
546545
"path" => {
547-
token::NtPath(Box::new(panictry!(p.parse_path(PathStyle::Type))))
546+
token::NtPath(panictry!(p.parse_path(PathStyle::Type)))
548547
},
549548
"meta" => token::NtMeta(panictry!(p.parse_meta_item())),
550549
// this is not supposed to happen, since it has been checked

src/libsyntax/ext/tt/macro_rules.rs

Lines changed: 14 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -236,22 +236,28 @@ pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension {
236236
// Extract the arguments:
237237
let lhses = match **argument_map.get(&lhs_nm).unwrap() {
238238
MatchedSeq(ref s, _) => {
239-
s.iter().map(|m| match **m {
240-
MatchedNonterminal(NtTT(ref tt)) => {
241-
valid &= check_lhs_nt_follows(sess, tt);
242-
(**tt).clone()
239+
s.iter().map(|m| {
240+
if let MatchedNonterminal(ref nt) = **m {
241+
if let NtTT(ref tt) = **nt {
242+
valid &= check_lhs_nt_follows(sess, tt);
243+
return (*tt).clone();
244+
}
243245
}
244-
_ => sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
246+
sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
245247
}).collect::<Vec<TokenTree>>()
246248
}
247249
_ => sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
248250
};
249251

250252
let rhses = match **argument_map.get(&rhs_nm).unwrap() {
251253
MatchedSeq(ref s, _) => {
252-
s.iter().map(|m| match **m {
253-
MatchedNonterminal(NtTT(ref tt)) => (**tt).clone(),
254-
_ => sess.span_diagnostic.span_bug(def.span, "wrong-structured rhs")
254+
s.iter().map(|m| {
255+
if let MatchedNonterminal(ref nt) = **m {
256+
if let NtTT(ref tt) = **nt {
257+
return (*tt).clone();
258+
}
259+
}
260+
sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
255261
}).collect()
256262
}
257263
_ => sess.span_diagnostic.span_bug(def.span, "wrong-structured rhs")

src/libsyntax/ext/tt/transcribe.rs

Lines changed: 11 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -277,39 +277,37 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
277277
return ret_val;
278278
// this can't be 0 length, just like TokenTree::Delimited
279279
}
280-
Some(cur_matched) => {
281-
match *cur_matched {
280+
Some(cur_matched) => if let MatchedNonterminal(ref nt) = *cur_matched {
281+
match **nt {
282282
// sidestep the interpolation tricks for ident because
283283
// (a) idents can be in lots of places, so it'd be a pain
284284
// (b) we actually can, since it's a token.
285-
MatchedNonterminal(NtIdent(ref sn)) => {
285+
NtIdent(ref sn) => {
286286
r.stack.last_mut().unwrap().idx += 1;
287287
r.cur_span = sn.span;
288288
r.cur_tok = token::Ident(sn.node);
289289
return ret_val;
290290
}
291-
MatchedNonterminal(NtTT(ref tt)) => {
291+
NtTT(_) => {
292292
r.stack.push(TtFrame {
293-
forest: TokenTree::Token(sp, Interpolated(NtTT(tt.clone()))),
293+
forest: TokenTree::Token(sp, Interpolated(nt.clone())),
294294
idx: 0,
295295
dotdotdoted: false,
296296
sep: None,
297297
});
298298
}
299-
MatchedNonterminal(ref other_whole_nt) => {
299+
_ => {
300300
r.stack.last_mut().unwrap().idx += 1;
301301
// FIXME(pcwalton): Bad copy.
302302
r.cur_span = sp;
303-
r.cur_tok = Interpolated((*other_whole_nt).clone());
303+
r.cur_tok = Interpolated(nt.clone());
304304
return ret_val;
305305
}
306-
MatchedSeq(..) => {
307-
panic!(r.sp_diag.span_fatal(
308-
sp, /* blame the macro writer */
309-
&format!("variable '{}' is still repeating at this depth",
310-
ident)));
311-
}
312306
}
307+
} else {
308+
panic!(r.sp_diag.span_fatal(
309+
sp, /* blame the macro writer */
310+
&format!("variable '{}' is still repeating at this depth", ident)));
313311
}
314312
}
315313
}

src/libsyntax/fold.rs

Lines changed: 18 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -576,7 +576,13 @@ pub fn noop_fold_token<T: Folder>(t: token::Token, fld: &mut T) -> token::Token
576576
match t {
577577
token::Ident(id) => token::Ident(fld.fold_ident(id)),
578578
token::Lifetime(id) => token::Lifetime(fld.fold_ident(id)),
579-
token::Interpolated(nt) => token::Interpolated(fld.fold_interpolated(nt)),
579+
token::Interpolated(nt) => {
580+
let nt = match Rc::try_unwrap(nt) {
581+
Ok(nt) => nt,
582+
Err(nt) => (*nt).clone(),
583+
};
584+
token::Interpolated(Rc::new(fld.fold_interpolated(nt)))
585+
}
580586
token::SubstNt(ident) => token::SubstNt(fld.fold_ident(ident)),
581587
token::MatchNt(name, kind) => token::MatchNt(fld.fold_ident(name), fld.fold_ident(kind)),
582588
_ => t
@@ -614,26 +620,25 @@ pub fn noop_fold_interpolated<T: Folder>(nt: token::Nonterminal, fld: &mut T)
614620
.expect_one("expected fold to produce exactly one item")),
615621
token::NtBlock(block) => token::NtBlock(fld.fold_block(block)),
616622
token::NtStmt(stmt) =>
617-
token::NtStmt(stmt.map(|stmt| fld.fold_stmt(stmt)
623+
token::NtStmt(fld.fold_stmt(stmt)
618624
// this is probably okay, because the only folds likely
619625
// to peek inside interpolated nodes will be renamings/markings,
620626
// which map single items to single items
621-
.expect_one("expected fold to produce exactly one statement"))),
627+
.expect_one("expected fold to produce exactly one statement")),
622628
token::NtPat(pat) => token::NtPat(fld.fold_pat(pat)),
623629
token::NtExpr(expr) => token::NtExpr(fld.fold_expr(expr)),
624630
token::NtTy(ty) => token::NtTy(fld.fold_ty(ty)),
625-
token::NtIdent(id) =>
626-
token::NtIdent(Box::new(Spanned::<Ident>{node: fld.fold_ident(id.node), ..*id})),
631+
token::NtIdent(id) => token::NtIdent(Spanned::<Ident>{node: fld.fold_ident(id.node), ..id}),
627632
token::NtMeta(meta_item) => token::NtMeta(fld.fold_meta_item(meta_item)),
628-
token::NtPath(path) => token::NtPath(Box::new(fld.fold_path(*path))),
629-
token::NtTT(tt) => token::NtTT(P(fld.fold_tt(&tt))),
633+
token::NtPath(path) => token::NtPath(fld.fold_path(path)),
634+
token::NtTT(tt) => token::NtTT(fld.fold_tt(&tt)),
630635
token::NtArm(arm) => token::NtArm(fld.fold_arm(arm)),
631-
token::NtImplItem(arm) =>
632-
token::NtImplItem(arm.map(|arm| fld.fold_impl_item(arm)
633-
.expect_one("expected fold to produce exactly one item"))),
634-
token::NtTraitItem(arm) =>
635-
token::NtTraitItem(arm.map(|arm| fld.fold_trait_item(arm)
636-
.expect_one("expected fold to produce exactly one item"))),
636+
token::NtImplItem(item) =>
637+
token::NtImplItem(fld.fold_impl_item(item)
638+
.expect_one("expected fold to produce exactly one item")),
639+
token::NtTraitItem(item) =>
640+
token::NtTraitItem(fld.fold_trait_item(item)
641+
.expect_one("expected fold to produce exactly one item")),
637642
token::NtGenerics(generics) => token::NtGenerics(fld.fold_generics(generics)),
638643
token::NtWhereClause(where_clause) =>
639644
token::NtWhereClause(fld.fold_where_clause(where_clause)),

src/libsyntax/parse/attr.rs

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -215,7 +215,10 @@ impl<'a> Parser<'a> {
215215
/// meta_item_inner : (meta_item | UNSUFFIXED_LIT) (',' meta_item_inner)? ;
216216
pub fn parse_meta_item(&mut self) -> PResult<'a, P<ast::MetaItem>> {
217217
let nt_meta = match self.token {
218-
token::Interpolated(token::NtMeta(ref e)) => Some(e.clone()),
218+
token::Interpolated(ref nt) => match **nt {
219+
token::NtMeta(ref e) => Some(e.clone()),
220+
_ => None,
221+
},
219222
_ => None,
220223
};
221224

0 commit comments

Comments
 (0)