Skip to content

Commit 3c9aef1

Browse files
committed
Use TokenStream::concat more.
It's a better choice in a few places.
1 parent 67f8fb5 commit 3c9aef1

File tree

1 file changed

+11
-17
lines changed

1 file changed

+11
-17
lines changed

src/libsyntax/tokenstream.rs

Lines changed: 11 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -167,9 +167,9 @@ impl TokenStream {
167167
/// separating the two arguments with a comma for diagnostic suggestions.
168168
pub(crate) fn add_comma(&self) -> Option<(TokenStream, Span)> {
169169
// Used to suggest if a user writes `foo!(a b);`
170-
if let TokenStreamKind::Stream(ref slice) = self.kind {
170+
if let TokenStreamKind::Stream(ref stream) = self.kind {
171171
let mut suggestion = None;
172-
let mut iter = slice.iter().enumerate().peekable();
172+
let mut iter = stream.iter().enumerate().peekable();
173173
while let Some((pos, ts)) = iter.next() {
174174
if let Some((_, next)) = iter.peek() {
175175
let sp = match (&ts.kind, &next.kind) {
@@ -189,13 +189,12 @@ impl TokenStream {
189189
}
190190
}
191191
if let Some((pos, comma, sp)) = suggestion {
192-
let mut new_slice = vec![];
193-
let parts = slice.split_at(pos + 1);
194-
new_slice.extend_from_slice(parts.0);
195-
new_slice.push(comma);
196-
new_slice.extend_from_slice(parts.1);
197-
let slice = Lrc::new(new_slice);
198-
return Some((TokenStream { kind: TokenStreamKind::Stream(slice) }, sp));
192+
let mut new_stream = vec![];
193+
let parts = stream.split_at(pos + 1);
194+
new_stream.extend_from_slice(parts.0);
195+
new_stream.push(comma);
196+
new_stream.extend_from_slice(parts.1);
197+
return Some((TokenStream::concat(new_stream), sp));
199198
}
200199
}
201200
None
@@ -273,12 +272,7 @@ impl Extend<TokenStream> for TokenStream {
273272
// Build the resulting token stream. If it contains more than one token,
274273
// preserve capacity in the vector in anticipation of the caller
275274
// performing additional calls to extend.
276-
let mut tts = builder.0;
277-
*self = match tts.len() {
278-
0 => TokenStream::empty(),
279-
1 => tts.pop().unwrap(),
280-
_ => TokenStream::concat_rc_vec(Lrc::new(tts)),
281-
};
275+
*self = TokenStream::concat(builder.0);
282276
}
283277
}
284278

@@ -483,7 +477,7 @@ impl TokenStreamBuilder {
483477
match len {
484478
1 => {}
485479
2 => self.0.push(streams[0].clone().into()),
486-
_ => self.0.push(TokenStream::concat_rc_vec( Lrc::new(streams[0 .. len - 1].to_vec()))),
480+
_ => self.0.push(TokenStream::concat(streams[0 .. len - 1].to_vec())),
487481
}
488482
self.push_all_but_last_tree(&streams[len - 1])
489483
}
@@ -495,7 +489,7 @@ impl TokenStreamBuilder {
495489
match len {
496490
1 => {}
497491
2 => self.0.push(streams[1].clone().into()),
498-
_ => self.0.push(TokenStream::concat_rc_vec(Lrc::new(streams[1 .. len].to_vec()))),
492+
_ => self.0.push(TokenStream::concat(streams[1 .. len].to_vec())),
499493
}
500494
self.push_all_but_first_tree(&streams[0])
501495
}

0 commit comments

Comments
 (0)