Skip to content

Commit 67f8fb5

Browse files
committed
Use Lrc<Vec<TokenStream>> instead of RcVec<TokenStream>.
This shrinks: - ThinTokenStream: 16 to 8 bytes - TokenTree: 32 to 24 bytes - TokenStream: 40 to 32 bytes The only downside is that in a couple of places this requires using `to_vec()` (which allocates) instead of `sub_slice()`. But those places are rarely executed, so it doesn't hurt perf. Overall, this reduces instruction counts on numerous benchmarks by up to 3%.
1 parent 08857c1 commit 67f8fb5

File tree

1 file changed

+17
-17
lines changed

1 file changed

+17
-17
lines changed

src/libsyntax/tokenstream.rs

Lines changed: 17 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -28,8 +28,8 @@ use ext::tt::{macro_parser, quoted};
2828
use parse::Directory;
2929
use parse::token::{self, DelimToken, Token};
3030
use print::pprust;
31+
use rustc_data_structures::sync::Lrc;
3132
use serialize::{Decoder, Decodable, Encoder, Encodable};
32-
use util::RcVec;
3333

3434
use std::borrow::Cow;
3535
use std::{fmt, iter, mem};
@@ -160,7 +160,7 @@ pub struct TokenStream {
160160

161161
// `TokenStream` is used a lot. Make sure it doesn't unintentionally get bigger.
162162
#[cfg(target_arch = "x86_64")]
163-
static_assert!(MEM_SIZE_OF_TOKEN_STREAM: mem::size_of::<TokenStream>() == 40);
163+
static_assert!(MEM_SIZE_OF_TOKEN_STREAM: mem::size_of::<TokenStream>() == 32);
164164

165165
impl TokenStream {
166166
/// Given a `TokenStream` with a `Stream` of only two arguments, return a new `TokenStream`
@@ -194,7 +194,7 @@ impl TokenStream {
194194
new_slice.extend_from_slice(parts.0);
195195
new_slice.push(comma);
196196
new_slice.extend_from_slice(parts.1);
197-
let slice = RcVec::new(new_slice);
197+
let slice = Lrc::new(new_slice);
198198
return Some((TokenStream { kind: TokenStreamKind::Stream(slice) }, sp));
199199
}
200200
}
@@ -207,7 +207,7 @@ enum TokenStreamKind {
207207
Empty,
208208
Tree(TokenTree),
209209
JointTree(TokenTree),
210-
Stream(RcVec<TokenStream>),
210+
Stream(Lrc<Vec<TokenStream>>),
211211
}
212212

213213
impl From<TokenTree> for TokenStream {
@@ -246,7 +246,7 @@ impl Extend<TokenStream> for TokenStream {
246246
vec.push(TokenStream { kind });
247247
vec
248248
}
249-
TokenStreamKind::Stream(rc_vec) => match RcVec::try_unwrap(rc_vec) {
249+
TokenStreamKind::Stream(rc_vec) => match Lrc::try_unwrap(rc_vec) {
250250
Ok(mut vec) => {
251251
// Extend in place using the existing capacity if possible.
252252
// This is the fast path for libraries like `quote` that
@@ -277,7 +277,7 @@ impl Extend<TokenStream> for TokenStream {
277277
*self = match tts.len() {
278278
0 => TokenStream::empty(),
279279
1 => tts.pop().unwrap(),
280-
_ => TokenStream::concat_rc_vec(RcVec::new_preserving_capacity(tts)),
280+
_ => TokenStream::concat_rc_vec(Lrc::new(tts)),
281281
};
282282
}
283283
}
@@ -314,11 +314,11 @@ impl TokenStream {
314314
match streams.len() {
315315
0 => TokenStream::empty(),
316316
1 => streams.pop().unwrap(),
317-
_ => TokenStream::concat_rc_vec(RcVec::new(streams)),
317+
_ => TokenStream::concat_rc_vec(Lrc::new(streams)),
318318
}
319319
}
320320

321-
fn concat_rc_vec(streams: RcVec<TokenStream>) -> TokenStream {
321+
fn concat_rc_vec(streams: Lrc<Vec<TokenStream>>) -> TokenStream {
322322
TokenStream { kind: TokenStreamKind::Stream(streams) }
323323
}
324324

@@ -483,7 +483,7 @@ impl TokenStreamBuilder {
483483
match len {
484484
1 => {}
485485
2 => self.0.push(streams[0].clone().into()),
486-
_ => self.0.push(TokenStream::concat_rc_vec(streams.sub_slice(0 .. len - 1))),
486+
_ => self.0.push(TokenStream::concat_rc_vec( Lrc::new(streams[0 .. len - 1].to_vec()))),
487487
}
488488
self.push_all_but_last_tree(&streams[len - 1])
489489
}
@@ -495,7 +495,7 @@ impl TokenStreamBuilder {
495495
match len {
496496
1 => {}
497497
2 => self.0.push(streams[1].clone().into()),
498-
_ => self.0.push(TokenStream::concat_rc_vec(streams.sub_slice(1 .. len))),
498+
_ => self.0.push(TokenStream::concat_rc_vec(Lrc::new(streams[1 .. len].to_vec()))),
499499
}
500500
self.push_all_but_first_tree(&streams[0])
501501
}
@@ -515,13 +515,13 @@ enum CursorKind {
515515

516516
#[derive(Clone)]
517517
struct StreamCursor {
518-
stream: RcVec<TokenStream>,
518+
stream: Lrc<Vec<TokenStream>>,
519519
index: usize,
520-
stack: Vec<(RcVec<TokenStream>, usize)>,
520+
stack: Vec<(Lrc<Vec<TokenStream>>, usize)>,
521521
}
522522

523523
impl StreamCursor {
524-
fn new(stream: RcVec<TokenStream>) -> Self {
524+
fn new(stream: Lrc<Vec<TokenStream>>) -> Self {
525525
StreamCursor { stream: stream, index: 0, stack: Vec::new() }
526526
}
527527

@@ -544,7 +544,7 @@ impl StreamCursor {
544544
}
545545
}
546546

547-
fn insert(&mut self, stream: RcVec<TokenStream>) {
547+
fn insert(&mut self, stream: Lrc<Vec<TokenStream>>) {
548548
self.stack.push((mem::replace(&mut self.stream, stream),
549549
mem::replace(&mut self.index, 0)));
550550
}
@@ -656,7 +656,7 @@ impl Cursor {
656656
/// `ThinTokenStream` is smaller, but needs to allocate to represent a single `TokenTree`.
657657
/// We must use `ThinTokenStream` in `TokenTree::Delimited` to avoid infinite size due to recursion.
658658
#[derive(Debug, Clone)]
659-
pub struct ThinTokenStream(Option<RcVec<TokenStream>>);
659+
pub struct ThinTokenStream(Option<Lrc<Vec<TokenStream>>>);
660660

661661
impl ThinTokenStream {
662662
pub fn stream(&self) -> TokenStream {
@@ -668,8 +668,8 @@ impl From<TokenStream> for ThinTokenStream {
668668
fn from(stream: TokenStream) -> ThinTokenStream {
669669
ThinTokenStream(match stream.kind {
670670
TokenStreamKind::Empty => None,
671-
TokenStreamKind::Tree(tree) => Some(RcVec::new(vec![tree.into()])),
672-
TokenStreamKind::JointTree(tree) => Some(RcVec::new(vec![tree.joint()])),
671+
TokenStreamKind::Tree(tree) => Some(Lrc::new(vec![tree.into()])),
672+
TokenStreamKind::JointTree(tree) => Some(Lrc::new(vec![tree.joint()])),
673673
TokenStreamKind::Stream(stream) => Some(stream),
674674
})
675675
}

0 commit comments

Comments
 (0)