@@ -28,8 +28,8 @@ use ext::tt::{macro_parser, quoted};
28
28
use parse:: Directory ;
29
29
use parse:: token:: { self , DelimToken , Token } ;
30
30
use print:: pprust;
31
+ use rustc_data_structures:: sync:: Lrc ;
31
32
use serialize:: { Decoder , Decodable , Encoder , Encodable } ;
32
- use util:: RcVec ;
33
33
34
34
use std:: borrow:: Cow ;
35
35
use std:: { fmt, iter, mem} ;
@@ -160,7 +160,7 @@ pub struct TokenStream {
160
160
161
161
// `TokenStream` is used a lot. Make sure it doesn't unintentionally get bigger.
162
162
#[ cfg( target_arch = "x86_64" ) ]
163
- static_assert ! ( MEM_SIZE_OF_TOKEN_STREAM : mem:: size_of:: <TokenStream >( ) == 40 ) ;
163
+ static_assert ! ( MEM_SIZE_OF_TOKEN_STREAM : mem:: size_of:: <TokenStream >( ) == 32 ) ;
164
164
165
165
impl TokenStream {
166
166
/// Given a `TokenStream` with a `Stream` of only two arguments, return a new `TokenStream`
@@ -194,7 +194,7 @@ impl TokenStream {
194
194
new_slice. extend_from_slice ( parts. 0 ) ;
195
195
new_slice. push ( comma) ;
196
196
new_slice. extend_from_slice ( parts. 1 ) ;
197
- let slice = RcVec :: new ( new_slice) ;
197
+ let slice = Lrc :: new ( new_slice) ;
198
198
return Some ( ( TokenStream { kind : TokenStreamKind :: Stream ( slice) } , sp) ) ;
199
199
}
200
200
}
@@ -207,7 +207,7 @@ enum TokenStreamKind {
207
207
Empty ,
208
208
Tree ( TokenTree ) ,
209
209
JointTree ( TokenTree ) ,
210
- Stream ( RcVec < TokenStream > ) ,
210
+ Stream ( Lrc < Vec < TokenStream > > ) ,
211
211
}
212
212
213
213
impl From < TokenTree > for TokenStream {
@@ -246,7 +246,7 @@ impl Extend<TokenStream> for TokenStream {
246
246
vec. push ( TokenStream { kind } ) ;
247
247
vec
248
248
}
249
- TokenStreamKind :: Stream ( rc_vec) => match RcVec :: try_unwrap ( rc_vec) {
249
+ TokenStreamKind :: Stream ( rc_vec) => match Lrc :: try_unwrap ( rc_vec) {
250
250
Ok ( mut vec) => {
251
251
// Extend in place using the existing capacity if possible.
252
252
// This is the fast path for libraries like `quote` that
@@ -277,7 +277,7 @@ impl Extend<TokenStream> for TokenStream {
277
277
* self = match tts. len ( ) {
278
278
0 => TokenStream :: empty ( ) ,
279
279
1 => tts. pop ( ) . unwrap ( ) ,
280
- _ => TokenStream :: concat_rc_vec ( RcVec :: new_preserving_capacity ( tts) ) ,
280
+ _ => TokenStream :: concat_rc_vec ( Lrc :: new ( tts) ) ,
281
281
} ;
282
282
}
283
283
}
@@ -314,11 +314,11 @@ impl TokenStream {
314
314
match streams. len ( ) {
315
315
0 => TokenStream :: empty ( ) ,
316
316
1 => streams. pop ( ) . unwrap ( ) ,
317
- _ => TokenStream :: concat_rc_vec ( RcVec :: new ( streams) ) ,
317
+ _ => TokenStream :: concat_rc_vec ( Lrc :: new ( streams) ) ,
318
318
}
319
319
}
320
320
321
- fn concat_rc_vec ( streams : RcVec < TokenStream > ) -> TokenStream {
321
+ fn concat_rc_vec ( streams : Lrc < Vec < TokenStream > > ) -> TokenStream {
322
322
TokenStream { kind : TokenStreamKind :: Stream ( streams) }
323
323
}
324
324
@@ -483,7 +483,7 @@ impl TokenStreamBuilder {
483
483
match len {
484
484
1 => { }
485
485
2 => self . 0 . push ( streams[ 0 ] . clone ( ) . into ( ) ) ,
486
- _ => self . 0 . push ( TokenStream :: concat_rc_vec ( streams . sub_slice ( 0 .. len - 1 ) ) ) ,
486
+ _ => self . 0 . push ( TokenStream :: concat_rc_vec ( Lrc :: new ( streams [ 0 .. len - 1 ] . to_vec ( ) ) ) ) ,
487
487
}
488
488
self . push_all_but_last_tree ( & streams[ len - 1 ] )
489
489
}
@@ -495,7 +495,7 @@ impl TokenStreamBuilder {
495
495
match len {
496
496
1 => { }
497
497
2 => self . 0 . push ( streams[ 1 ] . clone ( ) . into ( ) ) ,
498
- _ => self . 0 . push ( TokenStream :: concat_rc_vec ( streams . sub_slice ( 1 .. len) ) ) ,
498
+ _ => self . 0 . push ( TokenStream :: concat_rc_vec ( Lrc :: new ( streams [ 1 .. len] . to_vec ( ) ) ) ) ,
499
499
}
500
500
self . push_all_but_first_tree ( & streams[ 0 ] )
501
501
}
@@ -515,13 +515,13 @@ enum CursorKind {
515
515
516
516
#[ derive( Clone ) ]
517
517
struct StreamCursor {
518
- stream : RcVec < TokenStream > ,
518
+ stream : Lrc < Vec < TokenStream > > ,
519
519
index : usize ,
520
- stack : Vec < ( RcVec < TokenStream > , usize ) > ,
520
+ stack : Vec < ( Lrc < Vec < TokenStream > > , usize ) > ,
521
521
}
522
522
523
523
impl StreamCursor {
524
- fn new ( stream : RcVec < TokenStream > ) -> Self {
524
+ fn new ( stream : Lrc < Vec < TokenStream > > ) -> Self {
525
525
StreamCursor { stream : stream, index : 0 , stack : Vec :: new ( ) }
526
526
}
527
527
@@ -544,7 +544,7 @@ impl StreamCursor {
544
544
}
545
545
}
546
546
547
- fn insert ( & mut self , stream : RcVec < TokenStream > ) {
547
+ fn insert ( & mut self , stream : Lrc < Vec < TokenStream > > ) {
548
548
self . stack . push ( ( mem:: replace ( & mut self . stream , stream) ,
549
549
mem:: replace ( & mut self . index , 0 ) ) ) ;
550
550
}
@@ -656,7 +656,7 @@ impl Cursor {
656
656
/// `ThinTokenStream` is smaller, but needs to allocate to represent a single `TokenTree`.
657
657
/// We must use `ThinTokenStream` in `TokenTree::Delimited` to avoid infinite size due to recursion.
658
658
#[ derive( Debug , Clone ) ]
659
- pub struct ThinTokenStream ( Option < RcVec < TokenStream > > ) ;
659
+ pub struct ThinTokenStream ( Option < Lrc < Vec < TokenStream > > > ) ;
660
660
661
661
impl ThinTokenStream {
662
662
pub fn stream ( & self ) -> TokenStream {
@@ -668,8 +668,8 @@ impl From<TokenStream> for ThinTokenStream {
668
668
fn from ( stream : TokenStream ) -> ThinTokenStream {
669
669
ThinTokenStream ( match stream. kind {
670
670
TokenStreamKind :: Empty => None ,
671
- TokenStreamKind :: Tree ( tree) => Some ( RcVec :: new ( vec ! [ tree. into( ) ] ) ) ,
672
- TokenStreamKind :: JointTree ( tree) => Some ( RcVec :: new ( vec ! [ tree. joint( ) ] ) ) ,
671
+ TokenStreamKind :: Tree ( tree) => Some ( Lrc :: new ( vec ! [ tree. into( ) ] ) ) ,
672
+ TokenStreamKind :: JointTree ( tree) => Some ( Lrc :: new ( vec ! [ tree. joint( ) ] ) ) ,
673
673
TokenStreamKind :: Stream ( stream) => Some ( stream) ,
674
674
} )
675
675
}
0 commit comments