13
13
) ]
14
14
#![ feature( core_intrinsics) ]
15
15
#![ feature( dropck_eyepatch) ]
16
- #![ feature( raw_vec_internals) ]
16
+ #![ feature( new_uninit) ]
17
+ #![ feature( maybe_uninit_slice) ]
17
18
#![ cfg_attr( test, feature( test) ) ]
18
19
#![ allow( deprecated) ]
19
20
20
- extern crate alloc;
21
-
22
21
use rustc_data_structures:: cold_path;
23
22
use smallvec:: SmallVec ;
24
23
@@ -27,12 +26,10 @@ use std::cell::{Cell, RefCell};
27
26
use std:: cmp;
28
27
use std:: intrinsics;
29
28
use std:: marker:: { PhantomData , Send } ;
30
- use std:: mem;
29
+ use std:: mem:: { self , MaybeUninit } ;
31
30
use std:: ptr;
32
31
use std:: slice;
33
32
34
- use alloc:: raw_vec:: RawVec ;
35
-
36
33
/// An arena that can hold objects of only one type.
37
34
pub struct TypedArena < T > {
38
35
/// A pointer to the next object to be allocated.
@@ -52,15 +49,15 @@ pub struct TypedArena<T> {
52
49
53
50
struct TypedArenaChunk < T > {
54
51
/// The raw storage for the arena chunk.
55
- storage : RawVec < T > ,
52
+ storage : Box < [ MaybeUninit < T > ] > ,
56
53
/// The number of valid entries in the chunk.
57
54
entries : usize ,
58
55
}
59
56
60
57
impl < T > TypedArenaChunk < T > {
61
58
#[ inline]
62
59
unsafe fn new ( capacity : usize ) -> TypedArenaChunk < T > {
63
- TypedArenaChunk { storage : RawVec :: with_capacity ( capacity) , entries : 0 }
60
+ TypedArenaChunk { storage : Box :: new_uninit_slice ( capacity) , entries : 0 }
64
61
}
65
62
66
63
/// Destroys this arena chunk.
@@ -80,19 +77,19 @@ impl<T> TypedArenaChunk<T> {
80
77
81
78
// Returns a pointer to the first allocated object.
82
79
#[ inline]
83
- fn start ( & self ) -> * mut T {
84
- self . storage . ptr ( )
80
+ fn start ( & mut self ) -> * mut T {
81
+ MaybeUninit :: slice_as_mut_ptr ( & mut self . storage )
85
82
}
86
83
87
84
// Returns a pointer to the end of the allocated space.
88
85
#[ inline]
89
- fn end ( & self ) -> * mut T {
86
+ fn end ( & mut self ) -> * mut T {
90
87
unsafe {
91
88
if mem:: size_of :: < T > ( ) == 0 {
92
89
// A pointer as large as possible for zero-sized elements.
93
90
!0 as * mut T
94
91
} else {
95
- self . start ( ) . add ( self . storage . capacity ( ) )
92
+ self . start ( ) . add ( self . storage . len ( ) )
96
93
}
97
94
}
98
95
}
@@ -226,10 +223,10 @@ impl<T> TypedArena<T> {
226
223
let used_bytes = self . ptr . get ( ) as usize - last_chunk. start ( ) as usize ;
227
224
last_chunk. entries = used_bytes / mem:: size_of :: < T > ( ) ;
228
225
229
- // If the previous chunk's capacity is less than HUGE_PAGE
226
+ // If the previous chunk's len is less than HUGE_PAGE
230
227
// bytes, then this chunk will be least double the previous
231
228
// chunk's size.
232
- new_cap = last_chunk. storage . capacity ( ) ;
229
+ new_cap = last_chunk. storage . len ( ) ;
233
230
if new_cap < HUGE_PAGE / elem_size {
234
231
new_cap = new_cap. checked_mul ( 2 ) . unwrap ( ) ;
235
232
}
@@ -239,7 +236,7 @@ impl<T> TypedArena<T> {
239
236
// Also ensure that this chunk can fit `additional`.
240
237
new_cap = cmp:: max ( additional, new_cap) ;
241
238
242
- let chunk = TypedArenaChunk :: < T > :: new ( new_cap) ;
239
+ let mut chunk = TypedArenaChunk :: < T > :: new ( new_cap) ;
243
240
self . ptr . set ( chunk. start ( ) ) ;
244
241
self . end . set ( chunk. end ( ) ) ;
245
242
chunks. push ( chunk) ;
@@ -301,7 +298,7 @@ unsafe impl<#[may_dangle] T> Drop for TypedArena<T> {
301
298
chunk. destroy ( chunk. entries ) ;
302
299
}
303
300
}
304
- // RawVec handles deallocation of `last_chunk` and `self.chunks`.
301
+ // Box handles deallocation of `last_chunk` and `self.chunks`.
305
302
}
306
303
}
307
304
}
@@ -344,10 +341,10 @@ impl DroplessArena {
344
341
// There is no need to update `last_chunk.entries` because that
345
342
// field isn't used by `DroplessArena`.
346
343
347
- // If the previous chunk's capacity is less than HUGE_PAGE
344
+ // If the previous chunk's len is less than HUGE_PAGE
348
345
// bytes, then this chunk will be least double the previous
349
346
// chunk's size.
350
- new_cap = last_chunk. storage . capacity ( ) ;
347
+ new_cap = last_chunk. storage . len ( ) ;
351
348
if new_cap < HUGE_PAGE {
352
349
new_cap = new_cap. checked_mul ( 2 ) . unwrap ( ) ;
353
350
}
@@ -357,7 +354,7 @@ impl DroplessArena {
357
354
// Also ensure that this chunk can fit `additional`.
358
355
new_cap = cmp:: max ( additional, new_cap) ;
359
356
360
- let chunk = TypedArenaChunk :: < u8 > :: new ( new_cap) ;
357
+ let mut chunk = TypedArenaChunk :: < u8 > :: new ( new_cap) ;
361
358
self . ptr . set ( chunk. start ( ) ) ;
362
359
self . end . set ( chunk. end ( ) ) ;
363
360
chunks. push ( chunk) ;
0 commit comments