@@ -8,7 +8,7 @@ use core::iter::{
8
8
FusedIterator , InPlaceIterable , SourceIter , TrustedLen , TrustedRandomAccessNoCoerce ,
9
9
} ;
10
10
use core:: marker:: PhantomData ;
11
- use core:: mem:: { self , ManuallyDrop , MaybeUninit } ;
11
+ use core:: mem:: { self , ManuallyDrop , MaybeUninit , SizedTypeProperties } ;
12
12
#[ cfg( not( no_global_oom_handling) ) ]
13
13
use core:: ops:: Deref ;
14
14
use core:: ptr:: { self , NonNull } ;
@@ -149,7 +149,7 @@ impl<T, A: Allocator> Iterator for IntoIter<T, A> {
149
149
fn next ( & mut self ) -> Option < T > {
150
150
if self . ptr == self . end {
151
151
None
152
- } else if mem :: size_of :: < T > ( ) == 0 {
152
+ } else if T :: IS_ZST {
153
153
// purposefully don't use 'ptr.offset' because for
154
154
// vectors with 0-size elements this would return the
155
155
// same pointer.
@@ -167,7 +167,7 @@ impl<T, A: Allocator> Iterator for IntoIter<T, A> {
167
167
168
168
#[ inline]
169
169
fn size_hint ( & self ) -> ( usize , Option < usize > ) {
170
- let exact = if mem :: size_of :: < T > ( ) == 0 {
170
+ let exact = if T :: IS_ZST {
171
171
self . end . addr ( ) . wrapping_sub ( self . ptr . addr ( ) )
172
172
} else {
173
173
unsafe { self . end . sub_ptr ( self . ptr ) }
@@ -179,7 +179,7 @@ impl<T, A: Allocator> Iterator for IntoIter<T, A> {
179
179
fn advance_by ( & mut self , n : usize ) -> Result < ( ) , usize > {
180
180
let step_size = self . len ( ) . min ( n) ;
181
181
let to_drop = ptr:: slice_from_raw_parts_mut ( self . ptr as * mut T , step_size) ;
182
- if mem :: size_of :: < T > ( ) == 0 {
182
+ if T :: IS_ZST {
183
183
// SAFETY: due to unchecked casts of unsigned amounts to signed offsets the wraparound
184
184
// effectively results in unsigned pointers representing positions 0..usize::MAX,
185
185
// which is valid for ZSTs.
@@ -209,7 +209,7 @@ impl<T, A: Allocator> Iterator for IntoIter<T, A> {
209
209
210
210
let len = self . len ( ) ;
211
211
212
- if mem :: size_of :: < T > ( ) == 0 {
212
+ if T :: IS_ZST {
213
213
if len < N {
214
214
self . forget_remaining_elements ( ) ;
215
215
// Safety: ZSTs can be conjured ex nihilo, only the amount has to be correct
@@ -253,7 +253,7 @@ impl<T, A: Allocator> Iterator for IntoIter<T, A> {
253
253
// that `T: Copy` so reading elements from the buffer doesn't invalidate
254
254
// them for `Drop`.
255
255
unsafe {
256
- if mem :: size_of :: < T > ( ) == 0 { mem:: zeroed ( ) } else { ptr:: read ( self . ptr . add ( i) ) }
256
+ if T :: IS_ZST { mem:: zeroed ( ) } else { ptr:: read ( self . ptr . add ( i) ) }
257
257
}
258
258
}
259
259
}
@@ -264,7 +264,7 @@ impl<T, A: Allocator> DoubleEndedIterator for IntoIter<T, A> {
264
264
fn next_back ( & mut self ) -> Option < T > {
265
265
if self . end == self . ptr {
266
266
None
267
- } else if mem :: size_of :: < T > ( ) == 0 {
267
+ } else if T :: IS_ZST {
268
268
// See above for why 'ptr.offset' isn't used
269
269
self . end = self . end . wrapping_byte_sub ( 1 ) ;
270
270
@@ -280,7 +280,7 @@ impl<T, A: Allocator> DoubleEndedIterator for IntoIter<T, A> {
280
280
#[ inline]
281
281
fn advance_back_by ( & mut self , n : usize ) -> Result < ( ) , usize > {
282
282
let step_size = self . len ( ) . min ( n) ;
283
- if mem :: size_of :: < T > ( ) == 0 {
283
+ if T :: IS_ZST {
284
284
// SAFETY: same as for advance_by()
285
285
self . end = self . end . wrapping_byte_sub ( step_size) ;
286
286
} else {
0 commit comments