Skip to content

Commit e30cb32

Browse files
committedApr 6, 2025·
Polymorphize array::IntoIter's iterator impl
1 parent 0b45675 commit e30cb32

File tree

4 files changed

+444
-142
lines changed

4 files changed

+444
-142
lines changed
 

‎library/core/src/array/iter.rs

+84-141
Original file line numberDiff line numberDiff line change
@@ -1,38 +1,35 @@
11
//! Defines the `IntoIter` owned iterator for arrays.
22
33
use crate::intrinsics::transmute_unchecked;
4-
use crate::iter::{self, FusedIterator, TrustedLen, TrustedRandomAccessNoCoerce};
4+
use crate::iter::{FusedIterator, TrustedLen, TrustedRandomAccessNoCoerce};
55
use crate::mem::MaybeUninit;
66
use crate::num::NonZero;
7-
use crate::ops::{IndexRange, Range};
7+
use crate::ops::{IndexRange, Range, Try};
88
use crate::{fmt, ptr};
99

10+
mod iter_inner;
11+
12+
type InnerSized<T, const N: usize> = iter_inner::PolymorphicIter<[MaybeUninit<T>; N]>;
13+
type InnerUnsized<T> = iter_inner::PolymorphicIter<[MaybeUninit<T>]>;
14+
1015
/// A by-value [array] iterator.
1116
#[stable(feature = "array_value_iter", since = "1.51.0")]
1217
#[rustc_insignificant_dtor]
1318
#[rustc_diagnostic_item = "ArrayIntoIter"]
19+
#[derive(Clone)]
1420
pub struct IntoIter<T, const N: usize> {
15-
/// This is the array we are iterating over.
16-
///
17-
/// Elements with index `i` where `alive.start <= i < alive.end` have not
18-
/// been yielded yet and are valid array entries. Elements with indices `i
19-
/// < alive.start` or `i >= alive.end` have been yielded already and must
20-
/// not be accessed anymore! Those dead elements might even be in a
21-
/// completely uninitialized state!
22-
///
23-
/// So the invariants are:
24-
/// - `data[alive]` is alive (i.e. contains valid elements)
25-
/// - `data[..alive.start]` and `data[alive.end..]` are dead (i.e. the
26-
/// elements were already read and must not be touched anymore!)
27-
data: [MaybeUninit<T>; N],
21+
inner: InnerSized<T, N>,
22+
}
2823

29-
/// The elements in `data` that have not been yielded yet.
30-
///
31-
/// Invariants:
32-
/// - `alive.end <= N`
33-
///
34-
/// (And the `IndexRange` type requires `alive.start <= alive.end`.)
35-
alive: IndexRange,
24+
impl<T, const N: usize> IntoIter<T, N> {
25+
#[inline]
26+
fn unsize(&self) -> &InnerUnsized<T> {
27+
&self.inner
28+
}
29+
#[inline]
30+
fn unsize_mut(&mut self) -> &mut InnerUnsized<T> {
31+
&mut self.inner
32+
}
3633
}
3734

3835
// Note: the `#[rustc_skip_during_method_dispatch(array)]` on `trait IntoIterator`
@@ -53,6 +50,7 @@ impl<T, const N: usize> IntoIterator for [T; N] {
5350
/// 2021 edition -- see the [array] Editions section for more information.
5451
///
5552
/// [array]: prim@array
53+
#[inline]
5654
fn into_iter(self) -> Self::IntoIter {
5755
// SAFETY: The transmute here is actually safe. The docs of `MaybeUninit`
5856
// promise:
@@ -68,7 +66,10 @@ impl<T, const N: usize> IntoIterator for [T; N] {
6866
// FIXME: If normal `transmute` ever gets smart enough to allow this
6967
// directly, use it instead of `transmute_unchecked`.
7068
let data: [MaybeUninit<T>; N] = unsafe { transmute_unchecked(self) };
71-
IntoIter { data, alive: IndexRange::zero_to(N) }
69+
// SAFETY: The original array was entirely initialized and the the alive
70+
// range we're passing here represents that fact.
71+
let inner = unsafe { InnerSized::new_unchecked(IndexRange::zero_to(N), data) };
72+
IntoIter { inner }
7273
}
7374
}
7475

@@ -136,13 +137,16 @@ impl<T, const N: usize> IntoIter<T, N> {
136137
/// assert_eq!(r.collect::<Vec<_>>(), vec![10, 11, 12, 13, 14, 15]);
137138
/// ```
138139
#[unstable(feature = "array_into_iter_constructors", issue = "91583")]
140+
#[inline]
139141
pub const unsafe fn new_unchecked(
140142
buffer: [MaybeUninit<T>; N],
141143
initialized: Range<usize>,
142144
) -> Self {
143145
// SAFETY: one of our safety conditions is that the range is canonical.
144146
let alive = unsafe { IndexRange::new_unchecked(initialized.start, initialized.end) };
145-
Self { data: buffer, alive }
147+
// SAFETY: one of our safety condition is that these items are initialized.
148+
let inner = unsafe { InnerSized::new_unchecked(alive, buffer) };
149+
IntoIter { inner }
146150
}
147151

148152
/// Creates an iterator over `T` which returns no elements.
@@ -198,172 +202,134 @@ impl<T, const N: usize> IntoIter<T, N> {
198202
/// assert_eq!(get_bytes(false).collect::<Vec<_>>(), vec![]);
199203
/// ```
200204
#[unstable(feature = "array_into_iter_constructors", issue = "91583")]
205+
#[inline]
201206
pub const fn empty() -> Self {
202-
let buffer = [const { MaybeUninit::uninit() }; N];
203-
let initialized = 0..0;
204-
205-
// SAFETY: We're telling it that none of the elements are initialized,
206-
// which is trivially true. And ∀N: usize, 0 <= N.
207-
unsafe { Self::new_unchecked(buffer, initialized) }
207+
let inner = InnerSized::empty();
208+
IntoIter { inner }
208209
}
209210

210211
/// Returns an immutable slice of all elements that have not been yielded
211212
/// yet.
212213
#[stable(feature = "array_value_iter", since = "1.51.0")]
214+
#[inline]
213215
pub fn as_slice(&self) -> &[T] {
214-
// SAFETY: We know that all elements within `alive` are properly initialized.
215-
unsafe {
216-
let slice = self.data.get_unchecked(self.alive.clone());
217-
slice.assume_init_ref()
218-
}
216+
self.unsize().as_slice()
219217
}
220218

221219
/// Returns a mutable slice of all elements that have not been yielded yet.
222220
#[stable(feature = "array_value_iter", since = "1.51.0")]
221+
#[inline]
223222
pub fn as_mut_slice(&mut self) -> &mut [T] {
224-
// SAFETY: We know that all elements within `alive` are properly initialized.
225-
unsafe {
226-
let slice = self.data.get_unchecked_mut(self.alive.clone());
227-
slice.assume_init_mut()
228-
}
223+
self.unsize_mut().as_mut_slice()
229224
}
230225
}
231226

232227
#[stable(feature = "array_value_iter_impls", since = "1.40.0")]
233228
impl<T, const N: usize> Iterator for IntoIter<T, N> {
234229
type Item = T;
230+
231+
#[inline]
235232
fn next(&mut self) -> Option<Self::Item> {
236-
// Get the next index from the front.
237-
//
238-
// Increasing `alive.start` by 1 maintains the invariant regarding
239-
// `alive`. However, due to this change, for a short time, the alive
240-
// zone is not `data[alive]` anymore, but `data[idx..alive.end]`.
241-
self.alive.next().map(|idx| {
242-
// Read the element from the array.
243-
// SAFETY: `idx` is an index into the former "alive" region of the
244-
// array. Reading this element means that `data[idx]` is regarded as
245-
// dead now (i.e. do not touch). As `idx` was the start of the
246-
// alive-zone, the alive zone is now `data[alive]` again, restoring
247-
// all invariants.
248-
unsafe { self.data.get_unchecked(idx).assume_init_read() }
249-
})
233+
self.unsize_mut().next()
250234
}
251235

236+
#[inline]
252237
fn size_hint(&self) -> (usize, Option<usize>) {
253-
let len = self.len();
254-
(len, Some(len))
238+
self.unsize().size_hint()
255239
}
256240

257241
#[inline]
258-
fn fold<Acc, Fold>(mut self, init: Acc, mut fold: Fold) -> Acc
242+
fn fold<Acc, Fold>(mut self, init: Acc, fold: Fold) -> Acc
259243
where
260244
Fold: FnMut(Acc, Self::Item) -> Acc,
261245
{
262-
let data = &mut self.data;
263-
iter::ByRefSized(&mut self.alive).fold(init, |acc, idx| {
264-
// SAFETY: idx is obtained by folding over the `alive` range, which implies the
265-
// value is currently considered alive but as the range is being consumed each value
266-
// we read here will only be read once and then considered dead.
267-
fold(acc, unsafe { data.get_unchecked(idx).assume_init_read() })
268-
})
246+
self.unsize_mut().fold(init, fold)
269247
}
270248

249+
#[inline]
250+
fn try_fold<B, F, R>(&mut self, init: B, f: F) -> R
251+
where
252+
Self: Sized,
253+
F: FnMut(B, Self::Item) -> R,
254+
R: Try<Output = B>,
255+
{
256+
self.unsize_mut().try_fold(init, f)
257+
}
258+
259+
#[inline]
271260
fn count(self) -> usize {
272261
self.len()
273262
}
274263

264+
#[inline]
275265
fn last(mut self) -> Option<Self::Item> {
276266
self.next_back()
277267
}
278268

269+
#[inline]
279270
fn advance_by(&mut self, n: usize) -> Result<(), NonZero<usize>> {
280-
// This also moves the start, which marks them as conceptually "dropped",
281-
// so if anything goes bad then our drop impl won't double-free them.
282-
let range_to_drop = self.alive.take_prefix(n);
283-
let remaining = n - range_to_drop.len();
284-
285-
// SAFETY: These elements are currently initialized, so it's fine to drop them.
286-
unsafe {
287-
let slice = self.data.get_unchecked_mut(range_to_drop);
288-
slice.assume_init_drop();
289-
}
290-
291-
NonZero::new(remaining).map_or(Ok(()), Err)
271+
self.unsize_mut().advance_by(n)
292272
}
293273

294274
#[inline]
295275
unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item {
296276
// SAFETY: The caller must provide an idx that is in bound of the remainder.
297-
unsafe { self.data.as_ptr().add(self.alive.start()).add(idx).cast::<T>().read() }
277+
let elem_ref = unsafe { self.as_mut_slice().get_unchecked_mut(idx) };
278+
// SAFETY: We only implement `TrustedRandomAccessNoCoerce` for types
279+
// which are actually `Copy`, so cannot have multiple-drop issues.
280+
unsafe { ptr::read(elem_ref) }
298281
}
299282
}
300283

301284
#[stable(feature = "array_value_iter_impls", since = "1.40.0")]
302285
impl<T, const N: usize> DoubleEndedIterator for IntoIter<T, N> {
286+
#[inline]
303287
fn next_back(&mut self) -> Option<Self::Item> {
304-
// Get the next index from the back.
305-
//
306-
// Decreasing `alive.end` by 1 maintains the invariant regarding
307-
// `alive`. However, due to this change, for a short time, the alive
308-
// zone is not `data[alive]` anymore, but `data[alive.start..=idx]`.
309-
self.alive.next_back().map(|idx| {
310-
// Read the element from the array.
311-
// SAFETY: `idx` is an index into the former "alive" region of the
312-
// array. Reading this element means that `data[idx]` is regarded as
313-
// dead now (i.e. do not touch). As `idx` was the end of the
314-
// alive-zone, the alive zone is now `data[alive]` again, restoring
315-
// all invariants.
316-
unsafe { self.data.get_unchecked(idx).assume_init_read() }
317-
})
288+
self.unsize_mut().next_back()
318289
}
319290

320291
#[inline]
321-
fn rfold<Acc, Fold>(mut self, init: Acc, mut rfold: Fold) -> Acc
292+
fn rfold<Acc, Fold>(mut self, init: Acc, rfold: Fold) -> Acc
322293
where
323294
Fold: FnMut(Acc, Self::Item) -> Acc,
324295
{
325-
let data = &mut self.data;
326-
iter::ByRefSized(&mut self.alive).rfold(init, |acc, idx| {
327-
// SAFETY: idx is obtained by folding over the `alive` range, which implies the
328-
// value is currently considered alive but as the range is being consumed each value
329-
// we read here will only be read once and then considered dead.
330-
rfold(acc, unsafe { data.get_unchecked(idx).assume_init_read() })
331-
})
296+
self.unsize_mut().rfold(init, rfold)
297+
}
298+
299+
#[inline]
300+
fn try_rfold<B, F, R>(&mut self, init: B, f: F) -> R
301+
where
302+
Self: Sized,
303+
F: FnMut(B, Self::Item) -> R,
304+
R: Try<Output = B>,
305+
{
306+
self.unsize_mut().try_rfold(init, f)
332307
}
333308

309+
#[inline]
334310
fn advance_back_by(&mut self, n: usize) -> Result<(), NonZero<usize>> {
335-
// This also moves the end, which marks them as conceptually "dropped",
336-
// so if anything goes bad then our drop impl won't double-free them.
337-
let range_to_drop = self.alive.take_suffix(n);
338-
let remaining = n - range_to_drop.len();
339-
340-
// SAFETY: These elements are currently initialized, so it's fine to drop them.
341-
unsafe {
342-
let slice = self.data.get_unchecked_mut(range_to_drop);
343-
slice.assume_init_drop();
344-
}
345-
346-
NonZero::new(remaining).map_or(Ok(()), Err)
311+
self.unsize_mut().advance_back_by(n)
347312
}
348313
}
349314

350315
#[stable(feature = "array_value_iter_impls", since = "1.40.0")]
351316
impl<T, const N: usize> Drop for IntoIter<T, N> {
317+
#[inline]
352318
fn drop(&mut self) {
353-
// SAFETY: This is safe: `as_mut_slice` returns exactly the sub-slice
354-
// of elements that have not been moved out yet and that remain
355-
// to be dropped.
356-
unsafe { ptr::drop_in_place(self.as_mut_slice()) }
319+
// `inner` now handles this, but it'd technically be a breaking change
320+
// to remove this `impl`, even though it's useless.
357321
}
358322
}
359323

360324
#[stable(feature = "array_value_iter_impls", since = "1.40.0")]
361325
impl<T, const N: usize> ExactSizeIterator for IntoIter<T, N> {
326+
#[inline]
362327
fn len(&self) -> usize {
363-
self.alive.len()
328+
self.inner.len()
364329
}
330+
#[inline]
365331
fn is_empty(&self) -> bool {
366-
self.alive.is_empty()
332+
self.inner.len() == 0
367333
}
368334
}
369335

@@ -396,32 +362,9 @@ where
396362
const MAY_HAVE_SIDE_EFFECT: bool = false;
397363
}
398364

399-
#[stable(feature = "array_value_iter_impls", since = "1.40.0")]
400-
impl<T: Clone, const N: usize> Clone for IntoIter<T, N> {
401-
fn clone(&self) -> Self {
402-
// Note, we don't really need to match the exact same alive range, so
403-
// we can just clone into offset 0 regardless of where `self` is.
404-
let mut new =
405-
Self { data: [const { MaybeUninit::uninit() }; N], alive: IndexRange::zero_to(0) };
406-
407-
// Clone all alive elements.
408-
for (src, dst) in iter::zip(self.as_slice(), &mut new.data) {
409-
// Write a clone into the new array, then update its alive range.
410-
// If cloning panics, we'll correctly drop the previous items.
411-
dst.write(src.clone());
412-
// This addition cannot overflow as we're iterating a slice
413-
new.alive = IndexRange::zero_to(new.alive.end() + 1);
414-
}
415-
416-
new
417-
}
418-
}
419-
420365
#[stable(feature = "array_value_iter_impls", since = "1.40.0")]
421366
impl<T: fmt::Debug, const N: usize> fmt::Debug for IntoIter<T, N> {
422367
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
423-
// Only print the elements that were not yielded yet: we cannot
424-
// access the yielded elements anymore.
425-
f.debug_tuple("IntoIter").field(&self.as_slice()).finish()
368+
self.unsize().fmt(f)
426369
}
427370
}
+281
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,281 @@
1+
//! Defines the `IntoIter` owned iterator for arrays.
2+
3+
use crate::mem::MaybeUninit;
4+
use crate::num::NonZero;
5+
use crate::ops::{IndexRange, NeverShortCircuit, Try};
6+
use crate::{fmt, iter};
7+
8+
#[allow(private_bounds)]
9+
trait PartialDrop {
10+
/// # Safety
11+
/// `self[alive]` are all initialized before the call,
12+
/// then are never used (without reinitializing them) after it.
13+
unsafe fn partial_drop(&mut self, alive: IndexRange);
14+
}
15+
impl<T> PartialDrop for [MaybeUninit<T>] {
16+
unsafe fn partial_drop(&mut self, alive: IndexRange) {
17+
// SAFETY: We know that all elements within `alive` are properly initialized.
18+
unsafe { self.get_unchecked_mut(alive).assume_init_drop() }
19+
}
20+
}
21+
impl<T, const N: usize> PartialDrop for [MaybeUninit<T>; N] {
22+
unsafe fn partial_drop(&mut self, alive: IndexRange) {
23+
let slice: &mut [MaybeUninit<T>] = self;
24+
// SAFETY: Initialized elements in the array are also initialized in the slice.
25+
unsafe { slice.partial_drop(alive) }
26+
}
27+
}
28+
29+
/// The internals of a by-value array iterator.
30+
///
31+
/// The real `array::IntoIter<T, N>` stores a `PolymorphicIter<[MaybeUninit<T>, N]>`
32+
/// which it unsizes to `PolymorphicIter<[MaybeUninit<T>]>` to iterate.
33+
#[allow(private_bounds)]
34+
pub(super) struct PolymorphicIter<TAIL: ?Sized>
35+
where
36+
TAIL: PartialDrop,
37+
{
38+
/// The elements in `data` that have not been yielded yet.
39+
///
40+
/// Invariants:
41+
/// - `alive.end <= N`
42+
///
43+
/// (And the `IndexRange` type requires `alive.start <= alive.end`.)
44+
alive: IndexRange,
45+
46+
/// This is the array we are iterating over.
47+
///
48+
/// Elements with index `i` where `alive.start <= i < alive.end` have not
49+
/// been yielded yet and are valid array entries. Elements with indices `i
50+
/// < alive.start` or `i >= alive.end` have been yielded already and must
51+
/// not be accessed anymore! Those dead elements might even be in a
52+
/// completely uninitialized state!
53+
///
54+
/// So the invariants are:
55+
/// - `data[alive]` is alive (i.e. contains valid elements)
56+
/// - `data[..alive.start]` and `data[alive.end..]` are dead (i.e. the
57+
/// elements were already read and must not be touched anymore!)
58+
data: TAIL,
59+
}
60+
61+
#[allow(private_bounds)]
62+
impl<TAIL: ?Sized> PolymorphicIter<TAIL>
63+
where
64+
TAIL: PartialDrop,
65+
{
66+
#[inline]
67+
pub(super) const fn len(&self) -> usize {
68+
self.alive.len()
69+
}
70+
}
71+
72+
#[allow(private_bounds)]
73+
impl<TAIL: ?Sized> Drop for PolymorphicIter<TAIL>
74+
where
75+
TAIL: PartialDrop,
76+
{
77+
#[inline]
78+
fn drop(&mut self) {
79+
// SAFETY: by our type invariant `self.alive` is exactly the initialized
80+
// items, and this is drop so nothing can use the items afterwards.
81+
unsafe { self.data.partial_drop(self.alive.clone()) }
82+
}
83+
}
84+
85+
impl<T, const N: usize> PolymorphicIter<[MaybeUninit<T>; N]> {
86+
#[inline]
87+
pub(super) const fn empty() -> Self {
88+
Self { alive: IndexRange::zero_to(0), data: [const { MaybeUninit::uninit() }; N] }
89+
}
90+
91+
/// # Safety
92+
/// `data[alive]` are all initialized.
93+
#[inline]
94+
pub(super) const unsafe fn new_unchecked(alive: IndexRange, data: [MaybeUninit<T>; N]) -> Self {
95+
Self { alive, data }
96+
}
97+
}
98+
99+
impl<T: Clone, const N: usize> Clone for PolymorphicIter<[MaybeUninit<T>; N]> {
100+
#[inline]
101+
fn clone(&self) -> Self {
102+
// Note, we don't really need to match the exact same alive range, so
103+
// we can just clone into offset 0 regardless of where `self` is.
104+
let mut new = Self::empty();
105+
106+
fn clone_into_new<U: Clone>(
107+
source: &PolymorphicIter<[MaybeUninit<U>]>,
108+
target: &mut PolymorphicIter<[MaybeUninit<U>]>,
109+
) {
110+
// Clone all alive elements.
111+
for (src, dst) in iter::zip(source.as_slice(), &mut target.data) {
112+
// Write a clone into the new array, then update its alive range.
113+
// If cloning panics, we'll correctly drop the previous items.
114+
dst.write(src.clone());
115+
// This addition cannot overflow as we're iterating a slice,
116+
// the length of which always fits in usize.
117+
target.alive = IndexRange::zero_to(target.alive.end() + 1);
118+
}
119+
}
120+
121+
clone_into_new(self, &mut new);
122+
new
123+
}
124+
}
125+
126+
impl<T> PolymorphicIter<[MaybeUninit<T>]> {
127+
#[inline]
128+
pub(super) fn as_slice(&self) -> &[T] {
129+
// SAFETY: We know that all elements within `alive` are properly initialized.
130+
unsafe {
131+
let slice = self.data.get_unchecked(self.alive.clone());
132+
slice.assume_init_ref()
133+
}
134+
}
135+
136+
#[inline]
137+
pub(super) fn as_mut_slice(&mut self) -> &mut [T] {
138+
// SAFETY: We know that all elements within `alive` are properly initialized.
139+
unsafe {
140+
let slice = self.data.get_unchecked_mut(self.alive.clone());
141+
slice.assume_init_mut()
142+
}
143+
}
144+
}
145+
146+
impl<T: fmt::Debug> fmt::Debug for PolymorphicIter<[MaybeUninit<T>]> {
147+
#[inline]
148+
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
149+
// Only print the elements that were not yielded yet: we cannot
150+
// access the yielded elements anymore.
151+
f.debug_tuple("IntoIter").field(&self.as_slice()).finish()
152+
}
153+
}
154+
155+
/// Iterator-equivalent methods.
156+
///
157+
/// We don't implement the actual iterator traits because we want to implement
158+
/// things like `try_fold` that require `Self: Sized` (which we're not).
159+
impl<T> PolymorphicIter<[MaybeUninit<T>]> {
160+
#[inline]
161+
pub(super) fn next(&mut self) -> Option<T> {
162+
// Get the next index from the front.
163+
//
164+
// Increasing `alive.start` by 1 maintains the invariant regarding
165+
// `alive`. However, due to this change, for a short time, the alive
166+
// zone is not `data[alive]` anymore, but `data[idx..alive.end]`.
167+
self.alive.next().map(|idx| {
168+
// Read the element from the array.
169+
// SAFETY: `idx` is an index into the former "alive" region of the
170+
// array. Reading this element means that `data[idx]` is regarded as
171+
// dead now (i.e. do not touch). As `idx` was the start of the
172+
// alive-zone, the alive zone is now `data[alive]` again, restoring
173+
// all invariants.
174+
unsafe { self.data.get_unchecked(idx).assume_init_read() }
175+
})
176+
}
177+
178+
#[inline]
179+
pub(super) fn size_hint(&self) -> (usize, Option<usize>) {
180+
let len = self.len();
181+
(len, Some(len))
182+
}
183+
184+
#[inline]
185+
pub(super) fn advance_by(&mut self, n: usize) -> Result<(), NonZero<usize>> {
186+
// This also moves the start, which marks them as conceptually "dropped",
187+
// so if anything goes bad then our drop impl won't double-free them.
188+
let range_to_drop = self.alive.take_prefix(n);
189+
let remaining = n - range_to_drop.len();
190+
191+
// SAFETY: These elements are currently initialized, so it's fine to drop them.
192+
unsafe {
193+
let slice = self.data.get_unchecked_mut(range_to_drop);
194+
slice.assume_init_drop();
195+
}
196+
197+
NonZero::new(remaining).map_or(Ok(()), Err)
198+
}
199+
200+
#[inline]
201+
pub(super) fn fold<B>(&mut self, init: B, f: impl FnMut(B, T) -> B) -> B {
202+
self.try_fold(init, NeverShortCircuit::wrap_mut_2(f)).0
203+
}
204+
205+
#[inline]
206+
pub(super) fn try_fold<B, F, R>(&mut self, init: B, mut f: F) -> R
207+
where
208+
F: FnMut(B, T) -> R,
209+
R: Try<Output = B>,
210+
{
211+
// `alive` is an `IndexRange`, not an arbitrary iterator, so we can
212+
// trust that its `try_rfold` isn't going to do something weird like
213+
// call the fold-er multiple times for the same index.
214+
let data = &mut self.data;
215+
self.alive.try_fold(init, move |accum, idx| {
216+
// SAFETY: `idx` has been removed from the alive range, so we're not
217+
// going to drop it (even if `f` panics) and thus its ok to give
218+
// out ownership of that item to `f` to handle.
219+
let elem = unsafe { data.get_unchecked(idx).assume_init_read() };
220+
f(accum, elem)
221+
})
222+
}
223+
224+
#[inline]
225+
pub(super) fn next_back(&mut self) -> Option<T> {
226+
// Get the next index from the back.
227+
//
228+
// Decreasing `alive.end` by 1 maintains the invariant regarding
229+
// `alive`. However, due to this change, for a short time, the alive
230+
// zone is not `data[alive]` anymore, but `data[alive.start..=idx]`.
231+
self.alive.next_back().map(|idx| {
232+
// Read the element from the array.
233+
// SAFETY: `idx` is an index into the former "alive" region of the
234+
// array. Reading this element means that `data[idx]` is regarded as
235+
// dead now (i.e. do not touch). As `idx` was the end of the
236+
// alive-zone, the alive zone is now `data[alive]` again, restoring
237+
// all invariants.
238+
unsafe { self.data.get_unchecked(idx).assume_init_read() }
239+
})
240+
}
241+
242+
#[inline]
243+
pub(super) fn advance_back_by(&mut self, n: usize) -> Result<(), NonZero<usize>> {
244+
// This also moves the end, which marks them as conceptually "dropped",
245+
// so if anything goes bad then our drop impl won't double-free them.
246+
let range_to_drop = self.alive.take_suffix(n);
247+
let remaining = n - range_to_drop.len();
248+
249+
// SAFETY: These elements are currently initialized, so it's fine to drop them.
250+
unsafe {
251+
let slice = self.data.get_unchecked_mut(range_to_drop);
252+
slice.assume_init_drop();
253+
}
254+
255+
NonZero::new(remaining).map_or(Ok(()), Err)
256+
}
257+
258+
#[inline]
259+
pub(super) fn rfold<B>(&mut self, init: B, f: impl FnMut(B, T) -> B) -> B {
260+
self.try_rfold(init, NeverShortCircuit::wrap_mut_2(f)).0
261+
}
262+
263+
#[inline]
264+
pub(super) fn try_rfold<B, F, R>(&mut self, init: B, mut f: F) -> R
265+
where
266+
F: FnMut(B, T) -> R,
267+
R: Try<Output = B>,
268+
{
269+
// `alive` is an `IndexRange`, not an arbitrary iterator, so we can
270+
// trust that its `try_rfold` isn't going to do something weird like
271+
// call the fold-er multiple times for the same index.
272+
let data = &mut self.data;
273+
self.alive.try_rfold(init, move |accum, idx| {
274+
// SAFETY: `idx` has been removed from the alive range, so we're not
275+
// going to drop it (even if `f` panics) and thus its ok to give
276+
// out ownership of that item to `f` to handle.
277+
let elem = unsafe { data.get_unchecked(idx).assume_init_read() };
278+
f(accum, elem)
279+
})
280+
}
281+
}

‎library/core/src/ops/index_range.rs

+55
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
use crate::iter::{FusedIterator, TrustedLen};
22
use crate::num::NonZero;
3+
use crate::ops::{NeverShortCircuit, Try};
34
use crate::ub_checks;
45

56
/// Like a `Range<usize>`, but with a safety invariant that `start <= end`.
@@ -112,6 +113,12 @@ impl IndexRange {
112113
self.end = mid;
113114
suffix
114115
}
116+
117+
#[inline]
118+
fn assume_range(&self) {
119+
// SAFETY: This is the type invariant
120+
unsafe { crate::hint::assert_unchecked(self.start <= self.end) }
121+
}
115122
}
116123

117124
impl Iterator for IndexRange {
@@ -138,6 +145,30 @@ impl Iterator for IndexRange {
138145
let taken = self.take_prefix(n);
139146
NonZero::new(n - taken.len()).map_or(Ok(()), Err)
140147
}
148+
149+
#[inline]
150+
fn fold<B, F: FnMut(B, usize) -> B>(mut self, init: B, f: F) -> B {
151+
self.try_fold(init, NeverShortCircuit::wrap_mut_2(f)).0
152+
}
153+
154+
#[inline]
155+
fn try_fold<B, F, R>(&mut self, mut accum: B, mut f: F) -> R
156+
where
157+
Self: Sized,
158+
F: FnMut(B, Self::Item) -> R,
159+
R: Try<Output = B>,
160+
{
161+
// `Range` needs to check `start < end`, but thanks to our type invariant
162+
// we can loop on the stricter `start != end`.
163+
164+
self.assume_range();
165+
while self.start != self.end {
166+
// SAFETY: We just checked that the range is non-empty
167+
let i = unsafe { self.next_unchecked() };
168+
accum = f(accum, i)?;
169+
}
170+
try { accum }
171+
}
141172
}
142173

143174
impl DoubleEndedIterator for IndexRange {
@@ -156,6 +187,30 @@ impl DoubleEndedIterator for IndexRange {
156187
let taken = self.take_suffix(n);
157188
NonZero::new(n - taken.len()).map_or(Ok(()), Err)
158189
}
190+
191+
#[inline]
192+
fn rfold<B, F: FnMut(B, usize) -> B>(mut self, init: B, f: F) -> B {
193+
self.try_rfold(init, NeverShortCircuit::wrap_mut_2(f)).0
194+
}
195+
196+
#[inline]
197+
fn try_rfold<B, F, R>(&mut self, mut accum: B, mut f: F) -> R
198+
where
199+
Self: Sized,
200+
F: FnMut(B, Self::Item) -> R,
201+
R: Try<Output = B>,
202+
{
203+
// `Range` needs to check `start < end`, but thanks to our type invariant
204+
// we can loop on the stricter `start != end`.
205+
206+
self.assume_range();
207+
while self.start != self.end {
208+
// SAFETY: We just checked that the range is non-empty
209+
let i = unsafe { self.next_back_unchecked() };
210+
accum = f(accum, i)?;
211+
}
212+
try { accum }
213+
}
159214
}
160215

161216
impl ExactSizeIterator for IndexRange {

‎tests/codegen/issues/issue-101082.rs

+24-1
Original file line numberDiff line numberDiff line change
@@ -11,11 +11,34 @@
1111
#[no_mangle]
1212
pub fn test() -> usize {
1313
// CHECK-LABEL: @test(
14-
// CHECK: ret {{i64|i32}} 165
14+
// host: ret {{i64|i32}} 165
15+
16+
// FIXME: Now that this autovectorizes via a masked load, it doesn't actually
17+
// const-fold for certain widths. The `test_eight` case below shows that, yes,
18+
// what we're emitting *can* be const-folded, except that the way LLVM does it
19+
// for certain widths doesn't today. We should be able to put this back to
20+
// the same check after <https://github.com/llvm/llvm-project/issues/134513>
21+
// x86-64-v3: <i64 23, i64 16, i64 54, i64 3>
22+
// x86-64-v3: llvm.masked.load
23+
// x86-64-v3: %[[R:.+]] = {{.+}}llvm.vector.reduce.add.v4i64
24+
// x86-64-v3: ret i64 %[[R]]
25+
1526
let values = [23, 16, 54, 3, 60, 9];
1627
let mut acc = 0;
1728
for item in values {
1829
acc += item;
1930
}
2031
acc
2132
}
33+
34+
#[no_mangle]
35+
pub fn test_eight() -> usize {
36+
// CHECK-LABEL: @test_eight(
37+
// CHECK: ret {{i64|i32}} 220
38+
let values = [23, 16, 54, 3, 60, 9, 13, 42];
39+
let mut acc = 0;
40+
for item in values {
41+
acc += item;
42+
}
43+
acc
44+
}

0 commit comments

Comments
 (0)
Please sign in to comment.