diff --git a/library/alloc/src/lib.rs b/library/alloc/src/lib.rs index b417513aaa23f..887e27a5c6446 100644 --- a/library/alloc/src/lib.rs +++ b/library/alloc/src/lib.rs @@ -146,6 +146,7 @@ #![feature(receiver_trait)] #![feature(set_ptr_value)] #![feature(sized_type_properties)] +#![feature(slice_drain_raw_iter)] #![feature(slice_from_ptr_range)] #![feature(slice_index_methods)] #![feature(slice_ptr_get)] diff --git a/library/alloc/src/vec/drain.rs b/library/alloc/src/vec/drain.rs index f0b63759ac70f..8630eef690c1d 100644 --- a/library/alloc/src/vec/drain.rs +++ b/library/alloc/src/vec/drain.rs @@ -1,7 +1,8 @@ use crate::alloc::{Allocator, Global}; use core::fmt; use core::iter::{FusedIterator, TrustedLen}; -use core::mem::{self, ManuallyDrop, SizedTypeProperties}; +use core::marker::PhantomData; +use core::mem::{ManuallyDrop, SizedTypeProperties}; use core::ptr::{self, NonNull}; use core::slice::{self}; @@ -29,14 +30,15 @@ pub struct Drain< /// Length of tail pub(super) tail_len: usize, /// Current remaining range to remove - pub(super) iter: slice::Iter<'a, T>, + pub(super) iter: slice::DrainRaw, pub(super) vec: NonNull>, + pub(super) phantom: PhantomData<&'a [T]>, } #[stable(feature = "collection_debug", since = "1.17.0")] impl fmt::Debug for Drain<'_, T, A> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_tuple("Drain").field(&self.iter.as_slice()).finish() + f.debug_tuple("Drain").field(&self.as_slice()).finish() } } @@ -55,7 +57,9 @@ impl<'a, T, A: Allocator> Drain<'a, T, A> { #[must_use] #[stable(feature = "vec_drain_as_slice", since = "1.46.0")] pub fn as_slice(&self) -> &[T] { - self.iter.as_slice() + // SAFETY: Restricting the lifetime of the returned slice to the self + // borrow keeps anything else from dropping them. + unsafe { self.iter.as_nonnull_slice().as_ref() } } /// Returns a reference to the underlying allocator. @@ -108,8 +112,9 @@ impl<'a, T, A: Allocator> Drain<'a, T, A> { let start = source_vec.len(); let tail = this.tail_start; - let unyielded_len = this.iter.len(); - let unyielded_ptr = this.iter.as_slice().as_ptr(); + let keep_items = this.iter.forget_remaining(); + let unyielded_len = keep_items.len(); + let unyielded_ptr = keep_items.as_mut_ptr(); // ZSTs have no identity, so we don't need to move them around. if !T::IS_ZST { @@ -154,7 +159,7 @@ impl Iterator for Drain<'_, T, A> { #[inline] fn next(&mut self) -> Option { - self.iter.next().map(|elt| unsafe { ptr::read(elt as *const _) }) + self.iter.next() } fn size_hint(&self) -> (usize, Option) { @@ -166,7 +171,7 @@ impl Iterator for Drain<'_, T, A> { impl DoubleEndedIterator for Drain<'_, T, A> { #[inline] fn next_back(&mut self) -> Option { - self.iter.next_back().map(|elt| unsafe { ptr::read(elt as *const _) }) + self.iter.next_back() } } @@ -195,16 +200,13 @@ impl Drop for Drain<'_, T, A> { } } - let iter = mem::take(&mut self.iter); - let drop_len = iter.len(); - - let mut vec = self.vec; - if T::IS_ZST { + let drop_len = self.iter.forget_remaining().len(); + // ZSTs have no identity, so we don't need to move them around, we only need to drop the correct amount. // this can be achieved by manipulating the Vec length instead of moving values out from `iter`. unsafe { - let vec = vec.as_mut(); + let vec = self.vec.as_mut(); let old_len = vec.len(); vec.set_len(old_len + drop_len + self.tail_len); vec.truncate(old_len + self.tail_len); @@ -214,28 +216,9 @@ impl Drop for Drain<'_, T, A> { } // ensure elements are moved back into their appropriate places, even when drop_in_place panics - let _guard = DropGuard(self); + let guard = DropGuard(self); - if drop_len == 0 { - return; - } - - // as_slice() must only be called when iter.len() is > 0 because - // it also gets touched by vec::Splice which may turn it into a dangling pointer - // which would make it and the vec pointer point to different allocations which would - // lead to invalid pointer arithmetic below. - let drop_ptr = iter.as_slice().as_ptr(); - - unsafe { - // drop_ptr comes from a slice::Iter which only gives us a &[T] but for drop_in_place - // a pointer with mutable provenance is necessary. Therefore we must reconstruct - // it from the original vec but also avoid creating a &mut to the front since that could - // invalidate raw pointers to it which some unsafe code might rely on. - let vec_ptr = vec.as_mut().as_mut_ptr(); - let drop_offset = drop_ptr.sub_ptr(vec_ptr); - let to_drop = ptr::slice_from_raw_parts_mut(vec_ptr.add(drop_offset), drop_len); - ptr::drop_in_place(to_drop); - } + guard.0.iter.drop_remaining(); } } diff --git a/library/alloc/src/vec/in_place_collect.rs b/library/alloc/src/vec/in_place_collect.rs index 88aa1b1b0e081..2f33d54311264 100644 --- a/library/alloc/src/vec/in_place_collect.rs +++ b/library/alloc/src/vec/in_place_collect.rs @@ -253,12 +253,14 @@ where { let (src_buf, src_ptr, src_cap, mut dst_buf, dst_end, dst_cap) = unsafe { let inner = iterator.as_inner().as_into_iter(); + let inner_ptr = inner.ptr(); + let inner_end = inner_ptr.add(inner.len()); ( inner.buf, - inner.ptr, + inner_ptr, inner.cap, inner.buf.cast::(), - inner.end as *const T, + inner_end.as_ptr() as *const T, inner.cap * mem::size_of::() / mem::size_of::(), ) }; @@ -275,9 +277,9 @@ where // check InPlaceIterable contract. This is only possible if the iterator advanced the // source pointer at all. If it uses unchecked access via TrustedRandomAccess // then the source pointer will stay in its initial position and we can't use it as reference - if src.ptr != src_ptr { + if src.ptr() != src_ptr { debug_assert!( - unsafe { dst_buf.add(len).cast() } <= src.ptr, + unsafe { dst_buf.add(len).cast() } <= src.ptr(), "InPlaceIterable contract violation, write pointer advanced beyond read pointer" ); } diff --git a/library/alloc/src/vec/into_iter.rs b/library/alloc/src/vec/into_iter.rs index b0226c848332c..8840bd8b352fa 100644 --- a/library/alloc/src/vec/into_iter.rs +++ b/library/alloc/src/vec/into_iter.rs @@ -11,23 +11,14 @@ use core::iter::{ TrustedRandomAccessNoCoerce, }; use core::marker::PhantomData; -use core::mem::{ManuallyDrop, MaybeUninit, SizedTypeProperties}; +use core::mem::ManuallyDrop; +#[cfg(not(no_global_oom_handling))] +use core::mem::SizedTypeProperties; use core::num::NonZero; #[cfg(not(no_global_oom_handling))] use core::ops::Deref; -use core::ptr::{self, NonNull}; -use core::slice::{self}; - -macro non_null { - (mut $place:expr, $t:ident) => {{ - #![allow(unused_unsafe)] // we're sometimes used within an unsafe block - unsafe { &mut *(ptr::addr_of_mut!($place) as *mut NonNull<$t>) } - }}, - ($place:expr, $t:ident) => {{ - #![allow(unused_unsafe)] // we're sometimes used within an unsafe block - unsafe { *(ptr::addr_of!($place) as *const NonNull<$t>) } - }}, -} +use core::ptr::NonNull; +use core::slice::DrainRaw; /// An iterator that moves out of a vector. /// @@ -52,12 +43,7 @@ pub struct IntoIter< // the drop impl reconstructs a RawVec from buf, cap and alloc // to avoid dropping the allocator twice we need to wrap it into ManuallyDrop pub(super) alloc: ManuallyDrop, - pub(super) ptr: NonNull, - /// If T is a ZST, this is actually ptr+len. This encoding is picked so that - /// ptr == end is a quick test for the Iterator being empty, that works - /// for both ZST and non-ZST. - /// For non-ZSTs the pointer is treated as `NonNull` - pub(super) end: *const T, + pub(super) drain: DrainRaw, } #[stable(feature = "vec_intoiter_debug", since = "1.13.0")] @@ -81,7 +67,7 @@ impl IntoIter { /// ``` #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")] pub fn as_slice(&self) -> &[T] { - unsafe { slice::from_raw_parts(self.ptr.as_ptr(), self.len()) } + unsafe { self.drain.as_nonnull_slice().as_ref() } } /// Returns the remaining items of this iterator as a mutable slice. @@ -99,7 +85,7 @@ impl IntoIter { /// ``` #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")] pub fn as_mut_slice(&mut self) -> &mut [T] { - unsafe { &mut *self.as_raw_mut_slice() } + unsafe { self.drain.as_nonnull_slice().as_mut() } } /// Returns a reference to the underlying allocator. @@ -109,10 +95,6 @@ impl IntoIter { &self.alloc } - fn as_raw_mut_slice(&mut self) -> *mut [T] { - ptr::slice_from_raw_parts_mut(self.ptr.as_ptr(), self.len()) - } - /// Drops remaining elements and relinquishes the backing allocation. /// This method guarantees it won't panic before relinquishing /// the backing allocation. @@ -130,28 +112,27 @@ impl IntoIter { /// documentation for an overview. #[cfg(not(no_global_oom_handling))] pub(super) fn forget_allocation_drop_remaining(&mut self) { - let remaining = self.as_raw_mut_slice(); - // overwrite the individual fields instead of creating a new // struct and then overwriting &mut self. // this creates less assembly self.cap = 0; self.buf = RawVec::NEW.non_null(); - self.ptr = self.buf; - self.end = self.buf.as_ptr(); // Dropping the remaining elements can panic, so this needs to be // done only after updating the other fields. - unsafe { - ptr::drop_in_place(remaining); - } + self.drain.drop_remaining(); + } + + /// Returns a pointer to the start of the part of the buffer that has not yet been dropped. + #[inline] + pub(crate) fn ptr(&self) -> NonNull { + self.drain.as_nonnull_slice().as_non_null_ptr() } /// Forgets to Drop the remaining elements while still allowing the backing allocation to be freed. + #[cfg(not(no_global_oom_handling))] pub(crate) fn forget_remaining_elements(&mut self) { - // For the ZST case, it is crucial that we mutate `end` here, not `ptr`. - // `ptr` must stay aligned, while `end` may be unaligned. - self.end = self.ptr.as_ptr(); + self.drain.forget_remaining(); } #[cfg(not(no_global_oom_handling))] @@ -167,17 +148,19 @@ impl IntoIter { // Taking `alloc` is ok because nothing else is going to look at it, // since our `Drop` impl isn't going to run so there's no more code. unsafe { - let buf = this.buf.as_ptr(); - let initialized = if T::IS_ZST { + let buf = this.buf; + let len = this.drain.len(); + let start = if T::IS_ZST { // All the pointers are the same for ZSTs, so it's fine to // say that they're all at the beginning of the "allocation". - 0..this.len() + 0 } else { - this.ptr.sub_ptr(this.buf)..this.end.sub_ptr(buf) + this.ptr().sub_ptr(buf) }; + let initialized = start..(start + len); let cap = this.cap; let alloc = ManuallyDrop::take(&mut this.alloc); - VecDeque::from_contiguous_raw_parts_in(buf, initialized, cap, alloc) + VecDeque::from_contiguous_raw_parts_in(buf.as_ptr(), initialized, cap, alloc) } } } @@ -200,51 +183,17 @@ impl Iterator for IntoIter { #[inline] fn next(&mut self) -> Option { - let ptr = if T::IS_ZST { - if self.ptr.as_ptr() == self.end as *mut T { - return None; - } - // `ptr` has to stay where it is to remain aligned, so we reduce the length by 1 by - // reducing the `end`. - self.end = self.end.wrapping_byte_sub(1); - self.ptr - } else { - if self.ptr == non_null!(self.end, T) { - return None; - } - let old = self.ptr; - self.ptr = unsafe { old.add(1) }; - old - }; - Some(unsafe { ptr.read() }) + self.drain.next() } #[inline] fn size_hint(&self) -> (usize, Option) { - let exact = if T::IS_ZST { - self.end.addr().wrapping_sub(self.ptr.as_ptr().addr()) - } else { - unsafe { non_null!(self.end, T).sub_ptr(self.ptr) } - }; - (exact, Some(exact)) + self.drain.size_hint() } #[inline] fn advance_by(&mut self, n: usize) -> Result<(), NonZero> { - let step_size = self.len().min(n); - let to_drop = ptr::slice_from_raw_parts_mut(self.ptr.as_ptr(), step_size); - if T::IS_ZST { - // See `next` for why we sub `end` here. - self.end = self.end.wrapping_byte_sub(step_size); - } else { - // SAFETY: the min() above ensures that step_size is in bounds - self.ptr = unsafe { self.ptr.add(step_size) }; - } - // SAFETY: the min() above ensures that step_size is in bounds - unsafe { - ptr::drop_in_place(to_drop); - } - NonZero::new(n - step_size).map_or(Ok(()), Err) + self.drain.advance_by(n) } #[inline] @@ -253,46 +202,17 @@ impl Iterator for IntoIter { } #[inline] - fn next_chunk(&mut self) -> Result<[T; N], core::array::IntoIter> { - let mut raw_ary = MaybeUninit::uninit_array(); - - let len = self.len(); - - if T::IS_ZST { - if len < N { - self.forget_remaining_elements(); - // Safety: ZSTs can be conjured ex nihilo, only the amount has to be correct - return Err(unsafe { array::IntoIter::new_unchecked(raw_ary, 0..len) }); - } - - self.end = self.end.wrapping_byte_sub(N); - // Safety: ditto - return Ok(unsafe { raw_ary.transpose().assume_init() }); - } - - if len < N { - // Safety: `len` indicates that this many elements are available and we just checked that - // it fits into the array. - unsafe { - ptr::copy_nonoverlapping(self.ptr.as_ptr(), raw_ary.as_mut_ptr() as *mut T, len); - self.forget_remaining_elements(); - return Err(array::IntoIter::new_unchecked(raw_ary, 0..len)); - } - } - - // Safety: `len` is larger than the array size. Copy a fixed amount here to fully initialize - // the array. - return unsafe { - ptr::copy_nonoverlapping(self.ptr.as_ptr(), raw_ary.as_mut_ptr() as *mut T, N); - self.ptr = self.ptr.add(N); - Ok(raw_ary.transpose().assume_init()) - }; + fn next_chunk(&mut self) -> Result<[T; N], array::IntoIter> { + self.drain.next_chunk() } unsafe fn __iterator_get_unchecked(&mut self, i: usize) -> Self::Item where Self: TrustedRandomAccessNoCoerce, { + // FIXME: for some reason, just `self.drain.__iterator_get_unchecked(i)` + // never worked for me. If you know a way to fix that, please do. + // SAFETY: the caller must guarantee that `i` is in bounds of the // `Vec`, so `i` cannot overflow an `isize`, and the `self.ptr.add(i)` // is guaranteed to pointer to an element of the `Vec` and @@ -301,7 +221,7 @@ impl Iterator for IntoIter { // Also note the implementation of `Self: TrustedRandomAccess` requires // that `T: Copy` so reading elements from the buffer doesn't invalidate // them for `Drop`. - unsafe { self.ptr.add(i).read() } + unsafe { self.ptr().add(i).read() } } } @@ -309,54 +229,22 @@ impl Iterator for IntoIter { impl DoubleEndedIterator for IntoIter { #[inline] fn next_back(&mut self) -> Option { - if T::IS_ZST { - if self.ptr.as_ptr() == self.end as *mut _ { - return None; - } - // See above for why 'ptr.offset' isn't used - self.end = self.end.wrapping_byte_sub(1); - // Note that even though this is next_back() we're reading from `self.ptr`, not - // `self.end`. We track our length using the byte offset from `self.ptr` to `self.end`, - // so the end pointer may not be suitably aligned for T. - Some(unsafe { ptr::read(self.ptr.as_ptr()) }) - } else { - if self.ptr == non_null!(self.end, T) { - return None; - } - unsafe { - self.end = self.end.sub(1); - Some(ptr::read(self.end)) - } - } + self.drain.next_back() } #[inline] fn advance_back_by(&mut self, n: usize) -> Result<(), NonZero> { - let step_size = self.len().min(n); - if T::IS_ZST { - // SAFETY: same as for advance_by() - self.end = self.end.wrapping_byte_sub(step_size); - } else { - // SAFETY: same as for advance_by() - self.end = unsafe { self.end.sub(step_size) }; - } - let to_drop = ptr::slice_from_raw_parts_mut(self.end as *mut T, step_size); - // SAFETY: same as for advance_by() - unsafe { - ptr::drop_in_place(to_drop); - } - NonZero::new(n - step_size).map_or(Ok(()), Err) + self.drain.advance_back_by(n) } } #[stable(feature = "rust1", since = "1.0.0")] impl ExactSizeIterator for IntoIter { fn is_empty(&self) -> bool { - if T::IS_ZST { - self.ptr.as_ptr() == self.end as *mut _ - } else { - self.ptr == non_null!(self.end, T) - } + self.drain.is_empty() + } + fn len(&self) -> usize { + self.drain.len() } } @@ -440,9 +328,7 @@ unsafe impl<#[may_dangle] T, A: Allocator> Drop for IntoIter { let guard = DropGuard(self); // destroy the remaining elements - unsafe { - ptr::drop_in_place(guard.0.as_raw_mut_slice()); - } + guard.0.drain.drop_remaining(); // now `guard` will be dropped and do the rest } } diff --git a/library/alloc/src/vec/mod.rs b/library/alloc/src/vec/mod.rs index b2e22d8715a8b..74ef3b9e00598 100644 --- a/library/alloc/src/vec/mod.rs +++ b/library/alloc/src/vec/mod.rs @@ -64,7 +64,7 @@ use core::marker::PhantomData; use core::mem::{self, ManuallyDrop, MaybeUninit, SizedTypeProperties}; use core::ops::{self, Index, IndexMut, Range, RangeBounds}; use core::ptr::{self, NonNull}; -use core::slice::{self, SliceIndex}; +use core::slice::{self, DrainRaw, SliceIndex}; use crate::alloc::{Allocator, Global}; use crate::borrow::{Cow, ToOwned}; @@ -1389,7 +1389,11 @@ impl Vec { pub fn as_mut_ptr(&mut self) -> *mut T { // We shadow the slice method of the same name to avoid going through // `deref_mut`, which creates an intermediate reference. - self.buf.ptr() + self.as_nonnull_ptr().as_ptr() + } + + fn as_nonnull_ptr(&mut self) -> NonNull { + self.buf.non_null() } /// Returns a reference to the underlying allocator. @@ -2199,12 +2203,13 @@ impl Vec { unsafe { // set self.vec length's to start, to be safe in case Drain is leaked self.set_len(start); - let range_slice = slice::from_raw_parts(self.as_ptr().add(start), end - start); + let drain = DrainRaw::from_parts(self.as_nonnull_ptr().add(start), end - start); Drain { tail_start: end, tail_len: len - end, - iter: range_slice.iter(), + iter: drain, vec: NonNull::from(self), + phantom: PhantomData, } } } @@ -3000,14 +3005,10 @@ impl IntoIterator for Vec { let me = ManuallyDrop::new(self); let alloc = ManuallyDrop::new(ptr::read(me.allocator())); let buf = me.buf.non_null(); - let begin = buf.as_ptr(); - let end = if T::IS_ZST { - begin.wrapping_byte_add(me.len()) - } else { - begin.add(me.len()) as *const T - }; + let len = me.len(); let cap = me.buf.capacity(); - IntoIter { buf, phantom: PhantomData, cap, alloc, ptr: buf, end } + let drain = DrainRaw::from_parts(buf, len); + IntoIter { buf, phantom: PhantomData, cap, alloc, drain } } } } diff --git a/library/alloc/src/vec/spec_from_iter.rs b/library/alloc/src/vec/spec_from_iter.rs index 6646ae7bccb7a..80ffa1f17c235 100644 --- a/library/alloc/src/vec/spec_from_iter.rs +++ b/library/alloc/src/vec/spec_from_iter.rs @@ -44,12 +44,12 @@ impl SpecFromIter> for Vec { // than creating it through the generic FromIterator implementation would. That limitation // is not strictly necessary as Vec's allocation behavior is intentionally unspecified. // But it is a conservative choice. - let has_advanced = iterator.buf != iterator.ptr; + let has_advanced = iterator.buf != iterator.ptr(); if !has_advanced || iterator.len() >= iterator.cap / 2 { unsafe { let it = ManuallyDrop::new(iterator); if has_advanced { - ptr::copy(it.ptr.as_ptr(), it.buf.as_ptr(), it.len()); + ptr::copy(it.ptr().as_ptr(), it.buf.as_ptr(), it.len()); } return Vec::from_nonnull(it.buf, it.len(), it.cap); } diff --git a/library/alloc/src/vec/splice.rs b/library/alloc/src/vec/splice.rs index 852fdcc3f5ce7..c1686cf1807aa 100644 --- a/library/alloc/src/vec/splice.rs +++ b/library/alloc/src/vec/splice.rs @@ -59,7 +59,7 @@ impl Drop for Splice<'_, I, A> { // Which means we can replace the slice::Iter with pointers that won't point to deallocated // memory, so that Drain::drop is still allowed to call iter.len(), otherwise it would break // the ptr.sub_ptr contract. - self.drain.iter = (&[]).iter(); + self.drain.iter = Default::default(); unsafe { if self.drain.tail_len == 0 { diff --git a/library/core/src/array/drain.rs b/library/core/src/array/drain.rs index 5fadf907b6219..a21a62a1c5164 100644 --- a/library/core/src/array/drain.rs +++ b/library/core/src/array/drain.rs @@ -1,7 +1,8 @@ use crate::iter::{TrustedLen, UncheckedIterator}; +use crate::marker::PhantomData; use crate::mem::ManuallyDrop; -use crate::ptr::drop_in_place; -use crate::slice; +use crate::ptr::NonNull; +use crate::slice::{self, DrainRaw}; /// A situationally-optimized version of `array.into_iter().for_each(func)`. /// @@ -21,37 +22,29 @@ pub(crate) fn drain_array_with( func: impl for<'a> FnOnce(Drain<'a, T>) -> R, ) -> R { let mut array = ManuallyDrop::new(array); - // SAFETY: Now that the local won't drop it, it's ok to construct the `Drain` which will. - let drain = Drain(array.iter_mut()); + // SAFETY: Now that the local won't drop it, it's ok to construct the `DrainRaw` which will. + // We ensure via the lifetime that it can't be used after the function returns, + // and thus the local `array` will always exist while iterating it. + let raw = unsafe { DrainRaw::from_parts(NonNull::new_unchecked(array.as_mut_ptr()), N) }; + let drain = Drain(raw, PhantomData); func(drain) } /// See [`drain_array_with`] -- this is `pub(crate)` only so it's allowed to be /// mentioned in the signature of that method. (Otherwise it hits `E0446`.) -// INVARIANT: It's ok to drop the remainder of the inner iterator. -pub(crate) struct Drain<'a, T>(slice::IterMut<'a, T>); - -impl Drop for Drain<'_, T> { - fn drop(&mut self) { - // SAFETY: By the type invariant, we're allowed to drop all these. - unsafe { drop_in_place(self.0.as_mut_slice()) } - } -} +pub(crate) struct Drain<'a, T>(slice::DrainRaw, PhantomData<&'a mut [T]>); impl Iterator for Drain<'_, T> { type Item = T; #[inline] fn next(&mut self) -> Option { - let p: *const T = self.0.next()?; - // SAFETY: The iterator was already advanced, so we won't drop this later. - Some(unsafe { p.read() }) + self.0.next() } #[inline] fn size_hint(&self) -> (usize, Option) { - let n = self.len(); - (n, Some(n)) + self.0.size_hint() } } @@ -69,8 +62,6 @@ impl UncheckedIterator for Drain<'_, T> { unsafe fn next_unchecked(&mut self) -> T { // SAFETY: `Drain` is 1:1 with the inner iterator, so if the caller promised // that there's an element left, the inner iterator has one too. - let p: *const T = unsafe { self.0.next_unchecked() }; - // SAFETY: The iterator was already advanced, so we won't drop this later. - unsafe { p.read() } + unsafe { self.0.next_unchecked() } } } diff --git a/library/core/src/lib.rs b/library/core/src/lib.rs index 6925a7d1da1fb..bd1284b4a041f 100644 --- a/library/core/src/lib.rs +++ b/library/core/src/lib.rs @@ -224,6 +224,7 @@ #![feature(doc_cfg)] #![feature(doc_cfg_hide)] #![feature(doc_notable_trait)] +#![feature(dropck_eyepatch)] #![feature(effects)] #![feature(extern_types)] #![feature(f128)] diff --git a/library/core/src/slice/drain.rs b/library/core/src/slice/drain.rs new file mode 100644 index 0000000000000..6ed78d9a50f38 --- /dev/null +++ b/library/core/src/slice/drain.rs @@ -0,0 +1,265 @@ +use crate::array; +use crate::fmt; +use crate::iter::{ + FusedIterator, TrustedFused, TrustedLen, TrustedRandomAccessNoCoerce, UncheckedIterator, +}; +use crate::mem::MaybeUninit; +use crate::num::NonZero; +use crate::ptr::NonNull; +use crate::slice::NonNullIter; + +/// An iterator which takes ownership of items out of a slice, dropping any +/// remaining items when the iterator drops. +/// +/// Note that, like a raw pointer, it's **up to you** to get the lifetime right. +/// In some ways it's actually harder to get right, as the iterator interface +/// appears safe, but as you promise when creating one of these, you still must +/// ensure that the mentioned memory is usable the whole time this lives. +/// +/// Ideally you won't be using this directly, but rather a version encapsulated +/// in a safer interface, like `vec::IntoIter`. +/// +/// This raw version may be removed in favour of a future language feature, +/// such as using `unsafe<'a> Drain<'a, T>` instead of `DrainRaw`. +#[unstable(feature = "slice_drain_raw_iter", issue = "none")] +pub struct DrainRaw(NonNullIter); + +#[unstable(feature = "slice_drain_raw_iter", issue = "none")] +// `may_dangle` is needed for compatibility with `vec::IntoIter` +unsafe impl<#[may_dangle] T> Drop for DrainRaw { + fn drop(&mut self) { + // When used in things like `vec::IntoIter`, the memory over which we're + // iterating might have been deallocated once we're running this drop. + // At the time of writing, Miri doesn't like `sub_ptr` between pointers + // into a deallocated allocation. So checking empty first -- which just + // needs pointer equality -- avoids that issue. + if !self.is_empty() { + let slice = self.as_nonnull_slice(); + // SAFETY: By type invariant, we're allowed to drop the rest of the items. + unsafe { slice.drop_in_place() }; + } + } +} + +#[unstable(feature = "slice_drain_raw_iter", issue = "none")] +impl fmt::Debug for DrainRaw { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_tuple("DrainRaw").field(&self.0.make_shortlived_slice()).finish() + } +} + +#[unstable(feature = "slice_drain_raw_iter", issue = "none")] +impl Default for DrainRaw { + /// Creates an empty slice iterator. + /// + /// ``` + /// # use core::slice::IterMut; + /// let iter: IterMut<'_, u8> = Default::default(); + /// assert_eq!(iter.len(), 0); + /// ``` + fn default() -> Self { + // SAFETY: dangling is sufficiently-aligned so zero-length is always fine + unsafe { DrainRaw::from_parts(NonNull::dangling(), 0) } + } +} + +impl DrainRaw { + /// Creates a new iterator which moves the `len` items starting at `ptr` + /// while it's iterated, or drops them when the iterator is dropped. + /// + /// # Safety + /// + /// - `ptr` through `ptr.add(len)` must be a single allocated object + /// such that that it's sound to `offset` through it. + /// - All those elements must be readable, including being sufficiently aligned. + /// - All those elements are valid for dropping. + #[unstable(feature = "slice_drain_raw_iter", issue = "none")] + #[inline] + pub unsafe fn from_parts(ptr: NonNull, len: usize) -> Self { + // SAFETY: this function's safety conditions are stricter than NonNullIter, + // and include allowing the type to drop the items in `Drop`. + Self(unsafe { NonNullIter::from_parts(ptr, len) }) + } + + /// Returns a pointer to the remaining elements of the iterator + #[unstable(feature = "slice_drain_raw_iter", issue = "none")] + #[inline] + pub fn as_nonnull_slice(&self) -> NonNull<[T]> { + self.0.make_nonnull_slice() + } + + /// Equivalent to exhausting the iterator normally, but faster. + #[unstable(feature = "slice_drain_raw_iter", issue = "none")] + #[inline] + pub fn drop_remaining(&mut self) { + let all = self.forget_remaining(); + // SAFETY: We "forgot" these elements so our `Drop` won't drop them, + // so it's ok to drop them here without risking double-frees. + unsafe { all.drop_in_place() } + } + + /// Exhaust the iterator without actually dropping the rest of the items. + /// + /// Returns the forgotten items. + #[unstable(feature = "slice_drain_raw_iter", issue = "none")] + #[inline] + pub fn forget_remaining(&mut self) -> NonNull<[T]> { + let all = self.as_nonnull_slice(); + self.0.exhaust(); + all + } +} + +impl UncheckedIterator for DrainRaw { + #[inline] + unsafe fn next_unchecked(&mut self) -> T { + // SAFETY: we're a 1:1 mapping of the inner iterator, so if the caller + // proved we have another item, the inner iterator has another one too. + // Also, the `next_unchecked` means the returned item is no longer part + // of the inner iterator, and thus `read`ing it here -- and giving it + // to the caller who will (probably) drop it -- is ok. + unsafe { self.0.next_unchecked().read() } + } +} + +#[unstable(feature = "slice_drain_raw_iter", issue = "none")] +impl Iterator for DrainRaw { + type Item = T; + + #[inline] + fn next(&mut self) -> Option { + match self.0.next() { + // SAFETY: The `next` means the returned item is no longer part of + // the inner iterator, and thus `read`ing it here -- and giving it + // to the caller who will (probably) drop it -- is ok. + Some(ptr) => Some(unsafe { ptr.read() }), + None => None, + } + } + + #[inline] + fn size_hint(&self) -> (usize, Option) { + self.0.size_hint() + } + + #[inline] + fn advance_by(&mut self, n: usize) -> Result<(), NonZero> { + let clamped = self.len().min(n); + // SAFETY: By construction, `clamped` is always in-bounds. + // The skipped elements are removed from the inner iterator so won't be + // dropped in `Drop`, so dropping there here is fine. + unsafe { + let to_drop = self.0.skip_forward_unchecked(clamped); + to_drop.drop_in_place(); + } + NonZero::new(n - clamped).map_or(Ok(()), Err) + } + + #[inline] + fn count(self) -> usize { + self.len() + } + + #[inline] + fn next_chunk(&mut self) -> Result<[T; N], core::array::IntoIter> { + let len = self.len(); + let clamped = len.min(N); + + // SAFETY: By construction, `clamped` is always in-bounds. + let to_copy = unsafe { self.0.skip_forward_unchecked(clamped) }; + if len >= N { + // SAFETY: If we have more elements than were requested, they can be + // read directly because arrays need no extra alignment. + Ok(unsafe { to_copy.cast::<[T; N]>().read() }) + } else { + let mut raw_ary = MaybeUninit::uninit_array(); + // SAFETY: If we don't have enough elements left, then copy all the + // ones we do have into the local array, which cannot overlap because + // new locals are always distinct storage. + Err(unsafe { + MaybeUninit::::slice_as_mut_ptr(&mut raw_ary) + .copy_from_nonoverlapping(to_copy.as_mut_ptr(), len); + array::IntoIter::new_unchecked(raw_ary, 0..len) + }) + } + } + + unsafe fn __iterator_get_unchecked(&mut self, i: usize) -> Self::Item + where + Self: TrustedRandomAccessNoCoerce, + { + // SAFETY: the caller must guarantee that `i` is in bounds of the slice, + // so the `get_unchecked_mut(i)` is guaranteed to pointer to an element + // and thus guaranteed to be valid to dereference. + // + // Also note the implementation of `Self: TrustedRandomAccess` requires + // that `T: Copy` so reading elements from the buffer doesn't invalidate + // them for `Drop`. + unsafe { self.as_nonnull_slice().get_unchecked_mut(i).read() } + } +} + +#[unstable(feature = "slice_drain_raw_iter", issue = "none")] +impl DoubleEndedIterator for DrainRaw { + #[inline] + fn next_back(&mut self) -> Option { + match self.0.next_back() { + // SAFETY: The `next_back` means the returned item is no longer part of + // the inner iterator, and thus `read`ing it here -- and giving it + // to the caller who will (probably) drop it -- is ok. + Some(ptr) => Some(unsafe { ptr.read() }), + None => None, + } + } + + #[inline] + fn advance_back_by(&mut self, n: usize) -> Result<(), NonZero> { + let clamped = self.len().min(n); + // SAFETY: By construction, `clamped` is always in-bounds. + // The skipped elements are removed from the inner iterator so won't be + // dropped in `Drop`, so dropping there here is fine. + unsafe { + let to_drop = self.0.skip_backward_unchecked(clamped); + to_drop.drop_in_place(); + } + NonZero::new(n - clamped).map_or(Ok(()), Err) + } +} + +#[unstable(feature = "slice_drain_raw_iter", issue = "none")] +impl ExactSizeIterator for DrainRaw { + fn is_empty(&self) -> bool { + self.0.is_empty() + } + + fn len(&self) -> usize { + self.0.len() + } +} + +#[unstable(feature = "slice_drain_raw_iter", issue = "none")] +impl FusedIterator for DrainRaw {} + +#[unstable(feature = "slice_drain_raw_iter", issue = "none")] +#[doc(hidden)] +unsafe impl TrustedFused for DrainRaw {} + +#[unstable(feature = "slice_drain_raw_iter", issue = "none")] +unsafe impl TrustedLen for DrainRaw {} + +#[doc(hidden)] +#[unstable(issue = "none", feature = "std_internals")] +#[rustc_unsafe_specialization_marker] +pub trait NonDrop {} + +// T: Copy as approximation for !Drop since get_unchecked does not advance self.ptr +// and thus we can't implement drop-handling +#[unstable(issue = "none", feature = "std_internals")] +impl NonDrop for T {} + +// TrustedRandomAccess (without NoCoerce) must not be implemented because +// subtypes/supertypes of `T` might not be `NonDrop` +#[unstable(feature = "slice_drain_raw_iter", issue = "none")] +unsafe impl TrustedRandomAccessNoCoerce for DrainRaw { + const MAY_HAVE_SIDE_EFFECT: bool = false; +} diff --git a/library/core/src/slice/iter.rs b/library/core/src/slice/iter.rs index d7d4f90c1a538..b6558b88b1f79 100644 --- a/library/core/src/slice/iter.rs +++ b/library/core/src/slice/iter.rs @@ -75,7 +75,7 @@ pub struct Iter<'a, T: 'a> { #[stable(feature = "core_impl_debug", since = "1.9.0")] impl fmt::Debug for Iter<'_, T> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_tuple("Iter").field(&self.as_slice()).finish() + f.debug_tuple("Iter").field(&self.make_shortlived_slice()).finish() } } @@ -126,11 +126,32 @@ impl<'a, T> Iter<'a, T> { #[stable(feature = "iter_to_slice", since = "1.4.0")] #[inline] pub fn as_slice(&self) -> &'a [T] { - self.make_slice() + // SAFETY: the type invariant guarantees the pointer represents a valid slice + unsafe { self.make_nonnull_slice().as_ref() } + } + + #[inline] + unsafe fn non_null_to_item(p: NonNull) -> ::Item { + // SAFETY: the type invariant guarantees the pointer represents a valid reference + unsafe { p.as_ref() } + } +} + +#[stable(feature = "default_iters", since = "1.70.0")] +impl Default for Iter<'_, T> { + /// Creates an empty slice iterator. + /// + /// ``` + /// # use core::slice::Iter; + /// let iter: Iter<'_, u8> = Default::default(); + /// assert_eq!(iter.len(), 0); + /// ``` + fn default() -> Self { + (&[]).into_iter() } } -iterator! {struct Iter -> *const T, &'a T, const, {/* no mut */}, as_ref, { +iterator! {struct Iter<'a, T> => *const T, &'a T, { fn is_sorted_by(self, mut compare: F) -> bool where Self: Sized, @@ -198,7 +219,7 @@ pub struct IterMut<'a, T: 'a> { #[stable(feature = "core_impl_debug", since = "1.9.0")] impl fmt::Debug for IterMut<'_, T> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_tuple("IterMut").field(&self.make_slice()).finish() + f.debug_tuple("IterMut").field(&self.make_shortlived_slice()).finish() } } @@ -304,7 +325,8 @@ impl<'a, T> IterMut<'a, T> { #[stable(feature = "slice_iter_mut_as_slice", since = "1.53.0")] #[inline] pub fn as_slice(&self) -> &[T] { - self.make_slice() + // SAFETY: the type invariant guarantees the pointer represents a valid slice + unsafe { self.make_nonnull_slice().as_ref() } } /// Views the underlying data as a mutable subslice of the original data. @@ -347,6 +369,26 @@ impl<'a, T> IterMut<'a, T> { // for `from_raw_parts_mut` are fulfilled. unsafe { from_raw_parts_mut(self.ptr.as_ptr(), len!(self)) } } + + #[inline] + unsafe fn non_null_to_item(mut p: NonNull) -> ::Item { + // SAFETY: the type invariant guarantees the pointer represents a valid item + unsafe { p.as_mut() } + } +} + +#[stable(feature = "default_iters", since = "1.70.0")] +impl Default for IterMut<'_, T> { + /// Creates an empty slice iterator. + /// + /// ``` + /// # use core::slice::IterMut; + /// let iter: IterMut<'_, u8> = Default::default(); + /// assert_eq!(iter.len(), 0); + /// ``` + fn default() -> Self { + (&mut []).into_iter() + } } #[stable(feature = "slice_iter_mut_as_slice", since = "1.53.0")] @@ -364,7 +406,79 @@ impl AsRef<[T]> for IterMut<'_, T> { // } // } -iterator! {struct IterMut -> *mut T, &'a mut T, mut, {mut}, as_mut, {}} +iterator! {struct IterMut<'a, T> => *mut T, &'a mut T, {}} + +/// Iterator over all the `NonNull` pointers to the elements of a slice. +#[must_use = "iterators are lazy and do nothing unless consumed"] +pub struct NonNullIter { + /// The pointer to the next element to return, or the past-the-end location + /// if the iterator is empty. + /// + /// This address will be used for all ZST elements, never changed. + ptr: NonNull, + /// For non-ZSTs, the non-null pointer to the past-the-end element. + /// + /// For ZSTs, this is `ptr::without_provenance(len)`. + end_or_len: *const T, +} + +impl fmt::Debug for NonNullIter { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_tuple("NonNullIter").field(&self.make_shortlived_slice()).finish() + } +} + +impl NonNullIter { + /// Creates a new iterator over the `len` items starting at `ptr` + /// + /// # Safety + /// + /// - `ptr` through `ptr.add(len)` must be a single allocated object + /// such that that it's sound to `offset` through it. + /// - All those elements must be readable + /// - The caller must ensure both as long as the iterator is in use. + #[inline] + pub unsafe fn from_parts(ptr: NonNull, len: usize) -> Self { + // SAFETY: There are several things here: + // + // `ptr` has been obtained by `slice.as_ptr()` where `slice` is a valid + // reference thus it is non-NUL and safe to use and pass to + // `NonNull::new_unchecked` . + // + // Adding `slice.len()` to the starting pointer gives a pointer + // at the end of `slice`. `end` will never be dereferenced, only checked + // for direct pointer equality with `ptr` to check if the iterator is + // done. + // + // In the case of a ZST, the end pointer is just the length. It's never + // used as a pointer at all, and thus it's fine to have no provenance. + // + // See the `next_unchecked!` and `is_empty!` macros as well as the + // `post_inc_start` method for more information. + unsafe { + let end_or_len = + if T::IS_ZST { without_provenance_mut(len) } else { ptr.as_ptr().add(len) }; + + Self { ptr, end_or_len } + } + } + + #[inline] + pub fn exhaust(&mut self) { + if T::IS_ZST { + self.end_or_len = without_provenance_mut(0); + } else { + self.end_or_len = self.ptr.as_ptr(); + } + } + + #[inline] + fn non_null_to_item(p: NonNull) -> ::Item { + p + } +} + +iterator! {struct NonNullIter => *const T, NonNull, {}} /// An internal abstraction over the splitting iterators, so that /// splitn, splitn_mut etc can be implemented once. diff --git a/library/core/src/slice/iter/macros.rs b/library/core/src/slice/iter/macros.rs index 0b8ff5cc01242..ef83ea65fb903 100644 --- a/library/core/src/slice/iter/macros.rs +++ b/library/core/src/slice/iter/macros.rs @@ -63,14 +63,12 @@ macro_rules! len { // The shared definition of the `Iter` and `IterMut` iterators macro_rules! iterator { ( - struct $name:ident -> $ptr:ty, + struct $name:ty => $ptr:ty, $elem:ty, - $raw_mut:tt, - {$( $mut_:tt )?}, - $into_ref:ident, {$($extra:tt)*} ) => { - impl<'a, T> $name<'a, T> { + #[allow(unused_lifetimes)] + impl<'a, T> $name { /// Returns the last element and moves the end of the iterator backwards by 1. /// /// # Safety @@ -80,16 +78,22 @@ macro_rules! iterator { unsafe fn next_back_unchecked(&mut self) -> $elem { // SAFETY: the caller promised it's not empty, so // the offsetting is in-bounds and there's an element to return. - unsafe { self.pre_dec_end(1).$into_ref() } + unsafe { Self::non_null_to_item(self.pre_dec_end(1)) } } // Helper function for creating a slice from the iterator. - #[inline(always)] - fn make_slice(&self) -> &'a [T] { - // SAFETY: the iterator was created from a slice with pointer - // `self.ptr` and length `len!(self)`. This guarantees that all - // the prerequisites for `from_raw_parts` are fulfilled. - unsafe { from_raw_parts(self.ptr.as_ptr(), len!(self)) } + #[inline] + pub(crate) fn make_nonnull_slice(&self) -> NonNull<[T]> { + NonNull::slice_from_raw_parts(self.ptr, len!(self)) + } + + #[inline] + pub(crate) fn make_shortlived_slice<'b>(&'b self) -> &'b [T] { + // SAFETY: Everything expanded with this macro is readable while + // the iterator exists and is unchanged, so by tying this to the + // shorter-than-`'a` self borrow we can make this safe to call. + // (Elision would be fine here, but using `'b` for emphasis.) + unsafe { self.make_nonnull_slice().as_ref() } } // Helper function for moving the start of the iterator forwards by `offset` elements, @@ -131,10 +135,31 @@ macro_rules! iterator { }, ) } + + // This is not used on every type that uses this macro, but is more + // convenient to implement here so it can use `post_inc_start`. + #[allow(dead_code)] + #[inline] + pub(crate) unsafe fn skip_forward_unchecked(&mut self, offset: usize) -> NonNull<[T]> { + // SAFETY: The caller guarantees the provided offset is in-bounds. + let old_begin = unsafe { self.post_inc_start(offset) }; + NonNull::slice_from_raw_parts(old_begin, offset) + } + + // This is not used on every type that uses this macro, but is more + // convenient to implement here so it can use `pre_dec_end`. + #[allow(dead_code)] + #[inline] + pub(crate) unsafe fn skip_backward_unchecked(&mut self, offset: usize) -> NonNull<[T]> { + // SAFETY: The caller guarantees the provided offset is in-bounds. + let new_end = unsafe { self.pre_dec_end(offset) }; + NonNull::slice_from_raw_parts(new_end, offset) + } } + #[allow(unused_lifetimes)] #[stable(feature = "rust1", since = "1.0.0")] - impl ExactSizeIterator for $name<'_, T> { + impl<'a, T> ExactSizeIterator for $name { #[inline(always)] fn len(&self) -> usize { len!(self) @@ -146,8 +171,9 @@ macro_rules! iterator { } } + #[allow(unused_lifetimes)] #[stable(feature = "rust1", since = "1.0.0")] - impl<'a, T> Iterator for $name<'a, T> { + impl<'a, T> Iterator for $name { type Item = $elem; #[inline] @@ -227,7 +253,7 @@ macro_rules! iterator { loop { // SAFETY: the loop iterates `i in 0..len`, which always is in bounds of // the slice allocation - acc = f(acc, unsafe { & $( $mut_ )? *self.ptr.add(i).as_ptr() }); + acc = f(acc, unsafe { Self::non_null_to_item(self.ptr.add(i)) }); // SAFETY: `i` can't overflow since it'll only reach usize::MAX if the // slice had that length, in which case we'll break out of the loop // after the increment @@ -378,14 +404,15 @@ macro_rules! iterator { // that will access this subslice are called, so it is valid // for the returned reference to be mutable in the case of // `IterMut` - unsafe { & $( $mut_ )? * self.ptr.as_ptr().add(idx) } + unsafe { Self::non_null_to_item(self.ptr.add(idx)) } } $($extra)* } + #[allow(unused_lifetimes)] #[stable(feature = "rust1", since = "1.0.0")] - impl<'a, T> DoubleEndedIterator for $name<'a, T> { + impl<'a, T> DoubleEndedIterator for $name { #[inline] fn next_back(&mut self) -> Option<$elem> { // could be implemented with slices, but this avoids bounds checks @@ -427,35 +454,24 @@ macro_rules! iterator { } } + #[allow(unused_lifetimes)] #[stable(feature = "fused", since = "1.26.0")] - impl FusedIterator for $name<'_, T> {} + impl<'a, T> FusedIterator for $name {} + #[allow(unused_lifetimes)] #[unstable(feature = "trusted_len", issue = "37572")] - unsafe impl TrustedLen for $name<'_, T> {} + unsafe impl<'a, T> TrustedLen for $name {} - impl<'a, T> UncheckedIterator for $name<'a, T> { + #[allow(unused_lifetimes)] + impl<'a, T> UncheckedIterator for $name { #[inline] unsafe fn next_unchecked(&mut self) -> $elem { // SAFETY: The caller promised there's at least one more item. unsafe { - self.post_inc_start(1).$into_ref() + Self::non_null_to_item(self.post_inc_start(1)) } } } - - #[stable(feature = "default_iters", since = "1.70.0")] - impl Default for $name<'_, T> { - /// Creates an empty slice iterator. - /// - /// ``` - #[doc = concat!("# use core::slice::", stringify!($name), ";")] - #[doc = concat!("let iter: ", stringify!($name<'_, u8>), " = Default::default();")] - /// assert_eq!(iter.len(), 0); - /// ``` - fn default() -> Self { - (& $( $mut_ )? []).into_iter() - } - } } } diff --git a/library/core/src/slice/mod.rs b/library/core/src/slice/mod.rs index 6e1ba74f72b33..66042f54f9730 100644 --- a/library/core/src/slice/mod.rs +++ b/library/core/src/slice/mod.rs @@ -35,6 +35,7 @@ pub mod sort; mod ascii; mod cmp; +mod drain; pub(crate) mod index; mod iter; mod raw; @@ -71,6 +72,11 @@ pub use iter::ArrayWindows; #[stable(feature = "slice_group_by", since = "1.77.0")] pub use iter::{ChunkBy, ChunkByMut}; +use iter::NonNullIter; + +#[unstable(feature = "slice_drain_raw_iter", issue = "none")] +pub use drain::DrainRaw; + #[stable(feature = "split_inclusive", since = "1.51.0")] pub use iter::{SplitInclusive, SplitInclusiveMut}; diff --git a/tests/codegen/vec-iter.rs b/tests/codegen/vec-iter.rs index 310680969c4fe..750964e754aad 100644 --- a/tests/codegen/vec-iter.rs +++ b/tests/codegen/vec-iter.rs @@ -1,8 +1,11 @@ //@ compile-flags: -O +//@ min-llvm-version: 18 (which added `dead_on_unwind`) #![crate_type = "lib"] #![feature(exact_size_is_empty)] +#![feature(iter_advance_by)] +#![feature(iter_next_chunk)] -use std::vec; +use std::{array, vec}; // CHECK-LABEL: @vec_iter_len_nonnull #[no_mangle] @@ -56,3 +59,60 @@ pub fn vec_iter_next_back_nonnull(it: &mut vec::IntoIter) -> Option { // CHECK: ret it.next_back() } + +// CHECK-LABEL: @vec_iter_next_transfers_ownership +#[no_mangle] +pub fn vec_iter_next_transfers_ownership(it: &mut vec::IntoIter>) -> Option> { + // CHECK-NOT: __rust_dealloc + it.next() +} + +// CHECK-LABEL: @vec_iter_advance_drops_item +#[no_mangle] +pub fn vec_iter_advance_drops_item(it: &mut vec::IntoIter>) { + // CHECK-NOT: __rust_dealloc + // CHECK: call void @__rust_dealloc + // CHECK-SAME: noundef 4 + // CHECK-SAME: noundef 4 + // CHECK-NOT: __rust_dealloc + _ = it.advance_by(1); +} + +// CHECK-LABEL: @vec_iter_next_chunk_short +// CHECK-SAME: ptr{{.+}}%[[RET:.+]], +#[no_mangle] +pub fn vec_iter_next_chunk_short( + it: &mut vec::IntoIter, +) -> Result<[u8; 4], array::IntoIter> { + // CHECK-NOT: alloca + // CHECK: %[[ACTUAL_LEN:.+]] = sub nuw + + // CHECK: %[[OUT1:.+]] = getelementptr inbounds i8, ptr %[[RET]] + // CHECK: call void @llvm.memcpy{{.+}}(ptr{{.+}}%[[OUT1]],{{.+}} %[[ACTUAL_LEN]], i1 false) + // CHECK: br + + // CHECK: %[[FULL:.+]] = load i32, + // CHECK: %[[OUT2:.+]] = getelementptr inbounds i8, ptr %[[RET]] + // CHECK: store i32 %[[FULL]], ptr %[[OUT2]] + // CHECK: br + it.next_chunk::<4>() +} + +// CHECK-LABEL: @vec_iter_next_chunk_long +// CHECK-SAME: ptr{{.+}}%[[RET:.+]], +#[no_mangle] +pub fn vec_iter_next_chunk_long( + it: &mut vec::IntoIter, +) -> Result<[u8; 123], array::IntoIter> { + // CHECK-NOT: alloca + // CHECK: %[[ACTUAL_LEN:.+]] = sub nuw + + // CHECK: %[[OUT1:.+]] = getelementptr inbounds i8, ptr %[[RET]] + // CHECK: call void @llvm.memcpy{{.+}}(ptr{{.+}}%[[OUT1]],{{.+}} %[[ACTUAL_LEN]], i1 false) + // CHECK: br + + // CHECK: %[[OUT2:.+]] = getelementptr inbounds i8, ptr %[[RET]] + // CHECK: call void @llvm.memcpy{{.+}}(ptr{{.+}}%[[OUT2]],{{.+}} 123, i1 false) + // CHECK: br + it.next_chunk::<123>() +} diff --git a/tests/mir-opt/pre-codegen/loops.vec_move.PreCodegen.after.mir b/tests/mir-opt/pre-codegen/loops.vec_move.PreCodegen.after.mir index cb29473d7627f..1033fa5398db8 100644 --- a/tests/mir-opt/pre-codegen/loops.vec_move.PreCodegen.after.mir +++ b/tests/mir-opt/pre-codegen/loops.vec_move.PreCodegen.after.mir @@ -6,14 +6,18 @@ fn vec_move(_1: Vec) -> () { let mut _2: std::vec::IntoIter; let mut _3: std::vec::IntoIter; let mut _4: &mut std::vec::IntoIter; - let mut _5: std::option::Option; - let mut _6: isize; - let _8: (); + let mut _6: std::option::Option; + let mut _7: isize; + let _9: (); scope 1 { debug iter => _3; - let _7: impl Sized; + let _8: impl Sized; scope 2 { - debug x => _7; + debug x => _8; + } + scope 3 (inlined as Iterator>::next) { + debug self => _4; + let mut _5: &mut core::slice::drain::DrainRaw; } } @@ -29,20 +33,21 @@ fn vec_move(_1: Vec) -> () { } bb2: { - StorageLive(_5); - StorageLive(_4); + StorageLive(_6); _4 = &mut _3; - _5 = as Iterator>::next(move _4) -> [return: bb3, unwind: bb9]; + StorageLive(_5); + _5 = &mut (_3.4: core::slice::drain::DrainRaw); + _6 = as Iterator>::next(move _5) -> [return: bb3, unwind: bb9]; } bb3: { - StorageDead(_4); - _6 = discriminant(_5); - switchInt(move _6) -> [0: bb4, 1: bb6, otherwise: bb8]; + StorageDead(_5); + _7 = discriminant(_6); + switchInt(move _7) -> [0: bb4, 1: bb6, otherwise: bb8]; } bb4: { - StorageDead(_5); + StorageDead(_6); drop(_3) -> [return: bb5, unwind continue]; } @@ -53,12 +58,12 @@ fn vec_move(_1: Vec) -> () { } bb6: { - _7 = move ((_5 as Some).0: impl Sized); - _8 = opaque::(move _7) -> [return: bb7, unwind: bb9]; + _8 = move ((_6 as Some).0: impl Sized); + _9 = opaque::(move _8) -> [return: bb7, unwind: bb9]; } bb7: { - StorageDead(_5); + StorageDead(_6); goto -> bb2; }