1 #[cfg(not(no_global_oom_handling))]
2 use super::AsVecIntoIter
;
3 use crate::alloc
::{Allocator, Global}
;
4 use crate::raw_vec
::RawVec
;
7 use core
::intrinsics
::arith_offset
;
9 FusedIterator
, InPlaceIterable
, SourceIter
, TrustedLen
, TrustedRandomAccessNoCoerce
,
11 use core
::marker
::PhantomData
;
12 use core
::mem
::{self, ManuallyDrop, MaybeUninit}
;
13 #[cfg(not(no_global_oom_handling))]
15 use core
::ptr
::{self, NonNull}
;
16 use core
::slice
::{self}
;
18 /// An iterator that moves out of a vector.
20 /// This `struct` is created by the `into_iter` method on [`Vec`](super::Vec)
21 /// (provided by the [`IntoIterator`] trait).
26 /// let v = vec![0, 1, 2];
27 /// let iter: std::vec::IntoIter<_> = v.into_iter();
29 #[stable(feature = "rust1", since = "1.0.0")]
30 #[rustc_insignificant_dtor]
33 #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
35 pub(super) buf
: NonNull
<T
>,
36 pub(super) phantom
: PhantomData
<T
>,
37 pub(super) cap
: usize,
38 // the drop impl reconstructs a RawVec from buf, cap and alloc
39 // to avoid dropping the allocator twice we need to wrap it into ManuallyDrop
40 pub(super) alloc
: ManuallyDrop
<A
>,
41 pub(super) ptr
: *const T
,
42 pub(super) end
: *const T
,
45 #[stable(feature = "vec_intoiter_debug", since = "1.13.0")]
46 impl<T
: fmt
::Debug
, A
: Allocator
> fmt
::Debug
for IntoIter
<T
, A
> {
47 fn fmt(&self, f
: &mut fmt
::Formatter
<'_
>) -> fmt
::Result
{
48 f
.debug_tuple("IntoIter").field(&self.as_slice()).finish()
52 impl<T
, A
: Allocator
> IntoIter
<T
, A
> {
53 /// Returns the remaining items of this iterator as a slice.
58 /// let vec = vec!['a', 'b', 'c'];
59 /// let mut into_iter = vec.into_iter();
60 /// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']);
61 /// let _ = into_iter.next().unwrap();
62 /// assert_eq!(into_iter.as_slice(), &['b', 'c']);
64 #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")]
65 pub fn as_slice(&self) -> &[T
] {
66 unsafe { slice::from_raw_parts(self.ptr, self.len()) }
69 /// Returns the remaining items of this iterator as a mutable slice.
74 /// let vec = vec!['a', 'b', 'c'];
75 /// let mut into_iter = vec.into_iter();
76 /// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']);
77 /// into_iter.as_mut_slice()[2] = 'z';
78 /// assert_eq!(into_iter.next().unwrap(), 'a');
79 /// assert_eq!(into_iter.next().unwrap(), 'b');
80 /// assert_eq!(into_iter.next().unwrap(), 'z');
82 #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")]
83 pub fn as_mut_slice(&mut self) -> &mut [T
] {
84 unsafe { &mut *self.as_raw_mut_slice() }
87 /// Returns a reference to the underlying allocator.
88 #[unstable(feature = "allocator_api", issue = "32838")]
90 pub fn allocator(&self) -> &A
{
94 fn as_raw_mut_slice(&mut self) -> *mut [T
] {
95 ptr
::slice_from_raw_parts_mut(self.ptr
as *mut T
, self.len())
98 /// Drops remaining elements and relinquishes the backing allocation.
100 /// This is roughly equivalent to the following, but more efficient
103 /// # let mut into_iter = Vec::<u8>::with_capacity(10).into_iter();
104 /// (&mut into_iter).for_each(core::mem::drop);
105 /// unsafe { core::ptr::write(&mut into_iter, Vec::new().into_iter()); }
108 /// This method is used by in-place iteration, refer to the vec::in_place_collect
109 /// documentation for an overview.
110 #[cfg(not(no_global_oom_handling))]
111 pub(super) fn forget_allocation_drop_remaining(&mut self) {
112 let remaining
= self.as_raw_mut_slice();
114 // overwrite the individual fields instead of creating a new
115 // struct and then overwriting &mut self.
116 // this creates less assembly
118 self.buf
= unsafe { NonNull::new_unchecked(RawVec::NEW.ptr()) }
;
119 self.ptr
= self.buf
.as_ptr();
120 self.end
= self.buf
.as_ptr();
123 ptr
::drop_in_place(remaining
);
127 /// Forgets to Drop the remaining elements while still allowing the backing allocation to be freed.
128 pub(crate) fn forget_remaining_elements(&mut self) {
133 #[stable(feature = "vec_intoiter_as_ref", since = "1.46.0")]
134 impl<T
, A
: Allocator
> AsRef
<[T
]> for IntoIter
<T
, A
> {
135 fn as_ref(&self) -> &[T
] {
140 #[stable(feature = "rust1", since = "1.0.0")]
141 unsafe impl<T
: Send
, A
: Allocator
+ Send
> Send
for IntoIter
<T
, A
> {}
142 #[stable(feature = "rust1", since = "1.0.0")]
143 unsafe impl<T
: Sync
, A
: Allocator
+ Sync
> Sync
for IntoIter
<T
, A
> {}
145 #[stable(feature = "rust1", since = "1.0.0")]
146 impl<T
, A
: Allocator
> Iterator
for IntoIter
<T
, A
> {
150 fn next(&mut self) -> Option
<T
> {
151 if self.ptr
as *const _
== self.end
{
153 } else if mem
::size_of
::<T
>() == 0 {
154 // purposefully don't use 'ptr.offset' because for
155 // vectors with 0-size elements this would return the
157 self.ptr
= unsafe { arith_offset(self.ptr as *const i8, 1) as *mut T }
;
159 // Make up a value of this ZST.
160 Some(unsafe { mem::zeroed() }
)
163 self.ptr
= unsafe { self.ptr.offset(1) }
;
165 Some(unsafe { ptr::read(old) }
)
170 fn size_hint(&self) -> (usize, Option
<usize>) {
171 let exact
= if mem
::size_of
::<T
>() == 0 {
172 self.end
.addr().wrapping_sub(self.ptr
.addr())
174 unsafe { self.end.sub_ptr(self.ptr) }
180 fn advance_by(&mut self, n
: usize) -> Result
<(), usize> {
181 let step_size
= self.len().min(n
);
182 let to_drop
= ptr
::slice_from_raw_parts_mut(self.ptr
as *mut T
, step_size
);
183 if mem
::size_of
::<T
>() == 0 {
184 // SAFETY: due to unchecked casts of unsigned amounts to signed offsets the wraparound
185 // effectively results in unsigned pointers representing positions 0..usize::MAX,
186 // which is valid for ZSTs.
187 self.ptr
= unsafe { arith_offset(self.ptr as *const i8, step_size as isize) as *mut T }
189 // SAFETY: the min() above ensures that step_size is in bounds
190 self.ptr
= unsafe { self.ptr.add(step_size) }
;
192 // SAFETY: the min() above ensures that step_size is in bounds
194 ptr
::drop_in_place(to_drop
);
197 return Err(step_size
);
203 fn count(self) -> usize {
208 fn next_chunk
<const N
: usize>(&mut self) -> Result
<[T
; N
], core
::array
::IntoIter
<T
, N
>> {
209 let mut raw_ary
= MaybeUninit
::uninit_array();
211 let len
= self.len();
213 if mem
::size_of
::<T
>() == 0 {
215 self.forget_remaining_elements();
216 // Safety: ZSTs can be conjured ex nihilo, only the amount has to be correct
217 return Err(unsafe { array::IntoIter::new_unchecked(raw_ary, 0..len) }
);
220 self.ptr
= unsafe { arith_offset(self.ptr as *const i8, N as isize) as *mut T }
;
222 return Ok(unsafe { MaybeUninit::array_assume_init(raw_ary) }
);
226 // Safety: `len` indicates that this many elements are available and we just checked that
227 // it fits into the array.
229 ptr
::copy_nonoverlapping(self.ptr
, raw_ary
.as_mut_ptr() as *mut T
, len
);
230 self.forget_remaining_elements();
231 return Err(array
::IntoIter
::new_unchecked(raw_ary
, 0..len
));
235 // Safety: `len` is larger than the array size. Copy a fixed amount here to fully initialize
238 ptr
::copy_nonoverlapping(self.ptr
, raw_ary
.as_mut_ptr() as *mut T
, N
);
239 self.ptr
= self.ptr
.add(N
);
240 Ok(MaybeUninit
::array_assume_init(raw_ary
))
244 unsafe fn __iterator_get_unchecked(&mut self, i
: usize) -> Self::Item
246 Self: TrustedRandomAccessNoCoerce
,
248 // SAFETY: the caller must guarantee that `i` is in bounds of the
249 // `Vec<T>`, so `i` cannot overflow an `isize`, and the `self.ptr.add(i)`
250 // is guaranteed to pointer to an element of the `Vec<T>` and
251 // thus guaranteed to be valid to dereference.
253 // Also note the implementation of `Self: TrustedRandomAccess` requires
254 // that `T: Copy` so reading elements from the buffer doesn't invalidate
257 if mem
::size_of
::<T
>() == 0 { mem::zeroed() }
else { ptr::read(self.ptr.add(i)) }
262 #[stable(feature = "rust1", since = "1.0.0")]
263 impl<T
, A
: Allocator
> DoubleEndedIterator
for IntoIter
<T
, A
> {
265 fn next_back(&mut self) -> Option
<T
> {
266 if self.end
== self.ptr
{
268 } else if mem
::size_of
::<T
>() == 0 {
269 // See above for why 'ptr.offset' isn't used
270 self.end
= unsafe { arith_offset(self.end as *const i8, -1) as *mut T }
;
272 // Make up a value of this ZST.
273 Some(unsafe { mem::zeroed() }
)
275 self.end
= unsafe { self.end.offset(-1) }
;
277 Some(unsafe { ptr::read(self.end) }
)
282 fn advance_back_by(&mut self, n
: usize) -> Result
<(), usize> {
283 let step_size
= self.len().min(n
);
284 if mem
::size_of
::<T
>() == 0 {
285 // SAFETY: same as for advance_by()
287 arith_offset(self.end
as *const i8, step_size
.wrapping_neg() as isize) as *mut T
290 // SAFETY: same as for advance_by()
291 self.end
= unsafe { self.end.offset(step_size.wrapping_neg() as isize) }
;
293 let to_drop
= ptr
::slice_from_raw_parts_mut(self.end
as *mut T
, step_size
);
294 // SAFETY: same as for advance_by()
296 ptr
::drop_in_place(to_drop
);
299 return Err(step_size
);
305 #[stable(feature = "rust1", since = "1.0.0")]
306 impl<T
, A
: Allocator
> ExactSizeIterator
for IntoIter
<T
, A
> {
307 fn is_empty(&self) -> bool
{
312 #[stable(feature = "fused", since = "1.26.0")]
313 impl<T
, A
: Allocator
> FusedIterator
for IntoIter
<T
, A
> {}
315 #[unstable(feature = "trusted_len", issue = "37572")]
316 unsafe impl<T
, A
: Allocator
> TrustedLen
for IntoIter
<T
, A
> {}
319 #[unstable(issue = "none", feature = "std_internals")]
320 #[rustc_unsafe_specialization_marker]
323 // T: Copy as approximation for !Drop since get_unchecked does not advance self.ptr
324 // and thus we can't implement drop-handling
325 #[unstable(issue = "none", feature = "std_internals")]
326 impl<T
: Copy
> NonDrop
for T {}
329 #[unstable(issue = "none", feature = "std_internals")]
330 // TrustedRandomAccess (without NoCoerce) must not be implemented because
331 // subtypes/supertypes of `T` might not be `NonDrop`
332 unsafe impl<T
, A
: Allocator
> TrustedRandomAccessNoCoerce
for IntoIter
<T
, A
>
336 const MAY_HAVE_SIDE_EFFECT
: bool
= false;
339 #[cfg(not(no_global_oom_handling))]
340 #[stable(feature = "vec_into_iter_clone", since = "1.8.0")]
341 impl<T
: Clone
, A
: Allocator
+ Clone
> Clone
for IntoIter
<T
, A
> {
343 fn clone(&self) -> Self {
344 self.as_slice().to_vec_in(self.alloc
.deref().clone()).into_iter()
347 fn clone(&self) -> Self {
348 crate::slice
::to_vec(self.as_slice(), self.alloc
.deref().clone()).into_iter()
352 #[stable(feature = "rust1", since = "1.0.0")]
353 unsafe impl<#[may_dangle] T, A: Allocator> Drop for IntoIter<T, A> {
355 struct DropGuard
<'a
, T
, A
: Allocator
>(&'a
mut IntoIter
<T
, A
>);
357 impl<T
, A
: Allocator
> Drop
for DropGuard
<'_
, T
, A
> {
360 // `IntoIter::alloc` is not used anymore after this and will be dropped by RawVec
361 let alloc
= ManuallyDrop
::take(&mut self.0.alloc
);
362 // RawVec handles deallocation
363 let _
= RawVec
::from_raw_parts_in(self.0.buf
.as_ptr(), self.0.cap
, alloc
);
368 let guard
= DropGuard(self);
369 // destroy the remaining elements
371 ptr
::drop_in_place(guard
.0.as_raw_mut_slice());
373 // now `guard` will be dropped and do the rest
377 // In addition to the SAFETY invariants of the following three unsafe traits
378 // also refer to the vec::in_place_collect module documentation to get an overview
379 #[unstable(issue = "none", feature = "inplace_iteration")]
381 unsafe impl<T
, A
: Allocator
> InPlaceIterable
for IntoIter
<T
, A
> {}
383 #[unstable(issue = "none", feature = "inplace_iteration")]
385 unsafe impl<T
, A
: Allocator
> SourceIter
for IntoIter
<T
, A
> {
389 unsafe fn as_inner(&mut self) -> &mut Self::Source
{
394 #[cfg(not(no_global_oom_handling))]
395 unsafe impl<T
> AsVecIntoIter
for IntoIter
<T
> {
398 fn as_into_iter(&mut self) -> &mut IntoIter
<Self::Item
> {