1 use crate::alloc
::{Allocator, Global}
;
2 use crate::raw_vec
::RawVec
;
4 use core
::intrinsics
::arith_offset
;
5 use core
::iter
::{FusedIterator, InPlaceIterable, SourceIter, TrustedLen, TrustedRandomAccess}
;
6 use core
::marker
::PhantomData
;
8 use core
::ptr
::{self, NonNull}
;
9 use core
::slice
::{self}
;
11 /// An iterator that moves out of a vector.
13 /// This `struct` is created by the `into_iter` method on [`Vec`](super::Vec)
14 /// (provided by the [`IntoIterator`] trait).
19 /// let v = vec![0, 1, 2];
20 /// let iter: std::vec::IntoIter<_> = v.into_iter();
22 #[stable(feature = "rust1", since = "1.0.0")]
25 #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
27 pub(super) buf
: NonNull
<T
>,
28 pub(super) phantom
: PhantomData
<T
>,
29 pub(super) cap
: usize,
31 pub(super) ptr
: *const T
,
32 pub(super) end
: *const T
,
35 #[stable(feature = "vec_intoiter_debug", since = "1.13.0")]
36 impl<T
: fmt
::Debug
, A
: Allocator
> fmt
::Debug
for IntoIter
<T
, A
> {
37 fn fmt(&self, f
: &mut fmt
::Formatter
<'_
>) -> fmt
::Result
{
38 f
.debug_tuple("IntoIter").field(&self.as_slice()).finish()
42 impl<T
, A
: Allocator
> IntoIter
<T
, A
> {
43 /// Returns the remaining items of this iterator as a slice.
48 /// let vec = vec!['a', 'b', 'c'];
49 /// let mut into_iter = vec.into_iter();
50 /// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']);
51 /// let _ = into_iter.next().unwrap();
52 /// assert_eq!(into_iter.as_slice(), &['b', 'c']);
54 #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")]
55 pub fn as_slice(&self) -> &[T
] {
56 unsafe { slice::from_raw_parts(self.ptr, self.len()) }
59 /// Returns the remaining items of this iterator as a mutable slice.
64 /// let vec = vec!['a', 'b', 'c'];
65 /// let mut into_iter = vec.into_iter();
66 /// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']);
67 /// into_iter.as_mut_slice()[2] = 'z';
68 /// assert_eq!(into_iter.next().unwrap(), 'a');
69 /// assert_eq!(into_iter.next().unwrap(), 'b');
70 /// assert_eq!(into_iter.next().unwrap(), 'z');
72 #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")]
73 pub fn as_mut_slice(&mut self) -> &mut [T
] {
74 unsafe { &mut *self.as_raw_mut_slice() }
77 /// Returns a reference to the underlying allocator.
78 #[unstable(feature = "allocator_api", issue = "32838")]
80 pub fn allocator(&self) -> &A
{
84 fn as_raw_mut_slice(&mut self) -> *mut [T
] {
85 ptr
::slice_from_raw_parts_mut(self.ptr
as *mut T
, self.len())
88 /// Drops remaining elements and relinquishes the backing allocation.
90 /// This is roughly equivalent to the following, but more efficient
93 /// # let mut into_iter = Vec::<u8>::with_capacity(10).into_iter();
94 /// (&mut into_iter).for_each(core::mem::drop);
95 /// unsafe { core::ptr::write(&mut into_iter, Vec::new().into_iter()); }
97 pub(super) fn forget_allocation_drop_remaining(&mut self) {
98 let remaining
= self.as_raw_mut_slice();
100 // overwrite the individual fields instead of creating a new
101 // struct and then overwriting &mut self.
102 // this creates less assembly
104 self.buf
= unsafe { NonNull::new_unchecked(RawVec::NEW.ptr()) }
;
105 self.ptr
= self.buf
.as_ptr();
106 self.end
= self.buf
.as_ptr();
109 ptr
::drop_in_place(remaining
);
114 #[stable(feature = "vec_intoiter_as_ref", since = "1.46.0")]
115 impl<T
, A
: Allocator
> AsRef
<[T
]> for IntoIter
<T
, A
> {
116 fn as_ref(&self) -> &[T
] {
121 #[stable(feature = "rust1", since = "1.0.0")]
122 unsafe impl<T
: Send
, A
: Allocator
+ Send
> Send
for IntoIter
<T
, A
> {}
123 #[stable(feature = "rust1", since = "1.0.0")]
124 unsafe impl<T
: Sync
, A
: Allocator
> Sync
for IntoIter
<T
, A
> {}
126 #[stable(feature = "rust1", since = "1.0.0")]
127 impl<T
, A
: Allocator
> Iterator
for IntoIter
<T
, A
> {
131 fn next(&mut self) -> Option
<T
> {
132 if self.ptr
as *const _
== self.end
{
134 } else if mem
::size_of
::<T
>() == 0 {
135 // purposefully don't use 'ptr.offset' because for
136 // vectors with 0-size elements this would return the
138 self.ptr
= unsafe { arith_offset(self.ptr as *const i8, 1) as *mut T }
;
140 // Make up a value of this ZST.
141 Some(unsafe { mem::zeroed() }
)
144 self.ptr
= unsafe { self.ptr.offset(1) }
;
146 Some(unsafe { ptr::read(old) }
)
151 fn size_hint(&self) -> (usize, Option
<usize>) {
152 let exact
= if mem
::size_of
::<T
>() == 0 {
153 (self.end
as usize).wrapping_sub(self.ptr
as usize)
155 unsafe { self.end.offset_from(self.ptr) as usize }
161 fn count(self) -> usize {
165 unsafe fn __iterator_get_unchecked(&mut self, i
: usize) -> Self::Item
167 Self: TrustedRandomAccess
,
169 // SAFETY: the caller must guarantee that `i` is in bounds of the
170 // `Vec<T>`, so `i` cannot overflow an `isize`, and the `self.ptr.add(i)`
171 // is guaranteed to pointer to an element of the `Vec<T>` and
172 // thus guaranteed to be valid to dereference.
174 // Also note the implementation of `Self: TrustedRandomAccess` requires
175 // that `T: Copy` so reading elements from the buffer doesn't invalidate
178 if mem
::size_of
::<T
>() == 0 { mem::zeroed() }
else { ptr::read(self.ptr.add(i)) }
183 #[stable(feature = "rust1", since = "1.0.0")]
184 impl<T
, A
: Allocator
> DoubleEndedIterator
for IntoIter
<T
, A
> {
186 fn next_back(&mut self) -> Option
<T
> {
187 if self.end
== self.ptr
{
189 } else if mem
::size_of
::<T
>() == 0 {
190 // See above for why 'ptr.offset' isn't used
191 self.end
= unsafe { arith_offset(self.end as *const i8, -1) as *mut T }
;
193 // Make up a value of this ZST.
194 Some(unsafe { mem::zeroed() }
)
196 self.end
= unsafe { self.end.offset(-1) }
;
198 Some(unsafe { ptr::read(self.end) }
)
203 #[stable(feature = "rust1", since = "1.0.0")]
204 impl<T
, A
: Allocator
> ExactSizeIterator
for IntoIter
<T
, A
> {
205 fn is_empty(&self) -> bool
{
210 #[stable(feature = "fused", since = "1.26.0")]
211 impl<T
, A
: Allocator
> FusedIterator
for IntoIter
<T
, A
> {}
213 #[unstable(feature = "trusted_len", issue = "37572")]
214 unsafe impl<T
, A
: Allocator
> TrustedLen
for IntoIter
<T
, A
> {}
217 #[unstable(issue = "none", feature = "std_internals")]
218 // T: Copy as approximation for !Drop since get_unchecked does not advance self.ptr
219 // and thus we can't implement drop-handling
220 unsafe impl<T
, A
: Allocator
> TrustedRandomAccess
for IntoIter
<T
, A
>
224 const MAY_HAVE_SIDE_EFFECT
: bool
= false;
227 #[stable(feature = "vec_into_iter_clone", since = "1.8.0")]
228 impl<T
: Clone
, A
: Allocator
+ Clone
> Clone
for IntoIter
<T
, A
> {
230 fn clone(&self) -> Self {
231 self.as_slice().to_vec_in(self.alloc
.clone()).into_iter()
234 fn clone(&self) -> Self {
235 crate::slice
::to_vec(self.as_slice(), self.alloc
.clone()).into_iter()
239 #[stable(feature = "rust1", since = "1.0.0")]
240 unsafe impl<#[may_dangle] T, A: Allocator> Drop for IntoIter<T, A> {
242 struct DropGuard
<'a
, T
, A
: Allocator
>(&'a
mut IntoIter
<T
, A
>);
244 impl<T
, A
: Allocator
> Drop
for DropGuard
<'_
, T
, A
> {
247 // `IntoIter::alloc` is not used anymore after this
248 let alloc
= ptr
::read(&self.0.alloc
);
249 // RawVec handles deallocation
250 let _
= RawVec
::from_raw_parts_in(self.0.buf
.as_ptr(), self.0.cap
, alloc
);
255 let guard
= DropGuard(self);
256 // destroy the remaining elements
258 ptr
::drop_in_place(guard
.0.as_raw_mut_slice());
260 // now `guard` will be dropped and do the rest
264 #[unstable(issue = "none", feature = "inplace_iteration")]
265 unsafe impl<T
, A
: Allocator
> InPlaceIterable
for IntoIter
<T
, A
> {}
267 #[unstable(issue = "none", feature = "inplace_iteration")]
268 unsafe impl<T
, A
: Allocator
> SourceIter
for IntoIter
<T
, A
> {
272 unsafe fn as_inner(&mut self) -> &mut Self::Source
{
277 // internal helper trait for in-place iteration specialization.
278 #[rustc_specialization_trait]
279 pub(crate) trait AsIntoIter
{
281 fn as_into_iter(&mut self) -> &mut IntoIter
<Self::Item
>;
284 impl<T
> AsIntoIter
for IntoIter
<T
> {
287 fn as_into_iter(&mut self) -> &mut IntoIter
<Self::Item
> {