]> git.proxmox.com Git - rustc.git/blame - library/alloc/src/vec/into_iter.rs
New upstream version 1.56.0~beta.4+dfsg1
[rustc.git] / library / alloc / src / vec / into_iter.rs
CommitLineData
5869c6ff
XL
1use crate::alloc::{Allocator, Global};
2use crate::raw_vec::RawVec;
3use core::fmt;
4use core::intrinsics::arith_offset;
94222f64
XL
5use core::iter::{
6 FusedIterator, InPlaceIterable, SourceIter, TrustedLen, TrustedRandomAccessNoCoerce,
7};
5869c6ff
XL
8use core::marker::PhantomData;
9use core::mem::{self};
10use core::ptr::{self, NonNull};
11use core::slice::{self};
12
13/// An iterator that moves out of a vector.
14///
15/// This `struct` is created by the `into_iter` method on [`Vec`](super::Vec)
16/// (provided by the [`IntoIterator`] trait).
17///
18/// # Example
19///
20/// ```
21/// let v = vec![0, 1, 2];
22/// let iter: std::vec::IntoIter<_> = v.into_iter();
23/// ```
24#[stable(feature = "rust1", since = "1.0.0")]
94222f64 25#[rustc_insignificant_dtor]
5869c6ff
XL
26pub struct IntoIter<
27 T,
28 #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
29> {
30 pub(super) buf: NonNull<T>,
31 pub(super) phantom: PhantomData<T>,
32 pub(super) cap: usize,
33 pub(super) alloc: A,
34 pub(super) ptr: *const T,
35 pub(super) end: *const T,
36}
37
38#[stable(feature = "vec_intoiter_debug", since = "1.13.0")]
39impl<T: fmt::Debug, A: Allocator> fmt::Debug for IntoIter<T, A> {
40 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
41 f.debug_tuple("IntoIter").field(&self.as_slice()).finish()
42 }
43}
44
45impl<T, A: Allocator> IntoIter<T, A> {
46 /// Returns the remaining items of this iterator as a slice.
47 ///
48 /// # Examples
49 ///
50 /// ```
51 /// let vec = vec!['a', 'b', 'c'];
52 /// let mut into_iter = vec.into_iter();
53 /// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']);
54 /// let _ = into_iter.next().unwrap();
55 /// assert_eq!(into_iter.as_slice(), &['b', 'c']);
56 /// ```
57 #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")]
58 pub fn as_slice(&self) -> &[T] {
59 unsafe { slice::from_raw_parts(self.ptr, self.len()) }
60 }
61
62 /// Returns the remaining items of this iterator as a mutable slice.
63 ///
64 /// # Examples
65 ///
66 /// ```
67 /// let vec = vec!['a', 'b', 'c'];
68 /// let mut into_iter = vec.into_iter();
69 /// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']);
70 /// into_iter.as_mut_slice()[2] = 'z';
71 /// assert_eq!(into_iter.next().unwrap(), 'a');
72 /// assert_eq!(into_iter.next().unwrap(), 'b');
73 /// assert_eq!(into_iter.next().unwrap(), 'z');
74 /// ```
75 #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")]
76 pub fn as_mut_slice(&mut self) -> &mut [T] {
77 unsafe { &mut *self.as_raw_mut_slice() }
78 }
79
80 /// Returns a reference to the underlying allocator.
81 #[unstable(feature = "allocator_api", issue = "32838")]
82 #[inline]
83 pub fn allocator(&self) -> &A {
84 &self.alloc
85 }
86
87 fn as_raw_mut_slice(&mut self) -> *mut [T] {
88 ptr::slice_from_raw_parts_mut(self.ptr as *mut T, self.len())
89 }
90
36d6ef2b
XL
91 /// Drops remaining elements and relinquishes the backing allocation.
92 ///
93 /// This is roughly equivalent to the following, but more efficient
94 ///
95 /// ```
96 /// # let mut into_iter = Vec::<u8>::with_capacity(10).into_iter();
97 /// (&mut into_iter).for_each(core::mem::drop);
98 /// unsafe { core::ptr::write(&mut into_iter, Vec::new().into_iter()); }
99 /// ```
17df50a5 100 #[cfg(not(no_global_oom_handling))]
36d6ef2b
XL
101 pub(super) fn forget_allocation_drop_remaining(&mut self) {
102 let remaining = self.as_raw_mut_slice();
5869c6ff 103
36d6ef2b
XL
104 // overwrite the individual fields instead of creating a new
105 // struct and then overwriting &mut self.
106 // this creates less assembly
5869c6ff
XL
107 self.cap = 0;
108 self.buf = unsafe { NonNull::new_unchecked(RawVec::NEW.ptr()) };
109 self.ptr = self.buf.as_ptr();
110 self.end = self.buf.as_ptr();
36d6ef2b
XL
111
112 unsafe {
113 ptr::drop_in_place(remaining);
114 }
5869c6ff
XL
115 }
116}
117
118#[stable(feature = "vec_intoiter_as_ref", since = "1.46.0")]
119impl<T, A: Allocator> AsRef<[T]> for IntoIter<T, A> {
120 fn as_ref(&self) -> &[T] {
121 self.as_slice()
122 }
123}
124
125#[stable(feature = "rust1", since = "1.0.0")]
126unsafe impl<T: Send, A: Allocator + Send> Send for IntoIter<T, A> {}
127#[stable(feature = "rust1", since = "1.0.0")]
128unsafe impl<T: Sync, A: Allocator> Sync for IntoIter<T, A> {}
129
130#[stable(feature = "rust1", since = "1.0.0")]
131impl<T, A: Allocator> Iterator for IntoIter<T, A> {
132 type Item = T;
133
134 #[inline]
135 fn next(&mut self) -> Option<T> {
136 if self.ptr as *const _ == self.end {
137 None
138 } else if mem::size_of::<T>() == 0 {
139 // purposefully don't use 'ptr.offset' because for
140 // vectors with 0-size elements this would return the
141 // same pointer.
142 self.ptr = unsafe { arith_offset(self.ptr as *const i8, 1) as *mut T };
143
144 // Make up a value of this ZST.
145 Some(unsafe { mem::zeroed() })
146 } else {
147 let old = self.ptr;
148 self.ptr = unsafe { self.ptr.offset(1) };
149
150 Some(unsafe { ptr::read(old) })
151 }
152 }
153
154 #[inline]
155 fn size_hint(&self) -> (usize, Option<usize>) {
156 let exact = if mem::size_of::<T>() == 0 {
157 (self.end as usize).wrapping_sub(self.ptr as usize)
158 } else {
159 unsafe { self.end.offset_from(self.ptr) as usize }
160 };
161 (exact, Some(exact))
162 }
163
164 #[inline]
165 fn count(self) -> usize {
166 self.len()
167 }
168
136023e0 169 #[doc(hidden)]
5869c6ff
XL
170 unsafe fn __iterator_get_unchecked(&mut self, i: usize) -> Self::Item
171 where
94222f64 172 Self: TrustedRandomAccessNoCoerce,
5869c6ff
XL
173 {
174 // SAFETY: the caller must guarantee that `i` is in bounds of the
175 // `Vec<T>`, so `i` cannot overflow an `isize`, and the `self.ptr.add(i)`
176 // is guaranteed to pointer to an element of the `Vec<T>` and
177 // thus guaranteed to be valid to dereference.
178 //
179 // Also note the implementation of `Self: TrustedRandomAccess` requires
180 // that `T: Copy` so reading elements from the buffer doesn't invalidate
181 // them for `Drop`.
182 unsafe {
183 if mem::size_of::<T>() == 0 { mem::zeroed() } else { ptr::read(self.ptr.add(i)) }
184 }
185 }
186}
187
188#[stable(feature = "rust1", since = "1.0.0")]
189impl<T, A: Allocator> DoubleEndedIterator for IntoIter<T, A> {
190 #[inline]
191 fn next_back(&mut self) -> Option<T> {
192 if self.end == self.ptr {
193 None
194 } else if mem::size_of::<T>() == 0 {
195 // See above for why 'ptr.offset' isn't used
196 self.end = unsafe { arith_offset(self.end as *const i8, -1) as *mut T };
197
198 // Make up a value of this ZST.
199 Some(unsafe { mem::zeroed() })
200 } else {
201 self.end = unsafe { self.end.offset(-1) };
202
203 Some(unsafe { ptr::read(self.end) })
204 }
205 }
206}
207
208#[stable(feature = "rust1", since = "1.0.0")]
209impl<T, A: Allocator> ExactSizeIterator for IntoIter<T, A> {
210 fn is_empty(&self) -> bool {
211 self.ptr == self.end
212 }
213}
214
215#[stable(feature = "fused", since = "1.26.0")]
216impl<T, A: Allocator> FusedIterator for IntoIter<T, A> {}
217
218#[unstable(feature = "trusted_len", issue = "37572")]
219unsafe impl<T, A: Allocator> TrustedLen for IntoIter<T, A> {}
220
221#[doc(hidden)]
222#[unstable(issue = "none", feature = "std_internals")]
223// T: Copy as approximation for !Drop since get_unchecked does not advance self.ptr
224// and thus we can't implement drop-handling
94222f64
XL
225//
226// TrustedRandomAccess (without NoCoerce) must not be implemented because
227// subtypes/supertypes of `T` might not be `Copy`
228unsafe impl<T, A: Allocator> TrustedRandomAccessNoCoerce for IntoIter<T, A>
5869c6ff
XL
229where
230 T: Copy,
231{
6a06907d 232 const MAY_HAVE_SIDE_EFFECT: bool = false;
5869c6ff
XL
233}
234
17df50a5 235#[cfg(not(no_global_oom_handling))]
5869c6ff
XL
236#[stable(feature = "vec_into_iter_clone", since = "1.8.0")]
237impl<T: Clone, A: Allocator + Clone> Clone for IntoIter<T, A> {
238 #[cfg(not(test))]
239 fn clone(&self) -> Self {
240 self.as_slice().to_vec_in(self.alloc.clone()).into_iter()
241 }
242 #[cfg(test)]
243 fn clone(&self) -> Self {
244 crate::slice::to_vec(self.as_slice(), self.alloc.clone()).into_iter()
245 }
246}
247
248#[stable(feature = "rust1", since = "1.0.0")]
249unsafe impl<#[may_dangle] T, A: Allocator> Drop for IntoIter<T, A> {
250 fn drop(&mut self) {
251 struct DropGuard<'a, T, A: Allocator>(&'a mut IntoIter<T, A>);
252
253 impl<T, A: Allocator> Drop for DropGuard<'_, T, A> {
254 fn drop(&mut self) {
255 unsafe {
256 // `IntoIter::alloc` is not used anymore after this
257 let alloc = ptr::read(&self.0.alloc);
258 // RawVec handles deallocation
259 let _ = RawVec::from_raw_parts_in(self.0.buf.as_ptr(), self.0.cap, alloc);
260 }
261 }
262 }
263
264 let guard = DropGuard(self);
265 // destroy the remaining elements
266 unsafe {
267 ptr::drop_in_place(guard.0.as_raw_mut_slice());
268 }
269 // now `guard` will be dropped and do the rest
270 }
271}
272
273#[unstable(issue = "none", feature = "inplace_iteration")]
17df50a5 274#[doc(hidden)]
5869c6ff
XL
275unsafe impl<T, A: Allocator> InPlaceIterable for IntoIter<T, A> {}
276
277#[unstable(issue = "none", feature = "inplace_iteration")]
17df50a5 278#[doc(hidden)]
5869c6ff
XL
279unsafe impl<T, A: Allocator> SourceIter for IntoIter<T, A> {
280 type Source = Self;
281
282 #[inline]
283 unsafe fn as_inner(&mut self) -> &mut Self::Source {
284 self
285 }
286}
287
288// internal helper trait for in-place iteration specialization.
289#[rustc_specialization_trait]
290pub(crate) trait AsIntoIter {
291 type Item;
292 fn as_into_iter(&mut self) -> &mut IntoIter<Self::Item>;
293}
294
295impl<T> AsIntoIter for IntoIter<T> {
296 type Item = T;
297
298 fn as_into_iter(&mut self) -> &mut IntoIter<Self::Item> {
299 self
300 }
301}