]> git.proxmox.com Git - rustc.git/blame - library/alloc/src/vec/into_iter.rs
New upstream version 1.55.0+dfsg1
[rustc.git] / library / alloc / src / vec / into_iter.rs
CommitLineData
5869c6ff
XL
1use crate::alloc::{Allocator, Global};
2use crate::raw_vec::RawVec;
3use core::fmt;
4use core::intrinsics::arith_offset;
5use core::iter::{FusedIterator, InPlaceIterable, SourceIter, TrustedLen, TrustedRandomAccess};
6use core::marker::PhantomData;
7use core::mem::{self};
8use core::ptr::{self, NonNull};
9use core::slice::{self};
10
11/// An iterator that moves out of a vector.
12///
13/// This `struct` is created by the `into_iter` method on [`Vec`](super::Vec)
14/// (provided by the [`IntoIterator`] trait).
15///
16/// # Example
17///
18/// ```
19/// let v = vec![0, 1, 2];
20/// let iter: std::vec::IntoIter<_> = v.into_iter();
21/// ```
22#[stable(feature = "rust1", since = "1.0.0")]
23pub struct IntoIter<
24 T,
25 #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
26> {
27 pub(super) buf: NonNull<T>,
28 pub(super) phantom: PhantomData<T>,
29 pub(super) cap: usize,
30 pub(super) alloc: A,
31 pub(super) ptr: *const T,
32 pub(super) end: *const T,
33}
34
35#[stable(feature = "vec_intoiter_debug", since = "1.13.0")]
36impl<T: fmt::Debug, A: Allocator> fmt::Debug for IntoIter<T, A> {
37 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
38 f.debug_tuple("IntoIter").field(&self.as_slice()).finish()
39 }
40}
41
42impl<T, A: Allocator> IntoIter<T, A> {
43 /// Returns the remaining items of this iterator as a slice.
44 ///
45 /// # Examples
46 ///
47 /// ```
48 /// let vec = vec!['a', 'b', 'c'];
49 /// let mut into_iter = vec.into_iter();
50 /// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']);
51 /// let _ = into_iter.next().unwrap();
52 /// assert_eq!(into_iter.as_slice(), &['b', 'c']);
53 /// ```
54 #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")]
55 pub fn as_slice(&self) -> &[T] {
56 unsafe { slice::from_raw_parts(self.ptr, self.len()) }
57 }
58
59 /// Returns the remaining items of this iterator as a mutable slice.
60 ///
61 /// # Examples
62 ///
63 /// ```
64 /// let vec = vec!['a', 'b', 'c'];
65 /// let mut into_iter = vec.into_iter();
66 /// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']);
67 /// into_iter.as_mut_slice()[2] = 'z';
68 /// assert_eq!(into_iter.next().unwrap(), 'a');
69 /// assert_eq!(into_iter.next().unwrap(), 'b');
70 /// assert_eq!(into_iter.next().unwrap(), 'z');
71 /// ```
72 #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")]
73 pub fn as_mut_slice(&mut self) -> &mut [T] {
74 unsafe { &mut *self.as_raw_mut_slice() }
75 }
76
77 /// Returns a reference to the underlying allocator.
78 #[unstable(feature = "allocator_api", issue = "32838")]
79 #[inline]
80 pub fn allocator(&self) -> &A {
81 &self.alloc
82 }
83
84 fn as_raw_mut_slice(&mut self) -> *mut [T] {
85 ptr::slice_from_raw_parts_mut(self.ptr as *mut T, self.len())
86 }
87
36d6ef2b
XL
88 /// Drops remaining elements and relinquishes the backing allocation.
89 ///
90 /// This is roughly equivalent to the following, but more efficient
91 ///
92 /// ```
93 /// # let mut into_iter = Vec::<u8>::with_capacity(10).into_iter();
94 /// (&mut into_iter).for_each(core::mem::drop);
95 /// unsafe { core::ptr::write(&mut into_iter, Vec::new().into_iter()); }
96 /// ```
17df50a5 97 #[cfg(not(no_global_oom_handling))]
36d6ef2b
XL
98 pub(super) fn forget_allocation_drop_remaining(&mut self) {
99 let remaining = self.as_raw_mut_slice();
5869c6ff 100
36d6ef2b
XL
101 // overwrite the individual fields instead of creating a new
102 // struct and then overwriting &mut self.
103 // this creates less assembly
5869c6ff
XL
104 self.cap = 0;
105 self.buf = unsafe { NonNull::new_unchecked(RawVec::NEW.ptr()) };
106 self.ptr = self.buf.as_ptr();
107 self.end = self.buf.as_ptr();
36d6ef2b
XL
108
109 unsafe {
110 ptr::drop_in_place(remaining);
111 }
5869c6ff
XL
112 }
113}
114
115#[stable(feature = "vec_intoiter_as_ref", since = "1.46.0")]
116impl<T, A: Allocator> AsRef<[T]> for IntoIter<T, A> {
117 fn as_ref(&self) -> &[T] {
118 self.as_slice()
119 }
120}
121
122#[stable(feature = "rust1", since = "1.0.0")]
123unsafe impl<T: Send, A: Allocator + Send> Send for IntoIter<T, A> {}
124#[stable(feature = "rust1", since = "1.0.0")]
125unsafe impl<T: Sync, A: Allocator> Sync for IntoIter<T, A> {}
126
127#[stable(feature = "rust1", since = "1.0.0")]
128impl<T, A: Allocator> Iterator for IntoIter<T, A> {
129 type Item = T;
130
131 #[inline]
132 fn next(&mut self) -> Option<T> {
133 if self.ptr as *const _ == self.end {
134 None
135 } else if mem::size_of::<T>() == 0 {
136 // purposefully don't use 'ptr.offset' because for
137 // vectors with 0-size elements this would return the
138 // same pointer.
139 self.ptr = unsafe { arith_offset(self.ptr as *const i8, 1) as *mut T };
140
141 // Make up a value of this ZST.
142 Some(unsafe { mem::zeroed() })
143 } else {
144 let old = self.ptr;
145 self.ptr = unsafe { self.ptr.offset(1) };
146
147 Some(unsafe { ptr::read(old) })
148 }
149 }
150
151 #[inline]
152 fn size_hint(&self) -> (usize, Option<usize>) {
153 let exact = if mem::size_of::<T>() == 0 {
154 (self.end as usize).wrapping_sub(self.ptr as usize)
155 } else {
156 unsafe { self.end.offset_from(self.ptr) as usize }
157 };
158 (exact, Some(exact))
159 }
160
161 #[inline]
162 fn count(self) -> usize {
163 self.len()
164 }
165
136023e0 166 #[doc(hidden)]
5869c6ff
XL
167 unsafe fn __iterator_get_unchecked(&mut self, i: usize) -> Self::Item
168 where
169 Self: TrustedRandomAccess,
170 {
171 // SAFETY: the caller must guarantee that `i` is in bounds of the
172 // `Vec<T>`, so `i` cannot overflow an `isize`, and the `self.ptr.add(i)`
173 // is guaranteed to pointer to an element of the `Vec<T>` and
174 // thus guaranteed to be valid to dereference.
175 //
176 // Also note the implementation of `Self: TrustedRandomAccess` requires
177 // that `T: Copy` so reading elements from the buffer doesn't invalidate
178 // them for `Drop`.
179 unsafe {
180 if mem::size_of::<T>() == 0 { mem::zeroed() } else { ptr::read(self.ptr.add(i)) }
181 }
182 }
183}
184
185#[stable(feature = "rust1", since = "1.0.0")]
186impl<T, A: Allocator> DoubleEndedIterator for IntoIter<T, A> {
187 #[inline]
188 fn next_back(&mut self) -> Option<T> {
189 if self.end == self.ptr {
190 None
191 } else if mem::size_of::<T>() == 0 {
192 // See above for why 'ptr.offset' isn't used
193 self.end = unsafe { arith_offset(self.end as *const i8, -1) as *mut T };
194
195 // Make up a value of this ZST.
196 Some(unsafe { mem::zeroed() })
197 } else {
198 self.end = unsafe { self.end.offset(-1) };
199
200 Some(unsafe { ptr::read(self.end) })
201 }
202 }
203}
204
205#[stable(feature = "rust1", since = "1.0.0")]
206impl<T, A: Allocator> ExactSizeIterator for IntoIter<T, A> {
207 fn is_empty(&self) -> bool {
208 self.ptr == self.end
209 }
210}
211
212#[stable(feature = "fused", since = "1.26.0")]
213impl<T, A: Allocator> FusedIterator for IntoIter<T, A> {}
214
215#[unstable(feature = "trusted_len", issue = "37572")]
216unsafe impl<T, A: Allocator> TrustedLen for IntoIter<T, A> {}
217
218#[doc(hidden)]
219#[unstable(issue = "none", feature = "std_internals")]
220// T: Copy as approximation for !Drop since get_unchecked does not advance self.ptr
221// and thus we can't implement drop-handling
222unsafe impl<T, A: Allocator> TrustedRandomAccess for IntoIter<T, A>
223where
224 T: Copy,
225{
6a06907d 226 const MAY_HAVE_SIDE_EFFECT: bool = false;
5869c6ff
XL
227}
228
17df50a5 229#[cfg(not(no_global_oom_handling))]
5869c6ff
XL
230#[stable(feature = "vec_into_iter_clone", since = "1.8.0")]
231impl<T: Clone, A: Allocator + Clone> Clone for IntoIter<T, A> {
232 #[cfg(not(test))]
233 fn clone(&self) -> Self {
234 self.as_slice().to_vec_in(self.alloc.clone()).into_iter()
235 }
236 #[cfg(test)]
237 fn clone(&self) -> Self {
238 crate::slice::to_vec(self.as_slice(), self.alloc.clone()).into_iter()
239 }
240}
241
242#[stable(feature = "rust1", since = "1.0.0")]
243unsafe impl<#[may_dangle] T, A: Allocator> Drop for IntoIter<T, A> {
244 fn drop(&mut self) {
245 struct DropGuard<'a, T, A: Allocator>(&'a mut IntoIter<T, A>);
246
247 impl<T, A: Allocator> Drop for DropGuard<'_, T, A> {
248 fn drop(&mut self) {
249 unsafe {
250 // `IntoIter::alloc` is not used anymore after this
251 let alloc = ptr::read(&self.0.alloc);
252 // RawVec handles deallocation
253 let _ = RawVec::from_raw_parts_in(self.0.buf.as_ptr(), self.0.cap, alloc);
254 }
255 }
256 }
257
258 let guard = DropGuard(self);
259 // destroy the remaining elements
260 unsafe {
261 ptr::drop_in_place(guard.0.as_raw_mut_slice());
262 }
263 // now `guard` will be dropped and do the rest
264 }
265}
266
267#[unstable(issue = "none", feature = "inplace_iteration")]
17df50a5 268#[doc(hidden)]
5869c6ff
XL
269unsafe impl<T, A: Allocator> InPlaceIterable for IntoIter<T, A> {}
270
271#[unstable(issue = "none", feature = "inplace_iteration")]
17df50a5 272#[doc(hidden)]
5869c6ff
XL
273unsafe impl<T, A: Allocator> SourceIter for IntoIter<T, A> {
274 type Source = Self;
275
276 #[inline]
277 unsafe fn as_inner(&mut self) -> &mut Self::Source {
278 self
279 }
280}
281
282// internal helper trait for in-place iteration specialization.
283#[rustc_specialization_trait]
284pub(crate) trait AsIntoIter {
285 type Item;
286 fn as_into_iter(&mut self) -> &mut IntoIter<Self::Item>;
287}
288
289impl<T> AsIntoIter for IntoIter<T> {
290 type Item = T;
291
292 fn as_into_iter(&mut self) -> &mut IntoIter<Self::Item> {
293 self
294 }
295}