]>
Commit | Line | Data |
---|---|---|
5869c6ff XL |
1 | use crate::alloc::{Allocator, Global}; |
2 | use crate::raw_vec::RawVec; | |
3 | use core::fmt; | |
4 | use core::intrinsics::arith_offset; | |
5 | use core::iter::{FusedIterator, InPlaceIterable, SourceIter, TrustedLen, TrustedRandomAccess}; | |
6 | use core::marker::PhantomData; | |
7 | use core::mem::{self}; | |
8 | use core::ptr::{self, NonNull}; | |
9 | use core::slice::{self}; | |
10 | ||
11 | /// An iterator that moves out of a vector. | |
12 | /// | |
13 | /// This `struct` is created by the `into_iter` method on [`Vec`](super::Vec) | |
14 | /// (provided by the [`IntoIterator`] trait). | |
15 | /// | |
16 | /// # Example | |
17 | /// | |
18 | /// ``` | |
19 | /// let v = vec![0, 1, 2]; | |
20 | /// let iter: std::vec::IntoIter<_> = v.into_iter(); | |
21 | /// ``` | |
22 | #[stable(feature = "rust1", since = "1.0.0")] | |
23 | pub struct IntoIter< | |
24 | T, | |
25 | #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, | |
26 | > { | |
27 | pub(super) buf: NonNull<T>, | |
28 | pub(super) phantom: PhantomData<T>, | |
29 | pub(super) cap: usize, | |
30 | pub(super) alloc: A, | |
31 | pub(super) ptr: *const T, | |
32 | pub(super) end: *const T, | |
33 | } | |
34 | ||
35 | #[stable(feature = "vec_intoiter_debug", since = "1.13.0")] | |
36 | impl<T: fmt::Debug, A: Allocator> fmt::Debug for IntoIter<T, A> { | |
37 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { | |
38 | f.debug_tuple("IntoIter").field(&self.as_slice()).finish() | |
39 | } | |
40 | } | |
41 | ||
42 | impl<T, A: Allocator> IntoIter<T, A> { | |
43 | /// Returns the remaining items of this iterator as a slice. | |
44 | /// | |
45 | /// # Examples | |
46 | /// | |
47 | /// ``` | |
48 | /// let vec = vec!['a', 'b', 'c']; | |
49 | /// let mut into_iter = vec.into_iter(); | |
50 | /// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']); | |
51 | /// let _ = into_iter.next().unwrap(); | |
52 | /// assert_eq!(into_iter.as_slice(), &['b', 'c']); | |
53 | /// ``` | |
54 | #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")] | |
55 | pub fn as_slice(&self) -> &[T] { | |
56 | unsafe { slice::from_raw_parts(self.ptr, self.len()) } | |
57 | } | |
58 | ||
59 | /// Returns the remaining items of this iterator as a mutable slice. | |
60 | /// | |
61 | /// # Examples | |
62 | /// | |
63 | /// ``` | |
64 | /// let vec = vec!['a', 'b', 'c']; | |
65 | /// let mut into_iter = vec.into_iter(); | |
66 | /// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']); | |
67 | /// into_iter.as_mut_slice()[2] = 'z'; | |
68 | /// assert_eq!(into_iter.next().unwrap(), 'a'); | |
69 | /// assert_eq!(into_iter.next().unwrap(), 'b'); | |
70 | /// assert_eq!(into_iter.next().unwrap(), 'z'); | |
71 | /// ``` | |
72 | #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")] | |
73 | pub fn as_mut_slice(&mut self) -> &mut [T] { | |
74 | unsafe { &mut *self.as_raw_mut_slice() } | |
75 | } | |
76 | ||
77 | /// Returns a reference to the underlying allocator. | |
78 | #[unstable(feature = "allocator_api", issue = "32838")] | |
79 | #[inline] | |
80 | pub fn allocator(&self) -> &A { | |
81 | &self.alloc | |
82 | } | |
83 | ||
84 | fn as_raw_mut_slice(&mut self) -> *mut [T] { | |
85 | ptr::slice_from_raw_parts_mut(self.ptr as *mut T, self.len()) | |
86 | } | |
87 | ||
36d6ef2b XL |
88 | /// Drops remaining elements and relinquishes the backing allocation. |
89 | /// | |
90 | /// This is roughly equivalent to the following, but more efficient | |
91 | /// | |
92 | /// ``` | |
93 | /// # let mut into_iter = Vec::<u8>::with_capacity(10).into_iter(); | |
94 | /// (&mut into_iter).for_each(core::mem::drop); | |
95 | /// unsafe { core::ptr::write(&mut into_iter, Vec::new().into_iter()); } | |
96 | /// ``` | |
97 | pub(super) fn forget_allocation_drop_remaining(&mut self) { | |
98 | let remaining = self.as_raw_mut_slice(); | |
5869c6ff | 99 | |
36d6ef2b XL |
100 | // overwrite the individual fields instead of creating a new |
101 | // struct and then overwriting &mut self. | |
102 | // this creates less assembly | |
5869c6ff XL |
103 | self.cap = 0; |
104 | self.buf = unsafe { NonNull::new_unchecked(RawVec::NEW.ptr()) }; | |
105 | self.ptr = self.buf.as_ptr(); | |
106 | self.end = self.buf.as_ptr(); | |
36d6ef2b XL |
107 | |
108 | unsafe { | |
109 | ptr::drop_in_place(remaining); | |
110 | } | |
5869c6ff XL |
111 | } |
112 | } | |
113 | ||
114 | #[stable(feature = "vec_intoiter_as_ref", since = "1.46.0")] | |
115 | impl<T, A: Allocator> AsRef<[T]> for IntoIter<T, A> { | |
116 | fn as_ref(&self) -> &[T] { | |
117 | self.as_slice() | |
118 | } | |
119 | } | |
120 | ||
121 | #[stable(feature = "rust1", since = "1.0.0")] | |
122 | unsafe impl<T: Send, A: Allocator + Send> Send for IntoIter<T, A> {} | |
123 | #[stable(feature = "rust1", since = "1.0.0")] | |
124 | unsafe impl<T: Sync, A: Allocator> Sync for IntoIter<T, A> {} | |
125 | ||
126 | #[stable(feature = "rust1", since = "1.0.0")] | |
127 | impl<T, A: Allocator> Iterator for IntoIter<T, A> { | |
128 | type Item = T; | |
129 | ||
130 | #[inline] | |
131 | fn next(&mut self) -> Option<T> { | |
132 | if self.ptr as *const _ == self.end { | |
133 | None | |
134 | } else if mem::size_of::<T>() == 0 { | |
135 | // purposefully don't use 'ptr.offset' because for | |
136 | // vectors with 0-size elements this would return the | |
137 | // same pointer. | |
138 | self.ptr = unsafe { arith_offset(self.ptr as *const i8, 1) as *mut T }; | |
139 | ||
140 | // Make up a value of this ZST. | |
141 | Some(unsafe { mem::zeroed() }) | |
142 | } else { | |
143 | let old = self.ptr; | |
144 | self.ptr = unsafe { self.ptr.offset(1) }; | |
145 | ||
146 | Some(unsafe { ptr::read(old) }) | |
147 | } | |
148 | } | |
149 | ||
150 | #[inline] | |
151 | fn size_hint(&self) -> (usize, Option<usize>) { | |
152 | let exact = if mem::size_of::<T>() == 0 { | |
153 | (self.end as usize).wrapping_sub(self.ptr as usize) | |
154 | } else { | |
155 | unsafe { self.end.offset_from(self.ptr) as usize } | |
156 | }; | |
157 | (exact, Some(exact)) | |
158 | } | |
159 | ||
160 | #[inline] | |
161 | fn count(self) -> usize { | |
162 | self.len() | |
163 | } | |
164 | ||
165 | unsafe fn __iterator_get_unchecked(&mut self, i: usize) -> Self::Item | |
166 | where | |
167 | Self: TrustedRandomAccess, | |
168 | { | |
169 | // SAFETY: the caller must guarantee that `i` is in bounds of the | |
170 | // `Vec<T>`, so `i` cannot overflow an `isize`, and the `self.ptr.add(i)` | |
171 | // is guaranteed to pointer to an element of the `Vec<T>` and | |
172 | // thus guaranteed to be valid to dereference. | |
173 | // | |
174 | // Also note the implementation of `Self: TrustedRandomAccess` requires | |
175 | // that `T: Copy` so reading elements from the buffer doesn't invalidate | |
176 | // them for `Drop`. | |
177 | unsafe { | |
178 | if mem::size_of::<T>() == 0 { mem::zeroed() } else { ptr::read(self.ptr.add(i)) } | |
179 | } | |
180 | } | |
181 | } | |
182 | ||
183 | #[stable(feature = "rust1", since = "1.0.0")] | |
184 | impl<T, A: Allocator> DoubleEndedIterator for IntoIter<T, A> { | |
185 | #[inline] | |
186 | fn next_back(&mut self) -> Option<T> { | |
187 | if self.end == self.ptr { | |
188 | None | |
189 | } else if mem::size_of::<T>() == 0 { | |
190 | // See above for why 'ptr.offset' isn't used | |
191 | self.end = unsafe { arith_offset(self.end as *const i8, -1) as *mut T }; | |
192 | ||
193 | // Make up a value of this ZST. | |
194 | Some(unsafe { mem::zeroed() }) | |
195 | } else { | |
196 | self.end = unsafe { self.end.offset(-1) }; | |
197 | ||
198 | Some(unsafe { ptr::read(self.end) }) | |
199 | } | |
200 | } | |
201 | } | |
202 | ||
203 | #[stable(feature = "rust1", since = "1.0.0")] | |
204 | impl<T, A: Allocator> ExactSizeIterator for IntoIter<T, A> { | |
205 | fn is_empty(&self) -> bool { | |
206 | self.ptr == self.end | |
207 | } | |
208 | } | |
209 | ||
210 | #[stable(feature = "fused", since = "1.26.0")] | |
211 | impl<T, A: Allocator> FusedIterator for IntoIter<T, A> {} | |
212 | ||
213 | #[unstable(feature = "trusted_len", issue = "37572")] | |
214 | unsafe impl<T, A: Allocator> TrustedLen for IntoIter<T, A> {} | |
215 | ||
216 | #[doc(hidden)] | |
217 | #[unstable(issue = "none", feature = "std_internals")] | |
218 | // T: Copy as approximation for !Drop since get_unchecked does not advance self.ptr | |
219 | // and thus we can't implement drop-handling | |
220 | unsafe impl<T, A: Allocator> TrustedRandomAccess for IntoIter<T, A> | |
221 | where | |
222 | T: Copy, | |
223 | { | |
6a06907d | 224 | const MAY_HAVE_SIDE_EFFECT: bool = false; |
5869c6ff XL |
225 | } |
226 | ||
227 | #[stable(feature = "vec_into_iter_clone", since = "1.8.0")] | |
228 | impl<T: Clone, A: Allocator + Clone> Clone for IntoIter<T, A> { | |
229 | #[cfg(not(test))] | |
230 | fn clone(&self) -> Self { | |
231 | self.as_slice().to_vec_in(self.alloc.clone()).into_iter() | |
232 | } | |
233 | #[cfg(test)] | |
234 | fn clone(&self) -> Self { | |
235 | crate::slice::to_vec(self.as_slice(), self.alloc.clone()).into_iter() | |
236 | } | |
237 | } | |
238 | ||
239 | #[stable(feature = "rust1", since = "1.0.0")] | |
240 | unsafe impl<#[may_dangle] T, A: Allocator> Drop for IntoIter<T, A> { | |
241 | fn drop(&mut self) { | |
242 | struct DropGuard<'a, T, A: Allocator>(&'a mut IntoIter<T, A>); | |
243 | ||
244 | impl<T, A: Allocator> Drop for DropGuard<'_, T, A> { | |
245 | fn drop(&mut self) { | |
246 | unsafe { | |
247 | // `IntoIter::alloc` is not used anymore after this | |
248 | let alloc = ptr::read(&self.0.alloc); | |
249 | // RawVec handles deallocation | |
250 | let _ = RawVec::from_raw_parts_in(self.0.buf.as_ptr(), self.0.cap, alloc); | |
251 | } | |
252 | } | |
253 | } | |
254 | ||
255 | let guard = DropGuard(self); | |
256 | // destroy the remaining elements | |
257 | unsafe { | |
258 | ptr::drop_in_place(guard.0.as_raw_mut_slice()); | |
259 | } | |
260 | // now `guard` will be dropped and do the rest | |
261 | } | |
262 | } | |
263 | ||
264 | #[unstable(issue = "none", feature = "inplace_iteration")] | |
265 | unsafe impl<T, A: Allocator> InPlaceIterable for IntoIter<T, A> {} | |
266 | ||
267 | #[unstable(issue = "none", feature = "inplace_iteration")] | |
268 | unsafe impl<T, A: Allocator> SourceIter for IntoIter<T, A> { | |
269 | type Source = Self; | |
270 | ||
271 | #[inline] | |
272 | unsafe fn as_inner(&mut self) -> &mut Self::Source { | |
273 | self | |
274 | } | |
275 | } | |
276 | ||
277 | // internal helper trait for in-place iteration specialization. | |
278 | #[rustc_specialization_trait] | |
279 | pub(crate) trait AsIntoIter { | |
280 | type Item; | |
281 | fn as_into_iter(&mut self) -> &mut IntoIter<Self::Item>; | |
282 | } | |
283 | ||
284 | impl<T> AsIntoIter for IntoIter<T> { | |
285 | type Item = T; | |
286 | ||
287 | fn as_into_iter(&mut self) -> &mut IntoIter<Self::Item> { | |
288 | self | |
289 | } | |
290 | } |