]>
Commit | Line | Data |
---|---|---|
5e7ed085 FG |
1 | #[cfg(not(no_global_oom_handling))] |
2 | use super::AsVecIntoIter; | |
5869c6ff XL |
3 | use crate::alloc::{Allocator, Global}; |
4 | use crate::raw_vec::RawVec; | |
064997fb | 5 | use core::array; |
5869c6ff XL |
6 | use core::fmt; |
7 | use core::intrinsics::arith_offset; | |
94222f64 XL |
8 | use core::iter::{ |
9 | FusedIterator, InPlaceIterable, SourceIter, TrustedLen, TrustedRandomAccessNoCoerce, | |
10 | }; | |
5869c6ff | 11 | use core::marker::PhantomData; |
064997fb FG |
12 | use core::mem::{self, ManuallyDrop, MaybeUninit}; |
13 | #[cfg(not(no_global_oom_handling))] | |
5e7ed085 | 14 | use core::ops::Deref; |
5869c6ff XL |
15 | use core::ptr::{self, NonNull}; |
16 | use core::slice::{self}; | |
17 | ||
18 | /// An iterator that moves out of a vector. | |
19 | /// | |
20 | /// This `struct` is created by the `into_iter` method on [`Vec`](super::Vec) | |
21 | /// (provided by the [`IntoIterator`] trait). | |
22 | /// | |
23 | /// # Example | |
24 | /// | |
25 | /// ``` | |
26 | /// let v = vec![0, 1, 2]; | |
27 | /// let iter: std::vec::IntoIter<_> = v.into_iter(); | |
28 | /// ``` | |
29 | #[stable(feature = "rust1", since = "1.0.0")] | |
94222f64 | 30 | #[rustc_insignificant_dtor] |
5869c6ff XL |
31 | pub struct IntoIter< |
32 | T, | |
33 | #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, | |
34 | > { | |
35 | pub(super) buf: NonNull<T>, | |
36 | pub(super) phantom: PhantomData<T>, | |
37 | pub(super) cap: usize, | |
5e7ed085 FG |
38 | // the drop impl reconstructs a RawVec from buf, cap and alloc |
39 | // to avoid dropping the allocator twice we need to wrap it into ManuallyDrop | |
40 | pub(super) alloc: ManuallyDrop<A>, | |
5869c6ff XL |
41 | pub(super) ptr: *const T, |
42 | pub(super) end: *const T, | |
43 | } | |
44 | ||
45 | #[stable(feature = "vec_intoiter_debug", since = "1.13.0")] | |
46 | impl<T: fmt::Debug, A: Allocator> fmt::Debug for IntoIter<T, A> { | |
47 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { | |
48 | f.debug_tuple("IntoIter").field(&self.as_slice()).finish() | |
49 | } | |
50 | } | |
51 | ||
52 | impl<T, A: Allocator> IntoIter<T, A> { | |
53 | /// Returns the remaining items of this iterator as a slice. | |
54 | /// | |
55 | /// # Examples | |
56 | /// | |
57 | /// ``` | |
58 | /// let vec = vec!['a', 'b', 'c']; | |
59 | /// let mut into_iter = vec.into_iter(); | |
60 | /// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']); | |
61 | /// let _ = into_iter.next().unwrap(); | |
62 | /// assert_eq!(into_iter.as_slice(), &['b', 'c']); | |
63 | /// ``` | |
64 | #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")] | |
65 | pub fn as_slice(&self) -> &[T] { | |
66 | unsafe { slice::from_raw_parts(self.ptr, self.len()) } | |
67 | } | |
68 | ||
69 | /// Returns the remaining items of this iterator as a mutable slice. | |
70 | /// | |
71 | /// # Examples | |
72 | /// | |
73 | /// ``` | |
74 | /// let vec = vec!['a', 'b', 'c']; | |
75 | /// let mut into_iter = vec.into_iter(); | |
76 | /// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']); | |
77 | /// into_iter.as_mut_slice()[2] = 'z'; | |
78 | /// assert_eq!(into_iter.next().unwrap(), 'a'); | |
79 | /// assert_eq!(into_iter.next().unwrap(), 'b'); | |
80 | /// assert_eq!(into_iter.next().unwrap(), 'z'); | |
81 | /// ``` | |
82 | #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")] | |
83 | pub fn as_mut_slice(&mut self) -> &mut [T] { | |
84 | unsafe { &mut *self.as_raw_mut_slice() } | |
85 | } | |
86 | ||
87 | /// Returns a reference to the underlying allocator. | |
88 | #[unstable(feature = "allocator_api", issue = "32838")] | |
89 | #[inline] | |
90 | pub fn allocator(&self) -> &A { | |
91 | &self.alloc | |
92 | } | |
93 | ||
94 | fn as_raw_mut_slice(&mut self) -> *mut [T] { | |
95 | ptr::slice_from_raw_parts_mut(self.ptr as *mut T, self.len()) | |
96 | } | |
97 | ||
36d6ef2b XL |
98 | /// Drops remaining elements and relinquishes the backing allocation. |
99 | /// | |
100 | /// This is roughly equivalent to the following, but more efficient | |
101 | /// | |
102 | /// ``` | |
103 | /// # let mut into_iter = Vec::<u8>::with_capacity(10).into_iter(); | |
104 | /// (&mut into_iter).for_each(core::mem::drop); | |
105 | /// unsafe { core::ptr::write(&mut into_iter, Vec::new().into_iter()); } | |
106 | /// ``` | |
5e7ed085 FG |
107 | /// |
108 | /// This method is used by in-place iteration, refer to the vec::in_place_collect | |
109 | /// documentation for an overview. | |
17df50a5 | 110 | #[cfg(not(no_global_oom_handling))] |
36d6ef2b XL |
111 | pub(super) fn forget_allocation_drop_remaining(&mut self) { |
112 | let remaining = self.as_raw_mut_slice(); | |
5869c6ff | 113 | |
36d6ef2b XL |
114 | // overwrite the individual fields instead of creating a new |
115 | // struct and then overwriting &mut self. | |
116 | // this creates less assembly | |
5869c6ff XL |
117 | self.cap = 0; |
118 | self.buf = unsafe { NonNull::new_unchecked(RawVec::NEW.ptr()) }; | |
119 | self.ptr = self.buf.as_ptr(); | |
120 | self.end = self.buf.as_ptr(); | |
36d6ef2b XL |
121 | |
122 | unsafe { | |
123 | ptr::drop_in_place(remaining); | |
124 | } | |
5869c6ff | 125 | } |
04454e1e FG |
126 | |
127 | /// Forgets to Drop the remaining elements while still allowing the backing allocation to be freed. | |
128 | pub(crate) fn forget_remaining_elements(&mut self) { | |
129 | self.ptr = self.end; | |
130 | } | |
5869c6ff XL |
131 | } |
132 | ||
133 | #[stable(feature = "vec_intoiter_as_ref", since = "1.46.0")] | |
134 | impl<T, A: Allocator> AsRef<[T]> for IntoIter<T, A> { | |
135 | fn as_ref(&self) -> &[T] { | |
136 | self.as_slice() | |
137 | } | |
138 | } | |
139 | ||
140 | #[stable(feature = "rust1", since = "1.0.0")] | |
141 | unsafe impl<T: Send, A: Allocator + Send> Send for IntoIter<T, A> {} | |
142 | #[stable(feature = "rust1", since = "1.0.0")] | |
5099ac24 | 143 | unsafe impl<T: Sync, A: Allocator + Sync> Sync for IntoIter<T, A> {} |
5869c6ff XL |
144 | |
145 | #[stable(feature = "rust1", since = "1.0.0")] | |
146 | impl<T, A: Allocator> Iterator for IntoIter<T, A> { | |
147 | type Item = T; | |
148 | ||
149 | #[inline] | |
150 | fn next(&mut self) -> Option<T> { | |
151 | if self.ptr as *const _ == self.end { | |
152 | None | |
153 | } else if mem::size_of::<T>() == 0 { | |
154 | // purposefully don't use 'ptr.offset' because for | |
155 | // vectors with 0-size elements this would return the | |
156 | // same pointer. | |
157 | self.ptr = unsafe { arith_offset(self.ptr as *const i8, 1) as *mut T }; | |
158 | ||
159 | // Make up a value of this ZST. | |
160 | Some(unsafe { mem::zeroed() }) | |
161 | } else { | |
162 | let old = self.ptr; | |
163 | self.ptr = unsafe { self.ptr.offset(1) }; | |
164 | ||
165 | Some(unsafe { ptr::read(old) }) | |
166 | } | |
167 | } | |
168 | ||
169 | #[inline] | |
170 | fn size_hint(&self) -> (usize, Option<usize>) { | |
171 | let exact = if mem::size_of::<T>() == 0 { | |
5e7ed085 | 172 | self.end.addr().wrapping_sub(self.ptr.addr()) |
5869c6ff | 173 | } else { |
04454e1e | 174 | unsafe { self.end.sub_ptr(self.ptr) } |
5869c6ff XL |
175 | }; |
176 | (exact, Some(exact)) | |
177 | } | |
178 | ||
c295e0f8 XL |
179 | #[inline] |
180 | fn advance_by(&mut self, n: usize) -> Result<(), usize> { | |
181 | let step_size = self.len().min(n); | |
182 | let to_drop = ptr::slice_from_raw_parts_mut(self.ptr as *mut T, step_size); | |
183 | if mem::size_of::<T>() == 0 { | |
184 | // SAFETY: due to unchecked casts of unsigned amounts to signed offsets the wraparound | |
185 | // effectively results in unsigned pointers representing positions 0..usize::MAX, | |
186 | // which is valid for ZSTs. | |
187 | self.ptr = unsafe { arith_offset(self.ptr as *const i8, step_size as isize) as *mut T } | |
188 | } else { | |
189 | // SAFETY: the min() above ensures that step_size is in bounds | |
190 | self.ptr = unsafe { self.ptr.add(step_size) }; | |
191 | } | |
192 | // SAFETY: the min() above ensures that step_size is in bounds | |
193 | unsafe { | |
194 | ptr::drop_in_place(to_drop); | |
195 | } | |
196 | if step_size < n { | |
197 | return Err(step_size); | |
198 | } | |
199 | Ok(()) | |
200 | } | |
201 | ||
5869c6ff XL |
202 | #[inline] |
203 | fn count(self) -> usize { | |
204 | self.len() | |
205 | } | |
206 | ||
064997fb FG |
207 | #[inline] |
208 | fn next_chunk<const N: usize>(&mut self) -> Result<[T; N], core::array::IntoIter<T, N>> { | |
209 | let mut raw_ary = MaybeUninit::uninit_array(); | |
210 | ||
211 | let len = self.len(); | |
212 | ||
213 | if mem::size_of::<T>() == 0 { | |
214 | if len < N { | |
215 | self.forget_remaining_elements(); | |
216 | // Safety: ZSTs can be conjured ex nihilo, only the amount has to be correct | |
217 | return Err(unsafe { array::IntoIter::new_unchecked(raw_ary, 0..len) }); | |
218 | } | |
219 | ||
220 | self.ptr = unsafe { arith_offset(self.ptr as *const i8, N as isize) as *mut T }; | |
221 | // Safety: ditto | |
222 | return Ok(unsafe { MaybeUninit::array_assume_init(raw_ary) }); | |
223 | } | |
224 | ||
225 | if len < N { | |
226 | // Safety: `len` indicates that this many elements are available and we just checked that | |
227 | // it fits into the array. | |
228 | unsafe { | |
229 | ptr::copy_nonoverlapping(self.ptr, raw_ary.as_mut_ptr() as *mut T, len); | |
230 | self.forget_remaining_elements(); | |
231 | return Err(array::IntoIter::new_unchecked(raw_ary, 0..len)); | |
232 | } | |
233 | } | |
234 | ||
235 | // Safety: `len` is larger than the array size. Copy a fixed amount here to fully initialize | |
236 | // the array. | |
237 | return unsafe { | |
238 | ptr::copy_nonoverlapping(self.ptr, raw_ary.as_mut_ptr() as *mut T, N); | |
239 | self.ptr = self.ptr.add(N); | |
240 | Ok(MaybeUninit::array_assume_init(raw_ary)) | |
241 | }; | |
242 | } | |
243 | ||
5869c6ff XL |
244 | unsafe fn __iterator_get_unchecked(&mut self, i: usize) -> Self::Item |
245 | where | |
94222f64 | 246 | Self: TrustedRandomAccessNoCoerce, |
5869c6ff XL |
247 | { |
248 | // SAFETY: the caller must guarantee that `i` is in bounds of the | |
249 | // `Vec<T>`, so `i` cannot overflow an `isize`, and the `self.ptr.add(i)` | |
250 | // is guaranteed to pointer to an element of the `Vec<T>` and | |
251 | // thus guaranteed to be valid to dereference. | |
252 | // | |
253 | // Also note the implementation of `Self: TrustedRandomAccess` requires | |
254 | // that `T: Copy` so reading elements from the buffer doesn't invalidate | |
255 | // them for `Drop`. | |
256 | unsafe { | |
257 | if mem::size_of::<T>() == 0 { mem::zeroed() } else { ptr::read(self.ptr.add(i)) } | |
258 | } | |
259 | } | |
260 | } | |
261 | ||
262 | #[stable(feature = "rust1", since = "1.0.0")] | |
263 | impl<T, A: Allocator> DoubleEndedIterator for IntoIter<T, A> { | |
264 | #[inline] | |
265 | fn next_back(&mut self) -> Option<T> { | |
266 | if self.end == self.ptr { | |
267 | None | |
268 | } else if mem::size_of::<T>() == 0 { | |
269 | // See above for why 'ptr.offset' isn't used | |
270 | self.end = unsafe { arith_offset(self.end as *const i8, -1) as *mut T }; | |
271 | ||
272 | // Make up a value of this ZST. | |
273 | Some(unsafe { mem::zeroed() }) | |
274 | } else { | |
275 | self.end = unsafe { self.end.offset(-1) }; | |
276 | ||
277 | Some(unsafe { ptr::read(self.end) }) | |
278 | } | |
279 | } | |
c295e0f8 XL |
280 | |
281 | #[inline] | |
282 | fn advance_back_by(&mut self, n: usize) -> Result<(), usize> { | |
283 | let step_size = self.len().min(n); | |
284 | if mem::size_of::<T>() == 0 { | |
285 | // SAFETY: same as for advance_by() | |
286 | self.end = unsafe { | |
287 | arith_offset(self.end as *const i8, step_size.wrapping_neg() as isize) as *mut T | |
288 | } | |
289 | } else { | |
290 | // SAFETY: same as for advance_by() | |
291 | self.end = unsafe { self.end.offset(step_size.wrapping_neg() as isize) }; | |
292 | } | |
293 | let to_drop = ptr::slice_from_raw_parts_mut(self.end as *mut T, step_size); | |
294 | // SAFETY: same as for advance_by() | |
295 | unsafe { | |
296 | ptr::drop_in_place(to_drop); | |
297 | } | |
298 | if step_size < n { | |
299 | return Err(step_size); | |
300 | } | |
301 | Ok(()) | |
302 | } | |
5869c6ff XL |
303 | } |
304 | ||
305 | #[stable(feature = "rust1", since = "1.0.0")] | |
306 | impl<T, A: Allocator> ExactSizeIterator for IntoIter<T, A> { | |
307 | fn is_empty(&self) -> bool { | |
308 | self.ptr == self.end | |
309 | } | |
310 | } | |
311 | ||
312 | #[stable(feature = "fused", since = "1.26.0")] | |
313 | impl<T, A: Allocator> FusedIterator for IntoIter<T, A> {} | |
314 | ||
315 | #[unstable(feature = "trusted_len", issue = "37572")] | |
316 | unsafe impl<T, A: Allocator> TrustedLen for IntoIter<T, A> {} | |
317 | ||
318 | #[doc(hidden)] | |
319 | #[unstable(issue = "none", feature = "std_internals")] | |
c295e0f8 XL |
320 | #[rustc_unsafe_specialization_marker] |
321 | pub trait NonDrop {} | |
322 | ||
5869c6ff XL |
323 | // T: Copy as approximation for !Drop since get_unchecked does not advance self.ptr |
324 | // and thus we can't implement drop-handling | |
c295e0f8 XL |
325 | #[unstable(issue = "none", feature = "std_internals")] |
326 | impl<T: Copy> NonDrop for T {} | |
327 | ||
328 | #[doc(hidden)] | |
329 | #[unstable(issue = "none", feature = "std_internals")] | |
94222f64 | 330 | // TrustedRandomAccess (without NoCoerce) must not be implemented because |
c295e0f8 | 331 | // subtypes/supertypes of `T` might not be `NonDrop` |
94222f64 | 332 | unsafe impl<T, A: Allocator> TrustedRandomAccessNoCoerce for IntoIter<T, A> |
5869c6ff | 333 | where |
c295e0f8 | 334 | T: NonDrop, |
5869c6ff | 335 | { |
6a06907d | 336 | const MAY_HAVE_SIDE_EFFECT: bool = false; |
5869c6ff XL |
337 | } |
338 | ||
17df50a5 | 339 | #[cfg(not(no_global_oom_handling))] |
5869c6ff XL |
340 | #[stable(feature = "vec_into_iter_clone", since = "1.8.0")] |
341 | impl<T: Clone, A: Allocator + Clone> Clone for IntoIter<T, A> { | |
342 | #[cfg(not(test))] | |
343 | fn clone(&self) -> Self { | |
5e7ed085 | 344 | self.as_slice().to_vec_in(self.alloc.deref().clone()).into_iter() |
5869c6ff XL |
345 | } |
346 | #[cfg(test)] | |
347 | fn clone(&self) -> Self { | |
5e7ed085 | 348 | crate::slice::to_vec(self.as_slice(), self.alloc.deref().clone()).into_iter() |
5869c6ff XL |
349 | } |
350 | } | |
351 | ||
352 | #[stable(feature = "rust1", since = "1.0.0")] | |
353 | unsafe impl<#[may_dangle] T, A: Allocator> Drop for IntoIter<T, A> { | |
354 | fn drop(&mut self) { | |
355 | struct DropGuard<'a, T, A: Allocator>(&'a mut IntoIter<T, A>); | |
356 | ||
357 | impl<T, A: Allocator> Drop for DropGuard<'_, T, A> { | |
358 | fn drop(&mut self) { | |
359 | unsafe { | |
5e7ed085 FG |
360 | // `IntoIter::alloc` is not used anymore after this and will be dropped by RawVec |
361 | let alloc = ManuallyDrop::take(&mut self.0.alloc); | |
5869c6ff XL |
362 | // RawVec handles deallocation |
363 | let _ = RawVec::from_raw_parts_in(self.0.buf.as_ptr(), self.0.cap, alloc); | |
364 | } | |
365 | } | |
366 | } | |
367 | ||
368 | let guard = DropGuard(self); | |
369 | // destroy the remaining elements | |
370 | unsafe { | |
371 | ptr::drop_in_place(guard.0.as_raw_mut_slice()); | |
372 | } | |
373 | // now `guard` will be dropped and do the rest | |
374 | } | |
375 | } | |
376 | ||
5e7ed085 FG |
377 | // In addition to the SAFETY invariants of the following three unsafe traits |
378 | // also refer to the vec::in_place_collect module documentation to get an overview | |
5869c6ff | 379 | #[unstable(issue = "none", feature = "inplace_iteration")] |
17df50a5 | 380 | #[doc(hidden)] |
5869c6ff XL |
381 | unsafe impl<T, A: Allocator> InPlaceIterable for IntoIter<T, A> {} |
382 | ||
383 | #[unstable(issue = "none", feature = "inplace_iteration")] | |
17df50a5 | 384 | #[doc(hidden)] |
5869c6ff XL |
385 | unsafe impl<T, A: Allocator> SourceIter for IntoIter<T, A> { |
386 | type Source = Self; | |
387 | ||
388 | #[inline] | |
389 | unsafe fn as_inner(&mut self) -> &mut Self::Source { | |
390 | self | |
391 | } | |
392 | } | |
393 | ||
5e7ed085 FG |
394 | #[cfg(not(no_global_oom_handling))] |
395 | unsafe impl<T> AsVecIntoIter for IntoIter<T> { | |
5869c6ff XL |
396 | type Item = T; |
397 | ||
398 | fn as_into_iter(&mut self) -> &mut IntoIter<Self::Item> { | |
399 | self | |
400 | } | |
401 | } |