1 use core
::iter
::{InPlaceIterable, SourceIter, TrustedRandomAccessNoCoerce}
;
2 use core
::mem
::{self, ManuallyDrop}
;
5 use super::{AsIntoIter, InPlaceDrop, SpecFromIter, SpecFromIterNested, Vec}
;
7 /// Specialization marker for collecting an iterator pipeline into a Vec while reusing the
8 /// source allocation, i.e. executing the pipeline in place.
9 #[rustc_unsafe_specialization_marker]
10 pub(super) trait InPlaceIterableMarker {}
12 impl<T
> InPlaceIterableMarker
for T
where T
: InPlaceIterable {}
14 impl<T
, I
> SpecFromIter
<T
, I
> for Vec
<T
>
16 I
: Iterator
<Item
= T
> + SourceIter
<Source
: AsIntoIter
> + InPlaceIterableMarker
,
18 default fn from_iter(mut iterator
: I
) -> Self {
19 // Additional requirements which cannot expressed via trait bounds. We rely on const eval
21 // a) no ZSTs as there would be no allocation to reuse and pointer arithmetic would panic
22 // b) size match as required by Alloc contract
23 // c) alignments match as required by Alloc contract
24 if mem
::size_of
::<T
>() == 0
25 || mem
::size_of
::<T
>()
26 != mem
::size_of
::<<<I
as SourceIter
>::Source
as AsIntoIter
>::Item
>()
27 || mem
::align_of
::<T
>()
28 != mem
::align_of
::<<<I
as SourceIter
>::Source
as AsIntoIter
>::Item
>()
30 // fallback to more generic implementations
31 return SpecFromIterNested
::from_iter(iterator
);
34 let (src_buf
, src_ptr
, dst_buf
, dst_end
, cap
) = unsafe {
35 let inner
= iterator
.as_inner().as_into_iter();
39 inner
.buf
.as_ptr() as *mut T
,
40 inner
.end
as *const T
,
45 let len
= SpecInPlaceCollect
::collect_in_place(&mut iterator
, dst_buf
, dst_end
);
47 let src
= unsafe { iterator.as_inner().as_into_iter() }
;
48 // check if SourceIter contract was upheld
49 // caveat: if they weren't we might not even make it to this point
50 debug_assert_eq
!(src_buf
, src
.buf
.as_ptr());
51 // check InPlaceIterable contract. This is only possible if the iterator advanced the
52 // source pointer at all. If it uses unchecked access via TrustedRandomAccess
53 // then the source pointer will stay in its initial position and we can't use it as reference
54 if src
.ptr
!= src_ptr
{
56 unsafe { dst_buf.add(len) as *const _ }
<= src
.ptr
,
57 "InPlaceIterable contract violation, write pointer advanced beyond read pointer"
61 // drop any remaining values at the tail of the source
62 // but prevent drop of the allocation itself once IntoIter goes out of scope
63 // if the drop panics then we also leak any elements collected into dst_buf
65 // FIXME: Since `SpecInPlaceCollect::collect_in_place` above might use
66 // `__iterator_get_unchecked` internally, this call might be operating on
67 // a `vec::IntoIter` with incorrect internal state regarding which elements
68 // have already been “consumed”. However, the `TrustedRandomIteratorNoCoerce`
69 // implementation of `vec::IntoIter` is only present if the `Vec` elements
70 // don’t have a destructor, so it doesn’t matter if elements are “dropped multiple times”
72 // This argument technically currently lacks justification from the `# Safety` docs for
73 // `SourceIter`/`InPlaceIterable` and/or `TrustedRandomAccess`, so it might be possible that
74 // someone could inadvertently create new library unsoundness
75 // involving this `.forget_allocation_drop_remaining()` call.
76 src
.forget_allocation_drop_remaining();
78 let vec
= unsafe { Vec::from_raw_parts(dst_buf, len, cap) }
;
84 fn write_in_place_with_drop
<T
>(
86 ) -> impl FnMut(InPlaceDrop
<T
>, T
) -> Result
<InPlaceDrop
<T
>, !> {
87 move |mut sink
, item
| {
89 // the InPlaceIterable contract cannot be verified precisely here since
90 // try_fold has an exclusive reference to the source pointer
91 // all we can do is check if it's still in range
92 debug_assert
!(sink
.dst
as *const _
<= src_end
, "InPlaceIterable contract violation");
93 ptr
::write(sink
.dst
, item
);
94 // Since this executes user code which can panic we have to bump the pointer
96 sink
.dst
= sink
.dst
.add(1);
102 /// Helper trait to hold specialized implementations of the in-place iterate-collect loop
103 trait SpecInPlaceCollect
<T
, I
>: Iterator
<Item
= T
> {
104 /// Collects an iterator (`self`) into the destination buffer (`dst`) and returns the number of items
105 /// collected. `end` is the last writable element of the allocation and used for bounds checks.
107 /// This method is specialized and one of its implementations makes use of
108 /// `Iterator::__iterator_get_unchecked` calls with a `TrustedRandomAccessNoCoerce` bound
109 /// on `I` which means the caller of this method must take the safety conditions
110 /// of that trait into consideration.
111 fn collect_in_place(&mut self, dst
: *mut T
, end
: *const T
) -> usize;
114 impl<T
, I
> SpecInPlaceCollect
<T
, I
> for I
116 I
: Iterator
<Item
= T
>,
119 default fn collect_in_place(&mut self, dst_buf
: *mut T
, end
: *const T
) -> usize {
120 // use try-fold since
121 // - it vectorizes better for some iterator adapters
122 // - unlike most internal iteration methods, it only takes a &mut self
123 // - it lets us thread the write pointer through its innards and get it back in the end
124 let sink
= InPlaceDrop { inner: dst_buf, dst: dst_buf }
;
126 self.try_fold
::<_
, _
, Result
<_
, !>>(sink
, write_in_place_with_drop(end
)).unwrap();
127 // iteration succeeded, don't drop head
128 unsafe { ManuallyDrop::new(sink).dst.offset_from(dst_buf) as usize }
132 impl<T
, I
> SpecInPlaceCollect
<T
, I
> for I
134 I
: Iterator
<Item
= T
> + TrustedRandomAccessNoCoerce
,
137 fn collect_in_place(&mut self, dst_buf
: *mut T
, end
: *const T
) -> usize {
138 let len
= self.size();
139 let mut drop_guard
= InPlaceDrop { inner: dst_buf, dst: dst_buf }
;
141 // Safety: InplaceIterable contract guarantees that for every element we read
142 // one slot in the underlying storage will have been freed up and we can immediately
143 // write back the result.
145 let dst
= dst_buf
.offset(i
as isize);
146 debug_assert
!(dst
as *const _
<= end
, "InPlaceIterable contract violation");
147 ptr
::write(dst
, self.__iterator_get_unchecked(i
));
148 // Since this executes user code which can panic we have to bump the pointer
150 drop_guard
.dst
= dst
.add(1);
153 mem
::forget(drop_guard
);