1 // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! A growable list type with heap-allocated contents, written `Vec<T>` but
12 //! pronounced 'vector.'
14 //! Vectors have `O(1)` indexing, amortized `O(1)` push (to the end) and
15 //! `O(1)` pop (from the end).
19 //! You can explicitly create a `Vec<T>` with `new()`:
22 //! let v: Vec<i32> = Vec::new();
25 //! ...or by using the `vec!` macro:
28 //! let v: Vec<i32> = vec![];
30 //! let v = vec![1, 2, 3, 4, 5];
32 //! let v = vec![0; 10]; // ten zeroes
35 //! You can `push` values onto the end of a vector (which will grow the vector as needed):
38 //! let mut v = vec![1, 2];
43 //! Popping values works in much the same way:
46 //! let mut v = vec![1, 2];
48 //! let two = v.pop();
51 //! Vectors also support indexing (through the `Index` and `IndexMut` traits):
54 //! let mut v = vec![1, 2, 3];
59 #![stable(feature = "rust1", since = "1.0.0")]
63 use alloc
::boxed
::Box
;
64 use alloc
::heap
::{EMPTY, allocate, reallocate, deallocate}
;
66 use core
::cmp
::Ordering
;
68 use core
::hash
::{self, Hash}
;
69 use core
::intrinsics
::{arith_offset, assume}
;
70 use core
::iter
::{repeat, FromIterator}
;
71 use core
::marker
::PhantomData
;
73 use core
::ops
::{Index, IndexMut, Deref}
;
76 use core
::ptr
::Unique
;
81 use borrow
::{Cow, IntoCow}
;
83 use super::range
::RangeArgument
;
85 // FIXME- fix places which assume the max vector allowed has memory usize::MAX.
86 const MAX_MEMORY_SIZE
: usize = isize::MAX
as usize;
88 /// A growable list type, written `Vec<T>` but pronounced 'vector.'
93 /// let mut vec = Vec::new();
97 /// assert_eq!(vec.len(), 2);
98 /// assert_eq!(vec[0], 1);
100 /// assert_eq!(vec.pop(), Some(2));
101 /// assert_eq!(vec.len(), 1);
104 /// assert_eq!(vec[0], 7);
106 /// vec.extend([1, 2, 3].iter().cloned());
109 /// println!("{}", x);
111 /// assert_eq!(vec, [7, 1, 2, 3]);
114 /// The `vec!` macro is provided to make initialization more convenient:
117 /// let mut vec = vec![1, 2, 3];
119 /// assert_eq!(vec, [1, 2, 3, 4]);
122 /// Use a `Vec<T>` as an efficient stack:
125 /// let mut stack = Vec::new();
131 /// while let Some(top) = stack.pop() {
132 /// // Prints 3, 2, 1
133 /// println!("{}", top);
137 /// # Capacity and reallocation
139 /// The capacity of a vector is the amount of space allocated for any future
140 /// elements that will be added onto the vector. This is not to be confused with
141 /// the *length* of a vector, which specifies the number of actual elements
142 /// within the vector. If a vector's length exceeds its capacity, its capacity
143 /// will automatically be increased, but its elements will have to be
146 /// For example, a vector with capacity 10 and length 0 would be an empty vector
147 /// with space for 10 more elements. Pushing 10 or fewer elements onto the
148 /// vector will not change its capacity or cause reallocation to occur. However,
149 /// if the vector's length is increased to 11, it will have to reallocate, which
150 /// can be slow. For this reason, it is recommended to use `Vec::with_capacity`
151 /// whenever possible to specify how big the vector is expected to get.
152 #[unsafe_no_drop_flag]
153 #[stable(feature = "rust1", since = "1.0.0")]
160 unsafe impl<T
: Send
> Send
for Vec
<T
> { }
161 unsafe impl<T
: Sync
> Sync
for Vec
<T
> { }
163 ////////////////////////////////////////////////////////////////////////////////
165 ////////////////////////////////////////////////////////////////////////////////
168 /// Constructs a new, empty `Vec<T>`.
170 /// The vector will not allocate until elements are pushed onto it.
175 /// let mut vec: Vec<i32> = Vec::new();
178 #[stable(feature = "rust1", since = "1.0.0")]
179 pub fn new() -> Vec
<T
> {
180 // We want ptr to never be NULL so instead we set it to some arbitrary
181 // non-null value which is fine since we never call deallocate on the ptr
182 // if cap is 0. The reason for this is because the pointer of a slice
183 // being NULL would break the null pointer optimization for enums.
184 unsafe { Vec::from_raw_parts(EMPTY as *mut T, 0, 0) }
187 /// Constructs a new, empty `Vec<T>` with the specified capacity.
189 /// The vector will be able to hold exactly `capacity` elements without reallocating. If
190 /// `capacity` is 0, the vector will not allocate.
192 /// It is important to note that this function does not specify the *length* of the returned
193 /// vector, but only the *capacity*. (For an explanation of the difference between length and
194 /// capacity, see the main `Vec<T>` docs above, 'Capacity and reallocation'.)
199 /// let mut vec = Vec::with_capacity(10);
201 /// // The vector contains no items, even though it has capacity for more
202 /// assert_eq!(vec.len(), 0);
204 /// // These are all done without reallocating...
209 /// // ...but this may make the vector reallocate
213 #[stable(feature = "rust1", since = "1.0.0")]
214 pub fn with_capacity(capacity
: usize) -> Vec
<T
> {
215 if mem
::size_of
::<T
>() == 0 {
216 unsafe { Vec::from_raw_parts(EMPTY as *mut T, 0, usize::MAX) }
217 } else if capacity
== 0 {
220 let size
= capacity
.checked_mul(mem
::size_of
::<T
>())
221 .expect("capacity overflow");
222 let ptr
= unsafe { allocate(size, mem::align_of::<T>()) }
;
223 if ptr
.is_null() { ::alloc::oom() }
224 unsafe { Vec::from_raw_parts(ptr as *mut T, 0, capacity) }
228 /// Creates a `Vec<T>` directly from the raw components of another vector.
230 /// This is highly unsafe, due to the number of invariants that aren't checked.
239 /// let mut v = vec![1, 2, 3];
241 /// // Pull out the various important pieces of information about `v`
242 /// let p = v.as_mut_ptr();
243 /// let len = v.len();
244 /// let cap = v.capacity();
247 /// // Cast `v` into the void: no destructor run, so we are in
248 /// // complete control of the allocation to which `p` points.
251 /// // Overwrite memory with 4, 5, 6
252 /// for i in 0..len as isize {
253 /// ptr::write(p.offset(i), 4 + i);
256 /// // Put everything back together into a Vec
257 /// let rebuilt = Vec::from_raw_parts(p, len, cap);
258 /// assert_eq!(rebuilt, [4, 5, 6]);
262 #[stable(feature = "rust1", since = "1.0.0")]
263 pub unsafe fn from_raw_parts(ptr
: *mut T
, length
: usize,
264 capacity
: usize) -> Vec
<T
> {
266 ptr
: Unique
::new(ptr
),
272 /// Creates a vector by copying the elements from a raw pointer.
274 /// This function will copy `elts` contiguous elements starting at `ptr`
275 /// into a new allocation owned by the returned `Vec<T>`. The elements of
276 /// the buffer are copied into the vector without cloning, as if
277 /// `ptr::read()` were called on them.
279 #[unstable(feature = "vec_from_raw_buf",
280 reason
= "may be better expressed via composition")]
281 #[deprecated(since = "1.2.0",
282 reason
= "use slice::from_raw_parts + .to_vec() instead")]
283 pub unsafe fn from_raw_buf(ptr
: *const T
, elts
: usize) -> Vec
<T
> {
284 let mut dst
= Vec
::with_capacity(elts
);
286 ptr
::copy_nonoverlapping(ptr
, dst
.as_mut_ptr(), elts
);
290 /// Returns the number of elements the vector can hold without
296 /// let vec: Vec<i32> = Vec::with_capacity(10);
297 /// assert_eq!(vec.capacity(), 10);
300 #[stable(feature = "rust1", since = "1.0.0")]
301 pub fn capacity(&self) -> usize {
305 /// Reserves capacity for at least `additional` more elements to be inserted
306 /// in the given `Vec<T>`. The collection may reserve more space to avoid
307 /// frequent reallocations.
311 /// Panics if the new capacity overflows `usize`.
316 /// let mut vec = vec![1];
318 /// assert!(vec.capacity() >= 11);
320 #[stable(feature = "rust1", since = "1.0.0")]
321 pub fn reserve(&mut self, additional
: usize) {
322 if self.cap
- self.len
< additional
{
323 const ERR_MSG
: &'
static str = "Vec::reserve: `isize` overflow";
325 let new_min_cap
= self.len
.checked_add(additional
).expect(ERR_MSG
);
326 if new_min_cap
> MAX_MEMORY_SIZE { panic!(ERR_MSG) }
327 self.grow_capacity(match new_min_cap
.checked_next_power_of_two() {
328 Some(x
) if x
> MAX_MEMORY_SIZE
=> MAX_MEMORY_SIZE
,
329 None
=> MAX_MEMORY_SIZE
,
335 /// Reserves the minimum capacity for exactly `additional` more elements to
336 /// be inserted in the given `Vec<T>`. Does nothing if the capacity is already
339 /// Note that the allocator may give the collection more space than it
340 /// requests. Therefore capacity can not be relied upon to be precisely
341 /// minimal. Prefer `reserve` if future insertions are expected.
345 /// Panics if the new capacity overflows `usize`.
350 /// let mut vec = vec![1];
351 /// vec.reserve_exact(10);
352 /// assert!(vec.capacity() >= 11);
354 #[stable(feature = "rust1", since = "1.0.0")]
355 pub fn reserve_exact(&mut self, additional
: usize) {
356 if self.cap
- self.len
< additional
{
357 match self.len
.checked_add(additional
) {
358 None
=> panic
!("Vec::reserve: `usize` overflow"),
359 Some(new_cap
) => self.grow_capacity(new_cap
)
364 /// Shrinks the capacity of the vector as much as possible.
366 /// It will drop down as close as possible to the length but the allocator
367 /// may still inform the vector that there is space for a few more elements.
372 /// let mut vec = Vec::with_capacity(10);
373 /// vec.extend([1, 2, 3].iter().cloned());
374 /// assert_eq!(vec.capacity(), 10);
375 /// vec.shrink_to_fit();
376 /// assert!(vec.capacity() >= 3);
378 #[stable(feature = "rust1", since = "1.0.0")]
379 pub fn shrink_to_fit(&mut self) {
380 if mem
::size_of
::<T
>() == 0 { return }
385 dealloc(*self.ptr
, self.cap
)
389 } else if self.cap
!= self.len
{
391 // Overflow check is unnecessary as the vector is already at
393 let ptr
= reallocate(*self.ptr
as *mut u8,
394 self.cap
* mem
::size_of
::<T
>(),
395 self.len
* mem
::size_of
::<T
>(),
396 mem
::align_of
::<T
>()) as *mut T
;
397 if ptr
.is_null() { ::alloc::oom() }
398 self.ptr
= Unique
::new(ptr
);
404 /// Converts the vector into Box<[T]>.
406 /// Note that this will drop any excess capacity. Calling this and
407 /// converting back to a vector with `into_vec()` is equivalent to calling
408 /// `shrink_to_fit()`.
409 #[stable(feature = "rust1", since = "1.0.0")]
410 pub fn into_boxed_slice(mut self) -> Box
<[T
]> {
411 self.shrink_to_fit();
413 let xs
: Box
<[T
]> = Box
::from_raw(&mut *self);
419 /// Shorten a vector, dropping excess elements.
421 /// If `len` is greater than the vector's current length, this has no
427 /// let mut vec = vec![1, 2, 3, 4];
429 /// assert_eq!(vec, [1, 2]);
431 #[stable(feature = "rust1", since = "1.0.0")]
432 pub fn truncate(&mut self, len
: usize) {
434 // drop any extra elements
435 while len
< self.len
{
436 // decrement len before the read(), so a panic on Drop doesn't
437 // re-drop the just-failed value.
439 ptr
::read(self.get_unchecked(self.len
));
444 /// Extracts a slice containing the entire vector.
446 /// Equivalent to `&s[..]`.
448 #[unstable(feature = "convert",
449 reason
= "waiting on RFC revision")]
450 pub fn as_slice(&self) -> &[T
] {
454 /// Extracts a mutable slice of the entire vector.
456 /// Equivalent to `&mut s[..]`.
458 #[unstable(feature = "convert",
459 reason
= "waiting on RFC revision")]
460 pub fn as_mut_slice(&mut self) -> &mut [T
] {
464 /// Sets the length of a vector.
466 /// This will explicitly set the size of the vector, without actually
467 /// modifying its buffers, so it is up to the caller to ensure that the
468 /// vector is actually the specified size.
473 /// let mut v = vec![1, 2, 3, 4];
479 #[stable(feature = "rust1", since = "1.0.0")]
480 pub unsafe fn set_len(&mut self, len
: usize) {
484 /// Removes an element from anywhere in the vector and return it, replacing
485 /// it with the last element.
487 /// This does not preserve ordering, but is O(1).
491 /// Panics if `index` is out of bounds.
496 /// let mut v = vec!["foo", "bar", "baz", "qux"];
498 /// assert_eq!(v.swap_remove(1), "bar");
499 /// assert_eq!(v, ["foo", "qux", "baz"]);
501 /// assert_eq!(v.swap_remove(0), "foo");
502 /// assert_eq!(v, ["baz", "qux"]);
505 #[stable(feature = "rust1", since = "1.0.0")]
506 pub fn swap_remove(&mut self, index
: usize) -> T
{
507 let length
= self.len();
508 self.swap(index
, length
- 1);
512 /// Inserts an element at position `index` within the vector, shifting all
513 /// elements after position `i` one position to the right.
517 /// Panics if `index` is greater than the vector's length.
522 /// let mut vec = vec![1, 2, 3];
523 /// vec.insert(1, 4);
524 /// assert_eq!(vec, [1, 4, 2, 3]);
525 /// vec.insert(4, 5);
526 /// assert_eq!(vec, [1, 4, 2, 3, 5]);
528 #[stable(feature = "rust1", since = "1.0.0")]
529 pub fn insert(&mut self, index
: usize, element
: T
) {
530 let len
= self.len();
531 assert
!(index
<= len
);
532 // space for the new element
535 unsafe { // infallible
536 // The spot to put the new value
538 let p
= self.as_mut_ptr().offset(index
as isize);
539 // Shift everything over to make space. (Duplicating the
540 // `index`th element into two consecutive places.)
541 ptr
::copy(&*p
, p
.offset(1), len
- index
);
542 // Write it in, overwriting the first copy of the `index`th
544 ptr
::write(&mut *p
, element
);
546 self.set_len(len
+ 1);
550 /// Removes and returns the element at position `index` within the vector,
551 /// shifting all elements after position `index` one position to the left.
555 /// Panics if `index` is out of bounds.
560 /// let mut v = vec![1, 2, 3];
561 /// assert_eq!(v.remove(1), 2);
562 /// assert_eq!(v, [1, 3]);
564 #[stable(feature = "rust1", since = "1.0.0")]
565 pub fn remove(&mut self, index
: usize) -> T
{
566 let len
= self.len();
567 assert
!(index
< len
);
568 unsafe { // infallible
571 // the place we are taking from.
572 let ptr
= self.as_mut_ptr().offset(index
as isize);
573 // copy it out, unsafely having a copy of the value on
574 // the stack and in the vector at the same time.
575 ret
= ptr
::read(ptr
);
577 // Shift everything down to fill in that spot.
578 ptr
::copy(&*ptr
.offset(1), ptr
, len
- index
- 1);
580 self.set_len(len
- 1);
585 /// Retains only the elements specified by the predicate.
587 /// In other words, remove all elements `e` such that `f(&e)` returns false.
588 /// This method operates in place and preserves the order of the retained
594 /// let mut vec = vec![1, 2, 3, 4];
595 /// vec.retain(|&x| x%2 == 0);
596 /// assert_eq!(vec, [2, 4]);
598 #[stable(feature = "rust1", since = "1.0.0")]
599 pub fn retain
<F
>(&mut self, mut f
: F
) where F
: FnMut(&T
) -> bool
{
600 let len
= self.len();
614 self.truncate(len
- del
);
618 /// Appends an element to the back of a collection.
622 /// Panics if the number of elements in the vector overflows a `usize`.
627 /// let mut vec = vec!(1, 2);
629 /// assert_eq!(vec, [1, 2, 3]);
632 #[stable(feature = "rust1", since = "1.0.0")]
633 pub fn push(&mut self, value
: T
) {
636 fn resize
<T
>(vec
: &mut Vec
<T
>) {
637 let old_size
= vec
.cap
* mem
::size_of
::<T
>();
638 if old_size
>= MAX_MEMORY_SIZE { panic!("capacity overflow") }
639 let mut size
= max(old_size
, 2 * mem
::size_of
::<T
>()) * 2;
640 if old_size
> size
|| size
> MAX_MEMORY_SIZE
{
641 size
= MAX_MEMORY_SIZE
;
644 let ptr
= alloc_or_realloc(*vec
.ptr
, old_size
, size
);
645 if ptr
.is_null() { ::alloc::oom() }
646 vec
.ptr
= Unique
::new(ptr
);
648 vec
.cap
= max(vec
.cap
, 2) * 2;
651 if mem
::size_of
::<T
>() == 0 {
652 // zero-size types consume no memory, so we can't rely on the
653 // address space running out
654 self.len
= self.len
.checked_add(1).expect("length overflow");
659 if self.len
== self.cap
{
664 let end
= (*self.ptr
).offset(self.len
as isize);
665 ptr
::write(&mut *end
, value
);
670 /// Removes the last element from a vector and returns it, or `None` if it is empty.
675 /// let mut vec = vec![1, 2, 3];
676 /// assert_eq!(vec.pop(), Some(3));
677 /// assert_eq!(vec, [1, 2]);
680 #[stable(feature = "rust1", since = "1.0.0")]
681 pub fn pop(&mut self) -> Option
<T
> {
687 Some(ptr
::read(self.get_unchecked(self.len())))
692 /// Moves all the elements of `other` into `Self`, leaving `other` empty.
696 /// Panics if the number of elements in the vector overflows a `usize`.
701 /// # #![feature(append)]
702 /// let mut vec = vec![1, 2, 3];
703 /// let mut vec2 = vec![4, 5, 6];
704 /// vec.append(&mut vec2);
705 /// assert_eq!(vec, [1, 2, 3, 4, 5, 6]);
706 /// assert_eq!(vec2, []);
709 #[unstable(feature = "append",
710 reason
= "new API, waiting for dust to settle")]
711 pub fn append(&mut self, other
: &mut Self) {
712 if mem
::size_of
::<T
>() == 0 {
713 // zero-size types consume no memory, so we can't rely on the
714 // address space running out
715 self.len
= self.len
.checked_add(other
.len()).expect("length overflow");
716 unsafe { other.set_len(0) }
719 self.reserve(other
.len());
720 let len
= self.len();
722 ptr
::copy_nonoverlapping(
724 self.get_unchecked_mut(len
),
728 self.len
+= other
.len();
729 unsafe { other.set_len(0); }
732 /// Create a draining iterator that removes the specified range in the vector
733 /// and yields the removed items from start to end. The element range is
734 /// removed even if the iterator is not consumed until the end.
736 /// Note: It is unspecified how many elements are removed from the vector,
737 /// if the `Drain` value is leaked.
741 /// Panics if the starting point is greater than the end point or if
742 /// the end point is greater than the length of the vector.
747 /// # #![feature(drain)]
749 /// // Draining using `..` clears the whole vector.
750 /// let mut v = vec![1, 2, 3];
751 /// let u: Vec<_> = v.drain(..).collect();
752 /// assert_eq!(v, &[]);
753 /// assert_eq!(u, &[1, 2, 3]);
755 #[unstable(feature = "drain",
756 reason
= "recently added, matches RFC")]
757 pub fn drain
<R
>(&mut self, range
: R
) -> Drain
<T
> where R
: RangeArgument
<usize> {
760 // When the Drain is first created, it shortens the length of
761 // the source vector to make sure no uninitalized or moved-from elements
762 // are accessible at all if the Drain's destructor never gets to run.
764 // Drain will ptr::read out the values to remove.
765 // When finished, remaining tail of the vec is copied back to cover
766 // the hole, and the vector length is restored to the new length.
768 let len
= self.len();
769 let start
= *range
.start().unwrap_or(&0);
770 let end
= *range
.end().unwrap_or(&len
);
771 assert
!(start
<= end
);
775 // set self.vec length's to start, to be safe in case Drain is leaked
777 // Use the borrow in the IterMut to indicate borrowing behavior of the
778 // whole Drain iterator (like &mut T).
779 let range_slice
= slice
::from_raw_parts_mut(
780 self.as_mut_ptr().offset(start
as isize),
785 iter
: range_slice
.iter_mut(),
791 /// Clears the vector, removing all values.
796 /// let mut v = vec![1, 2, 3];
800 /// assert!(v.is_empty());
803 #[stable(feature = "rust1", since = "1.0.0")]
804 pub fn clear(&mut self) {
808 /// Returns the number of elements in the vector.
813 /// let a = vec![1, 2, 3];
814 /// assert_eq!(a.len(), 3);
817 #[stable(feature = "rust1", since = "1.0.0")]
818 pub fn len(&self) -> usize { self.len }
820 /// Returns `true` if the vector contains no elements.
825 /// let mut v = Vec::new();
826 /// assert!(v.is_empty());
829 /// assert!(!v.is_empty());
831 #[stable(feature = "rust1", since = "1.0.0")]
832 pub fn is_empty(&self) -> bool { self.len() == 0 }
834 /// Converts a `Vec<T>` to a `Vec<U>` where `T` and `U` have the same
835 /// size and in case they are not zero-sized the same minimal alignment.
839 /// Panics if `T` and `U` have differing sizes or are not zero-sized and
840 /// have differing minimal alignments.
845 /// # #![feature(map_in_place)]
846 /// let v = vec![0, 1, 2];
847 /// let w = v.map_in_place(|i| i + 3);
848 /// assert_eq!(&w[..], &[3, 4, 5]);
850 /// #[derive(PartialEq, Debug)]
851 /// struct Newtype(u8);
852 /// let bytes = vec![0x11, 0x22];
853 /// let newtyped_bytes = bytes.map_in_place(|x| Newtype(x));
854 /// assert_eq!(&newtyped_bytes[..], &[Newtype(0x11), Newtype(0x22)]);
856 #[unstable(feature = "map_in_place",
857 reason
= "API may change to provide stronger guarantees")]
858 pub fn map_in_place
<U
, F
>(self, mut f
: F
) -> Vec
<U
> where F
: FnMut(T
) -> U
{
859 // FIXME: Assert statically that the types `T` and `U` have the same
861 assert
!(mem
::size_of
::<T
>() == mem
::size_of
::<U
>());
865 if mem
::size_of
::<T
>() != 0 {
866 // FIXME: Assert statically that the types `T` and `U` have the
867 // same minimal alignment in case they are not zero-sized.
869 // These asserts are necessary because the `align_of` of the
870 // types are passed to the allocator by `Vec`.
871 assert
!(mem
::align_of
::<T
>() == mem
::align_of
::<U
>());
873 // This `as isize` cast is safe, because the size of the elements of the
874 // vector is not 0, and:
876 // 1) If the size of the elements in the vector is 1, the `isize` may
877 // overflow, but it has the correct bit pattern so that the
878 // `.offset()` function will work.
881 // Address space 0x0-0xF.
882 // `u8` array at: 0x1.
883 // Size of `u8` array: 0x8.
884 // Calculated `offset`: -0x8.
885 // After `array.offset(offset)`: 0x9.
886 // (0x1 + 0x8 = 0x1 - 0x8)
888 // 2) If the size of the elements in the vector is >1, the `usize` ->
889 // `isize` conversion can't overflow.
890 let offset
= vec
.len() as isize;
891 let start
= vec
.as_mut_ptr();
893 let mut pv
= PartialVecNonZeroSized
{
897 // This points inside the vector, as the vector has length
899 end_t
: unsafe { start.offset(offset) }
,
900 start_u
: start
as *mut U
,
901 end_u
: start
as *mut U
,
903 _marker
: PhantomData
,
914 while pv
.end_u
as *mut T
!= pv
.end_t
{
918 // +-+-+-+-+-+-+-+-+-+
919 // |U|...|U|T|T|...|T|
920 // +-+-+-+-+-+-+-+-+-+
924 let t
= ptr
::read(pv
.start_t
);
927 // +-+-+-+-+-+-+-+-+-+
928 // |U|...|U|X|T|...|T|
929 // +-+-+-+-+-+-+-+-+-+
932 // We must not panic here, one cell is marked as `T`
933 // although it is not `T`.
935 pv
.start_t
= pv
.start_t
.offset(1);
938 // +-+-+-+-+-+-+-+-+-+
939 // |U|...|U|X|T|...|T|
940 // +-+-+-+-+-+-+-+-+-+
943 // We may panic again.
945 // The function given by the user might panic.
948 ptr
::write(pv
.end_u
, u
);
951 // +-+-+-+-+-+-+-+-+-+
952 // |U|...|U|U|T|...|T|
953 // +-+-+-+-+-+-+-+-+-+
956 // We should not panic here, because that would leak the `U`
957 // pointed to by `end_u`.
959 pv
.end_u
= pv
.end_u
.offset(1);
962 // +-+-+-+-+-+-+-+-+-+
963 // |U|...|U|U|T|...|T|
964 // +-+-+-+-+-+-+-+-+-+
967 // We may panic again.
979 // Extract `vec` and prevent the destructor of
980 // `PartialVecNonZeroSized` from running. Note that none of the
981 // function calls can panic, thus no resources can be leaked (as the
982 // `vec` member of `PartialVec` is the only one which holds
983 // allocations -- and it is returned from this function. None of
986 let vec_len
= pv
.vec
.len();
987 let vec_cap
= pv
.vec
.capacity();
988 let vec_ptr
= pv
.vec
.as_mut_ptr() as *mut U
;
990 Vec
::from_raw_parts(vec_ptr
, vec_len
, vec_cap
)
993 // Put the `Vec` into the `PartialVecZeroSized` structure and
994 // prevent the destructor of the `Vec` from running. Since the
995 // `Vec` contained zero-sized objects, it did not allocate, so we
996 // are not leaking memory here.
997 let mut pv
= PartialVecZeroSized
::<T
,U
> {
1000 marker
: PhantomData
,
1004 while pv
.num_t
!= 0 {
1006 // Create a `T` out of thin air and decrement `num_t`. This
1007 // must not panic between these steps, as otherwise a
1008 // destructor of `T` which doesn't exist runs.
1009 let t
= mem
::uninitialized();
1012 // The function given by the user might panic.
1015 // Forget the `U` and increment `num_u`. This increment
1016 // cannot overflow the `usize` as we only do this for a
1017 // number of times that fits into a `usize` (and start with
1018 // `0`). Again, we should not panic between these steps.
1023 // Create a `Vec` from our `PartialVecZeroSized` and make sure the
1024 // destructor of the latter will not run. None of this can panic.
1025 let mut result
= Vec
::new();
1027 result
.set_len(pv
.num_u
);
1034 /// Splits the collection into two at the given index.
1036 /// Returns a newly allocated `Self`. `self` contains elements `[0, at)`,
1037 /// and the returned `Self` contains elements `[at, len)`.
1039 /// Note that the capacity of `self` does not change.
1043 /// Panics if `at > len`.
1048 /// # #![feature(split_off)]
1049 /// let mut vec = vec![1,2,3];
1050 /// let vec2 = vec.split_off(1);
1051 /// assert_eq!(vec, [1]);
1052 /// assert_eq!(vec2, [2, 3]);
1055 #[unstable(feature = "split_off",
1056 reason
= "new API, waiting for dust to settle")]
1057 pub fn split_off(&mut self, at
: usize) -> Self {
1058 assert
!(at
<= self.len(), "`at` out of bounds");
1060 let other_len
= self.len
- at
;
1061 let mut other
= Vec
::with_capacity(other_len
);
1063 // Unsafely `set_len` and copy items to `other`.
1066 other
.set_len(other_len
);
1068 ptr
::copy_nonoverlapping(
1069 self.as_ptr().offset(at
as isize),
1078 impl<T
: Clone
> Vec
<T
> {
1079 /// Resizes the `Vec` in-place so that `len()` is equal to `new_len`.
1081 /// Calls either `extend()` or `truncate()` depending on whether `new_len`
1082 /// is larger than the current value of `len()` or not.
1087 /// # #![feature(vec_resize)]
1088 /// let mut vec = vec!["hello"];
1089 /// vec.resize(3, "world");
1090 /// assert_eq!(vec, ["hello", "world", "world"]);
1092 /// let mut vec = vec![1, 2, 3, 4];
1093 /// vec.resize(2, 0);
1094 /// assert_eq!(vec, [1, 2]);
1096 #[unstable(feature = "vec_resize",
1097 reason
= "matches collection reform specification; waiting for dust to settle")]
1098 pub fn resize(&mut self, new_len
: usize, value
: T
) {
1099 let len
= self.len();
1102 self.extend(repeat(value
).take(new_len
- len
));
1104 self.truncate(new_len
);
1108 /// Appends all elements in a slice to the `Vec`.
1110 /// Iterates over the slice `other`, clones each element, and then appends
1111 /// it to this `Vec`. The `other` vector is traversed in-order.
1116 /// # #![feature(vec_push_all)]
1117 /// let mut vec = vec![1];
1118 /// vec.push_all(&[2, 3, 4]);
1119 /// assert_eq!(vec, [1, 2, 3, 4]);
1122 #[unstable(feature = "vec_push_all",
1123 reason
= "likely to be replaced by a more optimized extend")]
1124 pub fn push_all(&mut self, other
: &[T
]) {
1125 self.reserve(other
.len());
1127 for i
in 0..other
.len() {
1128 let len
= self.len();
1130 // Unsafe code so this can be optimised to a memcpy (or something similarly
1131 // fast) when T is Copy. LLVM is easily confused, so any extra operations
1132 // during the loop can prevent this optimisation.
1135 self.get_unchecked_mut(len
),
1136 other
.get_unchecked(i
).clone());
1137 self.set_len(len
+ 1);
1143 impl<T
: PartialEq
> Vec
<T
> {
1144 /// Removes consecutive repeated elements in the vector.
1146 /// If the vector is sorted, this removes all duplicates.
1151 /// let mut vec = vec![1, 2, 2, 3, 2];
1155 /// assert_eq!(vec, [1, 2, 3, 2]);
1157 #[stable(feature = "rust1", since = "1.0.0")]
1158 pub fn dedup(&mut self) {
1160 // Although we have a mutable reference to `self`, we cannot make
1161 // *arbitrary* changes. The `PartialEq` comparisons could panic, so we
1162 // must ensure that the vector is in a valid state at all time.
1164 // The way that we handle this is by using swaps; we iterate
1165 // over all the elements, swapping as we go so that at the end
1166 // the elements we wish to keep are in the front, and those we
1167 // wish to reject are at the back. We can then truncate the
1168 // vector. This operation is still O(n).
1170 // Example: We start in this state, where `r` represents "next
1171 // read" and `w` represents "next_write`.
1174 // +---+---+---+---+---+---+
1175 // | 0 | 1 | 1 | 2 | 3 | 3 |
1176 // +---+---+---+---+---+---+
1179 // Comparing self[r] against self[w-1], this is not a duplicate, so
1180 // we swap self[r] and self[w] (no effect as r==w) and then increment both
1181 // r and w, leaving us with:
1184 // +---+---+---+---+---+---+
1185 // | 0 | 1 | 1 | 2 | 3 | 3 |
1186 // +---+---+---+---+---+---+
1189 // Comparing self[r] against self[w-1], this value is a duplicate,
1190 // so we increment `r` but leave everything else unchanged:
1193 // +---+---+---+---+---+---+
1194 // | 0 | 1 | 1 | 2 | 3 | 3 |
1195 // +---+---+---+---+---+---+
1198 // Comparing self[r] against self[w-1], this is not a duplicate,
1199 // so swap self[r] and self[w] and advance r and w:
1202 // +---+---+---+---+---+---+
1203 // | 0 | 1 | 2 | 1 | 3 | 3 |
1204 // +---+---+---+---+---+---+
1207 // Not a duplicate, repeat:
1210 // +---+---+---+---+---+---+
1211 // | 0 | 1 | 2 | 3 | 1 | 3 |
1212 // +---+---+---+---+---+---+
1215 // Duplicate, advance r. End of vec. Truncate to w.
1217 let ln
= self.len();
1218 if ln
<= 1 { return; }
1220 // Avoid bounds checks by using raw pointers.
1221 let p
= self.as_mut_ptr();
1222 let mut r
: usize = 1;
1223 let mut w
: usize = 1;
1226 let p_r
= p
.offset(r
as isize);
1227 let p_wm1
= p
.offset((w
- 1) as isize);
1230 let p_w
= p_wm1
.offset(1);
1231 mem
::swap(&mut *p_r
, &mut *p_w
);
1243 ////////////////////////////////////////////////////////////////////////////////
1244 // Internal methods and functions
1245 ////////////////////////////////////////////////////////////////////////////////
1248 /// Reserves capacity for exactly `capacity` elements in the given vector.
1250 /// If the capacity for `self` is already equal to or greater than the
1251 /// requested capacity, then no action is taken.
1252 fn grow_capacity(&mut self, capacity
: usize) {
1253 if mem
::size_of
::<T
>() == 0 { return }
1255 if capacity
> self.cap
{
1256 let size
= capacity
.checked_mul(mem
::size_of
::<T
>())
1257 .expect("capacity overflow");
1259 let ptr
= alloc_or_realloc(*self.ptr
, self.cap
* mem
::size_of
::<T
>(), size
);
1260 if ptr
.is_null() { ::alloc::oom() }
1261 self.ptr
= Unique
::new(ptr
);
1263 self.cap
= capacity
;
1268 // FIXME: #13996: need a way to mark the return value as `noalias`
1270 unsafe fn alloc_or_realloc
<T
>(ptr
: *mut T
, old_size
: usize, size
: usize) -> *mut T
{
1272 allocate(size
, mem
::align_of
::<T
>()) as *mut T
1274 reallocate(ptr
as *mut u8, old_size
, size
, mem
::align_of
::<T
>()) as *mut T
1279 unsafe fn dealloc
<T
>(ptr
: *mut T
, len
: usize) {
1280 if mem
::size_of
::<T
>() != 0 {
1281 deallocate(ptr
as *mut u8,
1282 len
* mem
::size_of
::<T
>(),
1283 mem
::align_of
::<T
>())
1288 #[stable(feature = "rust1", since = "1.0.0")]
1289 pub fn from_elem
<T
: Clone
>(elem
: T
, n
: usize) -> Vec
<T
> {
1291 let mut v
= Vec
::with_capacity(n
);
1292 let mut ptr
= v
.as_mut_ptr();
1294 // Write all elements except the last one
1296 ptr
::write(ptr
, Clone
::clone(&elem
));
1297 ptr
= ptr
.offset(1);
1298 v
.set_len(i
); // Increment the length in every step in case Clone::clone() panics
1302 // We can write the last element directly without cloning needlessly
1303 ptr
::write(ptr
, elem
);
1311 ////////////////////////////////////////////////////////////////////////////////
1312 // Common trait implementations for Vec
1313 ////////////////////////////////////////////////////////////////////////////////
1315 #[stable(feature = "rust1", since = "1.0.0")]
1316 impl<T
:Clone
> Clone
for Vec
<T
> {
1318 fn clone(&self) -> Vec
<T
> { <[T]>::to_vec(&**self) }
1320 // HACK(japaric): with cfg(test) the inherent `[T]::to_vec` method, which is
1321 // required for this method definition, is not available. Instead use the
1322 // `slice::to_vec` function which is only available with cfg(test)
1323 // NB see the slice::hack module in slice.rs for more information
1325 fn clone(&self) -> Vec
<T
> {
1326 ::slice
::to_vec(&**self)
1329 fn clone_from(&mut self, other
: &Vec
<T
>) {
1330 // drop anything in self that will not be overwritten
1331 if self.len() > other
.len() {
1332 self.truncate(other
.len())
1335 // reuse the contained values' allocations/resources.
1336 for (place
, thing
) in self.iter_mut().zip(other
) {
1337 place
.clone_from(thing
)
1340 // self.len <= other.len due to the truncate above, so the
1341 // slice here is always in-bounds.
1342 let slice
= &other
[self.len()..];
1343 self.push_all(slice
);
1347 #[stable(feature = "rust1", since = "1.0.0")]
1348 impl<T
: Hash
> Hash
for Vec
<T
> {
1350 fn hash
<H
: hash
::Hasher
>(&self, state
: &mut H
) {
1351 Hash
::hash(&**self, state
)
1355 #[stable(feature = "rust1", since = "1.0.0")]
1356 impl<T
> Index
<usize> for Vec
<T
> {
1360 fn index(&self, index
: usize) -> &T
{
1361 // NB built-in indexing via `&[T]`
1366 #[stable(feature = "rust1", since = "1.0.0")]
1367 impl<T
> IndexMut
<usize> for Vec
<T
> {
1369 fn index_mut(&mut self, index
: usize) -> &mut T
{
1370 // NB built-in indexing via `&mut [T]`
1371 &mut (**self)[index
]
1376 #[stable(feature = "rust1", since = "1.0.0")]
1377 impl<T
> ops
::Index
<ops
::Range
<usize>> for Vec
<T
> {
1381 fn index(&self, index
: ops
::Range
<usize>) -> &[T
] {
1382 Index
::index(&**self, index
)
1385 #[stable(feature = "rust1", since = "1.0.0")]
1386 impl<T
> ops
::Index
<ops
::RangeTo
<usize>> for Vec
<T
> {
1390 fn index(&self, index
: ops
::RangeTo
<usize>) -> &[T
] {
1391 Index
::index(&**self, index
)
1394 #[stable(feature = "rust1", since = "1.0.0")]
1395 impl<T
> ops
::Index
<ops
::RangeFrom
<usize>> for Vec
<T
> {
1399 fn index(&self, index
: ops
::RangeFrom
<usize>) -> &[T
] {
1400 Index
::index(&**self, index
)
1403 #[stable(feature = "rust1", since = "1.0.0")]
1404 impl<T
> ops
::Index
<ops
::RangeFull
> for Vec
<T
> {
1408 fn index(&self, _index
: ops
::RangeFull
) -> &[T
] {
1413 #[stable(feature = "rust1", since = "1.0.0")]
1414 impl<T
> ops
::IndexMut
<ops
::Range
<usize>> for Vec
<T
> {
1417 fn index_mut(&mut self, index
: ops
::Range
<usize>) -> &mut [T
] {
1418 IndexMut
::index_mut(&mut **self, index
)
1421 #[stable(feature = "rust1", since = "1.0.0")]
1422 impl<T
> ops
::IndexMut
<ops
::RangeTo
<usize>> for Vec
<T
> {
1425 fn index_mut(&mut self, index
: ops
::RangeTo
<usize>) -> &mut [T
] {
1426 IndexMut
::index_mut(&mut **self, index
)
1429 #[stable(feature = "rust1", since = "1.0.0")]
1430 impl<T
> ops
::IndexMut
<ops
::RangeFrom
<usize>> for Vec
<T
> {
1433 fn index_mut(&mut self, index
: ops
::RangeFrom
<usize>) -> &mut [T
] {
1434 IndexMut
::index_mut(&mut **self, index
)
1437 #[stable(feature = "rust1", since = "1.0.0")]
1438 impl<T
> ops
::IndexMut
<ops
::RangeFull
> for Vec
<T
> {
1441 fn index_mut(&mut self, _index
: ops
::RangeFull
) -> &mut [T
] {
1446 #[stable(feature = "rust1", since = "1.0.0")]
1447 impl<T
> ops
::Deref
for Vec
<T
> {
1450 fn deref(&self) -> &[T
] {
1453 assume(p
!= 0 as *mut T
);
1454 slice
::from_raw_parts(p
, self.len
)
1459 #[stable(feature = "rust1", since = "1.0.0")]
1460 impl<T
> ops
::DerefMut
for Vec
<T
> {
1461 fn deref_mut(&mut self) -> &mut [T
] {
1463 let ptr
= *self.ptr
;
1464 assume(!ptr
.is_null());
1465 slice
::from_raw_parts_mut(ptr
, self.len
)
1470 #[stable(feature = "rust1", since = "1.0.0")]
1471 impl<T
> FromIterator
<T
> for Vec
<T
> {
1473 fn from_iter
<I
: IntoIterator
<Item
=T
>>(iterable
: I
) -> Vec
<T
> {
1474 // Unroll the first iteration, as the vector is going to be
1475 // expanded on this iteration in every case when the iterable is not
1476 // empty, but the loop in extend_desugared() is not going to see the
1477 // vector being full in the few subsequent loop iterations.
1478 // So we get better branch prediction and the possibility to
1479 // construct the vector with initial estimated capacity.
1480 let mut iterator
= iterable
.into_iter();
1481 let mut vector
= match iterator
.next() {
1482 None
=> return Vec
::new(),
1484 let (lower
, _
) = iterator
.size_hint();
1485 let mut vector
= Vec
::with_capacity(lower
.saturating_add(1));
1487 ptr
::write(vector
.get_unchecked_mut(0), element
);
1493 vector
.extend_desugared(iterator
);
1498 #[stable(feature = "rust1", since = "1.0.0")]
1499 impl<T
> IntoIterator
for Vec
<T
> {
1501 type IntoIter
= IntoIter
<T
>;
1503 /// Creates a consuming iterator, that is, one that moves each value out of
1504 /// the vector (from start to end). The vector cannot be used after calling
1510 /// let v = vec!["a".to_string(), "b".to_string()];
1511 /// for s in v.into_iter() {
1512 /// // s has type String, not &String
1513 /// println!("{}", s);
1517 fn into_iter(self) -> IntoIter
<T
> {
1519 let ptr
= *self.ptr
;
1520 assume(!ptr
.is_null());
1522 let begin
= ptr
as *const T
;
1523 let end
= if mem
::size_of
::<T
>() == 0 {
1524 arith_offset(ptr
as *const i8, self.len() as isize) as *const T
1526 ptr
.offset(self.len() as isize) as *const T
1529 IntoIter { allocation: ptr, cap: cap, ptr: begin, end: end }
1534 #[stable(feature = "rust1", since = "1.0.0")]
1535 impl<'a
, T
> IntoIterator
for &'a Vec
<T
> {
1537 type IntoIter
= slice
::Iter
<'a
, T
>;
1539 fn into_iter(self) -> slice
::Iter
<'a
, T
> {
1544 #[stable(feature = "rust1", since = "1.0.0")]
1545 impl<'a
, T
> IntoIterator
for &'a
mut Vec
<T
> {
1546 type Item
= &'a
mut T
;
1547 type IntoIter
= slice
::IterMut
<'a
, T
>;
1549 fn into_iter(mut self) -> slice
::IterMut
<'a
, T
> {
1554 #[stable(feature = "rust1", since = "1.0.0")]
1555 impl<T
> Extend
<T
> for Vec
<T
> {
1557 fn extend
<I
: IntoIterator
<Item
=T
>>(&mut self, iterable
: I
) {
1558 self.extend_desugared(iterable
.into_iter())
1563 fn extend_desugared
<I
: Iterator
<Item
=T
>>(&mut self, mut iterator
: I
) {
1564 // This function should be the moral equivalent of:
1566 // for item in iterator {
1569 while let Some(element
) = iterator
.next() {
1570 let len
= self.len();
1571 if len
== self.capacity() {
1572 let (lower
, _
) = iterator
.size_hint();
1573 self.reserve(lower
.saturating_add(1));
1576 ptr
::write(self.get_unchecked_mut(len
), element
);
1577 // NB can't overflow since we would have had to alloc the address space
1578 self.set_len(len
+ 1);
1584 #[stable(feature = "extend_ref", since = "1.2.0")]
1585 impl<'a
, T
: 'a
+ Copy
> Extend
<&'a T
> for Vec
<T
> {
1586 fn extend
<I
: IntoIterator
<Item
=&'a T
>>(&mut self, iter
: I
) {
1587 self.extend(iter
.into_iter().cloned());
1591 __impl_slice_eq1
! { Vec<A>, Vec<B> }
1592 __impl_slice_eq1
! { Vec<A>, &'b [B] }
1593 __impl_slice_eq1
! { Vec<A>, &'b mut [B] }
1594 __impl_slice_eq1
! { Cow<'a, [A]>, &'b [B], Clone }
1595 __impl_slice_eq1
! { Cow<'a, [A]>, &'b mut [B], Clone }
1596 __impl_slice_eq1
! { Cow<'a, [A]>, Vec<B>, Clone }
1598 macro_rules
! array_impls
{
1601 // NOTE: some less important impls are omitted to reduce code bloat
1602 __impl_slice_eq1
! { Vec<A>, [B; $N] }
1603 __impl_slice_eq1
! { Vec<A>, &'b [B; $N] }
1604 // __impl_slice_eq1! { Vec<A>, &'b mut [B; $N] }
1605 // __impl_slice_eq1! { Cow<'a, [A]>, [B; $N], Clone }
1606 // __impl_slice_eq1! { Cow<'a, [A]>, &'b [B; $N], Clone }
1607 // __impl_slice_eq1! { Cow<'a, [A]>, &'b mut [B; $N], Clone }
1614 10 11 12 13 14 15 16 17 18 19
1615 20 21 22 23 24 25 26 27 28 29
1619 #[stable(feature = "rust1", since = "1.0.0")]
1620 impl<T
: PartialOrd
> PartialOrd
for Vec
<T
> {
1622 fn partial_cmp(&self, other
: &Vec
<T
>) -> Option
<Ordering
> {
1623 PartialOrd
::partial_cmp(&**self, &**other
)
1627 #[stable(feature = "rust1", since = "1.0.0")]
1628 impl<T
: Eq
> Eq
for Vec
<T
> {}
1630 #[stable(feature = "rust1", since = "1.0.0")]
1631 impl<T
: Ord
> Ord
for Vec
<T
> {
1633 fn cmp(&self, other
: &Vec
<T
>) -> Ordering
{
1634 Ord
::cmp(&**self, &**other
)
1638 #[stable(feature = "rust1", since = "1.0.0")]
1639 impl<T
> Drop
for Vec
<T
> {
1640 fn drop(&mut self) {
1641 // This is (and should always remain) a no-op if the fields are
1642 // zeroed (when moving out, because of #[unsafe_no_drop_flag]).
1643 if self.cap
!= 0 && self.cap
!= mem
::POST_DROP_USIZE
{
1645 for x
in self.iter() {
1648 dealloc(*self.ptr
, self.cap
)
1654 #[stable(feature = "rust1", since = "1.0.0")]
1655 impl<T
> Default
for Vec
<T
> {
1656 #[stable(feature = "rust1", since = "1.0.0")]
1657 fn default() -> Vec
<T
> {
1662 #[stable(feature = "rust1", since = "1.0.0")]
1663 impl<T
: fmt
::Debug
> fmt
::Debug
for Vec
<T
> {
1664 fn fmt(&self, f
: &mut fmt
::Formatter
) -> fmt
::Result
{
1665 fmt
::Debug
::fmt(&**self, f
)
1669 #[stable(feature = "rust1", since = "1.0.0")]
1670 impl<T
> AsRef
<Vec
<T
>> for Vec
<T
> {
1671 fn as_ref(&self) -> &Vec
<T
> {
1676 #[stable(feature = "rust1", since = "1.0.0")]
1677 impl<T
> AsRef
<[T
]> for Vec
<T
> {
1678 fn as_ref(&self) -> &[T
] {
1683 #[stable(feature = "rust1", since = "1.0.0")]
1684 impl<'a
, T
: Clone
> From
<&'a
[T
]> for Vec
<T
> {
1686 fn from(s
: &'a
[T
]) -> Vec
<T
> {
1690 fn from(s
: &'a
[T
]) -> Vec
<T
> {
1695 #[stable(feature = "rust1", since = "1.0.0")]
1696 impl<'a
> From
<&'a
str> for Vec
<u8> {
1697 fn from(s
: &'a
str) -> Vec
<u8> {
1698 From
::from(s
.as_bytes())
1702 ////////////////////////////////////////////////////////////////////////////////
1704 ////////////////////////////////////////////////////////////////////////////////
1706 #[stable(feature = "rust1", since = "1.0.0")]
1707 impl<'a
, T
> FromIterator
<T
> for Cow
<'a
, [T
]> where T
: Clone
{
1708 fn from_iter
<I
: IntoIterator
<Item
=T
>>(it
: I
) -> Cow
<'a
, [T
]> {
1709 Cow
::Owned(FromIterator
::from_iter(it
))
1713 #[allow(deprecated)]
1714 impl<'a
, T
: 'a
> IntoCow
<'a
, [T
]> for Vec
<T
> where T
: Clone
{
1715 fn into_cow(self) -> Cow
<'a
, [T
]> {
1720 #[allow(deprecated)]
1721 impl<'a
, T
> IntoCow
<'a
, [T
]> for &'a
[T
] where T
: Clone
{
1722 fn into_cow(self) -> Cow
<'a
, [T
]> {
1727 ////////////////////////////////////////////////////////////////////////////////
1729 ////////////////////////////////////////////////////////////////////////////////
1731 /// An iterator that moves out of a vector.
1732 #[stable(feature = "rust1", since = "1.0.0")]
1733 pub struct IntoIter
<T
> {
1734 allocation
: *mut T
, // the block of memory allocated for the vector
1735 cap
: usize, // the capacity of the vector
1740 unsafe impl<T
: Send
> Send
for IntoIter
<T
> { }
1741 unsafe impl<T
: Sync
> Sync
for IntoIter
<T
> { }
1743 impl<T
> IntoIter
<T
> {
1745 /// Drops all items that have not yet been moved and returns the empty vector.
1746 #[unstable(feature = "iter_to_vec")]
1747 pub fn into_inner(mut self) -> Vec
<T
> {
1749 for _x
in self.by_ref() { }
1750 let IntoIter { allocation, cap, ptr: _ptr, end: _end }
= self;
1752 Vec
::from_raw_parts(allocation
, 0, cap
)
1757 #[stable(feature = "rust1", since = "1.0.0")]
1758 impl<T
> Iterator
for IntoIter
<T
> {
1762 fn next(&mut self) -> Option
<T
> {
1764 if self.ptr
== self.end
{
1767 if mem
::size_of
::<T
>() == 0 {
1768 // purposefully don't use 'ptr.offset' because for
1769 // vectors with 0-size elements this would return the
1771 self.ptr
= arith_offset(self.ptr
as *const i8, 1) as *const T
;
1773 // Use a non-null pointer value
1774 Some(ptr
::read(EMPTY
as *mut T
))
1777 self.ptr
= self.ptr
.offset(1);
1779 Some(ptr
::read(old
))
1786 fn size_hint(&self) -> (usize, Option
<usize>) {
1787 let diff
= (self.end
as usize) - (self.ptr
as usize);
1788 let size
= mem
::size_of
::<T
>();
1789 let exact
= diff
/ (if size
== 0 {1}
else {size}
);
1790 (exact
, Some(exact
))
1794 fn count(self) -> usize {
1799 #[stable(feature = "rust1", since = "1.0.0")]
1800 impl<T
> DoubleEndedIterator
for IntoIter
<T
> {
1802 fn next_back(&mut self) -> Option
<T
> {
1804 if self.end
== self.ptr
{
1807 if mem
::size_of
::<T
>() == 0 {
1808 // See above for why 'ptr.offset' isn't used
1809 self.end
= arith_offset(self.end
as *const i8, -1) as *const T
;
1811 // Use a non-null pointer value
1812 Some(ptr
::read(EMPTY
as *mut T
))
1814 self.end
= self.end
.offset(-1);
1816 Some(ptr
::read(mem
::transmute(self.end
)))
1823 #[stable(feature = "rust1", since = "1.0.0")]
1824 impl<T
> ExactSizeIterator
for IntoIter
<T
> {}
1826 #[stable(feature = "rust1", since = "1.0.0")]
1827 impl<T
> Drop
for IntoIter
<T
> {
1828 fn drop(&mut self) {
1829 // destroy the remaining elements
1831 for _x
in self.by_ref() {}
1833 dealloc(self.allocation
, self.cap
);
1839 /// A draining iterator for `Vec<T>`.
1840 #[unstable(feature = "drain", reason = "recently added")]
1841 pub struct Drain
<'a
, T
: 'a
> {
1842 /// Index of tail to preserve
1846 /// Current remaining range to remove
1847 iter
: slice
::IterMut
<'a
, T
>,
1851 unsafe impl<'a
, T
: Sync
> Sync
for Drain
<'a
, T
> {}
1852 unsafe impl<'a
, T
: Send
> Send
for Drain
<'a
, T
> {}
1854 #[stable(feature = "rust1", since = "1.0.0")]
1855 impl<'a
, T
> Iterator
for Drain
<'a
, T
> {
1859 fn next(&mut self) -> Option
<T
> {
1860 self.iter
.next().map(|elt
|
1862 ptr
::read(elt
as *const _
)
1867 fn size_hint(&self) -> (usize, Option
<usize>) {
1868 self.iter
.size_hint()
1872 #[stable(feature = "rust1", since = "1.0.0")]
1873 impl<'a
, T
> DoubleEndedIterator
for Drain
<'a
, T
> {
1875 fn next_back(&mut self) -> Option
<T
> {
1876 self.iter
.next_back().map(|elt
|
1878 ptr
::read(elt
as *const _
)
1884 #[stable(feature = "rust1", since = "1.0.0")]
1885 impl<'a
, T
> Drop
for Drain
<'a
, T
> {
1886 fn drop(&mut self) {
1887 // exhaust self first
1888 while let Some(_
) = self.next() { }
1890 if self.tail_len
> 0 {
1892 let source_vec
= &mut *self.vec
;
1893 // memmove back untouched tail, update to new length
1894 let start
= source_vec
.len();
1895 let tail
= self.tail_start
;
1896 let src
= source_vec
.as_ptr().offset(tail
as isize);
1897 let dst
= source_vec
.as_mut_ptr().offset(start
as isize);
1898 ptr
::copy(src
, dst
, self.tail_len
);
1899 source_vec
.set_len(start
+ self.tail_len
);
1906 #[stable(feature = "rust1", since = "1.0.0")]
1907 impl<'a
, T
> ExactSizeIterator
for Drain
<'a
, T
> {}
1909 ////////////////////////////////////////////////////////////////////////////////
1910 // Conversion from &[T] to &Vec<T>
1911 ////////////////////////////////////////////////////////////////////////////////
1913 /// Wrapper type providing a `&Vec<T>` reference via `Deref`.
1914 #[unstable(feature = "collections")]
1915 #[deprecated(since = "1.2.0",
1916 reason
= "replaced with deref coercions or Borrow")]
1917 pub struct DerefVec
<'a
, T
:'a
> {
1919 l
: PhantomData
<&'a T
>,
1922 #[unstable(feature = "collections")]
1923 #[deprecated(since = "1.2.0",
1924 reason
= "replaced with deref coercions or Borrow")]
1925 #[allow(deprecated)]
1926 impl<'a
, T
> Deref
for DerefVec
<'a
, T
> {
1927 type Target
= Vec
<T
>;
1929 fn deref
<'b
>(&'b
self) -> &'b Vec
<T
> {
1934 // Prevent the inner `Vec<T>` from attempting to deallocate memory.
1935 #[stable(feature = "rust1", since = "1.0.0")]
1936 #[deprecated(since = "1.2.0",
1937 reason
= "replaced with deref coercions or Borrow")]
1938 #[allow(deprecated)]
1939 impl<'a
, T
> Drop
for DerefVec
<'a
, T
> {
1940 fn drop(&mut self) {
1946 /// Converts a slice to a wrapper type providing a `&Vec<T>` reference.
1951 /// # #![feature(collections)]
1952 /// use std::vec::as_vec;
1954 /// // Let's pretend we have a function that requires `&Vec<i32>`
1955 /// fn vec_consumer(s: &Vec<i32>) {
1956 /// assert_eq!(s, &[1, 2, 3]);
1959 /// // Provide a `&Vec<i32>` from a `&[i32]` without allocating
1960 /// let values = [1, 2, 3];
1961 /// vec_consumer(&as_vec(&values));
1963 #[unstable(feature = "collections")]
1964 #[deprecated(since = "1.2.0",
1965 reason
= "replaced with deref coercions or Borrow")]
1966 #[allow(deprecated)]
1967 pub fn as_vec
<'a
, T
>(x
: &'a
[T
]) -> DerefVec
<'a
, T
> {
1970 x
: Vec
::from_raw_parts(x
.as_ptr() as *mut T
, x
.len(), x
.len()),
1976 ////////////////////////////////////////////////////////////////////////////////
1977 // Partial vec, used for map_in_place
1978 ////////////////////////////////////////////////////////////////////////////////
1980 /// An owned, partially type-converted vector of elements with non-zero size.
1982 /// `T` and `U` must have the same, non-zero size. They must also have the same
1985 /// When the destructor of this struct runs, all `U`s from `start_u` (incl.) to
1986 /// `end_u` (excl.) and all `T`s from `start_t` (incl.) to `end_t` (excl.) are
1987 /// destructed. Additionally the underlying storage of `vec` will be freed.
1988 struct PartialVecNonZeroSized
<T
,U
> {
1996 _marker
: PhantomData
<U
>,
1999 /// An owned, partially type-converted vector of zero-sized elements.
2001 /// When the destructor of this struct runs, all `num_t` `T`s and `num_u` `U`s
2003 struct PartialVecZeroSized
<T
,U
> {
2006 marker
: PhantomData
<::core
::cell
::Cell
<(T
,U
)>>,
2009 impl<T
,U
> Drop
for PartialVecNonZeroSized
<T
,U
> {
2010 fn drop(&mut self) {
2012 // `vec` hasn't been modified until now. As it has a length
2013 // currently, this would run destructors of `T`s which might not be
2014 // there. So at first, set `vec`s length to `0`. This must be done
2015 // at first to remain memory-safe as the destructors of `U` or `T`
2016 // might cause unwinding where `vec`s destructor would be executed.
2017 self.vec
.set_len(0);
2019 // We have instances of `U`s and `T`s in `vec`. Destruct them.
2020 while self.start_u
!= self.end_u
{
2021 let _
= ptr
::read(self.start_u
); // Run a `U` destructor.
2022 self.start_u
= self.start_u
.offset(1);
2024 while self.start_t
!= self.end_t
{
2025 let _
= ptr
::read(self.start_t
); // Run a `T` destructor.
2026 self.start_t
= self.start_t
.offset(1);
2028 // After this destructor ran, the destructor of `vec` will run,
2029 // deallocating the underlying memory.
2034 impl<T
,U
> Drop
for PartialVecZeroSized
<T
,U
> {
2035 fn drop(&mut self) {
2037 // Destruct the instances of `T` and `U` this struct owns.
2038 while self.num_t
!= 0 {
2039 let _
: T
= mem
::uninitialized(); // Run a `T` destructor.
2042 while self.num_u
!= 0 {
2043 let _
: U
= mem
::uninitialized(); // Run a `U` destructor.