]> git.proxmox.com Git - rustc.git/blob - vendor/smallvec/src/lib.rs
New upstream version 1.57.0+dfsg1
[rustc.git] / vendor / smallvec / src / lib.rs
1 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
2 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
3 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
4 // option. This file may not be copied, modified, or distributed
5 // except according to those terms.
6
7 //! Small vectors in various sizes. These store a certain number of elements inline, and fall back
8 //! to the heap for larger allocations. This can be a useful optimization for improving cache
9 //! locality and reducing allocator traffic for workloads that fit within the inline buffer.
10 //!
11 //! ## `no_std` support
12 //!
13 //! By default, `smallvec` does not depend on `std`. However, the optional
14 //! `write` feature implements the `std::io::Write` trait for vectors of `u8`.
15 //! When this feature is enabled, `smallvec` depends on `std`.
16 //!
17 //! ## Optional features
18 //!
19 //! ### `serde`
20 //!
21 //! When this optional dependency is enabled, `SmallVec` implements the `serde::Serialize` and
22 //! `serde::Deserialize` traits.
23 //!
24 //! ### `write`
25 //!
26 //! When this feature is enabled, `SmallVec<[u8; _]>` implements the `std::io::Write` trait.
27 //! This feature is not compatible with `#![no_std]` programs.
28 //!
29 //! ### `union`
30 //!
31 //! **This feature requires Rust 1.49.**
32 //!
33 //! When the `union` feature is enabled `smallvec` will track its state (inline or spilled)
34 //! without the use of an enum tag, reducing the size of the `smallvec` by one machine word.
35 //! This means that there is potentially no space overhead compared to `Vec`.
36 //! Note that `smallvec` can still be larger than `Vec` if the inline buffer is larger than two
37 //! machine words.
38 //!
39 //! To use this feature add `features = ["union"]` in the `smallvec` section of Cargo.toml.
40 //! Note that this feature requires Rust 1.49.
41 //!
42 //! Tracking issue: [rust-lang/rust#55149](https://github.com/rust-lang/rust/issues/55149)
43 //!
44 //! ### `const_generics`
45 //!
46 //! **This feature requires Rust 1.51.**
47 //!
48 //! When this feature is enabled, `SmallVec` works with any arrays of any size, not just a fixed
49 //! list of sizes.
50 //!
51 //! ### `const_new`
52 //!
53 //! **This feature requires Rust 1.51.**
54 //!
55 //! This feature exposes the functions [`SmallVec::new_const`], [`SmallVec::from_const`], and [`smallvec_inline`] which enables the `SmallVec` to be initialized from a const context.
56 //! For details, see the
57 //! [Rust Reference](https://doc.rust-lang.org/reference/const_eval.html#const-functions).
58 //!
59 //! ### `specialization`
60 //!
61 //! **This feature is unstable and requires a nightly build of the Rust toolchain.**
62 //!
63 //! When this feature is enabled, `SmallVec::from(slice)` has improved performance for slices
64 //! of `Copy` types. (Without this feature, you can use `SmallVec::from_slice` to get optimal
65 //! performance for `Copy` types.)
66 //!
67 //! Tracking issue: [rust-lang/rust#31844](https://github.com/rust-lang/rust/issues/31844)
68 //!
69 //! ### `may_dangle`
70 //!
71 //! **This feature is unstable and requires a nightly build of the Rust toolchain.**
72 //!
73 //! This feature makes the Rust compiler less strict about use of vectors that contain borrowed
74 //! references. For details, see the
75 //! [Rustonomicon](https://doc.rust-lang.org/1.42.0/nomicon/dropck.html#an-escape-hatch).
76 //!
77 //! Tracking issue: [rust-lang/rust#34761](https://github.com/rust-lang/rust/issues/34761)
78
79 #![no_std]
80 #![cfg_attr(docsrs, feature(doc_cfg))]
81 #![cfg_attr(feature = "specialization", allow(incomplete_features))]
82 #![cfg_attr(feature = "specialization", feature(specialization))]
83 #![cfg_attr(feature = "may_dangle", feature(dropck_eyepatch))]
84 #![deny(missing_docs)]
85
86 #[doc(hidden)]
87 pub extern crate alloc;
88
89 #[cfg(any(test, feature = "write"))]
90 extern crate std;
91
92 #[cfg(test)]
93 mod tests;
94
95 #[allow(deprecated)]
96 use alloc::alloc::{Layout, LayoutErr};
97 use alloc::boxed::Box;
98 use alloc::{vec, vec::Vec};
99 use core::borrow::{Borrow, BorrowMut};
100 use core::cmp;
101 use core::fmt;
102 use core::hash::{Hash, Hasher};
103 use core::hint::unreachable_unchecked;
104 use core::iter::{repeat, FromIterator, FusedIterator, IntoIterator};
105 use core::mem;
106 use core::mem::MaybeUninit;
107 use core::ops::{self, Range, RangeBounds};
108 use core::ptr::{self, NonNull};
109 use core::slice::{self, SliceIndex};
110
111 #[cfg(feature = "serde")]
112 use serde::{
113 de::{Deserialize, Deserializer, SeqAccess, Visitor},
114 ser::{Serialize, SerializeSeq, Serializer},
115 };
116
117 #[cfg(feature = "serde")]
118 use core::marker::PhantomData;
119
120 #[cfg(feature = "write")]
121 use std::io;
122
123 /// Creates a [`SmallVec`] containing the arguments.
124 ///
125 /// `smallvec!` allows `SmallVec`s to be defined with the same syntax as array expressions.
126 /// There are two forms of this macro:
127 ///
128 /// - Create a [`SmallVec`] containing a given list of elements:
129 ///
130 /// ```
131 /// # #[macro_use] extern crate smallvec;
132 /// # use smallvec::SmallVec;
133 /// # fn main() {
134 /// let v: SmallVec<[_; 128]> = smallvec![1, 2, 3];
135 /// assert_eq!(v[0], 1);
136 /// assert_eq!(v[1], 2);
137 /// assert_eq!(v[2], 3);
138 /// # }
139 /// ```
140 ///
141 /// - Create a [`SmallVec`] from a given element and size:
142 ///
143 /// ```
144 /// # #[macro_use] extern crate smallvec;
145 /// # use smallvec::SmallVec;
146 /// # fn main() {
147 /// let v: SmallVec<[_; 0x8000]> = smallvec![1; 3];
148 /// assert_eq!(v, SmallVec::from_buf([1, 1, 1]));
149 /// # }
150 /// ```
151 ///
152 /// Note that unlike array expressions this syntax supports all elements
153 /// which implement [`Clone`] and the number of elements doesn't have to be
154 /// a constant.
155 ///
156 /// This will use `clone` to duplicate an expression, so one should be careful
157 /// using this with types having a nonstandard `Clone` implementation. For
158 /// example, `smallvec![Rc::new(1); 5]` will create a vector of five references
159 /// to the same boxed integer value, not five references pointing to independently
160 /// boxed integers.
161
162 #[macro_export]
163 macro_rules! smallvec {
164 // count helper: transform any expression into 1
165 (@one $x:expr) => (1usize);
166 ($elem:expr; $n:expr) => ({
167 $crate::SmallVec::from_elem($elem, $n)
168 });
169 ($($x:expr),*$(,)*) => ({
170 let count = 0usize $(+ $crate::smallvec!(@one $x))*;
171 #[allow(unused_mut)]
172 let mut vec = $crate::SmallVec::new();
173 if count <= vec.inline_size() {
174 $(vec.push($x);)*
175 vec
176 } else {
177 $crate::SmallVec::from_vec($crate::alloc::vec![$($x,)*])
178 }
179 });
180 }
181
182 /// Creates an inline [`SmallVec`] containing the arguments. This macro is enabled by the feature `const_new`.
183 ///
184 /// `smallvec_inline!` allows `SmallVec`s to be defined with the same syntax as array expressions in `const` contexts.
185 /// The inline storage `A` will always be an array of the size specified by the arguments.
186 /// There are two forms of this macro:
187 ///
188 /// - Create a [`SmallVec`] containing a given list of elements:
189 ///
190 /// ```
191 /// # #[macro_use] extern crate smallvec;
192 /// # use smallvec::SmallVec;
193 /// # fn main() {
194 /// const V: SmallVec<[i32; 3]> = smallvec_inline![1, 2, 3];
195 /// assert_eq!(V[0], 1);
196 /// assert_eq!(V[1], 2);
197 /// assert_eq!(V[2], 3);
198 /// # }
199 /// ```
200 ///
201 /// - Create a [`SmallVec`] from a given element and size:
202 ///
203 /// ```
204 /// # #[macro_use] extern crate smallvec;
205 /// # use smallvec::SmallVec;
206 /// # fn main() {
207 /// const V: SmallVec<[i32; 3]> = smallvec_inline![1; 3];
208 /// assert_eq!(V, SmallVec::from_buf([1, 1, 1]));
209 /// # }
210 /// ```
211 ///
212 /// Note that the behavior mimics that of array expressions, in contrast to [`smallvec`].
213 #[cfg(feature = "const_new")]
214 #[cfg_attr(docsrs, doc(cfg(feature = "const_new")))]
215 #[macro_export]
216 macro_rules! smallvec_inline {
217 // count helper: transform any expression into 1
218 (@one $x:expr) => (1usize);
219 ($elem:expr; $n:expr) => ({
220 $crate::SmallVec::<[_; $n]>::from_const([$elem; $n])
221 });
222 ($($x:expr),+ $(,)?) => ({
223 const N: usize = 0usize $(+ $crate::smallvec_inline!(@one $x))*;
224 $crate::SmallVec::<[_; N]>::from_const([$($x,)*])
225 });
226 }
227
228 /// `panic!()` in debug builds, optimization hint in release.
229 #[cfg(not(feature = "union"))]
230 macro_rules! debug_unreachable {
231 () => {
232 debug_unreachable!("entered unreachable code")
233 };
234 ($e:expr) => {
235 if cfg!(not(debug_assertions)) {
236 unreachable_unchecked();
237 } else {
238 panic!($e);
239 }
240 };
241 }
242
243 /// Trait to be implemented by a collection that can be extended from a slice
244 ///
245 /// ## Example
246 ///
247 /// ```rust
248 /// use smallvec::{ExtendFromSlice, SmallVec};
249 ///
250 /// fn initialize<V: ExtendFromSlice<u8>>(v: &mut V) {
251 /// v.extend_from_slice(b"Test!");
252 /// }
253 ///
254 /// let mut vec = Vec::new();
255 /// initialize(&mut vec);
256 /// assert_eq!(&vec, b"Test!");
257 ///
258 /// let mut small_vec = SmallVec::<[u8; 8]>::new();
259 /// initialize(&mut small_vec);
260 /// assert_eq!(&small_vec as &[_], b"Test!");
261 /// ```
262 #[doc(hidden)]
263 #[deprecated]
264 pub trait ExtendFromSlice<T> {
265 /// Extends a collection from a slice of its element type
266 fn extend_from_slice(&mut self, other: &[T]);
267 }
268
269 #[allow(deprecated)]
270 impl<T: Clone> ExtendFromSlice<T> for Vec<T> {
271 fn extend_from_slice(&mut self, other: &[T]) {
272 Vec::extend_from_slice(self, other)
273 }
274 }
275
276 /// Error type for APIs with fallible heap allocation
277 #[derive(Debug)]
278 pub enum CollectionAllocErr {
279 /// Overflow `usize::MAX` or other error during size computation
280 CapacityOverflow,
281 /// The allocator return an error
282 AllocErr {
283 /// The layout that was passed to the allocator
284 layout: Layout,
285 },
286 }
287
288 impl fmt::Display for CollectionAllocErr {
289 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
290 write!(f, "Allocation error: {:?}", self)
291 }
292 }
293
294 #[allow(deprecated)]
295 impl From<LayoutErr> for CollectionAllocErr {
296 fn from(_: LayoutErr) -> Self {
297 CollectionAllocErr::CapacityOverflow
298 }
299 }
300
301 fn infallible<T>(result: Result<T, CollectionAllocErr>) -> T {
302 match result {
303 Ok(x) => x,
304 Err(CollectionAllocErr::CapacityOverflow) => panic!("capacity overflow"),
305 Err(CollectionAllocErr::AllocErr { layout }) => alloc::alloc::handle_alloc_error(layout),
306 }
307 }
308
309 /// FIXME: use `Layout::array` when we require a Rust version where it’s stable
310 /// https://github.com/rust-lang/rust/issues/55724
311 fn layout_array<T>(n: usize) -> Result<Layout, CollectionAllocErr> {
312 let size = mem::size_of::<T>()
313 .checked_mul(n)
314 .ok_or(CollectionAllocErr::CapacityOverflow)?;
315 let align = mem::align_of::<T>();
316 Layout::from_size_align(size, align).map_err(|_| CollectionAllocErr::CapacityOverflow)
317 }
318
319 unsafe fn deallocate<T>(ptr: *mut T, capacity: usize) {
320 // This unwrap should succeed since the same did when allocating.
321 let layout = layout_array::<T>(capacity).unwrap();
322 alloc::alloc::dealloc(ptr as *mut u8, layout)
323 }
324
325 /// An iterator that removes the items from a `SmallVec` and yields them by value.
326 ///
327 /// Returned from [`SmallVec::drain`][1].
328 ///
329 /// [1]: struct.SmallVec.html#method.drain
330 pub struct Drain<'a, T: 'a + Array> {
331 tail_start: usize,
332 tail_len: usize,
333 iter: slice::Iter<'a, T::Item>,
334 vec: NonNull<SmallVec<T>>,
335 }
336
337 impl<'a, T: 'a + Array> fmt::Debug for Drain<'a, T>
338 where
339 T::Item: fmt::Debug,
340 {
341 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
342 f.debug_tuple("Drain").field(&self.iter.as_slice()).finish()
343 }
344 }
345
346 unsafe impl<'a, T: Sync + Array> Sync for Drain<'a, T> {}
347 unsafe impl<'a, T: Send + Array> Send for Drain<'a, T> {}
348
349 impl<'a, T: 'a + Array> Iterator for Drain<'a, T> {
350 type Item = T::Item;
351
352 #[inline]
353 fn next(&mut self) -> Option<T::Item> {
354 self.iter
355 .next()
356 .map(|reference| unsafe { ptr::read(reference) })
357 }
358
359 #[inline]
360 fn size_hint(&self) -> (usize, Option<usize>) {
361 self.iter.size_hint()
362 }
363 }
364
365 impl<'a, T: 'a + Array> DoubleEndedIterator for Drain<'a, T> {
366 #[inline]
367 fn next_back(&mut self) -> Option<T::Item> {
368 self.iter
369 .next_back()
370 .map(|reference| unsafe { ptr::read(reference) })
371 }
372 }
373
374 impl<'a, T: Array> ExactSizeIterator for Drain<'a, T> {
375 #[inline]
376 fn len(&self) -> usize {
377 self.iter.len()
378 }
379 }
380
381 impl<'a, T: Array> FusedIterator for Drain<'a, T> {}
382
383 impl<'a, T: 'a + Array> Drop for Drain<'a, T> {
384 fn drop(&mut self) {
385 self.for_each(drop);
386
387 if self.tail_len > 0 {
388 unsafe {
389 let source_vec = self.vec.as_mut();
390
391 // memmove back untouched tail, update to new length
392 let start = source_vec.len();
393 let tail = self.tail_start;
394 if tail != start {
395 let src = source_vec.as_ptr().add(tail);
396 let dst = source_vec.as_mut_ptr().add(start);
397 ptr::copy(src, dst, self.tail_len);
398 }
399 source_vec.set_len(start + self.tail_len);
400 }
401 }
402 }
403 }
404
405 #[cfg(feature = "union")]
406 union SmallVecData<A: Array> {
407 inline: core::mem::ManuallyDrop<MaybeUninit<A>>,
408 heap: (*mut A::Item, usize),
409 }
410
411 #[cfg(all(feature = "union", feature = "const_new"))]
412 impl<T, const N: usize> SmallVecData<[T; N]> {
413 #[cfg_attr(docsrs, doc(cfg(feature = "const_new")))]
414 #[inline]
415 const fn from_const(inline: MaybeUninit<[T; N]>) -> Self {
416 SmallVecData {
417 inline: core::mem::ManuallyDrop::new(inline),
418 }
419 }
420 }
421
422 #[cfg(feature = "union")]
423 impl<A: Array> SmallVecData<A> {
424 #[inline]
425 unsafe fn inline(&self) -> *const A::Item {
426 self.inline.as_ptr() as *const A::Item
427 }
428 #[inline]
429 unsafe fn inline_mut(&mut self) -> *mut A::Item {
430 self.inline.as_mut_ptr() as *mut A::Item
431 }
432 #[inline]
433 fn from_inline(inline: MaybeUninit<A>) -> SmallVecData<A> {
434 SmallVecData {
435 inline: core::mem::ManuallyDrop::new(inline),
436 }
437 }
438 #[inline]
439 unsafe fn into_inline(self) -> MaybeUninit<A> {
440 core::mem::ManuallyDrop::into_inner(self.inline)
441 }
442 #[inline]
443 unsafe fn heap(&self) -> (*mut A::Item, usize) {
444 self.heap
445 }
446 #[inline]
447 unsafe fn heap_mut(&mut self) -> &mut (*mut A::Item, usize) {
448 &mut self.heap
449 }
450 #[inline]
451 fn from_heap(ptr: *mut A::Item, len: usize) -> SmallVecData<A> {
452 SmallVecData { heap: (ptr, len) }
453 }
454 }
455
456 #[cfg(not(feature = "union"))]
457 enum SmallVecData<A: Array> {
458 Inline(MaybeUninit<A>),
459 Heap((*mut A::Item, usize)),
460 }
461
462 #[cfg(all(not(feature = "union"), feature = "const_new"))]
463 impl<T, const N: usize> SmallVecData<[T; N]> {
464 #[cfg_attr(docsrs, doc(cfg(feature = "const_new")))]
465 #[inline]
466 const fn from_const(inline: MaybeUninit<[T; N]>) -> Self {
467 SmallVecData::Inline(inline)
468 }
469 }
470
471 #[cfg(not(feature = "union"))]
472 impl<A: Array> SmallVecData<A> {
473 #[inline]
474 unsafe fn inline(&self) -> *const A::Item {
475 match self {
476 SmallVecData::Inline(a) => a.as_ptr() as *const A::Item,
477 _ => debug_unreachable!(),
478 }
479 }
480 #[inline]
481 unsafe fn inline_mut(&mut self) -> *mut A::Item {
482 match self {
483 SmallVecData::Inline(a) => a.as_mut_ptr() as *mut A::Item,
484 _ => debug_unreachable!(),
485 }
486 }
487 #[inline]
488 fn from_inline(inline: MaybeUninit<A>) -> SmallVecData<A> {
489 SmallVecData::Inline(inline)
490 }
491 #[inline]
492 unsafe fn into_inline(self) -> MaybeUninit<A> {
493 match self {
494 SmallVecData::Inline(a) => a,
495 _ => debug_unreachable!(),
496 }
497 }
498 #[inline]
499 unsafe fn heap(&self) -> (*mut A::Item, usize) {
500 match self {
501 SmallVecData::Heap(data) => *data,
502 _ => debug_unreachable!(),
503 }
504 }
505 #[inline]
506 unsafe fn heap_mut(&mut self) -> &mut (*mut A::Item, usize) {
507 match self {
508 SmallVecData::Heap(data) => data,
509 _ => debug_unreachable!(),
510 }
511 }
512 #[inline]
513 fn from_heap(ptr: *mut A::Item, len: usize) -> SmallVecData<A> {
514 SmallVecData::Heap((ptr, len))
515 }
516 }
517
518 unsafe impl<A: Array + Send> Send for SmallVecData<A> {}
519 unsafe impl<A: Array + Sync> Sync for SmallVecData<A> {}
520
521 /// A `Vec`-like container that can store a small number of elements inline.
522 ///
523 /// `SmallVec` acts like a vector, but can store a limited amount of data inline within the
524 /// `SmallVec` struct rather than in a separate allocation. If the data exceeds this limit, the
525 /// `SmallVec` will "spill" its data onto the heap, allocating a new buffer to hold it.
526 ///
527 /// The amount of data that a `SmallVec` can store inline depends on its backing store. The backing
528 /// store can be any type that implements the `Array` trait; usually it is a small fixed-sized
529 /// array. For example a `SmallVec<[u64; 8]>` can hold up to eight 64-bit integers inline.
530 ///
531 /// ## Example
532 ///
533 /// ```rust
534 /// use smallvec::SmallVec;
535 /// let mut v = SmallVec::<[u8; 4]>::new(); // initialize an empty vector
536 ///
537 /// // The vector can hold up to 4 items without spilling onto the heap.
538 /// v.extend(0..4);
539 /// assert_eq!(v.len(), 4);
540 /// assert!(!v.spilled());
541 ///
542 /// // Pushing another element will force the buffer to spill:
543 /// v.push(4);
544 /// assert_eq!(v.len(), 5);
545 /// assert!(v.spilled());
546 /// ```
547 pub struct SmallVec<A: Array> {
548 // The capacity field is used to determine which of the storage variants is active:
549 // If capacity <= Self::inline_capacity() then the inline variant is used and capacity holds the current length of the vector (number of elements actually in use).
550 // If capacity > Self::inline_capacity() then the heap variant is used and capacity holds the size of the memory allocation.
551 capacity: usize,
552 data: SmallVecData<A>,
553 }
554
555 impl<A: Array> SmallVec<A> {
556 /// Construct an empty vector
557 #[inline]
558 pub fn new() -> SmallVec<A> {
559 // Try to detect invalid custom implementations of `Array`. Hopefully,
560 // this check should be optimized away entirely for valid ones.
561 assert!(
562 mem::size_of::<A>() == A::size() * mem::size_of::<A::Item>()
563 && mem::align_of::<A>() >= mem::align_of::<A::Item>()
564 );
565 SmallVec {
566 capacity: 0,
567 data: SmallVecData::from_inline(MaybeUninit::uninit()),
568 }
569 }
570
571 /// Construct an empty vector with enough capacity pre-allocated to store at least `n`
572 /// elements.
573 ///
574 /// Will create a heap allocation only if `n` is larger than the inline capacity.
575 ///
576 /// ```
577 /// # use smallvec::SmallVec;
578 ///
579 /// let v: SmallVec<[u8; 3]> = SmallVec::with_capacity(100);
580 ///
581 /// assert!(v.is_empty());
582 /// assert!(v.capacity() >= 100);
583 /// ```
584 #[inline]
585 pub fn with_capacity(n: usize) -> Self {
586 let mut v = SmallVec::new();
587 v.reserve_exact(n);
588 v
589 }
590
591 /// Construct a new `SmallVec` from a `Vec<A::Item>`.
592 ///
593 /// Elements will be copied to the inline buffer if vec.capacity() <= Self::inline_capacity().
594 ///
595 /// ```rust
596 /// use smallvec::SmallVec;
597 ///
598 /// let vec = vec![1, 2, 3, 4, 5];
599 /// let small_vec: SmallVec<[_; 3]> = SmallVec::from_vec(vec);
600 ///
601 /// assert_eq!(&*small_vec, &[1, 2, 3, 4, 5]);
602 /// ```
603 #[inline]
604 pub fn from_vec(mut vec: Vec<A::Item>) -> SmallVec<A> {
605 if vec.capacity() <= Self::inline_capacity() {
606 unsafe {
607 let mut data = SmallVecData::<A>::from_inline(MaybeUninit::uninit());
608 let len = vec.len();
609 vec.set_len(0);
610 ptr::copy_nonoverlapping(vec.as_ptr(), data.inline_mut(), len);
611
612 SmallVec {
613 capacity: len,
614 data,
615 }
616 }
617 } else {
618 let (ptr, cap, len) = (vec.as_mut_ptr(), vec.capacity(), vec.len());
619 mem::forget(vec);
620
621 SmallVec {
622 capacity: cap,
623 data: SmallVecData::from_heap(ptr, len),
624 }
625 }
626 }
627
628 /// Constructs a new `SmallVec` on the stack from an `A` without
629 /// copying elements.
630 ///
631 /// ```rust
632 /// use smallvec::SmallVec;
633 ///
634 /// let buf = [1, 2, 3, 4, 5];
635 /// let small_vec: SmallVec<_> = SmallVec::from_buf(buf);
636 ///
637 /// assert_eq!(&*small_vec, &[1, 2, 3, 4, 5]);
638 /// ```
639 #[inline]
640 pub fn from_buf(buf: A) -> SmallVec<A> {
641 SmallVec {
642 capacity: A::size(),
643 data: SmallVecData::from_inline(MaybeUninit::new(buf)),
644 }
645 }
646
647 /// Constructs a new `SmallVec` on the stack from an `A` without
648 /// copying elements. Also sets the length, which must be less or
649 /// equal to the size of `buf`.
650 ///
651 /// ```rust
652 /// use smallvec::SmallVec;
653 ///
654 /// let buf = [1, 2, 3, 4, 5, 0, 0, 0];
655 /// let small_vec: SmallVec<_> = SmallVec::from_buf_and_len(buf, 5);
656 ///
657 /// assert_eq!(&*small_vec, &[1, 2, 3, 4, 5]);
658 /// ```
659 #[inline]
660 pub fn from_buf_and_len(buf: A, len: usize) -> SmallVec<A> {
661 assert!(len <= A::size());
662 unsafe { SmallVec::from_buf_and_len_unchecked(MaybeUninit::new(buf), len) }
663 }
664
665 /// Constructs a new `SmallVec` on the stack from an `A` without
666 /// copying elements. Also sets the length. The user is responsible
667 /// for ensuring that `len <= A::size()`.
668 ///
669 /// ```rust
670 /// use smallvec::SmallVec;
671 /// use std::mem::MaybeUninit;
672 ///
673 /// let buf = [1, 2, 3, 4, 5, 0, 0, 0];
674 /// let small_vec: SmallVec<_> = unsafe {
675 /// SmallVec::from_buf_and_len_unchecked(MaybeUninit::new(buf), 5)
676 /// };
677 ///
678 /// assert_eq!(&*small_vec, &[1, 2, 3, 4, 5]);
679 /// ```
680 #[inline]
681 pub unsafe fn from_buf_and_len_unchecked(buf: MaybeUninit<A>, len: usize) -> SmallVec<A> {
682 SmallVec {
683 capacity: len,
684 data: SmallVecData::from_inline(buf),
685 }
686 }
687
688 /// Sets the length of a vector.
689 ///
690 /// This will explicitly set the size of the vector, without actually
691 /// modifying its buffers, so it is up to the caller to ensure that the
692 /// vector is actually the specified size.
693 pub unsafe fn set_len(&mut self, new_len: usize) {
694 let (_, len_ptr, _) = self.triple_mut();
695 *len_ptr = new_len;
696 }
697
698 /// The maximum number of elements this vector can hold inline
699 #[inline]
700 fn inline_capacity() -> usize {
701 if mem::size_of::<A::Item>() > 0 {
702 A::size()
703 } else {
704 // For zero-size items code like `ptr.add(offset)` always returns the same pointer.
705 // Therefore all items are at the same address,
706 // and any array size has capacity for infinitely many items.
707 // The capacity is limited by the bit width of the length field.
708 //
709 // `Vec` also does this:
710 // https://github.com/rust-lang/rust/blob/1.44.0/src/liballoc/raw_vec.rs#L186
711 //
712 // In our case, this also ensures that a smallvec of zero-size items never spills,
713 // and we never try to allocate zero bytes which `std::alloc::alloc` disallows.
714 core::usize::MAX
715 }
716 }
717
718 /// The maximum number of elements this vector can hold inline
719 #[inline]
720 pub fn inline_size(&self) -> usize {
721 Self::inline_capacity()
722 }
723
724 /// The number of elements stored in the vector
725 #[inline]
726 pub fn len(&self) -> usize {
727 self.triple().1
728 }
729
730 /// Returns `true` if the vector is empty
731 #[inline]
732 pub fn is_empty(&self) -> bool {
733 self.len() == 0
734 }
735
736 /// The number of items the vector can hold without reallocating
737 #[inline]
738 pub fn capacity(&self) -> usize {
739 self.triple().2
740 }
741
742 /// Returns a tuple with (data ptr, len, capacity)
743 /// Useful to get all SmallVec properties with a single check of the current storage variant.
744 #[inline]
745 fn triple(&self) -> (*const A::Item, usize, usize) {
746 unsafe {
747 if self.spilled() {
748 let (ptr, len) = self.data.heap();
749 (ptr, len, self.capacity)
750 } else {
751 (self.data.inline(), self.capacity, Self::inline_capacity())
752 }
753 }
754 }
755
756 /// Returns a tuple with (data ptr, len ptr, capacity)
757 #[inline]
758 fn triple_mut(&mut self) -> (*mut A::Item, &mut usize, usize) {
759 unsafe {
760 if self.spilled() {
761 let &mut (ptr, ref mut len_ptr) = self.data.heap_mut();
762 (ptr, len_ptr, self.capacity)
763 } else {
764 (
765 self.data.inline_mut(),
766 &mut self.capacity,
767 Self::inline_capacity(),
768 )
769 }
770 }
771 }
772
773 /// Returns `true` if the data has spilled into a separate heap-allocated buffer.
774 #[inline]
775 pub fn spilled(&self) -> bool {
776 self.capacity > Self::inline_capacity()
777 }
778
779 /// Creates a draining iterator that removes the specified range in the vector
780 /// and yields the removed items.
781 ///
782 /// Note 1: The element range is removed even if the iterator is only
783 /// partially consumed or not consumed at all.
784 ///
785 /// Note 2: It is unspecified how many elements are removed from the vector
786 /// if the `Drain` value is leaked.
787 ///
788 /// # Panics
789 ///
790 /// Panics if the starting point is greater than the end point or if
791 /// the end point is greater than the length of the vector.
792 pub fn drain<R>(&mut self, range: R) -> Drain<'_, A>
793 where
794 R: RangeBounds<usize>,
795 {
796 use core::ops::Bound::*;
797
798 let len = self.len();
799 let start = match range.start_bound() {
800 Included(&n) => n,
801 Excluded(&n) => n.checked_add(1).expect("Range start out of bounds"),
802 Unbounded => 0,
803 };
804 let end = match range.end_bound() {
805 Included(&n) => n.checked_add(1).expect("Range end out of bounds"),
806 Excluded(&n) => n,
807 Unbounded => len,
808 };
809
810 assert!(start <= end);
811 assert!(end <= len);
812
813 unsafe {
814 self.set_len(start);
815
816 let range_slice = slice::from_raw_parts_mut(self.as_mut_ptr().add(start), end - start);
817
818 Drain {
819 tail_start: end,
820 tail_len: len - end,
821 iter: range_slice.iter(),
822 vec: NonNull::from(self),
823 }
824 }
825 }
826
827 /// Append an item to the vector.
828 #[inline]
829 pub fn push(&mut self, value: A::Item) {
830 unsafe {
831 let (mut ptr, mut len, cap) = self.triple_mut();
832 if *len == cap {
833 self.reserve(1);
834 let &mut (heap_ptr, ref mut heap_len) = self.data.heap_mut();
835 ptr = heap_ptr;
836 len = heap_len;
837 }
838 ptr::write(ptr.add(*len), value);
839 *len += 1;
840 }
841 }
842
843 /// Remove an item from the end of the vector and return it, or None if empty.
844 #[inline]
845 pub fn pop(&mut self) -> Option<A::Item> {
846 unsafe {
847 let (ptr, len_ptr, _) = self.triple_mut();
848 if *len_ptr == 0 {
849 return None;
850 }
851 let last_index = *len_ptr - 1;
852 *len_ptr = last_index;
853 Some(ptr::read(ptr.add(last_index)))
854 }
855 }
856
857 /// Moves all the elements of `other` into `self`, leaving `other` empty.
858 ///
859 /// # Example
860 ///
861 /// ```
862 /// # use smallvec::{SmallVec, smallvec};
863 /// let mut v0: SmallVec<[u8; 16]> = smallvec![1, 2, 3];
864 /// let mut v1: SmallVec<[u8; 32]> = smallvec![4, 5, 6];
865 /// v0.append(&mut v1);
866 /// assert_eq!(*v0, [1, 2, 3, 4, 5, 6]);
867 /// assert_eq!(*v1, []);
868 /// ```
869 pub fn append<B>(&mut self, other: &mut SmallVec<B>)
870 where
871 B: Array<Item = A::Item>,
872 {
873 self.extend(other.drain(..))
874 }
875
876 /// Re-allocate to set the capacity to `max(new_cap, inline_size())`.
877 ///
878 /// Panics if `new_cap` is less than the vector's length
879 /// or if the capacity computation overflows `usize`.
880 pub fn grow(&mut self, new_cap: usize) {
881 infallible(self.try_grow(new_cap))
882 }
883
884 /// Re-allocate to set the capacity to `max(new_cap, inline_size())`.
885 ///
886 /// Panics if `new_cap` is less than the vector's length
887 pub fn try_grow(&mut self, new_cap: usize) -> Result<(), CollectionAllocErr> {
888 unsafe {
889 let (ptr, &mut len, cap) = self.triple_mut();
890 let unspilled = !self.spilled();
891 assert!(new_cap >= len);
892 if new_cap <= self.inline_size() {
893 if unspilled {
894 return Ok(());
895 }
896 self.data = SmallVecData::from_inline(MaybeUninit::uninit());
897 ptr::copy_nonoverlapping(ptr, self.data.inline_mut(), len);
898 self.capacity = len;
899 deallocate(ptr, cap);
900 } else if new_cap != cap {
901 let layout = layout_array::<A::Item>(new_cap)?;
902 debug_assert!(layout.size() > 0);
903 let new_alloc;
904 if unspilled {
905 new_alloc = NonNull::new(alloc::alloc::alloc(layout))
906 .ok_or(CollectionAllocErr::AllocErr { layout })?
907 .cast()
908 .as_ptr();
909 ptr::copy_nonoverlapping(ptr, new_alloc, len);
910 } else {
911 // This should never fail since the same succeeded
912 // when previously allocating `ptr`.
913 let old_layout = layout_array::<A::Item>(cap)?;
914
915 let new_ptr = alloc::alloc::realloc(ptr as *mut u8, old_layout, layout.size());
916 new_alloc = NonNull::new(new_ptr)
917 .ok_or(CollectionAllocErr::AllocErr { layout })?
918 .cast()
919 .as_ptr();
920 }
921 self.data = SmallVecData::from_heap(new_alloc, len);
922 self.capacity = new_cap;
923 }
924 Ok(())
925 }
926 }
927
928 /// Reserve capacity for `additional` more elements to be inserted.
929 ///
930 /// May reserve more space to avoid frequent reallocations.
931 ///
932 /// Panics if the capacity computation overflows `usize`.
933 #[inline]
934 pub fn reserve(&mut self, additional: usize) {
935 infallible(self.try_reserve(additional))
936 }
937
938 /// Reserve capacity for `additional` more elements to be inserted.
939 ///
940 /// May reserve more space to avoid frequent reallocations.
941 pub fn try_reserve(&mut self, additional: usize) -> Result<(), CollectionAllocErr> {
942 // prefer triple_mut() even if triple() would work
943 // so that the optimizer removes duplicated calls to it
944 // from callers like insert()
945 let (_, &mut len, cap) = self.triple_mut();
946 if cap - len >= additional {
947 return Ok(());
948 }
949 let new_cap = len
950 .checked_add(additional)
951 .and_then(usize::checked_next_power_of_two)
952 .ok_or(CollectionAllocErr::CapacityOverflow)?;
953 self.try_grow(new_cap)
954 }
955
956 /// Reserve the minimum capacity for `additional` more elements to be inserted.
957 ///
958 /// Panics if the new capacity overflows `usize`.
959 pub fn reserve_exact(&mut self, additional: usize) {
960 infallible(self.try_reserve_exact(additional))
961 }
962
963 /// Reserve the minimum capacity for `additional` more elements to be inserted.
964 pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), CollectionAllocErr> {
965 let (_, &mut len, cap) = self.triple_mut();
966 if cap - len >= additional {
967 return Ok(());
968 }
969 let new_cap = len
970 .checked_add(additional)
971 .ok_or(CollectionAllocErr::CapacityOverflow)?;
972 self.try_grow(new_cap)
973 }
974
975 /// Shrink the capacity of the vector as much as possible.
976 ///
977 /// When possible, this will move data from an external heap buffer to the vector's inline
978 /// storage.
979 pub fn shrink_to_fit(&mut self) {
980 if !self.spilled() {
981 return;
982 }
983 let len = self.len();
984 if self.inline_size() >= len {
985 unsafe {
986 let (ptr, len) = self.data.heap();
987 self.data = SmallVecData::from_inline(MaybeUninit::uninit());
988 ptr::copy_nonoverlapping(ptr, self.data.inline_mut(), len);
989 deallocate(ptr, self.capacity);
990 self.capacity = len;
991 }
992 } else if self.capacity() > len {
993 self.grow(len);
994 }
995 }
996
997 /// Shorten the vector, keeping the first `len` elements and dropping the rest.
998 ///
999 /// If `len` is greater than or equal to the vector's current length, this has no
1000 /// effect.
1001 ///
1002 /// This does not re-allocate. If you want the vector's capacity to shrink, call
1003 /// `shrink_to_fit` after truncating.
1004 pub fn truncate(&mut self, len: usize) {
1005 unsafe {
1006 let (ptr, len_ptr, _) = self.triple_mut();
1007 while len < *len_ptr {
1008 let last_index = *len_ptr - 1;
1009 *len_ptr = last_index;
1010 ptr::drop_in_place(ptr.add(last_index));
1011 }
1012 }
1013 }
1014
1015 /// Extracts a slice containing the entire vector.
1016 ///
1017 /// Equivalent to `&s[..]`.
1018 pub fn as_slice(&self) -> &[A::Item] {
1019 self
1020 }
1021
1022 /// Extracts a mutable slice of the entire vector.
1023 ///
1024 /// Equivalent to `&mut s[..]`.
1025 pub fn as_mut_slice(&mut self) -> &mut [A::Item] {
1026 self
1027 }
1028
1029 /// Remove the element at position `index`, replacing it with the last element.
1030 ///
1031 /// This does not preserve ordering, but is O(1).
1032 ///
1033 /// Panics if `index` is out of bounds.
1034 #[inline]
1035 pub fn swap_remove(&mut self, index: usize) -> A::Item {
1036 let len = self.len();
1037 self.swap(len - 1, index);
1038 self.pop()
1039 .unwrap_or_else(|| unsafe { unreachable_unchecked() })
1040 }
1041
1042 /// Remove all elements from the vector.
1043 #[inline]
1044 pub fn clear(&mut self) {
1045 self.truncate(0);
1046 }
1047
1048 /// Remove and return the element at position `index`, shifting all elements after it to the
1049 /// left.
1050 ///
1051 /// Panics if `index` is out of bounds.
1052 pub fn remove(&mut self, index: usize) -> A::Item {
1053 unsafe {
1054 let (mut ptr, len_ptr, _) = self.triple_mut();
1055 let len = *len_ptr;
1056 assert!(index < len);
1057 *len_ptr = len - 1;
1058 ptr = ptr.add(index);
1059 let item = ptr::read(ptr);
1060 ptr::copy(ptr.add(1), ptr, len - index - 1);
1061 item
1062 }
1063 }
1064
1065 /// Insert an element at position `index`, shifting all elements after it to the right.
1066 ///
1067 /// Panics if `index` is out of bounds.
1068 pub fn insert(&mut self, index: usize, element: A::Item) {
1069 self.reserve(1);
1070
1071 unsafe {
1072 let (mut ptr, len_ptr, _) = self.triple_mut();
1073 let len = *len_ptr;
1074 assert!(index <= len);
1075 *len_ptr = len + 1;
1076 ptr = ptr.add(index);
1077 ptr::copy(ptr, ptr.add(1), len - index);
1078 ptr::write(ptr, element);
1079 }
1080 }
1081
1082 /// Insert multiple elements at position `index`, shifting all following elements toward the
1083 /// back.
1084 pub fn insert_many<I: IntoIterator<Item = A::Item>>(&mut self, index: usize, iterable: I) {
1085 let mut iter = iterable.into_iter();
1086 if index == self.len() {
1087 return self.extend(iter);
1088 }
1089
1090 let (lower_size_bound, _) = iter.size_hint();
1091 assert!(lower_size_bound <= core::isize::MAX as usize); // Ensure offset is indexable
1092 assert!(index + lower_size_bound >= index); // Protect against overflow
1093
1094 let mut num_added = 0;
1095 let old_len = self.len();
1096 assert!(index <= old_len);
1097
1098 unsafe {
1099 // Reserve space for `lower_size_bound` elements.
1100 self.reserve(lower_size_bound);
1101 let start = self.as_mut_ptr();
1102 let ptr = start.add(index);
1103
1104 // Move the trailing elements.
1105 ptr::copy(ptr, ptr.add(lower_size_bound), old_len - index);
1106
1107 // In case the iterator panics, don't double-drop the items we just copied above.
1108 self.set_len(0);
1109 let mut guard = DropOnPanic {
1110 start,
1111 skip: index..(index + lower_size_bound),
1112 len: old_len + lower_size_bound,
1113 };
1114
1115 while num_added < lower_size_bound {
1116 let element = match iter.next() {
1117 Some(x) => x,
1118 None => break,
1119 };
1120 let cur = ptr.add(num_added);
1121 ptr::write(cur, element);
1122 guard.skip.start += 1;
1123 num_added += 1;
1124 }
1125
1126 if num_added < lower_size_bound {
1127 // Iterator provided fewer elements than the hint. Move the tail backward.
1128 ptr::copy(
1129 ptr.add(lower_size_bound),
1130 ptr.add(num_added),
1131 old_len - index,
1132 );
1133 }
1134 // There are no more duplicate or uninitialized slots, so the guard is not needed.
1135 self.set_len(old_len + num_added);
1136 mem::forget(guard);
1137 }
1138
1139 // Insert any remaining elements one-by-one.
1140 for element in iter {
1141 self.insert(index + num_added, element);
1142 num_added += 1;
1143 }
1144
1145 struct DropOnPanic<T> {
1146 start: *mut T,
1147 skip: Range<usize>, // Space we copied-out-of, but haven't written-to yet.
1148 len: usize,
1149 }
1150
1151 impl<T> Drop for DropOnPanic<T> {
1152 fn drop(&mut self) {
1153 for i in 0..self.len {
1154 if !self.skip.contains(&i) {
1155 unsafe {
1156 ptr::drop_in_place(self.start.add(i));
1157 }
1158 }
1159 }
1160 }
1161 }
1162 }
1163
1164 /// Convert a SmallVec to a Vec, without reallocating if the SmallVec has already spilled onto
1165 /// the heap.
1166 pub fn into_vec(self) -> Vec<A::Item> {
1167 if self.spilled() {
1168 unsafe {
1169 let (ptr, len) = self.data.heap();
1170 let v = Vec::from_raw_parts(ptr, len, self.capacity);
1171 mem::forget(self);
1172 v
1173 }
1174 } else {
1175 self.into_iter().collect()
1176 }
1177 }
1178
1179 /// Converts a `SmallVec` into a `Box<[T]>` without reallocating if the `SmallVec` has already spilled
1180 /// onto the heap.
1181 ///
1182 /// Note that this will drop any excess capacity.
1183 pub fn into_boxed_slice(self) -> Box<[A::Item]> {
1184 self.into_vec().into_boxed_slice()
1185 }
1186
1187 /// Convert the SmallVec into an `A` if possible. Otherwise return `Err(Self)`.
1188 ///
1189 /// This method returns `Err(Self)` if the SmallVec is too short (and the `A` contains uninitialized elements),
1190 /// or if the SmallVec is too long (and all the elements were spilled to the heap).
1191 pub fn into_inner(self) -> Result<A, Self> {
1192 if self.spilled() || self.len() != A::size() {
1193 // Note: A::size, not Self::inline_capacity
1194 Err(self)
1195 } else {
1196 unsafe {
1197 let data = ptr::read(&self.data);
1198 mem::forget(self);
1199 Ok(data.into_inline().assume_init())
1200 }
1201 }
1202 }
1203
1204 /// Retains only the elements specified by the predicate.
1205 ///
1206 /// In other words, remove all elements `e` such that `f(&e)` returns `false`.
1207 /// This method operates in place and preserves the order of the retained
1208 /// elements.
1209 pub fn retain<F: FnMut(&mut A::Item) -> bool>(&mut self, mut f: F) {
1210 let mut del = 0;
1211 let len = self.len();
1212 for i in 0..len {
1213 if !f(&mut self[i]) {
1214 del += 1;
1215 } else if del > 0 {
1216 self.swap(i - del, i);
1217 }
1218 }
1219 self.truncate(len - del);
1220 }
1221
1222 /// Removes consecutive duplicate elements.
1223 pub fn dedup(&mut self)
1224 where
1225 A::Item: PartialEq<A::Item>,
1226 {
1227 self.dedup_by(|a, b| a == b);
1228 }
1229
1230 /// Removes consecutive duplicate elements using the given equality relation.
1231 pub fn dedup_by<F>(&mut self, mut same_bucket: F)
1232 where
1233 F: FnMut(&mut A::Item, &mut A::Item) -> bool,
1234 {
1235 // See the implementation of Vec::dedup_by in the
1236 // standard library for an explanation of this algorithm.
1237 let len = self.len();
1238 if len <= 1 {
1239 return;
1240 }
1241
1242 let ptr = self.as_mut_ptr();
1243 let mut w: usize = 1;
1244
1245 unsafe {
1246 for r in 1..len {
1247 let p_r = ptr.add(r);
1248 let p_wm1 = ptr.add(w - 1);
1249 if !same_bucket(&mut *p_r, &mut *p_wm1) {
1250 if r != w {
1251 let p_w = p_wm1.add(1);
1252 mem::swap(&mut *p_r, &mut *p_w);
1253 }
1254 w += 1;
1255 }
1256 }
1257 }
1258
1259 self.truncate(w);
1260 }
1261
1262 /// Removes consecutive elements that map to the same key.
1263 pub fn dedup_by_key<F, K>(&mut self, mut key: F)
1264 where
1265 F: FnMut(&mut A::Item) -> K,
1266 K: PartialEq<K>,
1267 {
1268 self.dedup_by(|a, b| key(a) == key(b));
1269 }
1270
1271 /// Resizes the `SmallVec` in-place so that `len` is equal to `new_len`.
1272 ///
1273 /// If `new_len` is greater than `len`, the `SmallVec` is extended by the difference, with each
1274 /// additional slot filled with the result of calling the closure `f`. The return values from `f`
1275 //// will end up in the `SmallVec` in the order they have been generated.
1276 ///
1277 /// If `new_len` is less than `len`, the `SmallVec` is simply truncated.
1278 ///
1279 /// This method uses a closure to create new values on every push. If you'd rather `Clone` a given
1280 /// value, use `resize`. If you want to use the `Default` trait to generate values, you can pass
1281 /// `Default::default()` as the second argument.
1282 ///
1283 /// Added for std::vec::Vec compatibility (added in Rust 1.33.0)
1284 ///
1285 /// ```
1286 /// # use smallvec::{smallvec, SmallVec};
1287 /// let mut vec : SmallVec<[_; 4]> = smallvec![1, 2, 3];
1288 /// vec.resize_with(5, Default::default);
1289 /// assert_eq!(&*vec, &[1, 2, 3, 0, 0]);
1290 ///
1291 /// let mut vec : SmallVec<[_; 4]> = smallvec![];
1292 /// let mut p = 1;
1293 /// vec.resize_with(4, || { p *= 2; p });
1294 /// assert_eq!(&*vec, &[2, 4, 8, 16]);
1295 /// ```
1296 pub fn resize_with<F>(&mut self, new_len: usize, f: F)
1297 where
1298 F: FnMut() -> A::Item,
1299 {
1300 let old_len = self.len();
1301 if old_len < new_len {
1302 let mut f = f;
1303 let additional = new_len - old_len;
1304 self.reserve(additional);
1305 for _ in 0..additional {
1306 self.push(f());
1307 }
1308 } else if old_len > new_len {
1309 self.truncate(new_len);
1310 }
1311 }
1312
1313 /// Creates a `SmallVec` directly from the raw components of another
1314 /// `SmallVec`.
1315 ///
1316 /// # Safety
1317 ///
1318 /// This is highly unsafe, due to the number of invariants that aren't
1319 /// checked:
1320 ///
1321 /// * `ptr` needs to have been previously allocated via `SmallVec` for its
1322 /// spilled storage (at least, it's highly likely to be incorrect if it
1323 /// wasn't).
1324 /// * `ptr`'s `A::Item` type needs to be the same size and alignment that
1325 /// it was allocated with
1326 /// * `length` needs to be less than or equal to `capacity`.
1327 /// * `capacity` needs to be the capacity that the pointer was allocated
1328 /// with.
1329 ///
1330 /// Violating these may cause problems like corrupting the allocator's
1331 /// internal data structures.
1332 ///
1333 /// Additionally, `capacity` must be greater than the amount of inline
1334 /// storage `A` has; that is, the new `SmallVec` must need to spill over
1335 /// into heap allocated storage. This condition is asserted against.
1336 ///
1337 /// The ownership of `ptr` is effectively transferred to the
1338 /// `SmallVec` which may then deallocate, reallocate or change the
1339 /// contents of memory pointed to by the pointer at will. Ensure
1340 /// that nothing else uses the pointer after calling this
1341 /// function.
1342 ///
1343 /// # Examples
1344 ///
1345 /// ```
1346 /// # #[macro_use] extern crate smallvec;
1347 /// # use smallvec::SmallVec;
1348 /// use std::mem;
1349 /// use std::ptr;
1350 ///
1351 /// fn main() {
1352 /// let mut v: SmallVec<[_; 1]> = smallvec![1, 2, 3];
1353 ///
1354 /// // Pull out the important parts of `v`.
1355 /// let p = v.as_mut_ptr();
1356 /// let len = v.len();
1357 /// let cap = v.capacity();
1358 /// let spilled = v.spilled();
1359 ///
1360 /// unsafe {
1361 /// // Forget all about `v`. The heap allocation that stored the
1362 /// // three values won't be deallocated.
1363 /// mem::forget(v);
1364 ///
1365 /// // Overwrite memory with [4, 5, 6].
1366 /// //
1367 /// // This is only safe if `spilled` is true! Otherwise, we are
1368 /// // writing into the old `SmallVec`'s inline storage on the
1369 /// // stack.
1370 /// assert!(spilled);
1371 /// for i in 0..len {
1372 /// ptr::write(p.add(i), 4 + i);
1373 /// }
1374 ///
1375 /// // Put everything back together into a SmallVec with a different
1376 /// // amount of inline storage, but which is still less than `cap`.
1377 /// let rebuilt = SmallVec::<[_; 2]>::from_raw_parts(p, len, cap);
1378 /// assert_eq!(&*rebuilt, &[4, 5, 6]);
1379 /// }
1380 /// }
1381 #[inline]
1382 pub unsafe fn from_raw_parts(ptr: *mut A::Item, length: usize, capacity: usize) -> SmallVec<A> {
1383 assert!(capacity > Self::inline_capacity());
1384 SmallVec {
1385 capacity,
1386 data: SmallVecData::from_heap(ptr, length),
1387 }
1388 }
1389
1390 /// Returns a raw pointer to the vector's buffer.
1391 pub fn as_ptr(&self) -> *const A::Item {
1392 // We shadow the slice method of the same name to avoid going through
1393 // `deref`, which creates an intermediate reference that may place
1394 // additional safety constraints on the contents of the slice.
1395 self.triple().0
1396 }
1397
1398 /// Returns a raw mutable pointer to the vector's buffer.
1399 pub fn as_mut_ptr(&mut self) -> *mut A::Item {
1400 // We shadow the slice method of the same name to avoid going through
1401 // `deref_mut`, which creates an intermediate reference that may place
1402 // additional safety constraints on the contents of the slice.
1403 self.triple_mut().0
1404 }
1405 }
1406
1407 impl<A: Array> SmallVec<A>
1408 where
1409 A::Item: Copy,
1410 {
1411 /// Copy the elements from a slice into a new `SmallVec`.
1412 ///
1413 /// For slices of `Copy` types, this is more efficient than `SmallVec::from(slice)`.
1414 pub fn from_slice(slice: &[A::Item]) -> Self {
1415 let len = slice.len();
1416 if len <= Self::inline_capacity() {
1417 SmallVec {
1418 capacity: len,
1419 data: SmallVecData::from_inline(unsafe {
1420 let mut data: MaybeUninit<A> = MaybeUninit::uninit();
1421 ptr::copy_nonoverlapping(
1422 slice.as_ptr(),
1423 data.as_mut_ptr() as *mut A::Item,
1424 len,
1425 );
1426 data
1427 }),
1428 }
1429 } else {
1430 let mut b = slice.to_vec();
1431 let (ptr, cap) = (b.as_mut_ptr(), b.capacity());
1432 mem::forget(b);
1433 SmallVec {
1434 capacity: cap,
1435 data: SmallVecData::from_heap(ptr, len),
1436 }
1437 }
1438 }
1439
1440 /// Copy elements from a slice into the vector at position `index`, shifting any following
1441 /// elements toward the back.
1442 ///
1443 /// For slices of `Copy` types, this is more efficient than `insert`.
1444 pub fn insert_from_slice(&mut self, index: usize, slice: &[A::Item]) {
1445 self.reserve(slice.len());
1446
1447 let len = self.len();
1448 assert!(index <= len);
1449
1450 unsafe {
1451 let slice_ptr = slice.as_ptr();
1452 let ptr = self.as_mut_ptr().add(index);
1453 ptr::copy(ptr, ptr.add(slice.len()), len - index);
1454 ptr::copy_nonoverlapping(slice_ptr, ptr, slice.len());
1455 self.set_len(len + slice.len());
1456 }
1457 }
1458
1459 /// Copy elements from a slice and append them to the vector.
1460 ///
1461 /// For slices of `Copy` types, this is more efficient than `extend`.
1462 #[inline]
1463 pub fn extend_from_slice(&mut self, slice: &[A::Item]) {
1464 let len = self.len();
1465 self.insert_from_slice(len, slice);
1466 }
1467 }
1468
1469 impl<A: Array> SmallVec<A>
1470 where
1471 A::Item: Clone,
1472 {
1473 /// Resizes the vector so that its length is equal to `len`.
1474 ///
1475 /// If `len` is less than the current length, the vector simply truncated.
1476 ///
1477 /// If `len` is greater than the current length, `value` is appended to the
1478 /// vector until its length equals `len`.
1479 pub fn resize(&mut self, len: usize, value: A::Item) {
1480 let old_len = self.len();
1481
1482 if len > old_len {
1483 self.extend(repeat(value).take(len - old_len));
1484 } else {
1485 self.truncate(len);
1486 }
1487 }
1488
1489 /// Creates a `SmallVec` with `n` copies of `elem`.
1490 /// ```
1491 /// use smallvec::SmallVec;
1492 ///
1493 /// let v = SmallVec::<[char; 128]>::from_elem('d', 2);
1494 /// assert_eq!(v, SmallVec::from_buf(['d', 'd']));
1495 /// ```
1496 pub fn from_elem(elem: A::Item, n: usize) -> Self {
1497 if n > Self::inline_capacity() {
1498 vec![elem; n].into()
1499 } else {
1500 let mut v = SmallVec::<A>::new();
1501 unsafe {
1502 let (ptr, len_ptr, _) = v.triple_mut();
1503 let mut local_len = SetLenOnDrop::new(len_ptr);
1504
1505 for i in 0..n {
1506 ::core::ptr::write(ptr.add(i), elem.clone());
1507 local_len.increment_len(1);
1508 }
1509 }
1510 v
1511 }
1512 }
1513 }
1514
1515 impl<A: Array> ops::Deref for SmallVec<A> {
1516 type Target = [A::Item];
1517 #[inline]
1518 fn deref(&self) -> &[A::Item] {
1519 unsafe {
1520 let (ptr, len, _) = self.triple();
1521 slice::from_raw_parts(ptr, len)
1522 }
1523 }
1524 }
1525
1526 impl<A: Array> ops::DerefMut for SmallVec<A> {
1527 #[inline]
1528 fn deref_mut(&mut self) -> &mut [A::Item] {
1529 unsafe {
1530 let (ptr, &mut len, _) = self.triple_mut();
1531 slice::from_raw_parts_mut(ptr, len)
1532 }
1533 }
1534 }
1535
1536 impl<A: Array> AsRef<[A::Item]> for SmallVec<A> {
1537 #[inline]
1538 fn as_ref(&self) -> &[A::Item] {
1539 self
1540 }
1541 }
1542
1543 impl<A: Array> AsMut<[A::Item]> for SmallVec<A> {
1544 #[inline]
1545 fn as_mut(&mut self) -> &mut [A::Item] {
1546 self
1547 }
1548 }
1549
1550 impl<A: Array> Borrow<[A::Item]> for SmallVec<A> {
1551 #[inline]
1552 fn borrow(&self) -> &[A::Item] {
1553 self
1554 }
1555 }
1556
1557 impl<A: Array> BorrowMut<[A::Item]> for SmallVec<A> {
1558 #[inline]
1559 fn borrow_mut(&mut self) -> &mut [A::Item] {
1560 self
1561 }
1562 }
1563
1564 #[cfg(feature = "write")]
1565 #[cfg_attr(docsrs, doc(cfg(feature = "write")))]
1566 impl<A: Array<Item = u8>> io::Write for SmallVec<A> {
1567 #[inline]
1568 fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
1569 self.extend_from_slice(buf);
1570 Ok(buf.len())
1571 }
1572
1573 #[inline]
1574 fn write_all(&mut self, buf: &[u8]) -> io::Result<()> {
1575 self.extend_from_slice(buf);
1576 Ok(())
1577 }
1578
1579 #[inline]
1580 fn flush(&mut self) -> io::Result<()> {
1581 Ok(())
1582 }
1583 }
1584
1585 #[cfg(feature = "serde")]
1586 #[cfg_attr(docsrs, doc(cfg(feature = "serde")))]
1587 impl<A: Array> Serialize for SmallVec<A>
1588 where
1589 A::Item: Serialize,
1590 {
1591 fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
1592 let mut state = serializer.serialize_seq(Some(self.len()))?;
1593 for item in self {
1594 state.serialize_element(&item)?;
1595 }
1596 state.end()
1597 }
1598 }
1599
1600 #[cfg(feature = "serde")]
1601 #[cfg_attr(docsrs, doc(cfg(feature = "serde")))]
1602 impl<'de, A: Array> Deserialize<'de> for SmallVec<A>
1603 where
1604 A::Item: Deserialize<'de>,
1605 {
1606 fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
1607 deserializer.deserialize_seq(SmallVecVisitor {
1608 phantom: PhantomData,
1609 })
1610 }
1611 }
1612
1613 #[cfg(feature = "serde")]
1614 struct SmallVecVisitor<A> {
1615 phantom: PhantomData<A>,
1616 }
1617
1618 #[cfg(feature = "serde")]
1619 impl<'de, A: Array> Visitor<'de> for SmallVecVisitor<A>
1620 where
1621 A::Item: Deserialize<'de>,
1622 {
1623 type Value = SmallVec<A>;
1624
1625 fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
1626 formatter.write_str("a sequence")
1627 }
1628
1629 fn visit_seq<B>(self, mut seq: B) -> Result<Self::Value, B::Error>
1630 where
1631 B: SeqAccess<'de>,
1632 {
1633 use serde::de::Error;
1634 let len = seq.size_hint().unwrap_or(0);
1635 let mut values = SmallVec::new();
1636 values.try_reserve(len).map_err(B::Error::custom)?;
1637
1638 while let Some(value) = seq.next_element()? {
1639 values.push(value);
1640 }
1641
1642 Ok(values)
1643 }
1644 }
1645
1646 #[cfg(feature = "specialization")]
1647 trait SpecFrom<A: Array, S> {
1648 fn spec_from(slice: S) -> SmallVec<A>;
1649 }
1650
1651 #[cfg(feature = "specialization")]
1652 mod specialization;
1653
1654 #[cfg(feature = "specialization")]
1655 impl<'a, A: Array> SpecFrom<A, &'a [A::Item]> for SmallVec<A>
1656 where
1657 A::Item: Copy,
1658 {
1659 #[inline]
1660 fn spec_from(slice: &'a [A::Item]) -> SmallVec<A> {
1661 SmallVec::from_slice(slice)
1662 }
1663 }
1664
1665 impl<'a, A: Array> From<&'a [A::Item]> for SmallVec<A>
1666 where
1667 A::Item: Clone,
1668 {
1669 #[cfg(not(feature = "specialization"))]
1670 #[inline]
1671 fn from(slice: &'a [A::Item]) -> SmallVec<A> {
1672 slice.iter().cloned().collect()
1673 }
1674
1675 #[cfg(feature = "specialization")]
1676 #[inline]
1677 fn from(slice: &'a [A::Item]) -> SmallVec<A> {
1678 SmallVec::spec_from(slice)
1679 }
1680 }
1681
1682 impl<A: Array> From<Vec<A::Item>> for SmallVec<A> {
1683 #[inline]
1684 fn from(vec: Vec<A::Item>) -> SmallVec<A> {
1685 SmallVec::from_vec(vec)
1686 }
1687 }
1688
1689 impl<A: Array> From<A> for SmallVec<A> {
1690 #[inline]
1691 fn from(array: A) -> SmallVec<A> {
1692 SmallVec::from_buf(array)
1693 }
1694 }
1695
1696 impl<A: Array, I: SliceIndex<[A::Item]>> ops::Index<I> for SmallVec<A> {
1697 type Output = I::Output;
1698
1699 fn index(&self, index: I) -> &I::Output {
1700 &(**self)[index]
1701 }
1702 }
1703
1704 impl<A: Array, I: SliceIndex<[A::Item]>> ops::IndexMut<I> for SmallVec<A> {
1705 fn index_mut(&mut self, index: I) -> &mut I::Output {
1706 &mut (&mut **self)[index]
1707 }
1708 }
1709
1710 #[allow(deprecated)]
1711 impl<A: Array> ExtendFromSlice<A::Item> for SmallVec<A>
1712 where
1713 A::Item: Copy,
1714 {
1715 fn extend_from_slice(&mut self, other: &[A::Item]) {
1716 SmallVec::extend_from_slice(self, other)
1717 }
1718 }
1719
1720 impl<A: Array> FromIterator<A::Item> for SmallVec<A> {
1721 #[inline]
1722 fn from_iter<I: IntoIterator<Item = A::Item>>(iterable: I) -> SmallVec<A> {
1723 let mut v = SmallVec::new();
1724 v.extend(iterable);
1725 v
1726 }
1727 }
1728
1729 impl<A: Array> Extend<A::Item> for SmallVec<A> {
1730 fn extend<I: IntoIterator<Item = A::Item>>(&mut self, iterable: I) {
1731 let mut iter = iterable.into_iter();
1732 let (lower_size_bound, _) = iter.size_hint();
1733 self.reserve(lower_size_bound);
1734
1735 unsafe {
1736 let (ptr, len_ptr, cap) = self.triple_mut();
1737 let mut len = SetLenOnDrop::new(len_ptr);
1738 while len.get() < cap {
1739 if let Some(out) = iter.next() {
1740 ptr::write(ptr.add(len.get()), out);
1741 len.increment_len(1);
1742 } else {
1743 return;
1744 }
1745 }
1746 }
1747
1748 for elem in iter {
1749 self.push(elem);
1750 }
1751 }
1752 }
1753
1754 impl<A: Array> fmt::Debug for SmallVec<A>
1755 where
1756 A::Item: fmt::Debug,
1757 {
1758 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1759 f.debug_list().entries(self.iter()).finish()
1760 }
1761 }
1762
1763 impl<A: Array> Default for SmallVec<A> {
1764 #[inline]
1765 fn default() -> SmallVec<A> {
1766 SmallVec::new()
1767 }
1768 }
1769
1770 #[cfg(feature = "may_dangle")]
1771 unsafe impl<#[may_dangle] A: Array> Drop for SmallVec<A> {
1772 fn drop(&mut self) {
1773 unsafe {
1774 if self.spilled() {
1775 let (ptr, len) = self.data.heap();
1776 Vec::from_raw_parts(ptr, len, self.capacity);
1777 } else {
1778 ptr::drop_in_place(&mut self[..]);
1779 }
1780 }
1781 }
1782 }
1783
1784 #[cfg(not(feature = "may_dangle"))]
1785 impl<A: Array> Drop for SmallVec<A> {
1786 fn drop(&mut self) {
1787 unsafe {
1788 if self.spilled() {
1789 let (ptr, len) = self.data.heap();
1790 Vec::from_raw_parts(ptr, len, self.capacity);
1791 } else {
1792 ptr::drop_in_place(&mut self[..]);
1793 }
1794 }
1795 }
1796 }
1797
1798 impl<A: Array> Clone for SmallVec<A>
1799 where
1800 A::Item: Clone,
1801 {
1802 #[inline]
1803 fn clone(&self) -> SmallVec<A> {
1804 SmallVec::from(self.as_slice())
1805 }
1806
1807 fn clone_from(&mut self, source: &Self) {
1808 // Inspired from `impl Clone for Vec`.
1809
1810 // drop anything that will not be overwritten
1811 self.truncate(source.len());
1812
1813 // self.len <= other.len due to the truncate above, so the
1814 // slices here are always in-bounds.
1815 let (init, tail) = source.split_at(self.len());
1816
1817 // reuse the contained values' allocations/resources.
1818 self.clone_from_slice(init);
1819 self.extend(tail.iter().cloned());
1820 }
1821 }
1822
1823 impl<A: Array, B: Array> PartialEq<SmallVec<B>> for SmallVec<A>
1824 where
1825 A::Item: PartialEq<B::Item>,
1826 {
1827 #[inline]
1828 fn eq(&self, other: &SmallVec<B>) -> bool {
1829 self[..] == other[..]
1830 }
1831 }
1832
1833 impl<A: Array> Eq for SmallVec<A> where A::Item: Eq {}
1834
1835 impl<A: Array> PartialOrd for SmallVec<A>
1836 where
1837 A::Item: PartialOrd,
1838 {
1839 #[inline]
1840 fn partial_cmp(&self, other: &SmallVec<A>) -> Option<cmp::Ordering> {
1841 PartialOrd::partial_cmp(&**self, &**other)
1842 }
1843 }
1844
1845 impl<A: Array> Ord for SmallVec<A>
1846 where
1847 A::Item: Ord,
1848 {
1849 #[inline]
1850 fn cmp(&self, other: &SmallVec<A>) -> cmp::Ordering {
1851 Ord::cmp(&**self, &**other)
1852 }
1853 }
1854
1855 impl<A: Array> Hash for SmallVec<A>
1856 where
1857 A::Item: Hash,
1858 {
1859 fn hash<H: Hasher>(&self, state: &mut H) {
1860 (**self).hash(state)
1861 }
1862 }
1863
1864 unsafe impl<A: Array> Send for SmallVec<A> where A::Item: Send {}
1865
1866 /// An iterator that consumes a `SmallVec` and yields its items by value.
1867 ///
1868 /// Returned from [`SmallVec::into_iter`][1].
1869 ///
1870 /// [1]: struct.SmallVec.html#method.into_iter
1871 pub struct IntoIter<A: Array> {
1872 data: SmallVec<A>,
1873 current: usize,
1874 end: usize,
1875 }
1876
1877 impl<A: Array> fmt::Debug for IntoIter<A>
1878 where
1879 A::Item: fmt::Debug,
1880 {
1881 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1882 f.debug_tuple("IntoIter").field(&self.as_slice()).finish()
1883 }
1884 }
1885
1886 impl<A: Array + Clone> Clone for IntoIter<A>
1887 where
1888 A::Item: Clone,
1889 {
1890 fn clone(&self) -> IntoIter<A> {
1891 SmallVec::from(self.as_slice()).into_iter()
1892 }
1893 }
1894
1895 impl<A: Array> Drop for IntoIter<A> {
1896 fn drop(&mut self) {
1897 for _ in self {}
1898 }
1899 }
1900
1901 impl<A: Array> Iterator for IntoIter<A> {
1902 type Item = A::Item;
1903
1904 #[inline]
1905 fn next(&mut self) -> Option<A::Item> {
1906 if self.current == self.end {
1907 None
1908 } else {
1909 unsafe {
1910 let current = self.current;
1911 self.current += 1;
1912 Some(ptr::read(self.data.as_ptr().add(current)))
1913 }
1914 }
1915 }
1916
1917 #[inline]
1918 fn size_hint(&self) -> (usize, Option<usize>) {
1919 let size = self.end - self.current;
1920 (size, Some(size))
1921 }
1922 }
1923
1924 impl<A: Array> DoubleEndedIterator for IntoIter<A> {
1925 #[inline]
1926 fn next_back(&mut self) -> Option<A::Item> {
1927 if self.current == self.end {
1928 None
1929 } else {
1930 unsafe {
1931 self.end -= 1;
1932 Some(ptr::read(self.data.as_ptr().add(self.end)))
1933 }
1934 }
1935 }
1936 }
1937
1938 impl<A: Array> ExactSizeIterator for IntoIter<A> {}
1939 impl<A: Array> FusedIterator for IntoIter<A> {}
1940
1941 impl<A: Array> IntoIter<A> {
1942 /// Returns the remaining items of this iterator as a slice.
1943 pub fn as_slice(&self) -> &[A::Item] {
1944 let len = self.end - self.current;
1945 unsafe { core::slice::from_raw_parts(self.data.as_ptr().add(self.current), len) }
1946 }
1947
1948 /// Returns the remaining items of this iterator as a mutable slice.
1949 pub fn as_mut_slice(&mut self) -> &mut [A::Item] {
1950 let len = self.end - self.current;
1951 unsafe { core::slice::from_raw_parts_mut(self.data.as_mut_ptr().add(self.current), len) }
1952 }
1953 }
1954
1955 impl<A: Array> IntoIterator for SmallVec<A> {
1956 type IntoIter = IntoIter<A>;
1957 type Item = A::Item;
1958 fn into_iter(mut self) -> Self::IntoIter {
1959 unsafe {
1960 // Set SmallVec len to zero as `IntoIter` drop handles dropping of the elements
1961 let len = self.len();
1962 self.set_len(0);
1963 IntoIter {
1964 data: self,
1965 current: 0,
1966 end: len,
1967 }
1968 }
1969 }
1970 }
1971
1972 impl<'a, A: Array> IntoIterator for &'a SmallVec<A> {
1973 type IntoIter = slice::Iter<'a, A::Item>;
1974 type Item = &'a A::Item;
1975 fn into_iter(self) -> Self::IntoIter {
1976 self.iter()
1977 }
1978 }
1979
1980 impl<'a, A: Array> IntoIterator for &'a mut SmallVec<A> {
1981 type IntoIter = slice::IterMut<'a, A::Item>;
1982 type Item = &'a mut A::Item;
1983 fn into_iter(self) -> Self::IntoIter {
1984 self.iter_mut()
1985 }
1986 }
1987
1988 /// Types that can be used as the backing store for a SmallVec
1989 pub unsafe trait Array {
1990 /// The type of the array's elements.
1991 type Item;
1992 /// Returns the number of items the array can hold.
1993 fn size() -> usize;
1994 }
1995
1996 /// Set the length of the vec when the `SetLenOnDrop` value goes out of scope.
1997 ///
1998 /// Copied from https://github.com/rust-lang/rust/pull/36355
1999 struct SetLenOnDrop<'a> {
2000 len: &'a mut usize,
2001 local_len: usize,
2002 }
2003
2004 impl<'a> SetLenOnDrop<'a> {
2005 #[inline]
2006 fn new(len: &'a mut usize) -> Self {
2007 SetLenOnDrop {
2008 local_len: *len,
2009 len,
2010 }
2011 }
2012
2013 #[inline]
2014 fn get(&self) -> usize {
2015 self.local_len
2016 }
2017
2018 #[inline]
2019 fn increment_len(&mut self, increment: usize) {
2020 self.local_len += increment;
2021 }
2022 }
2023
2024 impl<'a> Drop for SetLenOnDrop<'a> {
2025 #[inline]
2026 fn drop(&mut self) {
2027 *self.len = self.local_len;
2028 }
2029 }
2030
2031 #[cfg(feature = "const_new")]
2032 impl<T, const N: usize> SmallVec<[T; N]> {
2033 /// Construct an empty vector.
2034 ///
2035 /// This is a `const` version of [`SmallVec::new`] that is enabled by the feature `const_new`, with the limitation that it only works for arrays.
2036 #[cfg_attr(docsrs, doc(cfg(feature = "const_new")))]
2037 #[inline]
2038 pub const fn new_const() -> Self {
2039 SmallVec {
2040 capacity: 0,
2041 data: SmallVecData::from_const(MaybeUninit::uninit()),
2042 }
2043 }
2044
2045 /// The array passed as an argument is moved to be an inline version of `SmallVec`.
2046 ///
2047 /// This is a `const` version of [`SmallVec::from_buf`] that is enabled by the feature `const_new`, with the limitation that it only works for arrays.
2048 #[cfg_attr(docsrs, doc(cfg(feature = "const_new")))]
2049 #[inline]
2050 pub const fn from_const(items: [T; N]) -> Self {
2051 SmallVec {
2052 capacity: N,
2053 data: SmallVecData::from_const(MaybeUninit::new(items)),
2054 }
2055 }
2056 }
2057
2058 #[cfg(all(feature = "const_generics", not(doc)))]
2059 #[cfg_attr(docsrs, doc(cfg(feature = "const_generics")))]
2060 unsafe impl<T, const N: usize> Array for [T; N] {
2061 type Item = T;
2062 fn size() -> usize {
2063 N
2064 }
2065 }
2066
2067 #[cfg(any(not(feature = "const_generics"), doc))]
2068 macro_rules! impl_array(
2069 ($($size:expr),+) => {
2070 $(
2071 unsafe impl<T> Array for [T; $size] {
2072 type Item = T;
2073 fn size() -> usize { $size }
2074 }
2075 )+
2076 }
2077 );
2078
2079 #[cfg(any(not(feature = "const_generics"), doc))]
2080 impl_array!(
2081 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25,
2082 26, 27, 28, 29, 30, 31, 32, 36, 0x40, 0x60, 0x80, 0x100, 0x200, 0x400, 0x600, 0x800, 0x1000,
2083 0x2000, 0x4000, 0x6000, 0x8000, 0x10000, 0x20000, 0x40000, 0x60000, 0x80000, 0x10_0000
2084 );
2085
2086 /// Convenience trait for constructing a `SmallVec`
2087 pub trait ToSmallVec<A: Array> {
2088 /// Construct a new `SmallVec` from a slice.
2089 fn to_smallvec(&self) -> SmallVec<A>;
2090 }
2091
2092 impl<A: Array> ToSmallVec<A> for [A::Item]
2093 where
2094 A::Item: Copy,
2095 {
2096 #[inline]
2097 fn to_smallvec(&self) -> SmallVec<A> {
2098 SmallVec::from_slice(self)
2099 }
2100 }