7 use std::ptr::{Unique, self};
10 use std::ops::{Deref, DerefMut};
11 use std::marker::PhantomData;
25 // !0 is usize::MAX. This branch should be stripped at compile time.
26 let cap = if mem::size_of::<T>() == 0 { !0 } else { 0 };
28 // heap::EMPTY doubles as "unallocated" and "zero-sized allocation"
29 RawVec { ptr: Unique::new(heap::EMPTY as *mut T), cap: cap }
35 let elem_size = mem::size_of::<T>();
37 // since we set the capacity to usize::MAX when elem_size is
38 // 0, getting to here necessarily means the Vec is overfull.
39 assert!(elem_size != 0, "capacity overflow");
41 let align = mem::align_of::<T>();
43 let (new_cap, ptr) = if self.cap == 0 {
44 let ptr = heap::allocate(elem_size, align);
47 let new_cap = 2 * self.cap;
48 let ptr = heap::reallocate(*self.ptr as *mut _,
55 // If allocate or reallocate fail, we'll get `null` back
56 if ptr.is_null() { oom() }
58 self.ptr = Unique::new(ptr as *mut _);
64 impl<T> Drop for RawVec<T> {
66 let elem_size = mem::size_of::<T>();
67 if self.cap != 0 && elem_size != 0 {
68 let align = mem::align_of::<T>();
70 let num_bytes = elem_size * self.cap;
72 heap::deallocate(*self.ptr as *mut _, num_bytes, align);
88 fn ptr(&self) -> *mut T { *self.buf.ptr }
90 fn cap(&self) -> usize { self.buf.cap }
92 pub fn new() -> Self {
93 Vec { buf: RawVec::new(), len: 0 }
95 pub fn push(&mut self, elem: T) {
96 if self.len == self.cap() { self.buf.grow(); }
99 ptr::write(self.ptr().offset(self.len as isize), elem);
102 // Can't fail, we'll OOM first.
106 pub fn pop(&mut self) -> Option<T> {
112 Some(ptr::read(self.ptr().offset(self.len as isize)))
117 pub fn insert(&mut self, index: usize, elem: T) {
118 assert!(index <= self.len, "index out of bounds");
119 if self.cap() == self.len { self.buf.grow(); }
122 if index < self.len {
123 ptr::copy(self.ptr().offset(index as isize),
124 self.ptr().offset(index as isize + 1),
127 ptr::write(self.ptr().offset(index as isize), elem);
132 pub fn remove(&mut self, index: usize) -> T {
133 assert!(index < self.len, "index out of bounds");
136 let result = ptr::read(self.ptr().offset(index as isize));
137 ptr::copy(self.ptr().offset(index as isize + 1),
138 self.ptr().offset(index as isize),
144 pub fn into_iter(self) -> IntoIter<T> {
146 let iter = RawValIter::new(&self);
147 let buf = ptr::read(&self.buf);
157 pub fn drain(&mut self) -> Drain<T> {
158 // this is a mem::forget safety thing. If this is forgotten, we just
159 // leak the whole Vec's contents. Also we need to do this *eventually*
160 // anyway, so why not do it now?
164 iter: RawValIter::new(&self),
171 impl<T> Drop for Vec<T> {
173 while let Some(_) = self.pop() {}
174 // allocation is handled by RawVec
178 impl<T> Deref for Vec<T> {
180 fn deref(&self) -> &[T] {
182 ::std::slice::from_raw_parts(self.ptr(), self.len)
187 impl<T> DerefMut for Vec<T> {
188 fn deref_mut(&mut self) -> &mut [T] {
190 ::std::slice::from_raw_parts_mut(self.ptr(), self.len)
199 struct RawValIter<T> {
204 impl<T> RawValIter<T> {
205 unsafe fn new(slice: &[T]) -> Self {
207 start: slice.as_ptr(),
208 end: if mem::size_of::<T>() == 0 {
209 ((slice.as_ptr() as usize) + slice.len()) as *const _
210 } else if slice.len() == 0 {
213 slice.as_ptr().offset(slice.len() as isize)
219 impl<T> Iterator for RawValIter<T> {
221 fn next(&mut self) -> Option<T> {
222 if self.start == self.end {
226 let result = ptr::read(self.start);
227 self.start = self.start.offset(1);
233 fn size_hint(&self) -> (usize, Option<usize>) {
234 let elem_size = mem::size_of::<T>();
235 let len = (self.end as usize - self.start as usize)
236 / if elem_size == 0 { 1 } else { elem_size };
241 impl<T> DoubleEndedIterator for RawValIter<T> {
242 fn next_back(&mut self) -> Option<T> {
243 if self.start == self.end {
247 self.end = self.end.offset(-1);
248 Some(ptr::read(self.end))
257 pub struct IntoIter<T> {
258 _buf: RawVec<T>, // we don't actually care about this. Just need it to live.
262 impl<T> Iterator for IntoIter<T> {
264 fn next(&mut self) -> Option<T> { self.iter.next() }
265 fn size_hint(&self) -> (usize, Option<usize>) { self.iter.size_hint() }
268 impl<T> DoubleEndedIterator for IntoIter<T> {
269 fn next_back(&mut self) -> Option<T> { self.iter.next_back() }
272 impl<T> Drop for IntoIter<T> {
274 for _ in &mut *self {}
281 pub struct Drain<'a, T: 'a> {
282 vec: PhantomData<&'a mut Vec<T>>,
286 impl<'a, T> Iterator for Drain<'a, T> {
288 fn next(&mut self) -> Option<T> { self.iter.next_back() }
289 fn size_hint(&self) -> (usize, Option<usize>) { self.iter.size_hint() }
292 impl<'a, T> DoubleEndedIterator for Drain<'a, T> {
293 fn next_back(&mut self) -> Option<T> { self.iter.next_back() }
296 impl<'a, T> Drop for Drain<'a, T> {
298 // pre-drain the iter
299 for _ in &mut self.iter {}
303 /// Abort the process, we're out of memory!
305 /// In practice this is probably dead code on most OSes
307 ::std::process::exit(-9999);