]> git.proxmox.com Git - rustc.git/blob - src/doc/nomicon/vec-final.md
Imported Upstream version 1.3.0+dfsg1
[rustc.git] / src / doc / nomicon / vec-final.md
1 % The Final Code
2
3 ```rust
4 #![feature(unique)]
5 #![feature(heap_api)]
6
7 use std::ptr::{Unique, self};
8 use std::rt::heap;
9 use std::mem;
10 use std::ops::{Deref, DerefMut};
11 use std::marker::PhantomData;
12
13
14
15
16
17 struct RawVec<T> {
18 ptr: Unique<T>,
19 cap: usize,
20 }
21
22 impl<T> RawVec<T> {
23 fn new() -> Self {
24 unsafe {
25 // !0 is usize::MAX. This branch should be stripped at compile time.
26 let cap = if mem::size_of::<T>() == 0 { !0 } else { 0 };
27
28 // heap::EMPTY doubles as "unallocated" and "zero-sized allocation"
29 RawVec { ptr: Unique::new(heap::EMPTY as *mut T), cap: cap }
30 }
31 }
32
33 fn grow(&mut self) {
34 unsafe {
35 let elem_size = mem::size_of::<T>();
36
37 // since we set the capacity to usize::MAX when elem_size is
38 // 0, getting to here necessarily means the Vec is overfull.
39 assert!(elem_size != 0, "capacity overflow");
40
41 let align = mem::align_of::<T>();
42
43 let (new_cap, ptr) = if self.cap == 0 {
44 let ptr = heap::allocate(elem_size, align);
45 (1, ptr)
46 } else {
47 let new_cap = 2 * self.cap;
48 let ptr = heap::reallocate(*self.ptr as *mut _,
49 self.cap * elem_size,
50 new_cap * elem_size,
51 align);
52 (new_cap, ptr)
53 };
54
55 // If allocate or reallocate fail, we'll get `null` back
56 if ptr.is_null() { oom() }
57
58 self.ptr = Unique::new(ptr as *mut _);
59 self.cap = new_cap;
60 }
61 }
62 }
63
64 impl<T> Drop for RawVec<T> {
65 fn drop(&mut self) {
66 let elem_size = mem::size_of::<T>();
67 if self.cap != 0 && elem_size != 0 {
68 let align = mem::align_of::<T>();
69
70 let num_bytes = elem_size * self.cap;
71 unsafe {
72 heap::deallocate(*self.ptr as *mut _, num_bytes, align);
73 }
74 }
75 }
76 }
77
78
79
80
81
82 pub struct Vec<T> {
83 buf: RawVec<T>,
84 len: usize,
85 }
86
87 impl<T> Vec<T> {
88 fn ptr(&self) -> *mut T { *self.buf.ptr }
89
90 fn cap(&self) -> usize { self.buf.cap }
91
92 pub fn new() -> Self {
93 Vec { buf: RawVec::new(), len: 0 }
94 }
95 pub fn push(&mut self, elem: T) {
96 if self.len == self.cap() { self.buf.grow(); }
97
98 unsafe {
99 ptr::write(self.ptr().offset(self.len as isize), elem);
100 }
101
102 // Can't fail, we'll OOM first.
103 self.len += 1;
104 }
105
106 pub fn pop(&mut self) -> Option<T> {
107 if self.len == 0 {
108 None
109 } else {
110 self.len -= 1;
111 unsafe {
112 Some(ptr::read(self.ptr().offset(self.len as isize)))
113 }
114 }
115 }
116
117 pub fn insert(&mut self, index: usize, elem: T) {
118 assert!(index <= self.len, "index out of bounds");
119 if self.cap() == self.len { self.buf.grow(); }
120
121 unsafe {
122 if index < self.len {
123 ptr::copy(self.ptr().offset(index as isize),
124 self.ptr().offset(index as isize + 1),
125 self.len - index);
126 }
127 ptr::write(self.ptr().offset(index as isize), elem);
128 self.len += 1;
129 }
130 }
131
132 pub fn remove(&mut self, index: usize) -> T {
133 assert!(index < self.len, "index out of bounds");
134 unsafe {
135 self.len -= 1;
136 let result = ptr::read(self.ptr().offset(index as isize));
137 ptr::copy(self.ptr().offset(index as isize + 1),
138 self.ptr().offset(index as isize),
139 self.len - index);
140 result
141 }
142 }
143
144 pub fn into_iter(self) -> IntoIter<T> {
145 unsafe {
146 let iter = RawValIter::new(&self);
147 let buf = ptr::read(&self.buf);
148 mem::forget(self);
149
150 IntoIter {
151 iter: iter,
152 _buf: buf,
153 }
154 }
155 }
156
157 pub fn drain(&mut self) -> Drain<T> {
158 // this is a mem::forget safety thing. If this is forgotten, we just
159 // leak the whole Vec's contents. Also we need to do this *eventually*
160 // anyway, so why not do it now?
161 self.len = 0;
162 unsafe {
163 Drain {
164 iter: RawValIter::new(&self),
165 vec: PhantomData,
166 }
167 }
168 }
169 }
170
171 impl<T> Drop for Vec<T> {
172 fn drop(&mut self) {
173 while let Some(_) = self.pop() {}
174 // allocation is handled by RawVec
175 }
176 }
177
178 impl<T> Deref for Vec<T> {
179 type Target = [T];
180 fn deref(&self) -> &[T] {
181 unsafe {
182 ::std::slice::from_raw_parts(self.ptr(), self.len)
183 }
184 }
185 }
186
187 impl<T> DerefMut for Vec<T> {
188 fn deref_mut(&mut self) -> &mut [T] {
189 unsafe {
190 ::std::slice::from_raw_parts_mut(self.ptr(), self.len)
191 }
192 }
193 }
194
195
196
197
198
199 struct RawValIter<T> {
200 start: *const T,
201 end: *const T,
202 }
203
204 impl<T> RawValIter<T> {
205 unsafe fn new(slice: &[T]) -> Self {
206 RawValIter {
207 start: slice.as_ptr(),
208 end: if mem::size_of::<T>() == 0 {
209 ((slice.as_ptr() as usize) + slice.len()) as *const _
210 } else if slice.len() == 0 {
211 slice.as_ptr()
212 } else {
213 slice.as_ptr().offset(slice.len() as isize)
214 }
215 }
216 }
217 }
218
219 impl<T> Iterator for RawValIter<T> {
220 type Item = T;
221 fn next(&mut self) -> Option<T> {
222 if self.start == self.end {
223 None
224 } else {
225 unsafe {
226 let result = ptr::read(self.start);
227 self.start = self.start.offset(1);
228 Some(result)
229 }
230 }
231 }
232
233 fn size_hint(&self) -> (usize, Option<usize>) {
234 let elem_size = mem::size_of::<T>();
235 let len = (self.end as usize - self.start as usize)
236 / if elem_size == 0 { 1 } else { elem_size };
237 (len, Some(len))
238 }
239 }
240
241 impl<T> DoubleEndedIterator for RawValIter<T> {
242 fn next_back(&mut self) -> Option<T> {
243 if self.start == self.end {
244 None
245 } else {
246 unsafe {
247 self.end = self.end.offset(-1);
248 Some(ptr::read(self.end))
249 }
250 }
251 }
252 }
253
254
255
256
257 pub struct IntoIter<T> {
258 _buf: RawVec<T>, // we don't actually care about this. Just need it to live.
259 iter: RawValIter<T>,
260 }
261
262 impl<T> Iterator for IntoIter<T> {
263 type Item = T;
264 fn next(&mut self) -> Option<T> { self.iter.next() }
265 fn size_hint(&self) -> (usize, Option<usize>) { self.iter.size_hint() }
266 }
267
268 impl<T> DoubleEndedIterator for IntoIter<T> {
269 fn next_back(&mut self) -> Option<T> { self.iter.next_back() }
270 }
271
272 impl<T> Drop for IntoIter<T> {
273 fn drop(&mut self) {
274 for _ in &mut *self {}
275 }
276 }
277
278
279
280
281 pub struct Drain<'a, T: 'a> {
282 vec: PhantomData<&'a mut Vec<T>>,
283 iter: RawValIter<T>,
284 }
285
286 impl<'a, T> Iterator for Drain<'a, T> {
287 type Item = T;
288 fn next(&mut self) -> Option<T> { self.iter.next_back() }
289 fn size_hint(&self) -> (usize, Option<usize>) { self.iter.size_hint() }
290 }
291
292 impl<'a, T> DoubleEndedIterator for Drain<'a, T> {
293 fn next_back(&mut self) -> Option<T> { self.iter.next_back() }
294 }
295
296 impl<'a, T> Drop for Drain<'a, T> {
297 fn drop(&mut self) {
298 // pre-drain the iter
299 for _ in &mut self.iter {}
300 }
301 }
302
303 /// Abort the process, we're out of memory!
304 ///
305 /// In practice this is probably dead code on most OSes
306 fn oom() {
307 ::std::process::exit(-9999);
308 }
309
310 # fn main() {}
311 ```