]> git.proxmox.com Git - rustc.git/blame - src/doc/nomicon/src/vec-final.md
New upstream version 1.54.0+dfsg1
[rustc.git] / src / doc / nomicon / src / vec-final.md
CommitLineData
8bb4bdeb 1# The Final Code
c1a9b12d
SL
2
3```rust
17df50a5
XL
4use std::alloc::{self, Layout};
5use std::marker::PhantomData;
c1a9b12d
SL
6use std::mem;
7use std::ops::{Deref, DerefMut};
17df50a5 8use std::ptr::{self, NonNull};
c1a9b12d
SL
9
10struct RawVec<T> {
17df50a5 11 ptr: NonNull<T>,
c1a9b12d 12 cap: usize,
17df50a5 13 _marker: PhantomData<T>,
c1a9b12d
SL
14}
15
17df50a5
XL
16unsafe impl<T: Send> Send for RawVec<T> {}
17unsafe impl<T: Sync> Sync for RawVec<T> {}
18
c1a9b12d
SL
19impl<T> RawVec<T> {
20 fn new() -> Self {
7cac9316
XL
21 // !0 is usize::MAX. This branch should be stripped at compile time.
22 let cap = if mem::size_of::<T>() == 0 { !0 } else { 0 };
c1a9b12d 23
17df50a5
XL
24 // `NonNull::dangling()` doubles as "unallocated" and "zero-sized allocation"
25 RawVec {
26 ptr: NonNull::dangling(),
27 cap: cap,
28 _marker: PhantomData,
29 }
c1a9b12d
SL
30 }
31
32 fn grow(&mut self) {
17df50a5
XL
33 // since we set the capacity to usize::MAX when T has size 0,
34 // getting to here necessarily means the Vec is overfull.
35 assert!(mem::size_of::<T>() != 0, "capacity overflow");
c1a9b12d 36
17df50a5
XL
37 let (new_cap, new_layout) = if self.cap == 0 {
38 (1, Layout::array::<T>(1).unwrap())
39 } else {
40 // This can't overflow because we ensure self.cap <= isize::MAX.
41 let new_cap = 2 * self.cap;
42
43 // `Layout::array` checks that the number of bytes is <= usize::MAX,
44 // but this is redundant since old_layout.size() <= isize::MAX,
45 // so the `unwrap` should never fail.
46 let new_layout = Layout::array::<T>(new_cap).unwrap();
47 (new_cap, new_layout)
48 };
49
50 // Ensure that the new allocation doesn't exceed `isize::MAX` bytes.
51 assert!(
52 new_layout.size() <= isize::MAX as usize,
53 "Allocation too large"
54 );
55
56 let new_ptr = if self.cap == 0 {
57 unsafe { alloc::alloc(new_layout) }
58 } else {
59 let old_layout = Layout::array::<T>(self.cap).unwrap();
60 let old_ptr = self.ptr.as_ptr() as *mut u8;
61 unsafe { alloc::realloc(old_ptr, old_layout, new_layout.size()) }
62 };
63
64 // If allocation fails, `new_ptr` will be null, in which case we abort.
65 self.ptr = match NonNull::new(new_ptr as *mut T) {
66 Some(p) => p,
67 None => alloc::handle_alloc_error(new_layout),
68 };
69 self.cap = new_cap;
c1a9b12d
SL
70 }
71}
72
73impl<T> Drop for RawVec<T> {
74 fn drop(&mut self) {
75 let elem_size = mem::size_of::<T>();
17df50a5 76
c1a9b12d 77 if self.cap != 0 && elem_size != 0 {
c1a9b12d 78 unsafe {
17df50a5
XL
79 alloc::dealloc(
80 self.ptr.as_ptr() as *mut u8,
81 Layout::array::<T>(self.cap).unwrap(),
82 );
c1a9b12d
SL
83 }
84 }
85 }
86}
87
c1a9b12d
SL
88pub struct Vec<T> {
89 buf: RawVec<T>,
90 len: usize,
91}
92
93impl<T> Vec<T> {
17df50a5
XL
94 fn ptr(&self) -> *mut T {
95 self.buf.ptr.as_ptr()
96 }
c1a9b12d 97
17df50a5
XL
98 fn cap(&self) -> usize {
99 self.buf.cap
100 }
c1a9b12d
SL
101
102 pub fn new() -> Self {
17df50a5
XL
103 Vec {
104 buf: RawVec::new(),
105 len: 0,
106 }
c1a9b12d
SL
107 }
108 pub fn push(&mut self, elem: T) {
17df50a5
XL
109 if self.len == self.cap() {
110 self.buf.grow();
111 }
c1a9b12d
SL
112
113 unsafe {
17df50a5 114 ptr::write(self.ptr().add(self.len), elem);
c1a9b12d
SL
115 }
116
17df50a5 117 // Can't overflow, we'll OOM first.
c1a9b12d
SL
118 self.len += 1;
119 }
120
121 pub fn pop(&mut self) -> Option<T> {
122 if self.len == 0 {
123 None
124 } else {
125 self.len -= 1;
17df50a5 126 unsafe { Some(ptr::read(self.ptr().add(self.len))) }
c1a9b12d
SL
127 }
128 }
129
130 pub fn insert(&mut self, index: usize, elem: T) {
131 assert!(index <= self.len, "index out of bounds");
17df50a5
XL
132 if self.cap() == self.len {
133 self.buf.grow();
134 }
c1a9b12d
SL
135
136 unsafe {
17df50a5
XL
137 ptr::copy(
138 self.ptr().add(index),
139 self.ptr().add(index + 1),
140 self.len - index,
141 );
142 ptr::write(self.ptr().add(index), elem);
c1a9b12d
SL
143 self.len += 1;
144 }
145 }
146
147 pub fn remove(&mut self, index: usize) -> T {
148 assert!(index < self.len, "index out of bounds");
149 unsafe {
150 self.len -= 1;
17df50a5
XL
151 let result = ptr::read(self.ptr().add(index));
152 ptr::copy(
153 self.ptr().add(index + 1),
154 self.ptr().add(index),
155 self.len - index,
156 );
c1a9b12d
SL
157 result
158 }
159 }
160
161 pub fn into_iter(self) -> IntoIter<T> {
162 unsafe {
163 let iter = RawValIter::new(&self);
164 let buf = ptr::read(&self.buf);
165 mem::forget(self);
166
167 IntoIter {
168 iter: iter,
169 _buf: buf,
170 }
171 }
172 }
173
174 pub fn drain(&mut self) -> Drain<T> {
c1a9b12d 175 unsafe {
e9174d1e
SL
176 let iter = RawValIter::new(&self);
177
178 // this is a mem::forget safety thing. If Drain is forgotten, we just
179 // leak the whole Vec's contents. Also we need to do this *eventually*
180 // anyway, so why not do it now?
181 self.len = 0;
182
c1a9b12d 183 Drain {
e9174d1e 184 iter: iter,
c1a9b12d
SL
185 vec: PhantomData,
186 }
187 }
188 }
189}
190
191impl<T> Drop for Vec<T> {
192 fn drop(&mut self) {
193 while let Some(_) = self.pop() {}
5869c6ff 194 // deallocation is handled by RawVec
c1a9b12d
SL
195 }
196}
197
198impl<T> Deref for Vec<T> {
199 type Target = [T];
200 fn deref(&self) -> &[T] {
17df50a5 201 unsafe { std::slice::from_raw_parts(self.ptr(), self.len) }
c1a9b12d
SL
202 }
203}
204
205impl<T> DerefMut for Vec<T> {
206 fn deref_mut(&mut self) -> &mut [T] {
17df50a5 207 unsafe { std::slice::from_raw_parts_mut(self.ptr(), self.len) }
c1a9b12d
SL
208 }
209}
210
c1a9b12d
SL
211struct RawValIter<T> {
212 start: *const T,
213 end: *const T,
214}
215
216impl<T> RawValIter<T> {
217 unsafe fn new(slice: &[T]) -> Self {
218 RawValIter {
219 start: slice.as_ptr(),
220 end: if mem::size_of::<T>() == 0 {
221 ((slice.as_ptr() as usize) + slice.len()) as *const _
222 } else if slice.len() == 0 {
223 slice.as_ptr()
224 } else {
17df50a5 225 slice.as_ptr().add(slice.len())
5869c6ff 226 },
c1a9b12d
SL
227 }
228 }
229}
230
231impl<T> Iterator for RawValIter<T> {
232 type Item = T;
233 fn next(&mut self) -> Option<T> {
234 if self.start == self.end {
235 None
236 } else {
237 unsafe {
238 let result = ptr::read(self.start);
9cc50fc6
SL
239 self.start = if mem::size_of::<T>() == 0 {
240 (self.start as usize + 1) as *const _
241 } else {
242 self.start.offset(1)
243 };
c1a9b12d
SL
244 Some(result)
245 }
246 }
247 }
248
249 fn size_hint(&self) -> (usize, Option<usize>) {
250 let elem_size = mem::size_of::<T>();
17df50a5
XL
251 let len = (self.end as usize - self.start as usize) /
252 if elem_size == 0 { 1 } else { elem_size };
c1a9b12d
SL
253 (len, Some(len))
254 }
255}
256
257impl<T> DoubleEndedIterator for RawValIter<T> {
258 fn next_back(&mut self) -> Option<T> {
259 if self.start == self.end {
260 None
261 } else {
262 unsafe {
9cc50fc6
SL
263 self.end = if mem::size_of::<T>() == 0 {
264 (self.end as usize - 1) as *const _
265 } else {
266 self.end.offset(-1)
267 };
c1a9b12d
SL
268 Some(ptr::read(self.end))
269 }
270 }
271 }
272}
273
c1a9b12d
SL
274pub struct IntoIter<T> {
275 _buf: RawVec<T>, // we don't actually care about this. Just need it to live.
276 iter: RawValIter<T>,
277}
278
279impl<T> Iterator for IntoIter<T> {
280 type Item = T;
17df50a5
XL
281 fn next(&mut self) -> Option<T> {
282 self.iter.next()
283 }
284 fn size_hint(&self) -> (usize, Option<usize>) {
285 self.iter.size_hint()
286 }
c1a9b12d
SL
287}
288
289impl<T> DoubleEndedIterator for IntoIter<T> {
17df50a5
XL
290 fn next_back(&mut self) -> Option<T> {
291 self.iter.next_back()
292 }
c1a9b12d
SL
293}
294
295impl<T> Drop for IntoIter<T> {
296 fn drop(&mut self) {
297 for _ in &mut *self {}
298 }
299}
300
c1a9b12d
SL
301pub struct Drain<'a, T: 'a> {
302 vec: PhantomData<&'a mut Vec<T>>,
303 iter: RawValIter<T>,
304}
305
306impl<'a, T> Iterator for Drain<'a, T> {
307 type Item = T;
17df50a5
XL
308 fn next(&mut self) -> Option<T> {
309 self.iter.next()
310 }
311 fn size_hint(&self) -> (usize, Option<usize>) {
312 self.iter.size_hint()
313 }
c1a9b12d
SL
314}
315
316impl<'a, T> DoubleEndedIterator for Drain<'a, T> {
17df50a5
XL
317 fn next_back(&mut self) -> Option<T> {
318 self.iter.next_back()
319 }
c1a9b12d
SL
320}
321
322impl<'a, T> Drop for Drain<'a, T> {
323 fn drop(&mut self) {
324 // pre-drain the iter
5869c6ff 325 for _ in &mut *self {}
c1a9b12d
SL
326 }
327}
5869c6ff 328#
b7449926
XL
329# fn main() {
330# tests::create_push_pop();
331# tests::iter_test();
332# tests::test_drain();
333# tests::test_zst();
334# println!("All tests finished OK");
335# }
5869c6ff 336#
b7449926
XL
337# mod tests {
338# use super::*;
17df50a5 339#
b7449926
XL
340# pub fn create_push_pop() {
341# let mut v = Vec::new();
342# v.push(1);
343# assert_eq!(1, v.len());
344# assert_eq!(1, v[0]);
345# for i in v.iter_mut() {
346# *i += 1;
347# }
348# v.insert(0, 5);
349# let x = v.pop();
350# assert_eq!(Some(2), x);
351# assert_eq!(1, v.len());
352# v.push(10);
353# let x = v.remove(0);
354# assert_eq!(5, x);
355# assert_eq!(1, v.len());
356# }
13cf67c4 357#
b7449926
XL
358# pub fn iter_test() {
359# let mut v = Vec::new();
360# for i in 0..10 {
361# v.push(Box::new(i))
362# }
363# let mut iter = v.into_iter();
364# let first = iter.next().unwrap();
365# let last = iter.next_back().unwrap();
366# drop(iter);
367# assert_eq!(0, *first);
368# assert_eq!(9, *last);
369# }
13cf67c4 370#
b7449926
XL
371# pub fn test_drain() {
372# let mut v = Vec::new();
373# for i in 0..10 {
374# v.push(Box::new(i))
375# }
376# {
377# let mut drain = v.drain();
378# let first = drain.next().unwrap();
379# let last = drain.next_back().unwrap();
380# assert_eq!(0, *first);
381# assert_eq!(9, *last);
382# }
383# assert_eq!(0, v.len());
384# v.push(Box::new(1));
385# assert_eq!(1, *v.pop().unwrap());
386# }
13cf67c4 387#
b7449926
XL
388# pub fn test_zst() {
389# let mut v = Vec::new();
390# for _i in 0..10 {
391# v.push(())
392# }
13cf67c4 393#
b7449926 394# let mut count = 0;
13cf67c4 395#
b7449926
XL
396# for _ in v.into_iter() {
397# count += 1
398# }
13cf67c4 399#
b7449926
XL
400# assert_eq!(10, count);
401# }
402# }
c1a9b12d 403```