]> git.proxmox.com Git - rustc.git/blob - src/libarena/lib.rs
Imported Upstream version 1.4.0+dfsg1
[rustc.git] / src / libarena / lib.rs
1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 //! The arena, a fast but limited type of allocator.
12 //!
13 //! Arenas are a type of allocator that destroy the objects within, all at
14 //! once, once the arena itself is destroyed. They do not support deallocation
15 //! of individual objects while the arena itself is still alive. The benefit
16 //! of an arena is very fast allocation; just a pointer bump.
17 //!
18 //! This crate has two arenas implemented: `TypedArena`, which is a simpler
19 //! arena but can only hold objects of a single type, and `Arena`, which is a
20 //! more complex, slower arena which can hold objects of any type.
21
22 // Do not remove on snapshot creation. Needed for bootstrap. (Issue #22364)
23 #![cfg_attr(stage0, feature(custom_attribute))]
24 #![crate_name = "arena"]
25 #![unstable(feature = "rustc_private", issue = "27812")]
26 #![staged_api]
27 #![crate_type = "rlib"]
28 #![crate_type = "dylib"]
29 #![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
30 html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
31 html_root_url = "https://doc.rust-lang.org/nightly/")]
32
33 #![feature(alloc)]
34 #![feature(box_syntax)]
35 #![feature(core_intrinsics)]
36 #![feature(heap_api)]
37 #![feature(oom)]
38 #![feature(ptr_as_ref)]
39 #![feature(raw)]
40 #![feature(staged_api)]
41 #![cfg_attr(test, feature(test))]
42
43 extern crate alloc;
44
45 use std::cell::{Cell, RefCell};
46 use std::cmp;
47 use std::intrinsics;
48 use std::marker;
49 use std::mem;
50 use std::ptr;
51 use std::rc::Rc;
52
53 use alloc::heap::{allocate, deallocate};
54
55 // The way arena uses arrays is really deeply awful. The arrays are
56 // allocated, and have capacities reserved, but the fill for the array
57 // will always stay at 0.
58 #[derive(Clone, PartialEq)]
59 struct Chunk {
60 data: Rc<RefCell<Vec<u8>>>,
61 fill: Cell<usize>,
62 is_copy: Cell<bool>,
63 }
64
65 impl Chunk {
66 fn capacity(&self) -> usize {
67 self.data.borrow().capacity()
68 }
69
70 unsafe fn as_ptr(&self) -> *const u8 {
71 self.data.borrow().as_ptr()
72 }
73 }
74
75 /// A slower reflection-based arena that can allocate objects of any type.
76 ///
77 /// This arena uses `Vec<u8>` as a backing store to allocate objects from. For
78 /// each allocated object, the arena stores a pointer to the type descriptor
79 /// followed by the object (potentially with alignment padding after each
80 /// element). When the arena is destroyed, it iterates through all of its
81 /// chunks, and uses the tydesc information to trace through the objects,
82 /// calling the destructors on them. One subtle point that needs to be
83 /// addressed is how to handle panics while running the user provided
84 /// initializer function. It is important to not run the destructor on
85 /// uninitialized objects, but how to detect them is somewhat subtle. Since
86 /// `alloc()` can be invoked recursively, it is not sufficient to simply exclude
87 /// the most recent object. To solve this without requiring extra space, we
88 /// use the low order bit of the tydesc pointer to encode whether the object
89 /// it describes has been fully initialized.
90 ///
91 /// As an optimization, objects with destructors are stored in different chunks
92 /// than objects without destructors. This reduces overhead when initializing
93 /// plain-old-data (`Copy` types) and means we don't need to waste time running
94 /// their destructors.
95 pub struct Arena<'longer_than_self> {
96 // The head is separated out from the list as a unbenchmarked
97 // microoptimization, to avoid needing to case on the list to access the
98 // head.
99 head: RefCell<Chunk>,
100 copy_head: RefCell<Chunk>,
101 chunks: RefCell<Vec<Chunk>>,
102 _marker: marker::PhantomData<*mut &'longer_than_self()>,
103 }
104
105 impl<'a> Arena<'a> {
106 /// Allocates a new Arena with 32 bytes preallocated.
107 pub fn new() -> Arena<'a> {
108 Arena::new_with_size(32)
109 }
110
111 /// Allocates a new Arena with `initial_size` bytes preallocated.
112 pub fn new_with_size(initial_size: usize) -> Arena<'a> {
113 Arena {
114 head: RefCell::new(chunk(initial_size, false)),
115 copy_head: RefCell::new(chunk(initial_size, true)),
116 chunks: RefCell::new(Vec::new()),
117 _marker: marker::PhantomData,
118 }
119 }
120 }
121
122 fn chunk(size: usize, is_copy: bool) -> Chunk {
123 Chunk {
124 data: Rc::new(RefCell::new(Vec::with_capacity(size))),
125 fill: Cell::new(0),
126 is_copy: Cell::new(is_copy),
127 }
128 }
129
130 impl<'longer_than_self> Drop for Arena<'longer_than_self> {
131 fn drop(&mut self) {
132 unsafe {
133 destroy_chunk(&*self.head.borrow());
134 for chunk in self.chunks.borrow().iter() {
135 if !chunk.is_copy.get() {
136 destroy_chunk(chunk);
137 }
138 }
139 }
140 }
141 }
142
143 #[inline]
144 fn round_up(base: usize, align: usize) -> usize {
145 (base.checked_add(align - 1)).unwrap() & !(align - 1)
146 }
147
148 // Walk down a chunk, running the destructors for any objects stored
149 // in it.
150 unsafe fn destroy_chunk(chunk: &Chunk) {
151 let mut idx = 0;
152 let buf = chunk.as_ptr();
153 let fill = chunk.fill.get();
154
155 while idx < fill {
156 let tydesc_data = buf.offset(idx as isize) as *const usize;
157 let (tydesc, is_done) = un_bitpack_tydesc_ptr(*tydesc_data);
158 let (size, align) = ((*tydesc).size, (*tydesc).align);
159
160 let after_tydesc = idx + mem::size_of::<*const TyDesc>();
161
162 let start = round_up(after_tydesc, align);
163
164 //debug!("freeing object: idx = {}, size = {}, align = {}, done = {}",
165 // start, size, align, is_done);
166 if is_done {
167 ((*tydesc).drop_glue)(buf.offset(start as isize) as *const i8);
168 }
169
170 // Find where the next tydesc lives
171 idx = round_up(start + size, mem::align_of::<*const TyDesc>());
172 }
173 }
174
175 // We encode whether the object a tydesc describes has been
176 // initialized in the arena in the low bit of the tydesc pointer. This
177 // is necessary in order to properly do cleanup if a panic occurs
178 // during an initializer.
179 #[inline]
180 fn bitpack_tydesc_ptr(p: *const TyDesc, is_done: bool) -> usize {
181 p as usize | (is_done as usize)
182 }
183 #[inline]
184 fn un_bitpack_tydesc_ptr(p: usize) -> (*const TyDesc, bool) {
185 ((p & !1) as *const TyDesc, p & 1 == 1)
186 }
187
188 // HACK(eddyb) TyDesc replacement using a trait object vtable.
189 // This could be replaced in the future with a custom DST layout,
190 // or `&'static (drop_glue, size, align)` created by a `const fn`.
191 struct TyDesc {
192 drop_glue: fn(*const i8),
193 size: usize,
194 align: usize
195 }
196
197 trait AllTypes { fn dummy(&self) { } }
198 impl<T:?Sized> AllTypes for T { }
199
200 unsafe fn get_tydesc<T>() -> *const TyDesc {
201 use std::raw::TraitObject;
202
203 let ptr = &*(1 as *const T);
204
205 // Can use any trait that is implemented for all types.
206 let obj = mem::transmute::<&AllTypes, TraitObject>(ptr);
207 obj.vtable as *const TyDesc
208 }
209
210 impl<'longer_than_self> Arena<'longer_than_self> {
211 fn chunk_size(&self) -> usize {
212 self.copy_head.borrow().capacity()
213 }
214
215 // Functions for the POD part of the arena
216 fn alloc_copy_grow(&self, n_bytes: usize, align: usize) -> *const u8 {
217 // Allocate a new chunk.
218 let new_min_chunk_size = cmp::max(n_bytes, self.chunk_size());
219 self.chunks.borrow_mut().push(self.copy_head.borrow().clone());
220
221 *self.copy_head.borrow_mut() =
222 chunk((new_min_chunk_size + 1).next_power_of_two(), true);
223
224 self.alloc_copy_inner(n_bytes, align)
225 }
226
227 #[inline]
228 fn alloc_copy_inner(&self, n_bytes: usize, align: usize) -> *const u8 {
229 let start = round_up(self.copy_head.borrow().fill.get(), align);
230
231 let end = start + n_bytes;
232 if end > self.chunk_size() {
233 return self.alloc_copy_grow(n_bytes, align);
234 }
235
236 let copy_head = self.copy_head.borrow();
237 copy_head.fill.set(end);
238
239 unsafe {
240 copy_head.as_ptr().offset(start as isize)
241 }
242 }
243
244 #[inline]
245 fn alloc_copy<T, F>(&self, op: F) -> &mut T where F: FnOnce() -> T {
246 unsafe {
247 let ptr = self.alloc_copy_inner(mem::size_of::<T>(),
248 mem::align_of::<T>());
249 let ptr = ptr as *mut T;
250 ptr::write(&mut (*ptr), op());
251 &mut *ptr
252 }
253 }
254
255 // Functions for the non-POD part of the arena
256 fn alloc_noncopy_grow(&self, n_bytes: usize,
257 align: usize) -> (*const u8, *const u8) {
258 // Allocate a new chunk.
259 let new_min_chunk_size = cmp::max(n_bytes, self.chunk_size());
260 self.chunks.borrow_mut().push(self.head.borrow().clone());
261
262 *self.head.borrow_mut() =
263 chunk((new_min_chunk_size + 1).next_power_of_two(), false);
264
265 self.alloc_noncopy_inner(n_bytes, align)
266 }
267
268 #[inline]
269 fn alloc_noncopy_inner(&self, n_bytes: usize,
270 align: usize) -> (*const u8, *const u8) {
271 // Be careful to not maintain any `head` borrows active, because
272 // `alloc_noncopy_grow` borrows it mutably.
273 let (start, end, tydesc_start, head_capacity) = {
274 let head = self.head.borrow();
275 let fill = head.fill.get();
276
277 let tydesc_start = fill;
278 let after_tydesc = fill + mem::size_of::<*const TyDesc>();
279 let start = round_up(after_tydesc, align);
280 let end = start + n_bytes;
281
282 (start, end, tydesc_start, head.capacity())
283 };
284
285 if end > head_capacity {
286 return self.alloc_noncopy_grow(n_bytes, align);
287 }
288
289 let head = self.head.borrow();
290 head.fill.set(round_up(end, mem::align_of::<*const TyDesc>()));
291
292 unsafe {
293 let buf = head.as_ptr();
294 (buf.offset(tydesc_start as isize), buf.offset(start as isize))
295 }
296 }
297
298 #[inline]
299 fn alloc_noncopy<T, F>(&self, op: F) -> &mut T where F: FnOnce() -> T {
300 unsafe {
301 let tydesc = get_tydesc::<T>();
302 let (ty_ptr, ptr) =
303 self.alloc_noncopy_inner(mem::size_of::<T>(),
304 mem::align_of::<T>());
305 let ty_ptr = ty_ptr as *mut usize;
306 let ptr = ptr as *mut T;
307 // Write in our tydesc along with a bit indicating that it
308 // has *not* been initialized yet.
309 *ty_ptr = bitpack_tydesc_ptr(tydesc, false);
310 // Actually initialize it
311 ptr::write(&mut(*ptr), op());
312 // Now that we are done, update the tydesc to indicate that
313 // the object is there.
314 *ty_ptr = bitpack_tydesc_ptr(tydesc, true);
315
316 &mut *ptr
317 }
318 }
319
320 /// Allocates a new item in the arena, using `op` to initialize the value,
321 /// and returns a reference to it.
322 #[inline]
323 pub fn alloc<T:'longer_than_self, F>(&self, op: F) -> &mut T where F: FnOnce() -> T {
324 unsafe {
325 if intrinsics::needs_drop::<T>() {
326 self.alloc_noncopy(op)
327 } else {
328 self.alloc_copy(op)
329 }
330 }
331 }
332 }
333
334 #[test]
335 fn test_arena_destructors() {
336 let arena = Arena::new();
337 for i in 0..10 {
338 // Arena allocate something with drop glue to make sure it
339 // doesn't leak.
340 arena.alloc(|| Rc::new(i));
341 // Allocate something with funny size and alignment, to keep
342 // things interesting.
343 arena.alloc(|| [0u8, 1u8, 2u8]);
344 }
345 }
346
347 #[test]
348 #[should_panic]
349 fn test_arena_destructors_fail() {
350 let arena = Arena::new();
351 // Put some stuff in the arena.
352 for i in 0..10 {
353 // Arena allocate something with drop glue to make sure it
354 // doesn't leak.
355 arena.alloc(|| { Rc::new(i) });
356 // Allocate something with funny size and alignment, to keep
357 // things interesting.
358 arena.alloc(|| { [0u8, 1, 2] });
359 }
360 // Now, panic while allocating
361 arena.alloc::<Rc<i32>, _>(|| {
362 panic!();
363 });
364 }
365
366 /// A faster arena that can hold objects of only one type.
367 pub struct TypedArena<T> {
368 /// A pointer to the next object to be allocated.
369 ptr: Cell<*const T>,
370
371 /// A pointer to the end of the allocated area. When this pointer is
372 /// reached, a new chunk is allocated.
373 end: Cell<*const T>,
374
375 /// A pointer to the first arena segment.
376 first: RefCell<*mut TypedArenaChunk<T>>,
377
378 /// Marker indicating that dropping the arena causes its owned
379 /// instances of `T` to be dropped.
380 _own: marker::PhantomData<T>,
381 }
382
383 struct TypedArenaChunk<T> {
384 marker: marker::PhantomData<T>,
385
386 /// Pointer to the next arena segment.
387 next: *mut TypedArenaChunk<T>,
388
389 /// The number of elements that this chunk can hold.
390 capacity: usize,
391
392 // Objects follow here, suitably aligned.
393 }
394
395 fn calculate_size<T>(capacity: usize) -> usize {
396 let mut size = mem::size_of::<TypedArenaChunk<T>>();
397 size = round_up(size, mem::align_of::<T>());
398 let elem_size = mem::size_of::<T>();
399 let elems_size = elem_size.checked_mul(capacity).unwrap();
400 size = size.checked_add(elems_size).unwrap();
401 size
402 }
403
404 impl<T> TypedArenaChunk<T> {
405 #[inline]
406 unsafe fn new(next: *mut TypedArenaChunk<T>, capacity: usize)
407 -> *mut TypedArenaChunk<T> {
408 let size = calculate_size::<T>(capacity);
409 let chunk = allocate(size, mem::align_of::<TypedArenaChunk<T>>())
410 as *mut TypedArenaChunk<T>;
411 if chunk.is_null() { alloc::oom() }
412 (*chunk).next = next;
413 (*chunk).capacity = capacity;
414 chunk
415 }
416
417 /// Destroys this arena chunk. If the type descriptor is supplied, the
418 /// drop glue is called; otherwise, drop glue is not called.
419 #[inline]
420 unsafe fn destroy(&mut self, len: usize) {
421 // Destroy all the allocated objects.
422 if intrinsics::needs_drop::<T>() {
423 let mut start = self.start();
424 for _ in 0..len {
425 ptr::read(start as *const T); // run the destructor on the pointer
426 start = start.offset(mem::size_of::<T>() as isize)
427 }
428 }
429
430 // Destroy the next chunk.
431 let next = self.next;
432 let size = calculate_size::<T>(self.capacity);
433 let self_ptr: *mut TypedArenaChunk<T> = self;
434 deallocate(self_ptr as *mut u8, size,
435 mem::align_of::<TypedArenaChunk<T>>());
436 if !next.is_null() {
437 let capacity = (*next).capacity;
438 (*next).destroy(capacity);
439 }
440 }
441
442 // Returns a pointer to the first allocated object.
443 #[inline]
444 fn start(&self) -> *const u8 {
445 let this: *const TypedArenaChunk<T> = self;
446 unsafe {
447 round_up(this.offset(1) as usize, mem::align_of::<T>()) as *const u8
448 }
449 }
450
451 // Returns a pointer to the end of the allocated space.
452 #[inline]
453 fn end(&self) -> *const u8 {
454 unsafe {
455 let size = mem::size_of::<T>().checked_mul(self.capacity).unwrap();
456 self.start().offset(size as isize)
457 }
458 }
459 }
460
461 impl<T> TypedArena<T> {
462 /// Creates a new `TypedArena` with preallocated space for eight objects.
463 #[inline]
464 pub fn new() -> TypedArena<T> {
465 TypedArena::with_capacity(8)
466 }
467
468 /// Creates a new `TypedArena` with preallocated space for the given number of
469 /// objects.
470 #[inline]
471 pub fn with_capacity(capacity: usize) -> TypedArena<T> {
472 unsafe {
473 let chunk = TypedArenaChunk::<T>::new(ptr::null_mut(), capacity);
474 TypedArena {
475 ptr: Cell::new((*chunk).start() as *const T),
476 end: Cell::new((*chunk).end() as *const T),
477 first: RefCell::new(chunk),
478 _own: marker::PhantomData,
479 }
480 }
481 }
482
483 /// Allocates an object in the `TypedArena`, returning a reference to it.
484 #[inline]
485 pub fn alloc(&self, object: T) -> &mut T {
486 if self.ptr == self.end {
487 self.grow()
488 }
489
490 unsafe {
491 let ptr: &mut T = &mut *(self.ptr.get() as *mut T);
492 ptr::write(ptr, object);
493 self.ptr.set(self.ptr.get().offset(1));
494 ptr
495 }
496 }
497
498 /// Grows the arena.
499 #[inline(never)]
500 fn grow(&self) {
501 unsafe {
502 let chunk = *self.first.borrow_mut();
503 let new_capacity = (*chunk).capacity.checked_mul(2).unwrap();
504 let chunk = TypedArenaChunk::<T>::new(chunk, new_capacity);
505 self.ptr.set((*chunk).start() as *const T);
506 self.end.set((*chunk).end() as *const T);
507 *self.first.borrow_mut() = chunk
508 }
509 }
510 }
511
512 impl<T> Drop for TypedArena<T> {
513 fn drop(&mut self) {
514 unsafe {
515 // Determine how much was filled.
516 let start = self.first.borrow().as_ref().unwrap().start() as usize;
517 let end = self.ptr.get() as usize;
518 let diff = (end - start) / mem::size_of::<T>();
519
520 // Pass that to the `destroy` method.
521 (**self.first.borrow_mut()).destroy(diff)
522 }
523 }
524 }
525
526 #[cfg(test)]
527 mod tests {
528 extern crate test;
529 use self::test::Bencher;
530 use super::{Arena, TypedArena};
531
532 #[allow(dead_code)]
533 struct Point {
534 x: i32,
535 y: i32,
536 z: i32,
537 }
538
539 #[test]
540 fn test_arena_alloc_nested() {
541 struct Inner { value: u8 }
542 struct Outer<'a> { inner: &'a Inner }
543 enum EI<'e> { I(Inner), O(Outer<'e>) }
544
545 struct Wrap<'a>(TypedArena<EI<'a>>);
546
547 impl<'a> Wrap<'a> {
548 fn alloc_inner<F:Fn() -> Inner>(&self, f: F) -> &Inner {
549 let r: &EI = self.0.alloc(EI::I(f()));
550 if let &EI::I(ref i) = r {
551 i
552 } else {
553 panic!("mismatch");
554 }
555 }
556 fn alloc_outer<F:Fn() -> Outer<'a>>(&self, f: F) -> &Outer {
557 let r: &EI = self.0.alloc(EI::O(f()));
558 if let &EI::O(ref o) = r {
559 o
560 } else {
561 panic!("mismatch");
562 }
563 }
564 }
565
566 let arena = Wrap(TypedArena::new());
567
568 let result = arena.alloc_outer(|| Outer {
569 inner: arena.alloc_inner(|| Inner { value: 10 }) });
570
571 assert_eq!(result.inner.value, 10);
572 }
573
574 #[test]
575 pub fn test_copy() {
576 let arena = TypedArena::new();
577 for _ in 0..100000 {
578 arena.alloc(Point {
579 x: 1,
580 y: 2,
581 z: 3,
582 });
583 }
584 }
585
586 #[bench]
587 pub fn bench_copy(b: &mut Bencher) {
588 let arena = TypedArena::new();
589 b.iter(|| {
590 arena.alloc(Point {
591 x: 1,
592 y: 2,
593 z: 3,
594 })
595 })
596 }
597
598 #[bench]
599 pub fn bench_copy_nonarena(b: &mut Bencher) {
600 b.iter(|| {
601 let _: Box<_> = box Point {
602 x: 1,
603 y: 2,
604 z: 3,
605 };
606 })
607 }
608
609 #[bench]
610 pub fn bench_copy_old_arena(b: &mut Bencher) {
611 let arena = Arena::new();
612 b.iter(|| {
613 arena.alloc(|| {
614 Point {
615 x: 1,
616 y: 2,
617 z: 3,
618 }
619 })
620 })
621 }
622
623 #[allow(dead_code)]
624 struct Noncopy {
625 string: String,
626 array: Vec<i32>,
627 }
628
629 #[test]
630 pub fn test_noncopy() {
631 let arena = TypedArena::new();
632 for _ in 0..100000 {
633 arena.alloc(Noncopy {
634 string: "hello world".to_string(),
635 array: vec!( 1, 2, 3, 4, 5 ),
636 });
637 }
638 }
639
640 #[bench]
641 pub fn bench_noncopy(b: &mut Bencher) {
642 let arena = TypedArena::new();
643 b.iter(|| {
644 arena.alloc(Noncopy {
645 string: "hello world".to_string(),
646 array: vec!( 1, 2, 3, 4, 5 ),
647 })
648 })
649 }
650
651 #[bench]
652 pub fn bench_noncopy_nonarena(b: &mut Bencher) {
653 b.iter(|| {
654 let _: Box<_> = box Noncopy {
655 string: "hello world".to_string(),
656 array: vec!( 1, 2, 3, 4, 5 ),
657 };
658 })
659 }
660
661 #[bench]
662 pub fn bench_noncopy_old_arena(b: &mut Bencher) {
663 let arena = Arena::new();
664 b.iter(|| {
665 arena.alloc(|| Noncopy {
666 string: "hello world".to_string(),
667 array: vec!( 1, 2, 3, 4, 5 ),
668 })
669 })
670 }
671 }