]> git.proxmox.com Git - rustc.git/blob - src/libarena/lib.rs
Imported Upstream version 1.1.0+dfsg1
[rustc.git] / src / libarena / lib.rs
1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 //! The arena, a fast but limited type of allocator.
12 //!
13 //! Arenas are a type of allocator that destroy the objects within, all at
14 //! once, once the arena itself is destroyed. They do not support deallocation
15 //! of individual objects while the arena itself is still alive. The benefit
16 //! of an arena is very fast allocation; just a pointer bump.
17 //!
18 //! This crate has two arenas implemented: `TypedArena`, which is a simpler
19 //! arena but can only hold objects of a single type, and `Arena`, which is a
20 //! more complex, slower arena which can hold objects of any type.
21
22 // Do not remove on snapshot creation. Needed for bootstrap. (Issue #22364)
23 #![cfg_attr(stage0, feature(custom_attribute))]
24 #![crate_name = "arena"]
25 #![unstable(feature = "rustc_private")]
26 #![staged_api]
27 #![crate_type = "rlib"]
28 #![crate_type = "dylib"]
29 #![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
30 html_favicon_url = "http://www.rust-lang.org/favicon.ico",
31 html_root_url = "http://doc.rust-lang.org/nightly/")]
32
33 #![feature(alloc)]
34 #![feature(box_syntax)]
35 #![feature(core)]
36 #![feature(staged_api)]
37 #![feature(unboxed_closures)]
38 #![cfg_attr(test, feature(test))]
39
40 extern crate alloc;
41
42 use std::cell::{Cell, RefCell};
43 use std::cmp;
44 use std::intrinsics;
45 use std::marker;
46 use std::mem;
47 use std::ptr;
48 use std::rc::Rc;
49 use std::rt::heap::{allocate, deallocate};
50
51 // The way arena uses arrays is really deeply awful. The arrays are
52 // allocated, and have capacities reserved, but the fill for the array
53 // will always stay at 0.
54 #[derive(Clone, PartialEq)]
55 struct Chunk {
56 data: Rc<RefCell<Vec<u8>>>,
57 fill: Cell<usize>,
58 is_copy: Cell<bool>,
59 }
60
61 impl Chunk {
62 fn capacity(&self) -> usize {
63 self.data.borrow().capacity()
64 }
65
66 unsafe fn as_ptr(&self) -> *const u8 {
67 self.data.borrow().as_ptr()
68 }
69 }
70
71 /// A slower reflection-based arena that can allocate objects of any type.
72 ///
73 /// This arena uses `Vec<u8>` as a backing store to allocate objects from. For
74 /// each allocated object, the arena stores a pointer to the type descriptor
75 /// followed by the object (potentially with alignment padding after each
76 /// element). When the arena is destroyed, it iterates through all of its
77 /// chunks, and uses the tydesc information to trace through the objects,
78 /// calling the destructors on them. One subtle point that needs to be
79 /// addressed is how to handle panics while running the user provided
80 /// initializer function. It is important to not run the destructor on
81 /// uninitialized objects, but how to detect them is somewhat subtle. Since
82 /// `alloc()` can be invoked recursively, it is not sufficient to simply exclude
83 /// the most recent object. To solve this without requiring extra space, we
84 /// use the low order bit of the tydesc pointer to encode whether the object
85 /// it describes has been fully initialized.
86 ///
87 /// As an optimization, objects with destructors are stored in different chunks
88 /// than objects without destructors. This reduces overhead when initializing
89 /// plain-old-data (`Copy` types) and means we don't need to waste time running
90 /// their destructors.
91 pub struct Arena<'longer_than_self> {
92 // The head is separated out from the list as a unbenchmarked
93 // microoptimization, to avoid needing to case on the list to access the
94 // head.
95 head: RefCell<Chunk>,
96 copy_head: RefCell<Chunk>,
97 chunks: RefCell<Vec<Chunk>>,
98 _marker: marker::PhantomData<*mut &'longer_than_self()>,
99 }
100
101 impl<'a> Arena<'a> {
102 /// Allocates a new Arena with 32 bytes preallocated.
103 pub fn new() -> Arena<'a> {
104 Arena::new_with_size(32)
105 }
106
107 /// Allocates a new Arena with `initial_size` bytes preallocated.
108 pub fn new_with_size(initial_size: usize) -> Arena<'a> {
109 Arena {
110 head: RefCell::new(chunk(initial_size, false)),
111 copy_head: RefCell::new(chunk(initial_size, true)),
112 chunks: RefCell::new(Vec::new()),
113 _marker: marker::PhantomData,
114 }
115 }
116 }
117
118 fn chunk(size: usize, is_copy: bool) -> Chunk {
119 Chunk {
120 data: Rc::new(RefCell::new(Vec::with_capacity(size))),
121 fill: Cell::new(0),
122 is_copy: Cell::new(is_copy),
123 }
124 }
125
126 impl<'longer_than_self> Drop for Arena<'longer_than_self> {
127 fn drop(&mut self) {
128 unsafe {
129 destroy_chunk(&*self.head.borrow());
130 for chunk in &*self.chunks.borrow() {
131 if !chunk.is_copy.get() {
132 destroy_chunk(chunk);
133 }
134 }
135 }
136 }
137 }
138
139 #[inline]
140 fn round_up(base: usize, align: usize) -> usize {
141 (base.checked_add(align - 1)).unwrap() & !(align - 1)
142 }
143
144 // Walk down a chunk, running the destructors for any objects stored
145 // in it.
146 unsafe fn destroy_chunk(chunk: &Chunk) {
147 let mut idx = 0;
148 let buf = chunk.as_ptr();
149 let fill = chunk.fill.get();
150
151 while idx < fill {
152 let tydesc_data: *const usize = mem::transmute(buf.offset(idx as isize));
153 let (tydesc, is_done) = un_bitpack_tydesc_ptr(*tydesc_data);
154 let (size, align) = ((*tydesc).size, (*tydesc).align);
155
156 let after_tydesc = idx + mem::size_of::<*const TyDesc>();
157
158 let start = round_up(after_tydesc, align);
159
160 //debug!("freeing object: idx = {}, size = {}, align = {}, done = {}",
161 // start, size, align, is_done);
162 if is_done {
163 ((*tydesc).drop_glue)(buf.offset(start as isize) as *const i8);
164 }
165
166 // Find where the next tydesc lives
167 idx = round_up(start + size, mem::align_of::<*const TyDesc>());
168 }
169 }
170
171 // We encode whether the object a tydesc describes has been
172 // initialized in the arena in the low bit of the tydesc pointer. This
173 // is necessary in order to properly do cleanup if a panic occurs
174 // during an initializer.
175 #[inline]
176 fn bitpack_tydesc_ptr(p: *const TyDesc, is_done: bool) -> usize {
177 p as usize | (is_done as usize)
178 }
179 #[inline]
180 fn un_bitpack_tydesc_ptr(p: usize) -> (*const TyDesc, bool) {
181 ((p & !1) as *const TyDesc, p & 1 == 1)
182 }
183
184 // HACK(eddyb) TyDesc replacement using a trait object vtable.
185 // This could be replaced in the future with a custom DST layout,
186 // or `&'static (drop_glue, size, align)` created by a `const fn`.
187 struct TyDesc {
188 drop_glue: fn(*const i8),
189 size: usize,
190 align: usize
191 }
192
193 trait AllTypes { fn dummy(&self) { } }
194 impl<T:?Sized> AllTypes for T { }
195
196 unsafe fn get_tydesc<T>() -> *const TyDesc {
197 use std::raw::TraitObject;
198
199 let ptr = &*(1 as *const T);
200
201 // Can use any trait that is implemented for all types.
202 let obj = mem::transmute::<&AllTypes, TraitObject>(ptr);
203 obj.vtable as *const TyDesc
204 }
205
206 impl<'longer_than_self> Arena<'longer_than_self> {
207 fn chunk_size(&self) -> usize {
208 self.copy_head.borrow().capacity()
209 }
210
211 // Functions for the POD part of the arena
212 fn alloc_copy_grow(&self, n_bytes: usize, align: usize) -> *const u8 {
213 // Allocate a new chunk.
214 let new_min_chunk_size = cmp::max(n_bytes, self.chunk_size());
215 self.chunks.borrow_mut().push(self.copy_head.borrow().clone());
216
217 *self.copy_head.borrow_mut() =
218 chunk((new_min_chunk_size + 1).next_power_of_two(), true);
219
220 return self.alloc_copy_inner(n_bytes, align);
221 }
222
223 #[inline]
224 fn alloc_copy_inner(&self, n_bytes: usize, align: usize) -> *const u8 {
225 let start = round_up(self.copy_head.borrow().fill.get(), align);
226
227 let end = start + n_bytes;
228 if end > self.chunk_size() {
229 return self.alloc_copy_grow(n_bytes, align);
230 }
231
232 let copy_head = self.copy_head.borrow();
233 copy_head.fill.set(end);
234
235 unsafe {
236 copy_head.as_ptr().offset(start as isize)
237 }
238 }
239
240 #[inline]
241 fn alloc_copy<T, F>(&self, op: F) -> &mut T where F: FnOnce() -> T {
242 unsafe {
243 let ptr = self.alloc_copy_inner(mem::size_of::<T>(),
244 mem::min_align_of::<T>());
245 let ptr = ptr as *mut T;
246 ptr::write(&mut (*ptr), op());
247 return &mut *ptr;
248 }
249 }
250
251 // Functions for the non-POD part of the arena
252 fn alloc_noncopy_grow(&self, n_bytes: usize,
253 align: usize) -> (*const u8, *const u8) {
254 // Allocate a new chunk.
255 let new_min_chunk_size = cmp::max(n_bytes, self.chunk_size());
256 self.chunks.borrow_mut().push(self.head.borrow().clone());
257
258 *self.head.borrow_mut() =
259 chunk((new_min_chunk_size + 1).next_power_of_two(), false);
260
261 return self.alloc_noncopy_inner(n_bytes, align);
262 }
263
264 #[inline]
265 fn alloc_noncopy_inner(&self, n_bytes: usize,
266 align: usize) -> (*const u8, *const u8) {
267 // Be careful to not maintain any `head` borrows active, because
268 // `alloc_noncopy_grow` borrows it mutably.
269 let (start, end, tydesc_start, head_capacity) = {
270 let head = self.head.borrow();
271 let fill = head.fill.get();
272
273 let tydesc_start = fill;
274 let after_tydesc = fill + mem::size_of::<*const TyDesc>();
275 let start = round_up(after_tydesc, align);
276 let end = start + n_bytes;
277
278 (start, end, tydesc_start, head.capacity())
279 };
280
281 if end > head_capacity {
282 return self.alloc_noncopy_grow(n_bytes, align);
283 }
284
285 let head = self.head.borrow();
286 head.fill.set(round_up(end, mem::align_of::<*const TyDesc>()));
287
288 unsafe {
289 let buf = head.as_ptr();
290 return (buf.offset(tydesc_start as isize), buf.offset(start as isize));
291 }
292 }
293
294 #[inline]
295 fn alloc_noncopy<T, F>(&self, op: F) -> &mut T where F: FnOnce() -> T {
296 unsafe {
297 let tydesc = get_tydesc::<T>();
298 let (ty_ptr, ptr) =
299 self.alloc_noncopy_inner(mem::size_of::<T>(),
300 mem::min_align_of::<T>());
301 let ty_ptr = ty_ptr as *mut usize;
302 let ptr = ptr as *mut T;
303 // Write in our tydesc along with a bit indicating that it
304 // has *not* been initialized yet.
305 *ty_ptr = mem::transmute(tydesc);
306 // Actually initialize it
307 ptr::write(&mut(*ptr), op());
308 // Now that we are done, update the tydesc to indicate that
309 // the object is there.
310 *ty_ptr = bitpack_tydesc_ptr(tydesc, true);
311
312 return &mut *ptr;
313 }
314 }
315
316 /// Allocates a new item in the arena, using `op` to initialize the value,
317 /// and returns a reference to it.
318 #[inline]
319 pub fn alloc<T:'longer_than_self, F>(&self, op: F) -> &mut T where F: FnOnce() -> T {
320 unsafe {
321 if intrinsics::needs_drop::<T>() {
322 self.alloc_noncopy(op)
323 } else {
324 self.alloc_copy(op)
325 }
326 }
327 }
328 }
329
330 #[test]
331 fn test_arena_destructors() {
332 let arena = Arena::new();
333 for i in 0..10 {
334 // Arena allocate something with drop glue to make sure it
335 // doesn't leak.
336 arena.alloc(|| Rc::new(i));
337 // Allocate something with funny size and alignment, to keep
338 // things interesting.
339 arena.alloc(|| [0u8, 1u8, 2u8]);
340 }
341 }
342
343 #[test]
344 #[should_panic]
345 fn test_arena_destructors_fail() {
346 let arena = Arena::new();
347 // Put some stuff in the arena.
348 for i in 0..10 {
349 // Arena allocate something with drop glue to make sure it
350 // doesn't leak.
351 arena.alloc(|| { Rc::new(i) });
352 // Allocate something with funny size and alignment, to keep
353 // things interesting.
354 arena.alloc(|| { [0u8, 1, 2] });
355 }
356 // Now, panic while allocating
357 arena.alloc::<Rc<i32>, _>(|| {
358 panic!();
359 });
360 }
361
362 /// A faster arena that can hold objects of only one type.
363 pub struct TypedArena<T> {
364 /// A pointer to the next object to be allocated.
365 ptr: Cell<*const T>,
366
367 /// A pointer to the end of the allocated area. When this pointer is
368 /// reached, a new chunk is allocated.
369 end: Cell<*const T>,
370
371 /// A pointer to the first arena segment.
372 first: RefCell<*mut TypedArenaChunk<T>>,
373
374 /// Marker indicating that dropping the arena causes its owned
375 /// instances of `T` to be dropped.
376 _own: marker::PhantomData<T>,
377 }
378
379 struct TypedArenaChunk<T> {
380 marker: marker::PhantomData<T>,
381
382 /// Pointer to the next arena segment.
383 next: *mut TypedArenaChunk<T>,
384
385 /// The number of elements that this chunk can hold.
386 capacity: usize,
387
388 // Objects follow here, suitably aligned.
389 }
390
391 fn calculate_size<T>(capacity: usize) -> usize {
392 let mut size = mem::size_of::<TypedArenaChunk<T>>();
393 size = round_up(size, mem::min_align_of::<T>());
394 let elem_size = mem::size_of::<T>();
395 let elems_size = elem_size.checked_mul(capacity).unwrap();
396 size = size.checked_add(elems_size).unwrap();
397 size
398 }
399
400 impl<T> TypedArenaChunk<T> {
401 #[inline]
402 unsafe fn new(next: *mut TypedArenaChunk<T>, capacity: usize)
403 -> *mut TypedArenaChunk<T> {
404 let size = calculate_size::<T>(capacity);
405 let chunk = allocate(size, mem::min_align_of::<TypedArenaChunk<T>>())
406 as *mut TypedArenaChunk<T>;
407 if chunk.is_null() { alloc::oom() }
408 (*chunk).next = next;
409 (*chunk).capacity = capacity;
410 chunk
411 }
412
413 /// Destroys this arena chunk. If the type descriptor is supplied, the
414 /// drop glue is called; otherwise, drop glue is not called.
415 #[inline]
416 unsafe fn destroy(&mut self, len: usize) {
417 // Destroy all the allocated objects.
418 if intrinsics::needs_drop::<T>() {
419 let mut start = self.start();
420 for _ in 0..len {
421 ptr::read(start as *const T); // run the destructor on the pointer
422 start = start.offset(mem::size_of::<T>() as isize)
423 }
424 }
425
426 // Destroy the next chunk.
427 let next = self.next;
428 let size = calculate_size::<T>(self.capacity);
429 let self_ptr: *mut TypedArenaChunk<T> = self;
430 deallocate(self_ptr as *mut u8, size,
431 mem::min_align_of::<TypedArenaChunk<T>>());
432 if !next.is_null() {
433 let capacity = (*next).capacity;
434 (*next).destroy(capacity);
435 }
436 }
437
438 // Returns a pointer to the first allocated object.
439 #[inline]
440 fn start(&self) -> *const u8 {
441 let this: *const TypedArenaChunk<T> = self;
442 unsafe {
443 mem::transmute(round_up(this.offset(1) as usize,
444 mem::min_align_of::<T>()))
445 }
446 }
447
448 // Returns a pointer to the end of the allocated space.
449 #[inline]
450 fn end(&self) -> *const u8 {
451 unsafe {
452 let size = mem::size_of::<T>().checked_mul(self.capacity).unwrap();
453 self.start().offset(size as isize)
454 }
455 }
456 }
457
458 impl<T> TypedArena<T> {
459 /// Creates a new `TypedArena` with preallocated space for eight objects.
460 #[inline]
461 pub fn new() -> TypedArena<T> {
462 TypedArena::with_capacity(8)
463 }
464
465 /// Creates a new `TypedArena` with preallocated space for the given number of
466 /// objects.
467 #[inline]
468 pub fn with_capacity(capacity: usize) -> TypedArena<T> {
469 unsafe {
470 let chunk = TypedArenaChunk::<T>::new(ptr::null_mut(), capacity);
471 TypedArena {
472 ptr: Cell::new((*chunk).start() as *const T),
473 end: Cell::new((*chunk).end() as *const T),
474 first: RefCell::new(chunk),
475 _own: marker::PhantomData,
476 }
477 }
478 }
479
480 /// Allocates an object in the `TypedArena`, returning a reference to it.
481 #[inline]
482 pub fn alloc(&self, object: T) -> &mut T {
483 if self.ptr == self.end {
484 self.grow()
485 }
486
487 let ptr: &mut T = unsafe {
488 let ptr: &mut T = mem::transmute(self.ptr.clone());
489 ptr::write(ptr, object);
490 self.ptr.set(self.ptr.get().offset(1));
491 ptr
492 };
493
494 ptr
495 }
496
497 /// Grows the arena.
498 #[inline(never)]
499 fn grow(&self) {
500 unsafe {
501 let chunk = *self.first.borrow_mut();
502 let new_capacity = (*chunk).capacity.checked_mul(2).unwrap();
503 let chunk = TypedArenaChunk::<T>::new(chunk, new_capacity);
504 self.ptr.set((*chunk).start() as *const T);
505 self.end.set((*chunk).end() as *const T);
506 *self.first.borrow_mut() = chunk
507 }
508 }
509 }
510
511 impl<T> Drop for TypedArena<T> {
512 fn drop(&mut self) {
513 unsafe {
514 // Determine how much was filled.
515 let start = self.first.borrow().as_ref().unwrap().start() as usize;
516 let end = self.ptr.get() as usize;
517 let diff = (end - start) / mem::size_of::<T>();
518
519 // Pass that to the `destroy` method.
520 (**self.first.borrow_mut()).destroy(diff)
521 }
522 }
523 }
524
525 #[cfg(test)]
526 mod tests {
527 extern crate test;
528 use self::test::Bencher;
529 use super::{Arena, TypedArena};
530
531 #[allow(dead_code)]
532 struct Point {
533 x: i32,
534 y: i32,
535 z: i32,
536 }
537
538 #[test]
539 fn test_arena_alloc_nested() {
540 struct Inner { value: u8 }
541 struct Outer<'a> { inner: &'a Inner }
542 enum EI<'e> { I(Inner), O(Outer<'e>) }
543
544 struct Wrap<'a>(TypedArena<EI<'a>>);
545
546 impl<'a> Wrap<'a> {
547 fn alloc_inner<F:Fn() -> Inner>(&self, f: F) -> &Inner {
548 let r: &EI = self.0.alloc(EI::I(f()));
549 if let &EI::I(ref i) = r {
550 i
551 } else {
552 panic!("mismatch");
553 }
554 }
555 fn alloc_outer<F:Fn() -> Outer<'a>>(&self, f: F) -> &Outer {
556 let r: &EI = self.0.alloc(EI::O(f()));
557 if let &EI::O(ref o) = r {
558 o
559 } else {
560 panic!("mismatch");
561 }
562 }
563 }
564
565 let arena = Wrap(TypedArena::new());
566
567 let result = arena.alloc_outer(|| Outer {
568 inner: arena.alloc_inner(|| Inner { value: 10 }) });
569
570 assert_eq!(result.inner.value, 10);
571 }
572
573 #[test]
574 pub fn test_copy() {
575 let arena = TypedArena::new();
576 for _ in 0..100000 {
577 arena.alloc(Point {
578 x: 1,
579 y: 2,
580 z: 3,
581 });
582 }
583 }
584
585 #[bench]
586 pub fn bench_copy(b: &mut Bencher) {
587 let arena = TypedArena::new();
588 b.iter(|| {
589 arena.alloc(Point {
590 x: 1,
591 y: 2,
592 z: 3,
593 })
594 })
595 }
596
597 #[bench]
598 pub fn bench_copy_nonarena(b: &mut Bencher) {
599 b.iter(|| {
600 let _: Box<_> = box Point {
601 x: 1,
602 y: 2,
603 z: 3,
604 };
605 })
606 }
607
608 #[bench]
609 pub fn bench_copy_old_arena(b: &mut Bencher) {
610 let arena = Arena::new();
611 b.iter(|| {
612 arena.alloc(|| {
613 Point {
614 x: 1,
615 y: 2,
616 z: 3,
617 }
618 })
619 })
620 }
621
622 #[allow(dead_code)]
623 struct Noncopy {
624 string: String,
625 array: Vec<i32>,
626 }
627
628 #[test]
629 pub fn test_noncopy() {
630 let arena = TypedArena::new();
631 for _ in 0..100000 {
632 arena.alloc(Noncopy {
633 string: "hello world".to_string(),
634 array: vec!( 1, 2, 3, 4, 5 ),
635 });
636 }
637 }
638
639 #[bench]
640 pub fn bench_noncopy(b: &mut Bencher) {
641 let arena = TypedArena::new();
642 b.iter(|| {
643 arena.alloc(Noncopy {
644 string: "hello world".to_string(),
645 array: vec!( 1, 2, 3, 4, 5 ),
646 })
647 })
648 }
649
650 #[bench]
651 pub fn bench_noncopy_nonarena(b: &mut Bencher) {
652 b.iter(|| {
653 let _: Box<_> = box Noncopy {
654 string: "hello world".to_string(),
655 array: vec!( 1, 2, 3, 4, 5 ),
656 };
657 })
658 }
659
660 #[bench]
661 pub fn bench_noncopy_old_arena(b: &mut Bencher) {
662 let arena = Arena::new();
663 b.iter(|| {
664 arena.alloc(|| Noncopy {
665 string: "hello world".to_string(),
666 array: vec!( 1, 2, 3, 4, 5 ),
667 })
668 })
669 }
670 }