1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! The arena, a fast but limited type of allocator.
13 //! Arenas are a type of allocator that destroy the objects within, all at
14 //! once, once the arena itself is destroyed. They do not support deallocation
15 //! of individual objects while the arena itself is still alive. The benefit
16 //! of an arena is very fast allocation; just a pointer bump.
18 //! This crate has two arenas implemented: `TypedArena`, which is a simpler
19 //! arena but can only hold objects of a single type, and `Arena`, which is a
20 //! more complex, slower arena which can hold objects of any type.
22 // Do not remove on snapshot creation. Needed for bootstrap. (Issue #22364)
23 #![cfg_attr(stage0, feature(custom_attribute))]
24 #![crate_name = "arena"]
25 #![unstable(feature = "rustc_private")]
27 #![crate_type = "rlib"]
28 #![crate_type = "dylib"]
29 #![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
30 html_favicon_url
= "http://www.rust-lang.org/favicon.ico",
31 html_root_url
= "http://doc.rust-lang.org/nightly/")]
34 #![feature(box_syntax)]
36 #![feature(staged_api)]
37 #![feature(unboxed_closures)]
38 #![cfg_attr(test, feature(test))]
42 use std
::cell
::{Cell, RefCell}
;
49 use std
::rt
::heap
::{allocate, deallocate}
;
51 // The way arena uses arrays is really deeply awful. The arrays are
52 // allocated, and have capacities reserved, but the fill for the array
53 // will always stay at 0.
54 #[derive(Clone, PartialEq)]
56 data
: Rc
<RefCell
<Vec
<u8>>>,
62 fn capacity(&self) -> usize {
63 self.data
.borrow().capacity()
66 unsafe fn as_ptr(&self) -> *const u8 {
67 self.data
.borrow().as_ptr()
71 /// A slower reflection-based arena that can allocate objects of any type.
73 /// This arena uses `Vec<u8>` as a backing store to allocate objects from. For
74 /// each allocated object, the arena stores a pointer to the type descriptor
75 /// followed by the object (potentially with alignment padding after each
76 /// element). When the arena is destroyed, it iterates through all of its
77 /// chunks, and uses the tydesc information to trace through the objects,
78 /// calling the destructors on them. One subtle point that needs to be
79 /// addressed is how to handle panics while running the user provided
80 /// initializer function. It is important to not run the destructor on
81 /// uninitialized objects, but how to detect them is somewhat subtle. Since
82 /// `alloc()` can be invoked recursively, it is not sufficient to simply exclude
83 /// the most recent object. To solve this without requiring extra space, we
84 /// use the low order bit of the tydesc pointer to encode whether the object
85 /// it describes has been fully initialized.
87 /// As an optimization, objects with destructors are stored in different chunks
88 /// than objects without destructors. This reduces overhead when initializing
89 /// plain-old-data (`Copy` types) and means we don't need to waste time running
90 /// their destructors.
91 pub struct Arena
<'longer_than_self
> {
92 // The head is separated out from the list as a unbenchmarked
93 // microoptimization, to avoid needing to case on the list to access the
96 copy_head
: RefCell
<Chunk
>,
97 chunks
: RefCell
<Vec
<Chunk
>>,
98 _marker
: marker
::PhantomData
<*mut &'
longer_than_self()>,
102 /// Allocates a new Arena with 32 bytes preallocated.
103 pub fn new() -> Arena
<'a
> {
104 Arena
::new_with_size(32)
107 /// Allocates a new Arena with `initial_size` bytes preallocated.
108 pub fn new_with_size(initial_size
: usize) -> Arena
<'a
> {
110 head
: RefCell
::new(chunk(initial_size
, false)),
111 copy_head
: RefCell
::new(chunk(initial_size
, true)),
112 chunks
: RefCell
::new(Vec
::new()),
113 _marker
: marker
::PhantomData
,
118 fn chunk(size
: usize, is_copy
: bool
) -> Chunk
{
120 data
: Rc
::new(RefCell
::new(Vec
::with_capacity(size
))),
122 is_copy
: Cell
::new(is_copy
),
126 impl<'longer_than_self
> Drop
for Arena
<'longer_than_self
> {
129 destroy_chunk(&*self.head
.borrow());
130 for chunk
in &*self.chunks
.borrow() {
131 if !chunk
.is_copy
.get() {
132 destroy_chunk(chunk
);
140 fn round_up(base
: usize, align
: usize) -> usize {
141 (base
.checked_add(align
- 1)).unwrap() & !(align
- 1)
144 // Walk down a chunk, running the destructors for any objects stored
146 unsafe fn destroy_chunk(chunk
: &Chunk
) {
148 let buf
= chunk
.as_ptr();
149 let fill
= chunk
.fill
.get();
152 let tydesc_data
: *const usize = mem
::transmute(buf
.offset(idx
as isize));
153 let (tydesc
, is_done
) = un_bitpack_tydesc_ptr(*tydesc_data
);
154 let (size
, align
) = ((*tydesc
).size
, (*tydesc
).align
);
156 let after_tydesc
= idx
+ mem
::size_of
::<*const TyDesc
>();
158 let start
= round_up(after_tydesc
, align
);
160 //debug!("freeing object: idx = {}, size = {}, align = {}, done = {}",
161 // start, size, align, is_done);
163 ((*tydesc
).drop_glue
)(buf
.offset(start
as isize) as *const i8);
166 // Find where the next tydesc lives
167 idx
= round_up(start
+ size
, mem
::align_of
::<*const TyDesc
>());
171 // We encode whether the object a tydesc describes has been
172 // initialized in the arena in the low bit of the tydesc pointer. This
173 // is necessary in order to properly do cleanup if a panic occurs
174 // during an initializer.
176 fn bitpack_tydesc_ptr(p
: *const TyDesc
, is_done
: bool
) -> usize {
177 p
as usize | (is_done
as usize)
180 fn un_bitpack_tydesc_ptr(p
: usize) -> (*const TyDesc
, bool
) {
181 ((p
& !1) as *const TyDesc
, p
& 1 == 1)
184 // HACK(eddyb) TyDesc replacement using a trait object vtable.
185 // This could be replaced in the future with a custom DST layout,
186 // or `&'static (drop_glue, size, align)` created by a `const fn`.
188 drop_glue
: fn(*const i8),
193 trait AllTypes { fn dummy(&self) { }
}
194 impl<T
:?Sized
> AllTypes
for T { }
196 unsafe fn get_tydesc
<T
>() -> *const TyDesc
{
197 use std
::raw
::TraitObject
;
199 let ptr
= &*(1 as *const T
);
201 // Can use any trait that is implemented for all types.
202 let obj
= mem
::transmute
::<&AllTypes
, TraitObject
>(ptr
);
203 obj
.vtable
as *const TyDesc
206 impl<'longer_than_self
> Arena
<'longer_than_self
> {
207 fn chunk_size(&self) -> usize {
208 self.copy_head
.borrow().capacity()
211 // Functions for the POD part of the arena
212 fn alloc_copy_grow(&self, n_bytes
: usize, align
: usize) -> *const u8 {
213 // Allocate a new chunk.
214 let new_min_chunk_size
= cmp
::max(n_bytes
, self.chunk_size());
215 self.chunks
.borrow_mut().push(self.copy_head
.borrow().clone());
217 *self.copy_head
.borrow_mut() =
218 chunk((new_min_chunk_size
+ 1).next_power_of_two(), true);
220 return self.alloc_copy_inner(n_bytes
, align
);
224 fn alloc_copy_inner(&self, n_bytes
: usize, align
: usize) -> *const u8 {
225 let start
= round_up(self.copy_head
.borrow().fill
.get(), align
);
227 let end
= start
+ n_bytes
;
228 if end
> self.chunk_size() {
229 return self.alloc_copy_grow(n_bytes
, align
);
232 let copy_head
= self.copy_head
.borrow();
233 copy_head
.fill
.set(end
);
236 copy_head
.as_ptr().offset(start
as isize)
241 fn alloc_copy
<T
, F
>(&self, op
: F
) -> &mut T
where F
: FnOnce() -> T
{
243 let ptr
= self.alloc_copy_inner(mem
::size_of
::<T
>(),
244 mem
::min_align_of
::<T
>());
245 let ptr
= ptr
as *mut T
;
246 ptr
::write(&mut (*ptr
), op());
251 // Functions for the non-POD part of the arena
252 fn alloc_noncopy_grow(&self, n_bytes
: usize,
253 align
: usize) -> (*const u8, *const u8) {
254 // Allocate a new chunk.
255 let new_min_chunk_size
= cmp
::max(n_bytes
, self.chunk_size());
256 self.chunks
.borrow_mut().push(self.head
.borrow().clone());
258 *self.head
.borrow_mut() =
259 chunk((new_min_chunk_size
+ 1).next_power_of_two(), false);
261 return self.alloc_noncopy_inner(n_bytes
, align
);
265 fn alloc_noncopy_inner(&self, n_bytes
: usize,
266 align
: usize) -> (*const u8, *const u8) {
267 // Be careful to not maintain any `head` borrows active, because
268 // `alloc_noncopy_grow` borrows it mutably.
269 let (start
, end
, tydesc_start
, head_capacity
) = {
270 let head
= self.head
.borrow();
271 let fill
= head
.fill
.get();
273 let tydesc_start
= fill
;
274 let after_tydesc
= fill
+ mem
::size_of
::<*const TyDesc
>();
275 let start
= round_up(after_tydesc
, align
);
276 let end
= start
+ n_bytes
;
278 (start
, end
, tydesc_start
, head
.capacity())
281 if end
> head_capacity
{
282 return self.alloc_noncopy_grow(n_bytes
, align
);
285 let head
= self.head
.borrow();
286 head
.fill
.set(round_up(end
, mem
::align_of
::<*const TyDesc
>()));
289 let buf
= head
.as_ptr();
290 return (buf
.offset(tydesc_start
as isize), buf
.offset(start
as isize));
295 fn alloc_noncopy
<T
, F
>(&self, op
: F
) -> &mut T
where F
: FnOnce() -> T
{
297 let tydesc
= get_tydesc
::<T
>();
299 self.alloc_noncopy_inner(mem
::size_of
::<T
>(),
300 mem
::min_align_of
::<T
>());
301 let ty_ptr
= ty_ptr
as *mut usize;
302 let ptr
= ptr
as *mut T
;
303 // Write in our tydesc along with a bit indicating that it
304 // has *not* been initialized yet.
305 *ty_ptr
= mem
::transmute(tydesc
);
306 // Actually initialize it
307 ptr
::write(&mut(*ptr
), op());
308 // Now that we are done, update the tydesc to indicate that
309 // the object is there.
310 *ty_ptr
= bitpack_tydesc_ptr(tydesc
, true);
316 /// Allocates a new item in the arena, using `op` to initialize the value,
317 /// and returns a reference to it.
319 pub fn alloc
<T
:'longer_than_self
, F
>(&self, op
: F
) -> &mut T
where F
: FnOnce() -> T
{
321 if intrinsics
::needs_drop
::<T
>() {
322 self.alloc_noncopy(op
)
331 fn test_arena_destructors() {
332 let arena
= Arena
::new();
334 // Arena allocate something with drop glue to make sure it
336 arena
.alloc(|| Rc
::new(i
));
337 // Allocate something with funny size and alignment, to keep
338 // things interesting.
339 arena
.alloc(|| [0u8, 1u8, 2u8]);
345 fn test_arena_destructors_fail() {
346 let arena
= Arena
::new();
347 // Put some stuff in the arena.
349 // Arena allocate something with drop glue to make sure it
351 arena
.alloc(|| { Rc::new(i) }
);
352 // Allocate something with funny size and alignment, to keep
353 // things interesting.
354 arena
.alloc(|| { [0u8, 1, 2] }
);
356 // Now, panic while allocating
357 arena
.alloc
::<Rc
<i32>, _
>(|| {
362 /// A faster arena that can hold objects of only one type.
363 pub struct TypedArena
<T
> {
364 /// A pointer to the next object to be allocated.
367 /// A pointer to the end of the allocated area. When this pointer is
368 /// reached, a new chunk is allocated.
371 /// A pointer to the first arena segment.
372 first
: RefCell
<*mut TypedArenaChunk
<T
>>,
374 /// Marker indicating that dropping the arena causes its owned
375 /// instances of `T` to be dropped.
376 _own
: marker
::PhantomData
<T
>,
379 struct TypedArenaChunk
<T
> {
380 marker
: marker
::PhantomData
<T
>,
382 /// Pointer to the next arena segment.
383 next
: *mut TypedArenaChunk
<T
>,
385 /// The number of elements that this chunk can hold.
388 // Objects follow here, suitably aligned.
391 fn calculate_size
<T
>(capacity
: usize) -> usize {
392 let mut size
= mem
::size_of
::<TypedArenaChunk
<T
>>();
393 size
= round_up(size
, mem
::min_align_of
::<T
>());
394 let elem_size
= mem
::size_of
::<T
>();
395 let elems_size
= elem_size
.checked_mul(capacity
).unwrap();
396 size
= size
.checked_add(elems_size
).unwrap();
400 impl<T
> TypedArenaChunk
<T
> {
402 unsafe fn new(next
: *mut TypedArenaChunk
<T
>, capacity
: usize)
403 -> *mut TypedArenaChunk
<T
> {
404 let size
= calculate_size
::<T
>(capacity
);
405 let chunk
= allocate(size
, mem
::min_align_of
::<TypedArenaChunk
<T
>>())
406 as *mut TypedArenaChunk
<T
>;
407 if chunk
.is_null() { alloc::oom() }
408 (*chunk
).next
= next
;
409 (*chunk
).capacity
= capacity
;
413 /// Destroys this arena chunk. If the type descriptor is supplied, the
414 /// drop glue is called; otherwise, drop glue is not called.
416 unsafe fn destroy(&mut self, len
: usize) {
417 // Destroy all the allocated objects.
418 if intrinsics
::needs_drop
::<T
>() {
419 let mut start
= self.start();
421 ptr
::read(start
as *const T
); // run the destructor on the pointer
422 start
= start
.offset(mem
::size_of
::<T
>() as isize)
426 // Destroy the next chunk.
427 let next
= self.next
;
428 let size
= calculate_size
::<T
>(self.capacity
);
429 let self_ptr
: *mut TypedArenaChunk
<T
> = self;
430 deallocate(self_ptr
as *mut u8, size
,
431 mem
::min_align_of
::<TypedArenaChunk
<T
>>());
433 let capacity
= (*next
).capacity
;
434 (*next
).destroy(capacity
);
438 // Returns a pointer to the first allocated object.
440 fn start(&self) -> *const u8 {
441 let this
: *const TypedArenaChunk
<T
> = self;
443 mem
::transmute(round_up(this
.offset(1) as usize,
444 mem
::min_align_of
::<T
>()))
448 // Returns a pointer to the end of the allocated space.
450 fn end(&self) -> *const u8 {
452 let size
= mem
::size_of
::<T
>().checked_mul(self.capacity
).unwrap();
453 self.start().offset(size
as isize)
458 impl<T
> TypedArena
<T
> {
459 /// Creates a new `TypedArena` with preallocated space for eight objects.
461 pub fn new() -> TypedArena
<T
> {
462 TypedArena
::with_capacity(8)
465 /// Creates a new `TypedArena` with preallocated space for the given number of
468 pub fn with_capacity(capacity
: usize) -> TypedArena
<T
> {
470 let chunk
= TypedArenaChunk
::<T
>::new(ptr
::null_mut(), capacity
);
472 ptr
: Cell
::new((*chunk
).start() as *const T
),
473 end
: Cell
::new((*chunk
).end() as *const T
),
474 first
: RefCell
::new(chunk
),
475 _own
: marker
::PhantomData
,
480 /// Allocates an object in the `TypedArena`, returning a reference to it.
482 pub fn alloc(&self, object
: T
) -> &mut T
{
483 if self.ptr
== self.end
{
487 let ptr
: &mut T
= unsafe {
488 let ptr
: &mut T
= mem
::transmute(self.ptr
.clone());
489 ptr
::write(ptr
, object
);
490 self.ptr
.set(self.ptr
.get().offset(1));
501 let chunk
= *self.first
.borrow_mut();
502 let new_capacity
= (*chunk
).capacity
.checked_mul(2).unwrap();
503 let chunk
= TypedArenaChunk
::<T
>::new(chunk
, new_capacity
);
504 self.ptr
.set((*chunk
).start() as *const T
);
505 self.end
.set((*chunk
).end() as *const T
);
506 *self.first
.borrow_mut() = chunk
511 impl<T
> Drop
for TypedArena
<T
> {
514 // Determine how much was filled.
515 let start
= self.first
.borrow().as_ref().unwrap().start() as usize;
516 let end
= self.ptr
.get() as usize;
517 let diff
= (end
- start
) / mem
::size_of
::<T
>();
519 // Pass that to the `destroy` method.
520 (**self.first
.borrow_mut()).destroy(diff
)
528 use self::test
::Bencher
;
529 use super::{Arena, TypedArena}
;
539 fn test_arena_alloc_nested() {
540 struct Inner { value: u8 }
541 struct Outer
<'a
> { inner: &'a Inner }
542 enum EI
<'e
> { I(Inner), O(Outer<'e>) }
544 struct Wrap
<'a
>(TypedArena
<EI
<'a
>>);
547 fn alloc_inner
<F
:Fn() -> Inner
>(&self, f
: F
) -> &Inner
{
548 let r
: &EI
= self.0.alloc(EI
::I(f()));
549 if let &EI
::I(ref i
) = r
{
555 fn alloc_outer
<F
:Fn() -> Outer
<'a
>>(&self, f
: F
) -> &Outer
{
556 let r
: &EI
= self.0.alloc(EI
::O(f()));
557 if let &EI
::O(ref o
) = r
{
565 let arena
= Wrap(TypedArena
::new());
567 let result
= arena
.alloc_outer(|| Outer
{
568 inner
: arena
.alloc_inner(|| Inner { value: 10 }
) });
570 assert_eq
!(result
.inner
.value
, 10);
575 let arena
= TypedArena
::new();
586 pub fn bench_copy(b
: &mut Bencher
) {
587 let arena
= TypedArena
::new();
598 pub fn bench_copy_nonarena(b
: &mut Bencher
) {
600 let _
: Box
<_
> = box Point
{
609 pub fn bench_copy_old_arena(b
: &mut Bencher
) {
610 let arena
= Arena
::new();
629 pub fn test_noncopy() {
630 let arena
= TypedArena
::new();
632 arena
.alloc(Noncopy
{
633 string
: "hello world".to_string(),
634 array
: vec
!( 1, 2, 3, 4, 5 ),
640 pub fn bench_noncopy(b
: &mut Bencher
) {
641 let arena
= TypedArena
::new();
643 arena
.alloc(Noncopy
{
644 string
: "hello world".to_string(),
645 array
: vec
!( 1, 2, 3, 4, 5 ),
651 pub fn bench_noncopy_nonarena(b
: &mut Bencher
) {
653 let _
: Box
<_
> = box Noncopy
{
654 string
: "hello world".to_string(),
655 array
: vec
!( 1, 2, 3, 4, 5 ),
661 pub fn bench_noncopy_old_arena(b
: &mut Bencher
) {
662 let arena
= Arena
::new();
664 arena
.alloc(|| Noncopy
{
665 string
: "hello world".to_string(),
666 array
: vec
!( 1, 2, 3, 4, 5 ),