1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! The arena, a fast but limited type of allocator.
13 //! Arenas are a type of allocator that destroy the objects within, all at
14 //! once, once the arena itself is destroyed. They do not support deallocation
15 //! of individual objects while the arena itself is still alive. The benefit
16 //! of an arena is very fast allocation; just a pointer bump.
18 //! This crate has two arenas implemented: `TypedArena`, which is a simpler
19 //! arena but can only hold objects of a single type, and `Arena`, which is a
20 //! more complex, slower arena which can hold objects of any type.
22 // Do not remove on snapshot creation. Needed for bootstrap. (Issue #22364)
23 #![cfg_attr(stage0, feature(custom_attribute))]
24 #![crate_name = "arena"]
25 #![unstable(feature = "rustc_private", issue = "27812")]
27 #![crate_type = "rlib"]
28 #![crate_type = "dylib"]
29 #![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
30 html_favicon_url
= "https://doc.rust-lang.org/favicon.ico",
31 html_root_url
= "https://doc.rust-lang.org/nightly/")]
34 #![feature(box_syntax)]
35 #![feature(core_intrinsics)]
38 #![feature(ptr_as_ref)]
40 #![feature(staged_api)]
41 #![cfg_attr(test, feature(test))]
45 use std
::cell
::{Cell, RefCell}
;
53 use alloc
::heap
::{allocate, deallocate}
;
55 // The way arena uses arrays is really deeply awful. The arrays are
56 // allocated, and have capacities reserved, but the fill for the array
57 // will always stay at 0.
58 #[derive(Clone, PartialEq)]
60 data
: Rc
<RefCell
<Vec
<u8>>>,
66 fn capacity(&self) -> usize {
67 self.data
.borrow().capacity()
70 unsafe fn as_ptr(&self) -> *const u8 {
71 self.data
.borrow().as_ptr()
75 /// A slower reflection-based arena that can allocate objects of any type.
77 /// This arena uses `Vec<u8>` as a backing store to allocate objects from. For
78 /// each allocated object, the arena stores a pointer to the type descriptor
79 /// followed by the object (potentially with alignment padding after each
80 /// element). When the arena is destroyed, it iterates through all of its
81 /// chunks, and uses the tydesc information to trace through the objects,
82 /// calling the destructors on them. One subtle point that needs to be
83 /// addressed is how to handle panics while running the user provided
84 /// initializer function. It is important to not run the destructor on
85 /// uninitialized objects, but how to detect them is somewhat subtle. Since
86 /// `alloc()` can be invoked recursively, it is not sufficient to simply exclude
87 /// the most recent object. To solve this without requiring extra space, we
88 /// use the low order bit of the tydesc pointer to encode whether the object
89 /// it describes has been fully initialized.
91 /// As an optimization, objects with destructors are stored in different chunks
92 /// than objects without destructors. This reduces overhead when initializing
93 /// plain-old-data (`Copy` types) and means we don't need to waste time running
94 /// their destructors.
95 pub struct Arena
<'longer_than_self
> {
96 // The head is separated out from the list as a unbenchmarked
97 // microoptimization, to avoid needing to case on the list to access the
100 copy_head
: RefCell
<Chunk
>,
101 chunks
: RefCell
<Vec
<Chunk
>>,
102 _marker
: marker
::PhantomData
<*mut &'
longer_than_self()>,
106 /// Allocates a new Arena with 32 bytes preallocated.
107 pub fn new() -> Arena
<'a
> {
108 Arena
::new_with_size(32)
111 /// Allocates a new Arena with `initial_size` bytes preallocated.
112 pub fn new_with_size(initial_size
: usize) -> Arena
<'a
> {
114 head
: RefCell
::new(chunk(initial_size
, false)),
115 copy_head
: RefCell
::new(chunk(initial_size
, true)),
116 chunks
: RefCell
::new(Vec
::new()),
117 _marker
: marker
::PhantomData
,
122 fn chunk(size
: usize, is_copy
: bool
) -> Chunk
{
124 data
: Rc
::new(RefCell
::new(Vec
::with_capacity(size
))),
126 is_copy
: Cell
::new(is_copy
),
130 impl<'longer_than_self
> Drop
for Arena
<'longer_than_self
> {
133 destroy_chunk(&*self.head
.borrow());
134 for chunk
in self.chunks
.borrow().iter() {
135 if !chunk
.is_copy
.get() {
136 destroy_chunk(chunk
);
144 fn round_up(base
: usize, align
: usize) -> usize {
145 (base
.checked_add(align
- 1)).unwrap() & !(align
- 1)
148 // Walk down a chunk, running the destructors for any objects stored
150 unsafe fn destroy_chunk(chunk
: &Chunk
) {
152 let buf
= chunk
.as_ptr();
153 let fill
= chunk
.fill
.get();
156 let tydesc_data
= buf
.offset(idx
as isize) as *const usize;
157 let (tydesc
, is_done
) = un_bitpack_tydesc_ptr(*tydesc_data
);
158 let (size
, align
) = ((*tydesc
).size
, (*tydesc
).align
);
160 let after_tydesc
= idx
+ mem
::size_of
::<*const TyDesc
>();
162 let start
= round_up(after_tydesc
, align
);
164 //debug!("freeing object: idx = {}, size = {}, align = {}, done = {}",
165 // start, size, align, is_done);
167 ((*tydesc
).drop_glue
)(buf
.offset(start
as isize) as *const i8);
170 // Find where the next tydesc lives
171 idx
= round_up(start
+ size
, mem
::align_of
::<*const TyDesc
>());
175 // We encode whether the object a tydesc describes has been
176 // initialized in the arena in the low bit of the tydesc pointer. This
177 // is necessary in order to properly do cleanup if a panic occurs
178 // during an initializer.
180 fn bitpack_tydesc_ptr(p
: *const TyDesc
, is_done
: bool
) -> usize {
181 p
as usize | (is_done
as usize)
184 fn un_bitpack_tydesc_ptr(p
: usize) -> (*const TyDesc
, bool
) {
185 ((p
& !1) as *const TyDesc
, p
& 1 == 1)
188 // HACK(eddyb) TyDesc replacement using a trait object vtable.
189 // This could be replaced in the future with a custom DST layout,
190 // or `&'static (drop_glue, size, align)` created by a `const fn`.
192 drop_glue
: fn(*const i8),
197 trait AllTypes { fn dummy(&self) { }
}
198 impl<T
:?Sized
> AllTypes
for T { }
200 unsafe fn get_tydesc
<T
>() -> *const TyDesc
{
201 use std
::raw
::TraitObject
;
203 let ptr
= &*(1 as *const T
);
205 // Can use any trait that is implemented for all types.
206 let obj
= mem
::transmute
::<&AllTypes
, TraitObject
>(ptr
);
207 obj
.vtable
as *const TyDesc
210 impl<'longer_than_self
> Arena
<'longer_than_self
> {
211 fn chunk_size(&self) -> usize {
212 self.copy_head
.borrow().capacity()
215 // Functions for the POD part of the arena
216 fn alloc_copy_grow(&self, n_bytes
: usize, align
: usize) -> *const u8 {
217 // Allocate a new chunk.
218 let new_min_chunk_size
= cmp
::max(n_bytes
, self.chunk_size());
219 self.chunks
.borrow_mut().push(self.copy_head
.borrow().clone());
221 *self.copy_head
.borrow_mut() =
222 chunk((new_min_chunk_size
+ 1).next_power_of_two(), true);
224 self.alloc_copy_inner(n_bytes
, align
)
228 fn alloc_copy_inner(&self, n_bytes
: usize, align
: usize) -> *const u8 {
229 let start
= round_up(self.copy_head
.borrow().fill
.get(), align
);
231 let end
= start
+ n_bytes
;
232 if end
> self.chunk_size() {
233 return self.alloc_copy_grow(n_bytes
, align
);
236 let copy_head
= self.copy_head
.borrow();
237 copy_head
.fill
.set(end
);
240 copy_head
.as_ptr().offset(start
as isize)
245 fn alloc_copy
<T
, F
>(&self, op
: F
) -> &mut T
where F
: FnOnce() -> T
{
247 let ptr
= self.alloc_copy_inner(mem
::size_of
::<T
>(),
248 mem
::align_of
::<T
>());
249 let ptr
= ptr
as *mut T
;
250 ptr
::write(&mut (*ptr
), op());
255 // Functions for the non-POD part of the arena
256 fn alloc_noncopy_grow(&self, n_bytes
: usize,
257 align
: usize) -> (*const u8, *const u8) {
258 // Allocate a new chunk.
259 let new_min_chunk_size
= cmp
::max(n_bytes
, self.chunk_size());
260 self.chunks
.borrow_mut().push(self.head
.borrow().clone());
262 *self.head
.borrow_mut() =
263 chunk((new_min_chunk_size
+ 1).next_power_of_two(), false);
265 self.alloc_noncopy_inner(n_bytes
, align
)
269 fn alloc_noncopy_inner(&self, n_bytes
: usize,
270 align
: usize) -> (*const u8, *const u8) {
271 // Be careful to not maintain any `head` borrows active, because
272 // `alloc_noncopy_grow` borrows it mutably.
273 let (start
, end
, tydesc_start
, head_capacity
) = {
274 let head
= self.head
.borrow();
275 let fill
= head
.fill
.get();
277 let tydesc_start
= fill
;
278 let after_tydesc
= fill
+ mem
::size_of
::<*const TyDesc
>();
279 let start
= round_up(after_tydesc
, align
);
280 let end
= start
+ n_bytes
;
282 (start
, end
, tydesc_start
, head
.capacity())
285 if end
> head_capacity
{
286 return self.alloc_noncopy_grow(n_bytes
, align
);
289 let head
= self.head
.borrow();
290 head
.fill
.set(round_up(end
, mem
::align_of
::<*const TyDesc
>()));
293 let buf
= head
.as_ptr();
294 (buf
.offset(tydesc_start
as isize), buf
.offset(start
as isize))
299 fn alloc_noncopy
<T
, F
>(&self, op
: F
) -> &mut T
where F
: FnOnce() -> T
{
301 let tydesc
= get_tydesc
::<T
>();
303 self.alloc_noncopy_inner(mem
::size_of
::<T
>(),
304 mem
::align_of
::<T
>());
305 let ty_ptr
= ty_ptr
as *mut usize;
306 let ptr
= ptr
as *mut T
;
307 // Write in our tydesc along with a bit indicating that it
308 // has *not* been initialized yet.
309 *ty_ptr
= bitpack_tydesc_ptr(tydesc
, false);
310 // Actually initialize it
311 ptr
::write(&mut(*ptr
), op());
312 // Now that we are done, update the tydesc to indicate that
313 // the object is there.
314 *ty_ptr
= bitpack_tydesc_ptr(tydesc
, true);
320 /// Allocates a new item in the arena, using `op` to initialize the value,
321 /// and returns a reference to it.
323 pub fn alloc
<T
:'longer_than_self
, F
>(&self, op
: F
) -> &mut T
where F
: FnOnce() -> T
{
325 if intrinsics
::needs_drop
::<T
>() {
326 self.alloc_noncopy(op
)
335 fn test_arena_destructors() {
336 let arena
= Arena
::new();
338 // Arena allocate something with drop glue to make sure it
340 arena
.alloc(|| Rc
::new(i
));
341 // Allocate something with funny size and alignment, to keep
342 // things interesting.
343 arena
.alloc(|| [0u8, 1u8, 2u8]);
349 fn test_arena_destructors_fail() {
350 let arena
= Arena
::new();
351 // Put some stuff in the arena.
353 // Arena allocate something with drop glue to make sure it
355 arena
.alloc(|| { Rc::new(i) }
);
356 // Allocate something with funny size and alignment, to keep
357 // things interesting.
358 arena
.alloc(|| { [0u8, 1, 2] }
);
360 // Now, panic while allocating
361 arena
.alloc
::<Rc
<i32>, _
>(|| {
366 /// A faster arena that can hold objects of only one type.
367 pub struct TypedArena
<T
> {
368 /// A pointer to the next object to be allocated.
371 /// A pointer to the end of the allocated area. When this pointer is
372 /// reached, a new chunk is allocated.
375 /// A pointer to the first arena segment.
376 first
: RefCell
<*mut TypedArenaChunk
<T
>>,
378 /// Marker indicating that dropping the arena causes its owned
379 /// instances of `T` to be dropped.
380 _own
: marker
::PhantomData
<T
>,
383 struct TypedArenaChunk
<T
> {
384 marker
: marker
::PhantomData
<T
>,
386 /// Pointer to the next arena segment.
387 next
: *mut TypedArenaChunk
<T
>,
389 /// The number of elements that this chunk can hold.
392 // Objects follow here, suitably aligned.
395 fn calculate_size
<T
>(capacity
: usize) -> usize {
396 let mut size
= mem
::size_of
::<TypedArenaChunk
<T
>>();
397 size
= round_up(size
, mem
::align_of
::<T
>());
398 let elem_size
= mem
::size_of
::<T
>();
399 let elems_size
= elem_size
.checked_mul(capacity
).unwrap();
400 size
= size
.checked_add(elems_size
).unwrap();
404 impl<T
> TypedArenaChunk
<T
> {
406 unsafe fn new(next
: *mut TypedArenaChunk
<T
>, capacity
: usize)
407 -> *mut TypedArenaChunk
<T
> {
408 let size
= calculate_size
::<T
>(capacity
);
409 let chunk
= allocate(size
, mem
::align_of
::<TypedArenaChunk
<T
>>())
410 as *mut TypedArenaChunk
<T
>;
411 if chunk
.is_null() { alloc::oom() }
412 (*chunk
).next
= next
;
413 (*chunk
).capacity
= capacity
;
417 /// Destroys this arena chunk. If the type descriptor is supplied, the
418 /// drop glue is called; otherwise, drop glue is not called.
420 unsafe fn destroy(&mut self, len
: usize) {
421 // Destroy all the allocated objects.
422 if intrinsics
::needs_drop
::<T
>() {
423 let mut start
= self.start();
425 ptr
::read(start
as *const T
); // run the destructor on the pointer
426 start
= start
.offset(mem
::size_of
::<T
>() as isize)
430 // Destroy the next chunk.
431 let next
= self.next
;
432 let size
= calculate_size
::<T
>(self.capacity
);
433 let self_ptr
: *mut TypedArenaChunk
<T
> = self;
434 deallocate(self_ptr
as *mut u8, size
,
435 mem
::align_of
::<TypedArenaChunk
<T
>>());
437 let capacity
= (*next
).capacity
;
438 (*next
).destroy(capacity
);
442 // Returns a pointer to the first allocated object.
444 fn start(&self) -> *const u8 {
445 let this
: *const TypedArenaChunk
<T
> = self;
447 round_up(this
.offset(1) as usize, mem
::align_of
::<T
>()) as *const u8
451 // Returns a pointer to the end of the allocated space.
453 fn end(&self) -> *const u8 {
455 let size
= mem
::size_of
::<T
>().checked_mul(self.capacity
).unwrap();
456 self.start().offset(size
as isize)
461 impl<T
> TypedArena
<T
> {
462 /// Creates a new `TypedArena` with preallocated space for eight objects.
464 pub fn new() -> TypedArena
<T
> {
465 TypedArena
::with_capacity(8)
468 /// Creates a new `TypedArena` with preallocated space for the given number of
471 pub fn with_capacity(capacity
: usize) -> TypedArena
<T
> {
473 let chunk
= TypedArenaChunk
::<T
>::new(ptr
::null_mut(), capacity
);
475 ptr
: Cell
::new((*chunk
).start() as *const T
),
476 end
: Cell
::new((*chunk
).end() as *const T
),
477 first
: RefCell
::new(chunk
),
478 _own
: marker
::PhantomData
,
483 /// Allocates an object in the `TypedArena`, returning a reference to it.
485 pub fn alloc(&self, object
: T
) -> &mut T
{
486 if self.ptr
== self.end
{
491 let ptr
: &mut T
= &mut *(self.ptr
.get() as *mut T
);
492 ptr
::write(ptr
, object
);
493 self.ptr
.set(self.ptr
.get().offset(1));
502 let chunk
= *self.first
.borrow_mut();
503 let new_capacity
= (*chunk
).capacity
.checked_mul(2).unwrap();
504 let chunk
= TypedArenaChunk
::<T
>::new(chunk
, new_capacity
);
505 self.ptr
.set((*chunk
).start() as *const T
);
506 self.end
.set((*chunk
).end() as *const T
);
507 *self.first
.borrow_mut() = chunk
512 impl<T
> Drop
for TypedArena
<T
> {
515 // Determine how much was filled.
516 let start
= self.first
.borrow().as_ref().unwrap().start() as usize;
517 let end
= self.ptr
.get() as usize;
518 let diff
= (end
- start
) / mem
::size_of
::<T
>();
520 // Pass that to the `destroy` method.
521 (**self.first
.borrow_mut()).destroy(diff
)
529 use self::test
::Bencher
;
530 use super::{Arena, TypedArena}
;
540 fn test_arena_alloc_nested() {
541 struct Inner { value: u8 }
542 struct Outer
<'a
> { inner: &'a Inner }
543 enum EI
<'e
> { I(Inner), O(Outer<'e>) }
545 struct Wrap
<'a
>(TypedArena
<EI
<'a
>>);
548 fn alloc_inner
<F
:Fn() -> Inner
>(&self, f
: F
) -> &Inner
{
549 let r
: &EI
= self.0.alloc(EI
::I(f()));
550 if let &EI
::I(ref i
) = r
{
556 fn alloc_outer
<F
:Fn() -> Outer
<'a
>>(&self, f
: F
) -> &Outer
{
557 let r
: &EI
= self.0.alloc(EI
::O(f()));
558 if let &EI
::O(ref o
) = r
{
566 let arena
= Wrap(TypedArena
::new());
568 let result
= arena
.alloc_outer(|| Outer
{
569 inner
: arena
.alloc_inner(|| Inner { value: 10 }
) });
571 assert_eq
!(result
.inner
.value
, 10);
576 let arena
= TypedArena
::new();
587 pub fn bench_copy(b
: &mut Bencher
) {
588 let arena
= TypedArena
::new();
599 pub fn bench_copy_nonarena(b
: &mut Bencher
) {
601 let _
: Box
<_
> = box Point
{
610 pub fn bench_copy_old_arena(b
: &mut Bencher
) {
611 let arena
= Arena
::new();
630 pub fn test_noncopy() {
631 let arena
= TypedArena
::new();
633 arena
.alloc(Noncopy
{
634 string
: "hello world".to_string(),
635 array
: vec
!( 1, 2, 3, 4, 5 ),
641 pub fn bench_noncopy(b
: &mut Bencher
) {
642 let arena
= TypedArena
::new();
644 arena
.alloc(Noncopy
{
645 string
: "hello world".to_string(),
646 array
: vec
!( 1, 2, 3, 4, 5 ),
652 pub fn bench_noncopy_nonarena(b
: &mut Bencher
) {
654 let _
: Box
<_
> = box Noncopy
{
655 string
: "hello world".to_string(),
656 array
: vec
!( 1, 2, 3, 4, 5 ),
662 pub fn bench_noncopy_old_arena(b
: &mut Bencher
) {
663 let arena
= Arena
::new();
665 arena
.alloc(|| Noncopy
{
666 string
: "hello world".to_string(),
667 array
: vec
!( 1, 2, 3, 4, 5 ),