1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! The arena, a fast but limited type of allocator.
13 //! Arenas are a type of allocator that destroy the objects within, all at
14 //! once, once the arena itself is destroyed. They do not support deallocation
15 //! of individual objects while the arena itself is still alive. The benefit
16 //! of an arena is very fast allocation; just a pointer bump.
18 //! This crate has two arenas implemented: `TypedArena`, which is a simpler
19 //! arena but can only hold objects of a single type, and `Arena`, which is a
20 //! more complex, slower arena which can hold objects of any type.
22 #![crate_name = "arena"]
23 #![unstable(feature = "rustc_private", issue = "27812")]
24 #![crate_type = "rlib"]
25 #![crate_type = "dylib"]
26 #![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
27 html_favicon_url
= "https://doc.rust-lang.org/favicon.ico",
28 html_root_url
= "https://doc.rust-lang.org/nightly/",
29 test(no_crate_inject
, attr(deny(warnings
))))]
30 #![cfg_attr(not(stage0), deny(warnings))]
33 #![feature(core_intrinsics)]
36 #![feature(staged_api)]
37 #![feature(dropck_parametricity)]
38 #![cfg_attr(test, feature(test))]
44 use std
::cell
::{Cell, RefCell}
;
47 use std
::marker
::{PhantomData, Send}
;
52 use alloc
::raw_vec
::RawVec
;
54 /// A faster arena that can hold objects of only one type.
55 pub struct TypedArena
<T
> {
56 /// A pointer to the next object to be allocated.
59 /// A pointer to the end of the allocated area. When this pointer is
60 /// reached, a new chunk is allocated.
63 /// A vector arena segments.
64 chunks
: RefCell
<Vec
<TypedArenaChunk
<T
>>>,
66 /// Marker indicating that dropping the arena causes its owned
67 /// instances of `T` to be dropped.
71 struct TypedArenaChunk
<T
> {
72 /// Pointer to the next arena segment.
76 impl<T
> TypedArenaChunk
<T
> {
78 unsafe fn new(capacity
: usize) -> TypedArenaChunk
<T
> {
79 TypedArenaChunk { storage: RawVec::with_capacity(capacity) }
82 /// Destroys this arena chunk.
84 unsafe fn destroy(&mut self, len
: usize) {
85 // The branch on needs_drop() is an -O1 performance optimization.
86 // Without the branch, dropping TypedArena<u8> takes linear time.
87 if intrinsics
::needs_drop
::<T
>() {
88 let mut start
= self.start();
89 // Destroy all allocated objects.
91 ptr
::drop_in_place(start
);
92 start
= start
.offset(1);
97 // Returns a pointer to the first allocated object.
99 fn start(&self) -> *mut T
{
103 // Returns a pointer to the end of the allocated space.
105 fn end(&self) -> *mut T
{
107 if mem
::size_of
::<T
>() == 0 {
108 // A pointer as large as possible for zero-sized elements.
111 self.start().offset(self.storage
.cap() as isize)
117 const PAGE
: usize = 4096;
119 impl<T
> TypedArena
<T
> {
120 /// Creates a new `TypedArena` with preallocated space for many objects.
122 pub fn new() -> TypedArena
<T
> {
123 // Reserve at least one page.
124 let elem_size
= cmp
::max(1, mem
::size_of
::<T
>());
125 TypedArena
::with_capacity(PAGE
/ elem_size
)
128 /// Creates a new `TypedArena` with preallocated space for the given number of
131 pub fn with_capacity(capacity
: usize) -> TypedArena
<T
> {
133 let chunk
= TypedArenaChunk
::<T
>::new(cmp
::max(1, capacity
));
135 ptr
: Cell
::new(chunk
.start()),
136 end
: Cell
::new(chunk
.end()),
137 chunks
: RefCell
::new(vec
![chunk
]),
143 /// Allocates an object in the `TypedArena`, returning a reference to it.
145 pub fn alloc(&self, object
: T
) -> &mut T
{
146 if self.ptr
== self.end
{
151 if mem
::size_of
::<T
>() == 0 {
152 self.ptr
.set(intrinsics
::arith_offset(self.ptr
.get() as *mut u8, 1) as *mut T
);
153 let ptr
= heap
::EMPTY
as *mut T
;
154 // Don't drop the object. This `write` is equivalent to `forget`.
155 ptr
::write(ptr
, object
);
158 let ptr
= self.ptr
.get();
159 // Advance the pointer.
160 self.ptr
.set(self.ptr
.get().offset(1));
161 // Write into uninitialized memory.
162 ptr
::write(ptr
, object
);
173 let mut chunks
= self.chunks
.borrow_mut();
174 let prev_capacity
= chunks
.last().unwrap().storage
.cap();
175 let new_capacity
= prev_capacity
.checked_mul(2).unwrap();
176 if chunks
.last_mut().unwrap().storage
.double_in_place() {
177 self.end
.set(chunks
.last().unwrap().end());
179 let chunk
= TypedArenaChunk
::<T
>::new(new_capacity
);
180 self.ptr
.set(chunk
.start());
181 self.end
.set(chunk
.end());
186 /// Clears the arena. Deallocates all but the longest chunk which may be reused.
187 pub fn clear(&mut self) {
189 // Clear the last chunk, which is partially filled.
190 let mut chunks_borrow
= self.chunks
.borrow_mut();
191 let last_idx
= chunks_borrow
.len() - 1;
192 self.clear_last_chunk(&mut chunks_borrow
[last_idx
]);
193 // If `T` is ZST, code below has no effect.
194 for mut chunk
in chunks_borrow
.drain(..last_idx
) {
195 let cap
= chunk
.storage
.cap();
201 // Drops the contents of the last chunk. The last chunk is partially empty, unlike all other
203 fn clear_last_chunk(&self, last_chunk
: &mut TypedArenaChunk
<T
>) {
204 // Determine how much was filled.
205 let start
= last_chunk
.start() as usize;
206 // We obtain the value of the pointer to the first uninitialized element.
207 let end
= self.ptr
.get() as usize;
208 // We then calculate the number of elements to be dropped in the last chunk,
209 // which is the filled area's length.
210 let diff
= if mem
::size_of
::<T
>() == 0 {
211 // `T` is ZST. It can't have a drop flag, so the value here doesn't matter. We get
212 // the number of zero-sized values in the last and only chunk, just out of caution.
213 // Recall that `end` was incremented for each allocated value.
216 (end
- start
) / mem
::size_of
::<T
>()
218 // Pass that to the `destroy` method.
220 last_chunk
.destroy(diff
);
223 self.ptr
.set(last_chunk
.start());
227 impl<T
> Drop
for TypedArena
<T
> {
228 #[unsafe_destructor_blind_to_params]
231 // Determine how much was filled.
232 let mut chunks_borrow
= self.chunks
.borrow_mut();
233 let mut last_chunk
= chunks_borrow
.pop().unwrap();
234 // Drop the contents of the last chunk.
235 self.clear_last_chunk(&mut last_chunk
);
236 // The last chunk will be dropped. Destroy all other chunks.
237 for chunk
in chunks_borrow
.iter_mut() {
238 let cap
= chunk
.storage
.cap();
241 // RawVec handles deallocation of `last_chunk` and `self.chunks`.
246 unsafe impl<T
: Send
> Send
for TypedArena
<T
> {}
251 use self::test
::Bencher
;
252 use super::TypedArena
;
256 #[derive(Debug, Eq, PartialEq)]
264 fn test_arena_alloc_nested() {
276 struct Wrap
<'a
>(TypedArena
<EI
<'a
>>);
279 fn alloc_inner
<F
: Fn() -> Inner
>(&self, f
: F
) -> &Inner
{
280 let r
: &EI
= self.0.alloc(EI
::I(f()));
281 if let &EI
::I(ref i
) = r
{
287 fn alloc_outer
<F
: Fn() -> Outer
<'a
>>(&self, f
: F
) -> &Outer
{
288 let r
: &EI
= self.0.alloc(EI
::O(f()));
289 if let &EI
::O(ref o
) = r
{
297 let arena
= Wrap(TypedArena
::new());
299 let result
= arena
.alloc_outer(|| {
300 Outer { inner: arena.alloc_inner(|| Inner { value: 10 }
) }
303 assert_eq
!(result
.inner
.value
, 10);
308 let arena
= TypedArena
::new();
310 arena
.alloc(Point { x: 1, y: 2, z: 3 }
);
315 pub fn bench_copy(b
: &mut Bencher
) {
316 let arena
= TypedArena
::new();
317 b
.iter(|| arena
.alloc(Point { x: 1, y: 2, z: 3 }
))
321 pub fn bench_copy_nonarena(b
: &mut Bencher
) {
323 let _
: Box
<_
> = Box
::new(Point { x: 1, y: 2, z: 3 }
);
334 pub fn test_noncopy() {
335 let arena
= TypedArena
::new();
337 arena
.alloc(Noncopy
{
338 string
: "hello world".to_string(),
339 array
: vec
![1, 2, 3, 4, 5],
345 pub fn test_typed_arena_zero_sized() {
346 let arena
= TypedArena
::new();
353 pub fn test_typed_arena_clear() {
354 let mut arena
= TypedArena
::new();
358 arena
.alloc(Point { x: 1, y: 2, z: 3 }
);
365 struct DropCounter
<'a
> {
366 count
: &'a Cell
<u32>,
369 impl<'a
> Drop
for DropCounter
<'a
> {
371 self.count
.set(self.count
.get() + 1);
376 fn test_typed_arena_drop_count() {
377 let counter
= Cell
::new(0);
379 let arena
: TypedArena
<DropCounter
> = TypedArena
::new();
381 // Allocate something with drop glue to make sure it doesn't leak.
382 arena
.alloc(DropCounter { count: &counter }
);
385 assert_eq
!(counter
.get(), 100);
389 fn test_typed_arena_drop_on_clear() {
390 let counter
= Cell
::new(0);
391 let mut arena
: TypedArena
<DropCounter
> = TypedArena
::new();
394 // Allocate something with drop glue to make sure it doesn't leak.
395 arena
.alloc(DropCounter { count: &counter }
);
398 assert_eq
!(counter
.get(), i
* 100 + 100);
403 static DROP_COUNTER
: Cell
<u32> = Cell
::new(0)
406 struct SmallDroppable
;
408 impl Drop
for SmallDroppable
{
410 DROP_COUNTER
.with(|c
| c
.set(c
.get() + 1));
415 fn test_typed_arena_drop_small_count() {
416 DROP_COUNTER
.with(|c
| c
.set(0));
418 let arena
: TypedArena
<SmallDroppable
> = TypedArena
::new();
420 // Allocate something with drop glue to make sure it doesn't leak.
421 arena
.alloc(SmallDroppable
);
425 assert_eq
!(DROP_COUNTER
.with(|c
| c
.get()), 100);
429 pub fn bench_noncopy(b
: &mut Bencher
) {
430 let arena
= TypedArena
::new();
432 arena
.alloc(Noncopy
{
433 string
: "hello world".to_string(),
434 array
: vec
![1, 2, 3, 4, 5],
440 pub fn bench_noncopy_nonarena(b
: &mut Bencher
) {
442 let _
: Box
<_
> = Box
::new(Noncopy
{
443 string
: "hello world".to_string(),
444 array
: vec
![1, 2, 3, 4, 5],