1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! The arena, a fast but limited type of allocator.
13 //! Arenas are a type of allocator that destroy the objects within, all at
14 //! once, once the arena itself is destroyed. They do not support deallocation
15 //! of individual objects while the arena itself is still alive. The benefit
16 //! of an arena is very fast allocation; just a pointer bump.
18 //! This crate implements `TypedArena`, a simple arena that can only hold
19 //! objects of a single type.
21 #![crate_name = "arena"]
22 #![crate_type = "rlib"]
23 #![crate_type = "dylib"]
24 #![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
25 html_favicon_url
= "https://doc.rust-lang.org/favicon.ico",
26 html_root_url
= "https://doc.rust-lang.org/nightly/",
27 test(no_crate_inject
, attr(deny(warnings
))))]
31 #![feature(core_intrinsics)]
32 #![feature(dropck_eyepatch)]
33 #![feature(generic_param_attrs)]
34 #![feature(needs_drop)]
35 #![cfg_attr(test, feature(test))]
41 use std
::cell
::{Cell, RefCell}
;
44 use std
::marker
::{PhantomData, Send}
;
49 use alloc
::raw_vec
::RawVec
;
51 /// An arena that can hold objects of only one type.
52 pub struct TypedArena
<T
> {
53 /// A pointer to the next object to be allocated.
56 /// A pointer to the end of the allocated area. When this pointer is
57 /// reached, a new chunk is allocated.
60 /// A vector of arena chunks.
61 chunks
: RefCell
<Vec
<TypedArenaChunk
<T
>>>,
63 /// Marker indicating that dropping the arena causes its owned
64 /// instances of `T` to be dropped.
68 struct TypedArenaChunk
<T
> {
69 /// The raw storage for the arena chunk.
73 impl<T
> TypedArenaChunk
<T
> {
75 unsafe fn new(capacity
: usize) -> TypedArenaChunk
<T
> {
76 TypedArenaChunk { storage: RawVec::with_capacity(capacity) }
79 /// Destroys this arena chunk.
81 unsafe fn destroy(&mut self, len
: usize) {
82 // The branch on needs_drop() is an -O1 performance optimization.
83 // Without the branch, dropping TypedArena<u8> takes linear time.
84 if mem
::needs_drop
::<T
>() {
85 let mut start
= self.start();
86 // Destroy all allocated objects.
88 ptr
::drop_in_place(start
);
89 start
= start
.offset(1);
94 // Returns a pointer to the first allocated object.
96 fn start(&self) -> *mut T
{
100 // Returns a pointer to the end of the allocated space.
102 fn end(&self) -> *mut T
{
104 if mem
::size_of
::<T
>() == 0 {
105 // A pointer as large as possible for zero-sized elements.
108 self.start().offset(self.storage
.cap() as isize)
114 const PAGE
: usize = 4096;
116 impl<T
> TypedArena
<T
> {
117 /// Creates a new `TypedArena`.
119 pub fn new() -> TypedArena
<T
> {
121 // We set both `ptr` and `end` to 0 so that the first call to
122 // alloc() will trigger a grow().
123 ptr
: Cell
::new(0 as *mut T
),
124 end
: Cell
::new(0 as *mut T
),
125 chunks
: RefCell
::new(vec
![]),
130 /// Allocates an object in the `TypedArena`, returning a reference to it.
132 pub fn alloc(&self, object
: T
) -> &mut T
{
133 if self.ptr
== self.end
{
138 if mem
::size_of
::<T
>() == 0 {
139 self.ptr
.set(intrinsics
::arith_offset(self.ptr
.get() as *mut u8, 1) as *mut T
);
140 let ptr
= mem
::align_of
::<T
>() as *mut T
;
141 // Don't drop the object. This `write` is equivalent to `forget`.
142 ptr
::write(ptr
, object
);
145 let ptr
= self.ptr
.get();
146 // Advance the pointer.
147 self.ptr
.set(self.ptr
.get().offset(1));
148 // Write into uninitialized memory.
149 ptr
::write(ptr
, object
);
155 /// Allocates a slice of objects that are copy into the `TypedArena`, returning a mutable
156 /// reference to it. Will panic if passed a zero-sized types.
159 /// - Zero-sized types
160 /// - Zero-length slices
162 pub fn alloc_slice(&self, slice
: &[T
]) -> &mut [T
]
164 assert
!(mem
::size_of
::<T
>() != 0);
165 assert
!(slice
.len() != 0);
167 let available_capacity_bytes
= self.end
.get() as usize - self.ptr
.get() as usize;
168 let at_least_bytes
= slice
.len() * mem
::size_of
::<T
>();
169 if available_capacity_bytes
< at_least_bytes
{
170 self.grow(slice
.len());
174 let start_ptr
= self.ptr
.get();
175 let arena_slice
= slice
::from_raw_parts_mut(start_ptr
, slice
.len());
176 self.ptr
.set(start_ptr
.offset(arena_slice
.len() as isize));
177 arena_slice
.copy_from_slice(slice
);
185 fn grow(&self, n
: usize) {
187 let mut chunks
= self.chunks
.borrow_mut();
188 let (chunk
, mut new_capacity
);
189 if let Some(last_chunk
) = chunks
.last_mut() {
190 let used_bytes
= self.ptr
.get() as usize - last_chunk
.start() as usize;
191 let currently_used_cap
= used_bytes
/ mem
::size_of
::<T
>();
192 if last_chunk
.storage
.reserve_in_place(currently_used_cap
, n
) {
193 self.end
.set(last_chunk
.end());
196 new_capacity
= last_chunk
.storage
.cap();
198 new_capacity
= new_capacity
.checked_mul(2).unwrap();
199 if new_capacity
>= currently_used_cap
+ n
{
205 let elem_size
= cmp
::max(1, mem
::size_of
::<T
>());
206 new_capacity
= cmp
::max(n
, PAGE
/ elem_size
);
208 chunk
= TypedArenaChunk
::<T
>::new(new_capacity
);
209 self.ptr
.set(chunk
.start());
210 self.end
.set(chunk
.end());
215 /// Clears the arena. Deallocates all but the longest chunk which may be reused.
216 pub fn clear(&mut self) {
218 // Clear the last chunk, which is partially filled.
219 let mut chunks_borrow
= self.chunks
.borrow_mut();
220 if let Some(mut last_chunk
) = chunks_borrow
.pop() {
221 self.clear_last_chunk(&mut last_chunk
);
222 // If `T` is ZST, code below has no effect.
223 for mut chunk
in chunks_borrow
.drain(..) {
224 let cap
= chunk
.storage
.cap();
227 chunks_borrow
.push(last_chunk
);
232 // Drops the contents of the last chunk. The last chunk is partially empty, unlike all other
234 fn clear_last_chunk(&self, last_chunk
: &mut TypedArenaChunk
<T
>) {
235 // Determine how much was filled.
236 let start
= last_chunk
.start() as usize;
237 // We obtain the value of the pointer to the first uninitialized element.
238 let end
= self.ptr
.get() as usize;
239 // We then calculate the number of elements to be dropped in the last chunk,
240 // which is the filled area's length.
241 let diff
= if mem
::size_of
::<T
>() == 0 {
242 // `T` is ZST. It can't have a drop flag, so the value here doesn't matter. We get
243 // the number of zero-sized values in the last and only chunk, just out of caution.
244 // Recall that `end` was incremented for each allocated value.
247 (end
- start
) / mem
::size_of
::<T
>()
249 // Pass that to the `destroy` method.
251 last_chunk
.destroy(diff
);
254 self.ptr
.set(last_chunk
.start());
258 unsafe impl<#[may_dangle] T> Drop for TypedArena<T> {
261 // Determine how much was filled.
262 let mut chunks_borrow
= self.chunks
.borrow_mut();
263 if let Some(mut last_chunk
) = chunks_borrow
.pop() {
264 // Drop the contents of the last chunk.
265 self.clear_last_chunk(&mut last_chunk
);
266 // The last chunk will be dropped. Destroy all other chunks.
267 for chunk
in chunks_borrow
.iter_mut() {
268 let cap
= chunk
.storage
.cap();
272 // RawVec handles deallocation of `last_chunk` and `self.chunks`.
277 unsafe impl<T
: Send
> Send
for TypedArena
<T
> {}
279 pub struct DroplessArena
{
280 /// A pointer to the next object to be allocated.
283 /// A pointer to the end of the allocated area. When this pointer is
284 /// reached, a new chunk is allocated.
287 /// A vector of arena chunks.
288 chunks
: RefCell
<Vec
<TypedArenaChunk
<u8>>>,
292 pub fn new() -> DroplessArena
{
294 ptr
: Cell
::new(0 as *mut u8),
295 end
: Cell
::new(0 as *mut u8),
296 chunks
: RefCell
::new(vec
![]),
300 pub fn in_arena
<T
: ?Sized
>(&self, ptr
: *const T
) -> bool
{
301 let ptr
= ptr
as *const u8 as *mut u8;
302 for chunk
in &*self.chunks
.borrow() {
303 if chunk
.start() <= ptr
&& ptr
< chunk
.end() {
311 fn align_for
<T
>(&self) {
312 let align
= mem
::align_of
::<T
>();
313 let final_address
= ((self.ptr
.get() as usize) + align
- 1) & !(align
- 1);
314 self.ptr
.set(final_address
as *mut u8);
315 assert
!(self.ptr
<= self.end
);
320 fn grow
<T
>(&self, n
: usize) {
321 let needed_bytes
= n
* mem
::size_of
::<T
>();
323 let mut chunks
= self.chunks
.borrow_mut();
324 let (chunk
, mut new_capacity
);
325 if let Some(last_chunk
) = chunks
.last_mut() {
326 let used_bytes
= self.ptr
.get() as usize - last_chunk
.start() as usize;
327 if last_chunk
.storage
.reserve_in_place(used_bytes
, needed_bytes
) {
328 self.end
.set(last_chunk
.end());
331 new_capacity
= last_chunk
.storage
.cap();
333 new_capacity
= new_capacity
.checked_mul(2).unwrap();
334 if new_capacity
>= used_bytes
+ needed_bytes
{
340 new_capacity
= cmp
::max(needed_bytes
, PAGE
);
342 chunk
= TypedArenaChunk
::<u8>::new(new_capacity
);
343 self.ptr
.set(chunk
.start());
344 self.end
.set(chunk
.end());
350 pub fn alloc
<T
>(&self, object
: T
) -> &mut T
{
352 assert
!(!mem
::needs_drop
::<T
>());
353 assert
!(mem
::size_of
::<T
>() != 0);
355 self.align_for
::<T
>();
356 let future_end
= intrinsics
::arith_offset(self.ptr
.get(), mem
::size_of
::<T
>() as isize);
357 if (future_end
as *mut u8) >= self.end
.get() {
361 let ptr
= self.ptr
.get();
362 // Set the pointer past ourselves
363 self.ptr
.set(intrinsics
::arith_offset(
364 self.ptr
.get(), mem
::size_of
::<T
>() as isize
366 // Write into uninitialized memory.
367 ptr
::write(ptr
as *mut T
, object
);
368 &mut *(ptr
as *mut T
)
372 /// Allocates a slice of objects that are copied into the `DroplessArena`, returning a mutable
373 /// reference to it. Will panic if passed a zero-sized type.
376 /// - Zero-sized types
377 /// - Zero-length slices
379 pub fn alloc_slice
<T
>(&self, slice
: &[T
]) -> &mut [T
]
381 assert
!(!mem
::needs_drop
::<T
>());
382 assert
!(mem
::size_of
::<T
>() != 0);
383 assert
!(slice
.len() != 0);
384 self.align_for
::<T
>();
386 let future_end
= unsafe {
387 intrinsics
::arith_offset(self.ptr
.get(), (slice
.len() * mem
::size_of
::<T
>()) as isize)
389 if (future_end
as *mut u8) >= self.end
.get() {
390 self.grow
::<T
>(slice
.len());
394 let arena_slice
= slice
::from_raw_parts_mut(self.ptr
.get() as *mut T
, slice
.len());
395 self.ptr
.set(intrinsics
::arith_offset(
396 self.ptr
.get(), (slice
.len() * mem
::size_of
::<T
>()) as isize
398 arena_slice
.copy_from_slice(slice
);
407 use self::test
::Bencher
;
408 use super::TypedArena
;
412 #[derive(Debug, Eq, PartialEq)]
420 pub fn test_unused() {
421 let arena
: TypedArena
<Point
> = TypedArena
::new();
422 assert
!(arena
.chunks
.borrow().is_empty());
426 fn test_arena_alloc_nested() {
438 struct Wrap
<'a
>(TypedArena
<EI
<'a
>>);
441 fn alloc_inner
<F
: Fn() -> Inner
>(&self, f
: F
) -> &Inner
{
442 let r
: &EI
= self.0.alloc(EI
::I(f()));
443 if let &EI
::I(ref i
) = r
{
449 fn alloc_outer
<F
: Fn() -> Outer
<'a
>>(&self, f
: F
) -> &Outer
{
450 let r
: &EI
= self.0.alloc(EI
::O(f()));
451 if let &EI
::O(ref o
) = r
{
459 let arena
= Wrap(TypedArena
::new());
462 arena
.alloc_outer(|| Outer { inner: arena.alloc_inner(|| Inner { value: 10 }
) });
464 assert_eq
!(result
.inner
.value
, 10);
469 let arena
= TypedArena
::new();
471 arena
.alloc(Point { x: 1, y: 2, z: 3 }
);
476 pub fn bench_copy(b
: &mut Bencher
) {
477 let arena
= TypedArena
::new();
478 b
.iter(|| arena
.alloc(Point { x: 1, y: 2, z: 3 }
))
482 pub fn bench_copy_nonarena(b
: &mut Bencher
) {
484 let _
: Box
<_
> = Box
::new(Point { x: 1, y: 2, z: 3 }
);
495 pub fn test_noncopy() {
496 let arena
= TypedArena
::new();
498 arena
.alloc(Noncopy
{
499 string
: "hello world".to_string(),
500 array
: vec
![1, 2, 3, 4, 5],
506 pub fn test_typed_arena_zero_sized() {
507 let arena
= TypedArena
::new();
514 pub fn test_typed_arena_clear() {
515 let mut arena
= TypedArena
::new();
519 arena
.alloc(Point { x: 1, y: 2, z: 3 }
);
526 struct DropCounter
<'a
> {
527 count
: &'a Cell
<u32>,
530 impl<'a
> Drop
for DropCounter
<'a
> {
532 self.count
.set(self.count
.get() + 1);
537 fn test_typed_arena_drop_count() {
538 let counter
= Cell
::new(0);
540 let arena
: TypedArena
<DropCounter
> = TypedArena
::new();
542 // Allocate something with drop glue to make sure it doesn't leak.
543 arena
.alloc(DropCounter { count: &counter }
);
546 assert_eq
!(counter
.get(), 100);
550 fn test_typed_arena_drop_on_clear() {
551 let counter
= Cell
::new(0);
552 let mut arena
: TypedArena
<DropCounter
> = TypedArena
::new();
555 // Allocate something with drop glue to make sure it doesn't leak.
556 arena
.alloc(DropCounter { count: &counter }
);
559 assert_eq
!(counter
.get(), i
* 100 + 100);
564 static DROP_COUNTER
: Cell
<u32> = Cell
::new(0)
567 struct SmallDroppable
;
569 impl Drop
for SmallDroppable
{
571 DROP_COUNTER
.with(|c
| c
.set(c
.get() + 1));
576 fn test_typed_arena_drop_small_count() {
577 DROP_COUNTER
.with(|c
| c
.set(0));
579 let arena
: TypedArena
<SmallDroppable
> = TypedArena
::new();
581 // Allocate something with drop glue to make sure it doesn't leak.
582 arena
.alloc(SmallDroppable
);
586 assert_eq
!(DROP_COUNTER
.with(|c
| c
.get()), 100);
590 pub fn bench_noncopy(b
: &mut Bencher
) {
591 let arena
= TypedArena
::new();
593 arena
.alloc(Noncopy
{
594 string
: "hello world".to_string(),
595 array
: vec
![1, 2, 3, 4, 5],
601 pub fn bench_noncopy_nonarena(b
: &mut Bencher
) {
603 let _
: Box
<_
> = Box
::new(Noncopy
{
604 string
: "hello world".to_string(),
605 array
: vec
![1, 2, 3, 4, 5],