1 //! The arena, a fast but limited type of allocator.
3 //! Arenas are a type of allocator that destroy the objects within, all at
4 //! once, once the arena itself is destroyed. They do not support deallocation
5 //! of individual objects while the arena itself is still alive. The benefit
6 //! of an arena is very fast allocation; just a pointer bump.
8 //! This crate implements several kinds of arena.
11 html_root_url
= "https://doc.rust-lang.org/nightly/nightly-rustc/",
12 test(no_crate_inject
, attr(deny(warnings
)))
14 #![feature(dropck_eyepatch)]
15 #![feature(new_uninit)]
16 #![feature(maybe_uninit_slice)]
17 #![cfg_attr(test, feature(test))]
19 use smallvec
::SmallVec
;
21 use std
::alloc
::Layout
;
22 use std
::cell
::{Cell, RefCell}
;
24 use std
::marker
::{PhantomData, Send}
;
25 use std
::mem
::{self, MaybeUninit}
;
31 pub fn cold_path
<F
: FnOnce() -> R
, R
>(f
: F
) -> R
{
35 /// An arena that can hold objects of only one type.
36 pub struct TypedArena
<T
> {
37 /// A pointer to the next object to be allocated.
40 /// A pointer to the end of the allocated area. When this pointer is
41 /// reached, a new chunk is allocated.
44 /// A vector of arena chunks.
45 chunks
: RefCell
<Vec
<TypedArenaChunk
<T
>>>,
47 /// Marker indicating that dropping the arena causes its owned
48 /// instances of `T` to be dropped.
52 struct TypedArenaChunk
<T
> {
53 /// The raw storage for the arena chunk.
54 storage
: Box
<[MaybeUninit
<T
>]>,
55 /// The number of valid entries in the chunk.
59 impl<T
> TypedArenaChunk
<T
> {
61 unsafe fn new(capacity
: usize) -> TypedArenaChunk
<T
> {
62 TypedArenaChunk { storage: Box::new_uninit_slice(capacity), entries: 0 }
65 /// Destroys this arena chunk.
67 unsafe fn destroy(&mut self, len
: usize) {
68 // The branch on needs_drop() is an -O1 performance optimization.
69 // Without the branch, dropping TypedArena<u8> takes linear time.
70 if mem
::needs_drop
::<T
>() {
71 ptr
::drop_in_place(MaybeUninit
::slice_assume_init_mut(&mut self.storage
[..len
]));
75 // Returns a pointer to the first allocated object.
77 fn start(&mut self) -> *mut T
{
78 MaybeUninit
::slice_as_mut_ptr(&mut self.storage
)
81 // Returns a pointer to the end of the allocated space.
83 fn end(&mut self) -> *mut T
{
85 if mem
::size_of
::<T
>() == 0 {
86 // A pointer as large as possible for zero-sized elements.
89 self.start().add(self.storage
.len())
95 // The arenas start with PAGE-sized chunks, and then each new chunk is twice as
96 // big as its predecessor, up until we reach HUGE_PAGE-sized chunks, whereupon
97 // we stop growing. This scales well, from arenas that are barely used up to
98 // arenas that are used for 100s of MiBs. Note also that the chosen sizes match
99 // the usual sizes of pages and huge pages on Linux.
100 const PAGE
: usize = 4096;
101 const HUGE_PAGE
: usize = 2 * 1024 * 1024;
103 impl<T
> Default
for TypedArena
<T
> {
104 /// Creates a new `TypedArena`.
105 fn default() -> TypedArena
<T
> {
107 // We set both `ptr` and `end` to 0 so that the first call to
108 // alloc() will trigger a grow().
109 ptr
: Cell
::new(ptr
::null_mut()),
110 end
: Cell
::new(ptr
::null_mut()),
111 chunks
: RefCell
::new(vec
![]),
117 impl<T
> TypedArena
<T
> {
118 /// Allocates an object in the `TypedArena`, returning a reference to it.
120 pub fn alloc(&self, object
: T
) -> &mut T
{
121 if self.ptr
== self.end
{
126 if mem
::size_of
::<T
>() == 0 {
127 self.ptr
.set((self.ptr
.get() as *mut u8).wrapping_offset(1) as *mut T
);
128 let ptr
= mem
::align_of
::<T
>() as *mut T
;
129 // Don't drop the object. This `write` is equivalent to `forget`.
130 ptr
::write(ptr
, object
);
133 let ptr
= self.ptr
.get();
134 // Advance the pointer.
135 self.ptr
.set(self.ptr
.get().offset(1));
136 // Write into uninitialized memory.
137 ptr
::write(ptr
, object
);
144 fn can_allocate(&self, additional
: usize) -> bool
{
145 let available_bytes
= self.end
.get() as usize - self.ptr
.get() as usize;
146 let additional_bytes
= additional
.checked_mul(mem
::size_of
::<T
>()).unwrap();
147 available_bytes
>= additional_bytes
150 /// Ensures there's enough space in the current chunk to fit `len` objects.
152 fn ensure_capacity(&self, additional
: usize) {
153 if !self.can_allocate(additional
) {
154 self.grow(additional
);
155 debug_assert
!(self.can_allocate(additional
));
160 unsafe fn alloc_raw_slice(&self, len
: usize) -> *mut T
{
161 assert
!(mem
::size_of
::<T
>() != 0);
164 self.ensure_capacity(len
);
166 let start_ptr
= self.ptr
.get();
167 self.ptr
.set(start_ptr
.add(len
));
171 /// Allocates a slice of objects that are copied into the `TypedArena`, returning a mutable
172 /// reference to it. Will panic if passed a zero-sized types.
176 /// - Zero-sized types
177 /// - Zero-length slices
179 pub fn alloc_slice(&self, slice
: &[T
]) -> &mut [T
]
184 let len
= slice
.len();
185 let start_ptr
= self.alloc_raw_slice(len
);
186 slice
.as_ptr().copy_to_nonoverlapping(start_ptr
, len
);
187 slice
::from_raw_parts_mut(start_ptr
, len
)
192 pub fn alloc_from_iter
<I
: IntoIterator
<Item
= T
>>(&self, iter
: I
) -> &mut [T
] {
193 assert
!(mem
::size_of
::<T
>() != 0);
194 let mut vec
: SmallVec
<[_
; 8]> = iter
.into_iter().collect();
198 // Move the content to the arena by copying it and then forgetting
199 // the content of the SmallVec
202 let start_ptr
= self.alloc_raw_slice(len
);
203 vec
.as_ptr().copy_to_nonoverlapping(start_ptr
, len
);
205 slice
::from_raw_parts_mut(start_ptr
, len
)
212 fn grow(&self, additional
: usize) {
214 // We need the element size to convert chunk sizes (ranging from
215 // PAGE to HUGE_PAGE bytes) to element counts.
216 let elem_size
= cmp
::max(1, mem
::size_of
::<T
>());
217 let mut chunks
= self.chunks
.borrow_mut();
219 if let Some(last_chunk
) = chunks
.last_mut() {
220 // If a type is `!needs_drop`, we don't need to keep track of how many elements
221 // the chunk stores - the field will be ignored anyway.
222 if mem
::needs_drop
::<T
>() {
223 let used_bytes
= self.ptr
.get() as usize - last_chunk
.start() as usize;
224 last_chunk
.entries
= used_bytes
/ mem
::size_of
::<T
>();
227 // If the previous chunk's len is less than HUGE_PAGE
228 // bytes, then this chunk will be least double the previous
230 new_cap
= last_chunk
.storage
.len().min(HUGE_PAGE
/ elem_size
/ 2);
233 new_cap
= PAGE
/ elem_size
;
235 // Also ensure that this chunk can fit `additional`.
236 new_cap
= cmp
::max(additional
, new_cap
);
238 let mut chunk
= TypedArenaChunk
::<T
>::new(new_cap
);
239 self.ptr
.set(chunk
.start());
240 self.end
.set(chunk
.end());
245 /// Clears the arena. Deallocates all but the longest chunk which may be reused.
246 pub fn clear(&mut self) {
248 // Clear the last chunk, which is partially filled.
249 let mut chunks_borrow
= self.chunks
.borrow_mut();
250 if let Some(mut last_chunk
) = chunks_borrow
.last_mut() {
251 self.clear_last_chunk(&mut last_chunk
);
252 let len
= chunks_borrow
.len();
253 // If `T` is ZST, code below has no effect.
254 for mut chunk
in chunks_borrow
.drain(..len
- 1) {
255 chunk
.destroy(chunk
.entries
);
261 // Drops the contents of the last chunk. The last chunk is partially empty, unlike all other
263 fn clear_last_chunk(&self, last_chunk
: &mut TypedArenaChunk
<T
>) {
264 // Determine how much was filled.
265 let start
= last_chunk
.start() as usize;
266 // We obtain the value of the pointer to the first uninitialized element.
267 let end
= self.ptr
.get() as usize;
268 // We then calculate the number of elements to be dropped in the last chunk,
269 // which is the filled area's length.
270 let diff
= if mem
::size_of
::<T
>() == 0 {
271 // `T` is ZST. It can't have a drop flag, so the value here doesn't matter. We get
272 // the number of zero-sized values in the last and only chunk, just out of caution.
273 // Recall that `end` was incremented for each allocated value.
276 (end
- start
) / mem
::size_of
::<T
>()
278 // Pass that to the `destroy` method.
280 last_chunk
.destroy(diff
);
283 self.ptr
.set(last_chunk
.start());
287 unsafe impl<#[may_dangle] T> Drop for TypedArena<T> {
290 // Determine how much was filled.
291 let mut chunks_borrow
= self.chunks
.borrow_mut();
292 if let Some(mut last_chunk
) = chunks_borrow
.pop() {
293 // Drop the contents of the last chunk.
294 self.clear_last_chunk(&mut last_chunk
);
295 // The last chunk will be dropped. Destroy all other chunks.
296 for chunk
in chunks_borrow
.iter_mut() {
297 chunk
.destroy(chunk
.entries
);
300 // Box handles deallocation of `last_chunk` and `self.chunks`.
305 unsafe impl<T
: Send
> Send
for TypedArena
<T
> {}
307 pub struct DroplessArena
{
308 /// A pointer to the start of the free space.
309 start
: Cell
<*mut u8>,
311 /// A pointer to the end of free space.
313 /// The allocation proceeds from the end of the chunk towards the start.
314 /// When this pointer crosses the start pointer, a new chunk is allocated.
317 /// A vector of arena chunks.
318 chunks
: RefCell
<Vec
<TypedArenaChunk
<u8>>>,
321 unsafe impl Send
for DroplessArena {}
323 impl Default
for DroplessArena
{
325 fn default() -> DroplessArena
{
327 start
: Cell
::new(ptr
::null_mut()),
328 end
: Cell
::new(ptr
::null_mut()),
329 chunks
: Default
::default(),
337 fn grow(&self, additional
: usize) {
339 let mut chunks
= self.chunks
.borrow_mut();
341 if let Some(last_chunk
) = chunks
.last_mut() {
342 // There is no need to update `last_chunk.entries` because that
343 // field isn't used by `DroplessArena`.
345 // If the previous chunk's len is less than HUGE_PAGE
346 // bytes, then this chunk will be least double the previous
348 new_cap
= last_chunk
.storage
.len().min(HUGE_PAGE
/ 2);
353 // Also ensure that this chunk can fit `additional`.
354 new_cap
= cmp
::max(additional
, new_cap
);
356 let mut chunk
= TypedArenaChunk
::<u8>::new(new_cap
);
357 self.start
.set(chunk
.start());
358 self.end
.set(chunk
.end());
363 /// Allocates a byte slice with specified layout from the current memory
364 /// chunk. Returns `None` if there is no free space left to satisfy the
367 fn alloc_raw_without_grow(&self, layout
: Layout
) -> Option
<*mut u8> {
368 let start
= self.start
.get() as usize;
369 let end
= self.end
.get() as usize;
371 let align
= layout
.align();
372 let bytes
= layout
.size();
374 let new_end
= end
.checked_sub(bytes
)?
& !(align
- 1);
375 if start
<= new_end
{
376 let new_end
= new_end
as *mut u8;
377 self.end
.set(new_end
);
385 pub fn alloc_raw(&self, layout
: Layout
) -> *mut u8 {
386 assert
!(layout
.size() != 0);
388 if let Some(a
) = self.alloc_raw_without_grow(layout
) {
391 // No free space left. Allocate a new chunk to satisfy the request.
392 // On failure the grow will panic or abort.
393 self.grow(layout
.size());
398 pub fn alloc
<T
>(&self, object
: T
) -> &mut T
{
399 assert
!(!mem
::needs_drop
::<T
>());
401 let mem
= self.alloc_raw(Layout
::for_value
::<T
>(&object
)) as *mut T
;
404 // Write into uninitialized memory.
405 ptr
::write(mem
, object
);
410 /// Allocates a slice of objects that are copied into the `DroplessArena`, returning a mutable
411 /// reference to it. Will panic if passed a zero-sized type.
415 /// - Zero-sized types
416 /// - Zero-length slices
418 pub fn alloc_slice
<T
>(&self, slice
: &[T
]) -> &mut [T
]
422 assert
!(!mem
::needs_drop
::<T
>());
423 assert
!(mem
::size_of
::<T
>() != 0);
424 assert
!(!slice
.is_empty());
426 let mem
= self.alloc_raw(Layout
::for_value
::<[T
]>(slice
)) as *mut T
;
429 mem
.copy_from_nonoverlapping(slice
.as_ptr(), slice
.len());
430 slice
::from_raw_parts_mut(mem
, slice
.len())
435 unsafe fn write_from_iter
<T
, I
: Iterator
<Item
= T
>>(
442 // Use a manual loop since LLVM manages to optimize it better for
445 let value
= iter
.next();
446 if i
>= len
|| value
.is_none() {
447 // We only return as many items as the iterator gave us, even
448 // though it was supposed to give us `len`
449 return slice
::from_raw_parts_mut(mem
, i
);
451 ptr
::write(mem
.add(i
), value
.unwrap());
457 pub fn alloc_from_iter
<T
, I
: IntoIterator
<Item
= T
>>(&self, iter
: I
) -> &mut [T
] {
458 let iter
= iter
.into_iter();
459 assert
!(mem
::size_of
::<T
>() != 0);
460 assert
!(!mem
::needs_drop
::<T
>());
462 let size_hint
= iter
.size_hint();
465 (min
, Some(max
)) if min
== max
=> {
466 // We know the exact number of elements the iterator will produce here
473 let mem
= self.alloc_raw(Layout
::array
::<T
>(len
).unwrap()) as *mut T
;
474 unsafe { self.write_from_iter(iter, len, mem) }
477 cold_path(move || -> &mut [T
] {
478 let mut vec
: SmallVec
<[_
; 8]> = iter
.collect();
482 // Move the content to the arena by copying it and then forgetting
483 // the content of the SmallVec
487 self.alloc_raw(Layout
::for_value
::<[T
]>(vec
.as_slice())) as *mut T
;
488 vec
.as_ptr().copy_to_nonoverlapping(start_ptr
, len
);
490 slice
::from_raw_parts_mut(start_ptr
, len
)
498 /// Calls the destructor for an object when dropped.
500 drop_fn
: unsafe fn(*mut u8),
504 unsafe fn drop_for_type
<T
>(to_drop
: *mut u8) {
505 std
::ptr
::drop_in_place(to_drop
as *mut T
)
508 impl Drop
for DropType
{
510 unsafe { (self.drop_fn)(self.obj) }
514 /// An arena which can be used to allocate any type.
515 /// Allocating in this arena is unsafe since the type system
516 /// doesn't know which types it contains. In order to
517 /// allocate safely, you must store a PhantomData<T>
518 /// alongside this arena for each type T you allocate.
520 pub struct DropArena
{
521 /// A list of destructors to run when the arena drops.
522 /// Ordered so `destructors` gets dropped before the arena
523 /// since its destructor can reference memory in the arena.
524 destructors
: RefCell
<Vec
<DropType
>>,
525 arena
: DroplessArena
,
530 pub unsafe fn alloc
<T
>(&self, object
: T
) -> &mut T
{
531 let mem
= self.arena
.alloc_raw(Layout
::new
::<T
>()) as *mut T
;
532 // Write into uninitialized memory.
533 ptr
::write(mem
, object
);
534 let result
= &mut *mem
;
535 // Record the destructor after doing the allocation as that may panic
536 // and would cause `object`'s destructor to run twice if it was recorded before
539 .push(DropType { drop_fn: drop_for_type::<T>, obj: result as *mut T as *mut u8 }
);
544 pub unsafe fn alloc_from_iter
<T
, I
: IntoIterator
<Item
= T
>>(&self, iter
: I
) -> &mut [T
] {
545 let mut vec
: SmallVec
<[_
; 8]> = iter
.into_iter().collect();
551 let start_ptr
= self.arena
.alloc_raw(Layout
::array
::<T
>(len
).unwrap()) as *mut T
;
553 let mut destructors
= self.destructors
.borrow_mut();
554 // Reserve space for the destructors so we can't panic while adding them
555 destructors
.reserve(len
);
557 // Move the content to the arena by copying it and then forgetting
558 // the content of the SmallVec
559 vec
.as_ptr().copy_to_nonoverlapping(start_ptr
, len
);
560 mem
::forget(vec
.drain(..));
562 // Record the destructors after doing the allocation as that may panic
563 // and would cause `object`'s destructor to run twice if it was recorded before
566 .push(DropType { drop_fn: drop_for_type::<T>, obj: start_ptr.add(i) as *mut u8 }
);
569 slice
::from_raw_parts_mut(start_ptr
, len
)
574 macro_rules
! arena_for_type
{
576 $
crate::TypedArena
<$ty
>
578 ([few $
(, $attrs
:ident
)*][$ty
:ty
]) => {
579 ::std
::marker
::PhantomData
<$ty
>
581 ([$ignore
:ident $
(, $attrs
:ident
)*]$args
:tt
) => {
582 $
crate::arena_for_type
!([$
($attrs
),*]$args
)
587 macro_rules
! which_arena_for_type
{
588 ([][$arena
:expr
]) => {
589 ::std
::option
::Option
::Some($arena
)
591 ([few$
(, $attrs
:ident
)*][$arena
:expr
]) => {
592 ::std
::option
::Option
::None
594 ([$ignore
:ident$
(, $attrs
:ident
)*]$args
:tt
) => {
595 $
crate::which_arena_for_type
!([$
($attrs
),*]$args
)
600 macro_rules
! declare_arena
{
601 ([], [$
($a
:tt $name
:ident
: $ty
:ty
,)*], $tcx
:lifetime
) => {
603 pub struct Arena
<$tcx
> {
604 pub dropless
: $
crate::DroplessArena
,
605 drop
: $
crate::DropArena
,
606 $
($name
: $
crate::arena_for_type
!($a
[$ty
]),)*
609 pub trait ArenaAllocatable
<'tcx
, T
= Self>: Sized
{
610 fn allocate_on
<'a
>(self, arena
: &'a Arena
<'tcx
>) -> &'a
mut Self;
611 fn allocate_from_iter
<'a
>(
612 arena
: &'a Arena
<'tcx
>,
613 iter
: impl ::std
::iter
::IntoIterator
<Item
= Self>,
617 impl<'tcx
, T
: Copy
> ArenaAllocatable
<'tcx
, ()> for T
{
619 fn allocate_on
<'a
>(self, arena
: &'a Arena
<'tcx
>) -> &'a
mut Self {
620 arena
.dropless
.alloc(self)
623 fn allocate_from_iter
<'a
>(
624 arena
: &'a Arena
<'tcx
>,
625 iter
: impl ::std
::iter
::IntoIterator
<Item
= Self>,
626 ) -> &'a
mut [Self] {
627 arena
.dropless
.alloc_from_iter(iter
)
632 impl<$tcx
> ArenaAllocatable
<$tcx
, $ty
> for $ty
{
634 fn allocate_on
<'a
>(self, arena
: &'a Arena
<$tcx
>) -> &'a
mut Self {
635 if !::std
::mem
::needs_drop
::<Self>() {
636 return arena
.dropless
.alloc(self);
638 match $
crate::which_arena_for_type
!($a
[&arena
.$name
]) {
639 ::std
::option
::Option
::<&$
crate::TypedArena
<Self>>::Some(ty_arena
) => {
642 ::std
::option
::Option
::None
=> unsafe { arena.drop.alloc(self) }
,
647 fn allocate_from_iter
<'a
>(
648 arena
: &'a Arena
<$tcx
>,
649 iter
: impl ::std
::iter
::IntoIterator
<Item
= Self>,
650 ) -> &'a
mut [Self] {
651 if !::std
::mem
::needs_drop
::<Self>() {
652 return arena
.dropless
.alloc_from_iter(iter
);
654 match $
crate::which_arena_for_type
!($a
[&arena
.$name
]) {
655 ::std
::option
::Option
::<&$
crate::TypedArena
<Self>>::Some(ty_arena
) => {
656 ty_arena
.alloc_from_iter(iter
)
658 ::std
::option
::Option
::None
=> unsafe { arena.drop.alloc_from_iter(iter) }
,
664 impl<'tcx
> Arena
<'tcx
> {
666 pub fn alloc
<T
: ArenaAllocatable
<'tcx
, U
>, U
>(&self, value
: T
) -> &mut T
{
667 value
.allocate_on(self)
671 pub fn alloc_slice
<T
: ::std
::marker
::Copy
>(&self, value
: &[T
]) -> &mut [T
] {
672 if value
.is_empty() {
675 self.dropless
.alloc_slice(value
)
678 pub fn alloc_from_iter
<'a
, T
: ArenaAllocatable
<'tcx
, U
>, U
>(
680 iter
: impl ::std
::iter
::IntoIterator
<Item
= T
>,
682 T
::allocate_from_iter(self, iter
)