1 //! The arena, a fast but limited type of allocator.
3 //! Arenas are a type of allocator that destroy the objects within, all at
4 //! once, once the arena itself is destroyed. They do not support deallocation
5 //! of individual objects while the arena itself is still alive. The benefit
6 //! of an arena is very fast allocation; just a pointer bump.
8 //! This crate implements several kinds of arena.
11 html_root_url
= "https://doc.rust-lang.org/nightly/",
12 test(no_crate_inject
, attr(deny(warnings
)))
14 #![feature(core_intrinsics)]
15 #![feature(dropck_eyepatch)]
16 #![feature(raw_vec_internals)]
17 #![cfg_attr(test, feature(test))]
22 use rustc_data_structures
::cold_path
;
23 use smallvec
::SmallVec
;
25 use std
::alloc
::Layout
;
26 use std
::cell
::{Cell, RefCell}
;
29 use std
::marker
::{PhantomData, Send}
;
34 use alloc
::raw_vec
::RawVec
;
36 /// An arena that can hold objects of only one type.
37 pub struct TypedArena
<T
> {
38 /// A pointer to the next object to be allocated.
41 /// A pointer to the end of the allocated area. When this pointer is
42 /// reached, a new chunk is allocated.
45 /// A vector of arena chunks.
46 chunks
: RefCell
<Vec
<TypedArenaChunk
<T
>>>,
48 /// Marker indicating that dropping the arena causes its owned
49 /// instances of `T` to be dropped.
53 struct TypedArenaChunk
<T
> {
54 /// The raw storage for the arena chunk.
56 /// The number of valid entries in the chunk.
60 impl<T
> TypedArenaChunk
<T
> {
62 unsafe fn new(capacity
: usize) -> TypedArenaChunk
<T
> {
63 TypedArenaChunk { storage: RawVec::with_capacity(capacity), entries: 0 }
66 /// Destroys this arena chunk.
68 unsafe fn destroy(&mut self, len
: usize) {
69 // The branch on needs_drop() is an -O1 performance optimization.
70 // Without the branch, dropping TypedArena<u8> takes linear time.
71 if mem
::needs_drop
::<T
>() {
72 let mut start
= self.start();
73 // Destroy all allocated objects.
75 ptr
::drop_in_place(start
);
76 start
= start
.offset(1);
81 // Returns a pointer to the first allocated object.
83 fn start(&self) -> *mut T
{
87 // Returns a pointer to the end of the allocated space.
89 fn end(&self) -> *mut T
{
91 if mem
::size_of
::<T
>() == 0 {
92 // A pointer as large as possible for zero-sized elements.
95 self.start().add(self.storage
.capacity())
101 // The arenas start with PAGE-sized chunks, and then each new chunk is twice as
102 // big as its predecessor, up until we reach HUGE_PAGE-sized chunks, whereupon
103 // we stop growing. This scales well, from arenas that are barely used up to
104 // arenas that are used for 100s of MiBs. Note also that the chosen sizes match
105 // the usual sizes of pages and huge pages on Linux.
106 const PAGE
: usize = 4096;
107 const HUGE_PAGE
: usize = 2 * 1024 * 1024;
109 impl<T
> Default
for TypedArena
<T
> {
110 /// Creates a new `TypedArena`.
111 fn default() -> TypedArena
<T
> {
113 // We set both `ptr` and `end` to 0 so that the first call to
114 // alloc() will trigger a grow().
115 ptr
: Cell
::new(ptr
::null_mut()),
116 end
: Cell
::new(ptr
::null_mut()),
117 chunks
: RefCell
::new(vec
![]),
123 impl<T
> TypedArena
<T
> {
124 /// Allocates an object in the `TypedArena`, returning a reference to it.
126 pub fn alloc(&self, object
: T
) -> &mut T
{
127 if self.ptr
== self.end
{
132 if mem
::size_of
::<T
>() == 0 {
133 self.ptr
.set(intrinsics
::arith_offset(self.ptr
.get() as *mut u8, 1) as *mut T
);
134 let ptr
= mem
::align_of
::<T
>() as *mut T
;
135 // Don't drop the object. This `write` is equivalent to `forget`.
136 ptr
::write(ptr
, object
);
139 let ptr
= self.ptr
.get();
140 // Advance the pointer.
141 self.ptr
.set(self.ptr
.get().offset(1));
142 // Write into uninitialized memory.
143 ptr
::write(ptr
, object
);
150 fn can_allocate(&self, additional
: usize) -> bool
{
151 let available_bytes
= self.end
.get() as usize - self.ptr
.get() as usize;
152 let additional_bytes
= additional
.checked_mul(mem
::size_of
::<T
>()).unwrap();
153 available_bytes
>= additional_bytes
156 /// Ensures there's enough space in the current chunk to fit `len` objects.
158 fn ensure_capacity(&self, additional
: usize) {
159 if !self.can_allocate(additional
) {
160 self.grow(additional
);
161 debug_assert
!(self.can_allocate(additional
));
166 unsafe fn alloc_raw_slice(&self, len
: usize) -> *mut T
{
167 assert
!(mem
::size_of
::<T
>() != 0);
170 self.ensure_capacity(len
);
172 let start_ptr
= self.ptr
.get();
173 self.ptr
.set(start_ptr
.add(len
));
177 /// Allocates a slice of objects that are copied into the `TypedArena`, returning a mutable
178 /// reference to it. Will panic if passed a zero-sized types.
182 /// - Zero-sized types
183 /// - Zero-length slices
185 pub fn alloc_slice(&self, slice
: &[T
]) -> &mut [T
]
190 let len
= slice
.len();
191 let start_ptr
= self.alloc_raw_slice(len
);
192 slice
.as_ptr().copy_to_nonoverlapping(start_ptr
, len
);
193 slice
::from_raw_parts_mut(start_ptr
, len
)
198 pub fn alloc_from_iter
<I
: IntoIterator
<Item
= T
>>(&self, iter
: I
) -> &mut [T
] {
199 assert
!(mem
::size_of
::<T
>() != 0);
200 let mut vec
: SmallVec
<[_
; 8]> = iter
.into_iter().collect();
204 // Move the content to the arena by copying it and then forgetting
205 // the content of the SmallVec
208 let start_ptr
= self.alloc_raw_slice(len
);
209 vec
.as_ptr().copy_to_nonoverlapping(start_ptr
, len
);
211 slice
::from_raw_parts_mut(start_ptr
, len
)
218 fn grow(&self, additional
: usize) {
220 // We need the element size to convert chunk sizes (ranging from
221 // PAGE to HUGE_PAGE bytes) to element counts.
222 let elem_size
= cmp
::max(1, mem
::size_of
::<T
>());
223 let mut chunks
= self.chunks
.borrow_mut();
225 if let Some(last_chunk
) = chunks
.last_mut() {
226 let used_bytes
= self.ptr
.get() as usize - last_chunk
.start() as usize;
227 last_chunk
.entries
= used_bytes
/ mem
::size_of
::<T
>();
229 // If the previous chunk's capacity is less than HUGE_PAGE
230 // bytes, then this chunk will be least double the previous
232 new_cap
= last_chunk
.storage
.capacity();
233 if new_cap
< HUGE_PAGE
/ elem_size
{
234 new_cap
= new_cap
.checked_mul(2).unwrap();
237 new_cap
= PAGE
/ elem_size
;
239 // Also ensure that this chunk can fit `additional`.
240 new_cap
= cmp
::max(additional
, new_cap
);
242 let chunk
= TypedArenaChunk
::<T
>::new(new_cap
);
243 self.ptr
.set(chunk
.start());
244 self.end
.set(chunk
.end());
249 /// Clears the arena. Deallocates all but the longest chunk which may be reused.
250 pub fn clear(&mut self) {
252 // Clear the last chunk, which is partially filled.
253 let mut chunks_borrow
= self.chunks
.borrow_mut();
254 if let Some(mut last_chunk
) = chunks_borrow
.last_mut() {
255 self.clear_last_chunk(&mut last_chunk
);
256 let len
= chunks_borrow
.len();
257 // If `T` is ZST, code below has no effect.
258 for mut chunk
in chunks_borrow
.drain(..len
- 1) {
259 chunk
.destroy(chunk
.entries
);
265 // Drops the contents of the last chunk. The last chunk is partially empty, unlike all other
267 fn clear_last_chunk(&self, last_chunk
: &mut TypedArenaChunk
<T
>) {
268 // Determine how much was filled.
269 let start
= last_chunk
.start() as usize;
270 // We obtain the value of the pointer to the first uninitialized element.
271 let end
= self.ptr
.get() as usize;
272 // We then calculate the number of elements to be dropped in the last chunk,
273 // which is the filled area's length.
274 let diff
= if mem
::size_of
::<T
>() == 0 {
275 // `T` is ZST. It can't have a drop flag, so the value here doesn't matter. We get
276 // the number of zero-sized values in the last and only chunk, just out of caution.
277 // Recall that `end` was incremented for each allocated value.
280 (end
- start
) / mem
::size_of
::<T
>()
282 // Pass that to the `destroy` method.
284 last_chunk
.destroy(diff
);
287 self.ptr
.set(last_chunk
.start());
291 unsafe impl<#[may_dangle] T> Drop for TypedArena<T> {
294 // Determine how much was filled.
295 let mut chunks_borrow
= self.chunks
.borrow_mut();
296 if let Some(mut last_chunk
) = chunks_borrow
.pop() {
297 // Drop the contents of the last chunk.
298 self.clear_last_chunk(&mut last_chunk
);
299 // The last chunk will be dropped. Destroy all other chunks.
300 for chunk
in chunks_borrow
.iter_mut() {
301 chunk
.destroy(chunk
.entries
);
304 // RawVec handles deallocation of `last_chunk` and `self.chunks`.
309 unsafe impl<T
: Send
> Send
for TypedArena
<T
> {}
311 pub struct DroplessArena
{
312 /// A pointer to the next object to be allocated.
315 /// A pointer to the end of the allocated area. When this pointer is
316 /// reached, a new chunk is allocated.
319 /// A vector of arena chunks.
320 chunks
: RefCell
<Vec
<TypedArenaChunk
<u8>>>,
323 unsafe impl Send
for DroplessArena {}
325 impl Default
for DroplessArena
{
327 fn default() -> DroplessArena
{
329 ptr
: Cell
::new(ptr
::null_mut()),
330 end
: Cell
::new(ptr
::null_mut()),
331 chunks
: Default
::default(),
339 fn grow(&self, additional
: usize) {
341 let mut chunks
= self.chunks
.borrow_mut();
343 if let Some(last_chunk
) = chunks
.last_mut() {
344 // There is no need to update `last_chunk.entries` because that
345 // field isn't used by `DroplessArena`.
347 // If the previous chunk's capacity is less than HUGE_PAGE
348 // bytes, then this chunk will be least double the previous
350 new_cap
= last_chunk
.storage
.capacity();
351 if new_cap
< HUGE_PAGE
{
352 new_cap
= new_cap
.checked_mul(2).unwrap();
357 // Also ensure that this chunk can fit `additional`.
358 new_cap
= cmp
::max(additional
, new_cap
);
360 let chunk
= TypedArenaChunk
::<u8>::new(new_cap
);
361 self.ptr
.set(chunk
.start());
362 self.end
.set(chunk
.end());
367 /// Allocates a byte slice with specified layout from the current memory
368 /// chunk. Returns `None` if there is no free space left to satisfy the
371 fn alloc_raw_without_grow(&self, layout
: Layout
) -> Option
<*mut u8> {
372 let ptr
= self.ptr
.get() as usize;
373 let end
= self.end
.get() as usize;
374 let align
= layout
.align();
375 let bytes
= layout
.size();
376 // The allocation request fits into the current chunk iff:
378 // let aligned = align_to(ptr, align);
379 // ptr <= aligned && aligned + bytes <= end
381 // Except that we work with fixed width integers and need to be careful
382 // about potential overflow in the calcuation. If the overflow does
383 // happen, then we definitely don't have enough free and need to grow
385 let aligned
= ptr
.checked_add(align
- 1)?
& !(align
- 1);
386 let new_ptr
= aligned
.checked_add(bytes
)?
;
388 self.ptr
.set(new_ptr
as *mut u8);
389 Some(aligned
as *mut u8)
396 pub fn alloc_raw(&self, layout
: Layout
) -> *mut u8 {
397 assert
!(layout
.size() != 0);
399 if let Some(a
) = self.alloc_raw_without_grow(layout
) {
402 // No free space left. Allocate a new chunk to satisfy the request.
403 // On failure the grow will panic or abort.
404 self.grow(layout
.size());
409 pub fn alloc
<T
>(&self, object
: T
) -> &mut T
{
410 assert
!(!mem
::needs_drop
::<T
>());
412 let mem
= self.alloc_raw(Layout
::for_value
::<T
>(&object
)) as *mut T
;
415 // Write into uninitialized memory.
416 ptr
::write(mem
, object
);
421 /// Allocates a slice of objects that are copied into the `DroplessArena`, returning a mutable
422 /// reference to it. Will panic if passed a zero-sized type.
426 /// - Zero-sized types
427 /// - Zero-length slices
429 pub fn alloc_slice
<T
>(&self, slice
: &[T
]) -> &mut [T
]
433 assert
!(!mem
::needs_drop
::<T
>());
434 assert
!(mem
::size_of
::<T
>() != 0);
435 assert
!(!slice
.is_empty());
437 let mem
= self.alloc_raw(Layout
::for_value
::<[T
]>(slice
)) as *mut T
;
440 mem
.copy_from_nonoverlapping(slice
.as_ptr(), slice
.len());
441 slice
::from_raw_parts_mut(mem
, slice
.len())
446 unsafe fn write_from_iter
<T
, I
: Iterator
<Item
= T
>>(
453 // Use a manual loop since LLVM manages to optimize it better for
456 let value
= iter
.next();
457 if i
>= len
|| value
.is_none() {
458 // We only return as many items as the iterator gave us, even
459 // though it was supposed to give us `len`
460 return slice
::from_raw_parts_mut(mem
, i
);
462 ptr
::write(mem
.add(i
), value
.unwrap());
468 pub fn alloc_from_iter
<T
, I
: IntoIterator
<Item
= T
>>(&self, iter
: I
) -> &mut [T
] {
469 let iter
= iter
.into_iter();
470 assert
!(mem
::size_of
::<T
>() != 0);
471 assert
!(!mem
::needs_drop
::<T
>());
473 let size_hint
= iter
.size_hint();
476 (min
, Some(max
)) if min
== max
=> {
477 // We know the exact number of elements the iterator will produce here
484 let mem
= self.alloc_raw(Layout
::array
::<T
>(len
).unwrap()) as *mut T
;
485 unsafe { self.write_from_iter(iter, len, mem) }
488 cold_path(move || -> &mut [T
] {
489 let mut vec
: SmallVec
<[_
; 8]> = iter
.collect();
493 // Move the content to the arena by copying it and then forgetting
494 // the content of the SmallVec
498 self.alloc_raw(Layout
::for_value
::<[T
]>(vec
.as_slice())) as *mut T
;
499 vec
.as_ptr().copy_to_nonoverlapping(start_ptr
, len
);
501 slice
::from_raw_parts_mut(start_ptr
, len
)
509 /// Calls the destructor for an object when dropped.
511 drop_fn
: unsafe fn(*mut u8),
515 unsafe fn drop_for_type
<T
>(to_drop
: *mut u8) {
516 std
::ptr
::drop_in_place(to_drop
as *mut T
)
519 impl Drop
for DropType
{
521 unsafe { (self.drop_fn)(self.obj) }
525 /// An arena which can be used to allocate any type.
526 /// Allocating in this arena is unsafe since the type system
527 /// doesn't know which types it contains. In order to
528 /// allocate safely, you must store a PhantomData<T>
529 /// alongside this arena for each type T you allocate.
531 pub struct DropArena
{
532 /// A list of destructors to run when the arena drops.
533 /// Ordered so `destructors` gets dropped before the arena
534 /// since its destructor can reference memory in the arena.
535 destructors
: RefCell
<Vec
<DropType
>>,
536 arena
: DroplessArena
,
541 pub unsafe fn alloc
<T
>(&self, object
: T
) -> &mut T
{
542 let mem
= self.arena
.alloc_raw(Layout
::new
::<T
>()) as *mut T
;
543 // Write into uninitialized memory.
544 ptr
::write(mem
, object
);
545 let result
= &mut *mem
;
546 // Record the destructor after doing the allocation as that may panic
547 // and would cause `object`'s destuctor to run twice if it was recorded before
550 .push(DropType { drop_fn: drop_for_type::<T>, obj: result as *mut T as *mut u8 }
);
555 pub unsafe fn alloc_from_iter
<T
, I
: IntoIterator
<Item
= T
>>(&self, iter
: I
) -> &mut [T
] {
556 let mut vec
: SmallVec
<[_
; 8]> = iter
.into_iter().collect();
562 let start_ptr
= self.arena
.alloc_raw(Layout
::array
::<T
>(len
).unwrap()) as *mut T
;
564 let mut destructors
= self.destructors
.borrow_mut();
565 // Reserve space for the destructors so we can't panic while adding them
566 destructors
.reserve(len
);
568 // Move the content to the arena by copying it and then forgetting
569 // the content of the SmallVec
570 vec
.as_ptr().copy_to_nonoverlapping(start_ptr
, len
);
571 mem
::forget(vec
.drain(..));
573 // Record the destructors after doing the allocation as that may panic
574 // and would cause `object`'s destuctor to run twice if it was recorded before
576 destructors
.push(DropType
{
577 drop_fn
: drop_for_type
::<T
>,
578 obj
: start_ptr
.offset(i
as isize) as *mut u8,
582 slice
::from_raw_parts_mut(start_ptr
, len
)
587 macro_rules
! arena_for_type
{
589 $
crate::TypedArena
<$ty
>
591 ([few $
(, $attrs
:ident
)*][$ty
:ty
]) => {
592 ::std
::marker
::PhantomData
<$ty
>
594 ([$ignore
:ident $
(, $attrs
:ident
)*]$args
:tt
) => {
595 $
crate::arena_for_type
!([$
($attrs
),*]$args
)
600 macro_rules
! which_arena_for_type
{
601 ([][$arena
:expr
]) => {
602 ::std
::option
::Option
::Some($arena
)
604 ([few$
(, $attrs
:ident
)*][$arena
:expr
]) => {
605 ::std
::option
::Option
::None
607 ([$ignore
:ident$
(, $attrs
:ident
)*]$args
:tt
) => {
608 $
crate::which_arena_for_type
!([$
($attrs
),*]$args
)
613 macro_rules
! declare_arena
{
614 ([], [$
($a
:tt $name
:ident
: $ty
:ty
,)*], $tcx
:lifetime
) => {
616 pub struct Arena
<$tcx
> {
617 pub dropless
: $
crate::DroplessArena
,
618 drop
: $
crate::DropArena
,
619 $
($name
: $
crate::arena_for_type
!($a
[$ty
]),)*
622 pub trait ArenaAllocatable
<'tcx
, T
= Self>: Sized
{
623 fn allocate_on
<'a
>(self, arena
: &'a Arena
<'tcx
>) -> &'a
mut Self;
624 fn allocate_from_iter
<'a
>(
625 arena
: &'a Arena
<'tcx
>,
626 iter
: impl ::std
::iter
::IntoIterator
<Item
= Self>,
630 impl<'tcx
, T
: Copy
> ArenaAllocatable
<'tcx
, ()> for T
{
632 fn allocate_on
<'a
>(self, arena
: &'a Arena
<'tcx
>) -> &'a
mut Self {
633 arena
.dropless
.alloc(self)
636 fn allocate_from_iter
<'a
>(
637 arena
: &'a Arena
<'tcx
>,
638 iter
: impl ::std
::iter
::IntoIterator
<Item
= Self>,
639 ) -> &'a
mut [Self] {
640 arena
.dropless
.alloc_from_iter(iter
)
645 impl<$tcx
> ArenaAllocatable
<$tcx
, $ty
> for $ty
{
647 fn allocate_on
<'a
>(self, arena
: &'a Arena
<$tcx
>) -> &'a
mut Self {
648 if !::std
::mem
::needs_drop
::<Self>() {
649 return arena
.dropless
.alloc(self);
651 match $
crate::which_arena_for_type
!($a
[&arena
.$name
]) {
652 ::std
::option
::Option
::<&$
crate::TypedArena
<Self>>::Some(ty_arena
) => {
655 ::std
::option
::Option
::None
=> unsafe { arena.drop.alloc(self) }
,
660 fn allocate_from_iter
<'a
>(
661 arena
: &'a Arena
<$tcx
>,
662 iter
: impl ::std
::iter
::IntoIterator
<Item
= Self>,
663 ) -> &'a
mut [Self] {
664 if !::std
::mem
::needs_drop
::<Self>() {
665 return arena
.dropless
.alloc_from_iter(iter
);
667 match $
crate::which_arena_for_type
!($a
[&arena
.$name
]) {
668 ::std
::option
::Option
::<&$
crate::TypedArena
<Self>>::Some(ty_arena
) => {
669 ty_arena
.alloc_from_iter(iter
)
671 ::std
::option
::Option
::None
=> unsafe { arena.drop.alloc_from_iter(iter) }
,
677 impl<'tcx
> Arena
<'tcx
> {
679 pub fn alloc
<T
: ArenaAllocatable
<'tcx
, U
>, U
>(&self, value
: T
) -> &mut T
{
680 value
.allocate_on(self)
684 pub fn alloc_slice
<T
: ::std
::marker
::Copy
>(&self, value
: &[T
]) -> &mut [T
] {
685 if value
.is_empty() {
688 self.dropless
.alloc_slice(value
)
691 pub fn alloc_from_iter
<'a
, T
: ArenaAllocatable
<'tcx
, U
>, U
>(
693 iter
: impl ::std
::iter
::IntoIterator
<Item
= T
>,
695 T
::allocate_from_iter(self, iter
)