1 //! The arena, a fast but limited type of allocator.
3 //! Arenas are a type of allocator that destroy the objects within, all at
4 //! once, once the arena itself is destroyed. They do not support deallocation
5 //! of individual objects while the arena itself is still alive. The benefit
6 //! of an arena is very fast allocation; just a pointer bump.
8 //! This crate implements `TypedArena`, a simple arena that can only hold
9 //! objects of a single type.
12 html_root_url
= "https://doc.rust-lang.org/nightly/",
13 test(no_crate_inject
, attr(deny(warnings
)))
15 #![feature(core_intrinsics)]
16 #![feature(dropck_eyepatch)]
17 #![feature(raw_vec_internals)]
18 #![cfg_attr(test, feature(test))]
23 use rustc_data_structures
::cold_path
;
24 use smallvec
::SmallVec
;
26 use std
::cell
::{Cell, RefCell}
;
29 use std
::marker
::{PhantomData, Send}
;
34 use alloc
::raw_vec
::RawVec
;
36 /// An arena that can hold objects of only one type.
37 pub struct TypedArena
<T
> {
38 /// A pointer to the next object to be allocated.
41 /// A pointer to the end of the allocated area. When this pointer is
42 /// reached, a new chunk is allocated.
45 /// A vector of arena chunks.
46 chunks
: RefCell
<Vec
<TypedArenaChunk
<T
>>>,
48 /// Marker indicating that dropping the arena causes its owned
49 /// instances of `T` to be dropped.
53 struct TypedArenaChunk
<T
> {
54 /// The raw storage for the arena chunk.
56 /// The number of valid entries in the chunk.
60 impl<T
> TypedArenaChunk
<T
> {
62 unsafe fn new(capacity
: usize) -> TypedArenaChunk
<T
> {
63 TypedArenaChunk { storage: RawVec::with_capacity(capacity), entries: 0 }
66 /// Destroys this arena chunk.
68 unsafe fn destroy(&mut self, len
: usize) {
69 // The branch on needs_drop() is an -O1 performance optimization.
70 // Without the branch, dropping TypedArena<u8> takes linear time.
71 if mem
::needs_drop
::<T
>() {
72 let mut start
= self.start();
73 // Destroy all allocated objects.
75 ptr
::drop_in_place(start
);
76 start
= start
.offset(1);
81 // Returns a pointer to the first allocated object.
83 fn start(&self) -> *mut T
{
87 // Returns a pointer to the end of the allocated space.
89 fn end(&self) -> *mut T
{
91 if mem
::size_of
::<T
>() == 0 {
92 // A pointer as large as possible for zero-sized elements.
95 self.start().add(self.storage
.capacity())
101 const PAGE
: usize = 4096;
103 impl<T
> Default
for TypedArena
<T
> {
104 /// Creates a new `TypedArena`.
105 fn default() -> TypedArena
<T
> {
107 // We set both `ptr` and `end` to 0 so that the first call to
108 // alloc() will trigger a grow().
109 ptr
: Cell
::new(ptr
::null_mut()),
110 end
: Cell
::new(ptr
::null_mut()),
111 chunks
: RefCell
::new(vec
![]),
117 impl<T
> TypedArena
<T
> {
118 /// Allocates an object in the `TypedArena`, returning a reference to it.
120 pub fn alloc(&self, object
: T
) -> &mut T
{
121 if self.ptr
== self.end
{
126 if mem
::size_of
::<T
>() == 0 {
127 self.ptr
.set(intrinsics
::arith_offset(self.ptr
.get() as *mut u8, 1) as *mut T
);
128 let ptr
= mem
::align_of
::<T
>() as *mut T
;
129 // Don't drop the object. This `write` is equivalent to `forget`.
130 ptr
::write(ptr
, object
);
133 let ptr
= self.ptr
.get();
134 // Advance the pointer.
135 self.ptr
.set(self.ptr
.get().offset(1));
136 // Write into uninitialized memory.
137 ptr
::write(ptr
, object
);
144 fn can_allocate(&self, len
: usize) -> bool
{
145 let available_capacity_bytes
= self.end
.get() as usize - self.ptr
.get() as usize;
146 let at_least_bytes
= len
.checked_mul(mem
::size_of
::<T
>()).unwrap();
147 available_capacity_bytes
>= at_least_bytes
150 /// Ensures there's enough space in the current chunk to fit `len` objects.
152 fn ensure_capacity(&self, len
: usize) {
153 if !self.can_allocate(len
) {
155 debug_assert
!(self.can_allocate(len
));
160 unsafe fn alloc_raw_slice(&self, len
: usize) -> *mut T
{
161 assert
!(mem
::size_of
::<T
>() != 0);
164 self.ensure_capacity(len
);
166 let start_ptr
= self.ptr
.get();
167 self.ptr
.set(start_ptr
.add(len
));
171 /// Allocates a slice of objects that are copied into the `TypedArena`, returning a mutable
172 /// reference to it. Will panic if passed a zero-sized types.
176 /// - Zero-sized types
177 /// - Zero-length slices
179 pub fn alloc_slice(&self, slice
: &[T
]) -> &mut [T
]
184 let len
= slice
.len();
185 let start_ptr
= self.alloc_raw_slice(len
);
186 slice
.as_ptr().copy_to_nonoverlapping(start_ptr
, len
);
187 slice
::from_raw_parts_mut(start_ptr
, len
)
192 pub fn alloc_from_iter
<I
: IntoIterator
<Item
= T
>>(&self, iter
: I
) -> &mut [T
] {
193 assert
!(mem
::size_of
::<T
>() != 0);
194 let mut vec
: SmallVec
<[_
; 8]> = iter
.into_iter().collect();
198 // Move the content to the arena by copying it and then forgetting
199 // the content of the SmallVec
202 let start_ptr
= self.alloc_raw_slice(len
);
203 vec
.as_ptr().copy_to_nonoverlapping(start_ptr
, len
);
205 slice
::from_raw_parts_mut(start_ptr
, len
)
212 fn grow(&self, n
: usize) {
214 let mut chunks
= self.chunks
.borrow_mut();
215 let (chunk
, mut new_capacity
);
216 if let Some(last_chunk
) = chunks
.last_mut() {
217 let used_bytes
= self.ptr
.get() as usize - last_chunk
.start() as usize;
218 let currently_used_cap
= used_bytes
/ mem
::size_of
::<T
>();
219 last_chunk
.entries
= currently_used_cap
;
220 if last_chunk
.storage
.reserve_in_place(currently_used_cap
, n
) {
221 self.end
.set(last_chunk
.end());
224 new_capacity
= last_chunk
.storage
.capacity();
226 new_capacity
= new_capacity
.checked_mul(2).unwrap();
227 if new_capacity
>= currently_used_cap
+ n
{
233 let elem_size
= cmp
::max(1, mem
::size_of
::<T
>());
234 new_capacity
= cmp
::max(n
, PAGE
/ elem_size
);
236 chunk
= TypedArenaChunk
::<T
>::new(new_capacity
);
237 self.ptr
.set(chunk
.start());
238 self.end
.set(chunk
.end());
243 /// Clears the arena. Deallocates all but the longest chunk which may be reused.
244 pub fn clear(&mut self) {
246 // Clear the last chunk, which is partially filled.
247 let mut chunks_borrow
= self.chunks
.borrow_mut();
248 if let Some(mut last_chunk
) = chunks_borrow
.last_mut() {
249 self.clear_last_chunk(&mut last_chunk
);
250 let len
= chunks_borrow
.len();
251 // If `T` is ZST, code below has no effect.
252 for mut chunk
in chunks_borrow
.drain(..len
- 1) {
253 chunk
.destroy(chunk
.entries
);
259 // Drops the contents of the last chunk. The last chunk is partially empty, unlike all other
261 fn clear_last_chunk(&self, last_chunk
: &mut TypedArenaChunk
<T
>) {
262 // Determine how much was filled.
263 let start
= last_chunk
.start() as usize;
264 // We obtain the value of the pointer to the first uninitialized element.
265 let end
= self.ptr
.get() as usize;
266 // We then calculate the number of elements to be dropped in the last chunk,
267 // which is the filled area's length.
268 let diff
= if mem
::size_of
::<T
>() == 0 {
269 // `T` is ZST. It can't have a drop flag, so the value here doesn't matter. We get
270 // the number of zero-sized values in the last and only chunk, just out of caution.
271 // Recall that `end` was incremented for each allocated value.
274 (end
- start
) / mem
::size_of
::<T
>()
276 // Pass that to the `destroy` method.
278 last_chunk
.destroy(diff
);
281 self.ptr
.set(last_chunk
.start());
285 unsafe impl<#[may_dangle] T> Drop for TypedArena<T> {
288 // Determine how much was filled.
289 let mut chunks_borrow
= self.chunks
.borrow_mut();
290 if let Some(mut last_chunk
) = chunks_borrow
.pop() {
291 // Drop the contents of the last chunk.
292 self.clear_last_chunk(&mut last_chunk
);
293 // The last chunk will be dropped. Destroy all other chunks.
294 for chunk
in chunks_borrow
.iter_mut() {
295 chunk
.destroy(chunk
.entries
);
298 // RawVec handles deallocation of `last_chunk` and `self.chunks`.
303 unsafe impl<T
: Send
> Send
for TypedArena
<T
> {}
305 pub struct DroplessArena
{
306 /// A pointer to the next object to be allocated.
309 /// A pointer to the end of the allocated area. When this pointer is
310 /// reached, a new chunk is allocated.
313 /// A vector of arena chunks.
314 chunks
: RefCell
<Vec
<TypedArenaChunk
<u8>>>,
317 unsafe impl Send
for DroplessArena {}
319 impl Default
for DroplessArena
{
321 fn default() -> DroplessArena
{
323 ptr
: Cell
::new(ptr
::null_mut()),
324 end
: Cell
::new(ptr
::null_mut()),
325 chunks
: Default
::default(),
332 fn align(&self, align
: usize) {
333 let final_address
= ((self.ptr
.get() as usize) + align
- 1) & !(align
- 1);
334 self.ptr
.set(final_address
as *mut u8);
335 assert
!(self.ptr
<= self.end
);
340 fn grow(&self, needed_bytes
: usize) {
342 let mut chunks
= self.chunks
.borrow_mut();
343 let (chunk
, mut new_capacity
);
344 if let Some(last_chunk
) = chunks
.last_mut() {
345 let used_bytes
= self.ptr
.get() as usize - last_chunk
.start() as usize;
346 if last_chunk
.storage
.reserve_in_place(used_bytes
, needed_bytes
) {
347 self.end
.set(last_chunk
.end());
350 new_capacity
= last_chunk
.storage
.capacity();
352 new_capacity
= new_capacity
.checked_mul(2).unwrap();
353 if new_capacity
>= used_bytes
+ needed_bytes
{
359 new_capacity
= cmp
::max(needed_bytes
, PAGE
);
361 chunk
= TypedArenaChunk
::<u8>::new(new_capacity
);
362 self.ptr
.set(chunk
.start());
363 self.end
.set(chunk
.end());
369 pub fn alloc_raw(&self, bytes
: usize, align
: usize) -> &mut [u8] {
375 let future_end
= intrinsics
::arith_offset(self.ptr
.get(), bytes
as isize);
376 if (future_end
as *mut u8) >= self.end
.get() {
380 let ptr
= self.ptr
.get();
381 // Set the pointer past ourselves
382 self.ptr
.set(intrinsics
::arith_offset(self.ptr
.get(), bytes
as isize) as *mut u8);
383 slice
::from_raw_parts_mut(ptr
, bytes
)
388 pub fn alloc
<T
>(&self, object
: T
) -> &mut T
{
389 assert
!(!mem
::needs_drop
::<T
>());
391 let mem
= self.alloc_raw(mem
::size_of
::<T
>(), mem
::align_of
::<T
>()) as *mut _
as *mut T
;
394 // Write into uninitialized memory.
395 ptr
::write(mem
, object
);
400 /// Allocates a slice of objects that are copied into the `DroplessArena`, returning a mutable
401 /// reference to it. Will panic if passed a zero-sized type.
405 /// - Zero-sized types
406 /// - Zero-length slices
408 pub fn alloc_slice
<T
>(&self, slice
: &[T
]) -> &mut [T
]
412 assert
!(!mem
::needs_drop
::<T
>());
413 assert
!(mem
::size_of
::<T
>() != 0);
414 assert
!(!slice
.is_empty());
416 let mem
= self.alloc_raw(slice
.len() * mem
::size_of
::<T
>(), mem
::align_of
::<T
>()) as *mut _
420 let arena_slice
= slice
::from_raw_parts_mut(mem
, slice
.len());
421 arena_slice
.copy_from_slice(slice
);
427 unsafe fn write_from_iter
<T
, I
: Iterator
<Item
= T
>>(
434 // Use a manual loop since LLVM manages to optimize it better for
437 let value
= iter
.next();
438 if i
>= len
|| value
.is_none() {
439 // We only return as many items as the iterator gave us, even
440 // though it was supposed to give us `len`
441 return slice
::from_raw_parts_mut(mem
, i
);
443 ptr
::write(mem
.add(i
), value
.unwrap());
449 pub fn alloc_from_iter
<T
, I
: IntoIterator
<Item
= T
>>(&self, iter
: I
) -> &mut [T
] {
450 let iter
= iter
.into_iter();
451 assert
!(mem
::size_of
::<T
>() != 0);
452 assert
!(!mem
::needs_drop
::<T
>());
454 let size_hint
= iter
.size_hint();
457 (min
, Some(max
)) if min
== max
=> {
458 // We know the exact number of elements the iterator will produce here
464 let size
= len
.checked_mul(mem
::size_of
::<T
>()).unwrap();
465 let mem
= self.alloc_raw(size
, mem
::align_of
::<T
>()) as *mut _
as *mut T
;
466 unsafe { self.write_from_iter(iter, len, mem) }
469 cold_path(move || -> &mut [T
] {
470 let mut vec
: SmallVec
<[_
; 8]> = iter
.collect();
474 // Move the content to the arena by copying it and then forgetting
475 // the content of the SmallVec
479 .alloc_raw(len
* mem
::size_of
::<T
>(), mem
::align_of
::<T
>())
481 vec
.as_ptr().copy_to_nonoverlapping(start_ptr
, len
);
483 slice
::from_raw_parts_mut(start_ptr
, len
)
491 /// Calls the destructor for an object when dropped.
493 drop_fn
: unsafe fn(*mut u8),
497 unsafe fn drop_for_type
<T
>(to_drop
: *mut u8) {
498 std
::ptr
::drop_in_place(to_drop
as *mut T
)
501 impl Drop
for DropType
{
503 unsafe { (self.drop_fn)(self.obj) }
507 /// An arena which can be used to allocate any type.
508 /// Allocating in this arena is unsafe since the type system
509 /// doesn't know which types it contains. In order to
510 /// allocate safely, you must store a PhantomData<T>
511 /// alongside this arena for each type T you allocate.
513 pub struct DropArena
{
514 /// A list of destructors to run when the arena drops.
515 /// Ordered so `destructors` gets dropped before the arena
516 /// since its destructor can reference memory in the arena.
517 destructors
: RefCell
<Vec
<DropType
>>,
518 arena
: DroplessArena
,
523 pub unsafe fn alloc
<T
>(&self, object
: T
) -> &mut T
{
525 self.arena
.alloc_raw(mem
::size_of
::<T
>(), mem
::align_of
::<T
>()) as *mut _
as *mut T
;
526 // Write into uninitialized memory.
527 ptr
::write(mem
, object
);
528 let result
= &mut *mem
;
529 // Record the destructor after doing the allocation as that may panic
530 // and would cause `object`'s destuctor to run twice if it was recorded before
533 .push(DropType { drop_fn: drop_for_type::<T>, obj: result as *mut T as *mut u8 }
);
538 pub unsafe fn alloc_from_iter
<T
, I
: IntoIterator
<Item
= T
>>(&self, iter
: I
) -> &mut [T
] {
539 let mut vec
: SmallVec
<[_
; 8]> = iter
.into_iter().collect();
547 .alloc_raw(len
.checked_mul(mem
::size_of
::<T
>()).unwrap(), mem
::align_of
::<T
>())
550 let mut destructors
= self.destructors
.borrow_mut();
551 // Reserve space for the destructors so we can't panic while adding them
552 destructors
.reserve(len
);
554 // Move the content to the arena by copying it and then forgetting
555 // the content of the SmallVec
556 vec
.as_ptr().copy_to_nonoverlapping(start_ptr
, len
);
557 mem
::forget(vec
.drain(..));
559 // Record the destructors after doing the allocation as that may panic
560 // and would cause `object`'s destuctor to run twice if it was recorded before
562 destructors
.push(DropType
{
563 drop_fn
: drop_for_type
::<T
>,
564 obj
: start_ptr
.offset(i
as isize) as *mut u8,
568 slice
::from_raw_parts_mut(start_ptr
, len
)
573 macro_rules
! arena_for_type
{
575 $
crate::TypedArena
<$ty
>
577 ([few $
(, $attrs
:ident
)*][$ty
:ty
]) => {
578 ::std
::marker
::PhantomData
<$ty
>
580 ([$ignore
:ident $
(, $attrs
:ident
)*]$args
:tt
) => {
581 $
crate::arena_for_type
!([$
($attrs
),*]$args
)
586 macro_rules
! which_arena_for_type
{
587 ([][$arena
:expr
]) => {
588 ::std
::option
::Option
::Some($arena
)
590 ([few$
(, $attrs
:ident
)*][$arena
:expr
]) => {
591 ::std
::option
::Option
::None
593 ([$ignore
:ident$
(, $attrs
:ident
)*]$args
:tt
) => {
594 $
crate::which_arena_for_type
!([$
($attrs
),*]$args
)
599 macro_rules
! declare_arena
{
600 ([], [$
($a
:tt $name
:ident
: $ty
:ty
,)*], $tcx
:lifetime
) => {
602 pub struct Arena
<$tcx
> {
603 pub dropless
: $
crate::DroplessArena
,
604 drop
: $
crate::DropArena
,
605 $
($name
: $
crate::arena_for_type
!($a
[$ty
]),)*
609 pub trait ArenaAllocatable {}
611 impl<T
: Copy
> ArenaAllocatable
for T {}
613 unsafe trait ArenaField
<'tcx
>: Sized
{
614 /// Returns a specific arena to allocate from.
615 /// If `None` is returned, the `DropArena` will be used.
616 fn arena
<'a
>(arena
: &'a Arena
<'tcx
>) -> Option
<&'a $
crate::TypedArena
<Self>>;
619 unsafe impl<'tcx
, T
> ArenaField
<'tcx
> for T
{
621 default fn arena
<'a
>(_
: &'a Arena
<'tcx
>) -> Option
<&'a $
crate::TypedArena
<Self>> {
627 #[allow(unused_lifetimes)]
628 impl<$tcx
> ArenaAllocatable
for $ty {}
629 unsafe impl<$tcx
> ArenaField
<$tcx
> for $ty
{
631 fn arena
<'a
>(_arena
: &'a Arena
<$tcx
>) -> Option
<&'a $
crate::TypedArena
<Self>> {
632 $
crate::which_arena_for_type
!($a
[&_arena
.$name
])
637 impl<'tcx
> Arena
<'tcx
> {
639 pub fn alloc
<T
: ArenaAllocatable
>(&self, value
: T
) -> &mut T
{
640 if !::std
::mem
::needs_drop
::<T
>() {
641 return self.dropless
.alloc(value
);
643 match <T
as ArenaField
<'tcx
>>::arena(self) {
644 ::std
::option
::Option
::Some(arena
) => arena
.alloc(value
),
645 ::std
::option
::Option
::None
=> unsafe { self.drop.alloc(value) }
,
650 pub fn alloc_slice
<T
: ::std
::marker
::Copy
>(&self, value
: &[T
]) -> &mut [T
] {
651 if value
.is_empty() {
654 self.dropless
.alloc_slice(value
)
657 pub fn alloc_from_iter
<'a
, T
: ArenaAllocatable
>(
659 iter
: impl ::std
::iter
::IntoIterator
<Item
= T
>,
661 if !::std
::mem
::needs_drop
::<T
>() {
662 return self.dropless
.alloc_from_iter(iter
);
664 match <T
as ArenaField
<'tcx
>>::arena(self) {
665 ::std
::option
::Option
::Some(arena
) => arena
.alloc_from_iter(iter
),
666 ::std
::option
::Option
::None
=> unsafe { self.drop.alloc_from_iter(iter) }
,