]>
Commit | Line | Data |
---|---|---|
1a4d82fc JJ |
1 | //! The arena, a fast but limited type of allocator. |
2 | //! | |
3 | //! Arenas are a type of allocator that destroy the objects within, all at | |
4 | //! once, once the arena itself is destroyed. They do not support deallocation | |
5 | //! of individual objects while the arena itself is still alive. The benefit | |
6 | //! of an arena is very fast allocation; just a pointer bump. | |
7 | //! | |
9e0c209e SL |
8 | //! This crate implements `TypedArena`, a simple arena that can only hold |
9 | //! objects of a single type. | |
1a4d82fc | 10 | |
dfeec247 XL |
11 | #![doc( |
12 | html_root_url = "https://doc.rust-lang.org/nightly/", | |
13 | test(no_crate_inject, attr(deny(warnings))) | |
14 | )] | |
62682a34 | 15 | #![feature(core_intrinsics)] |
32a655c1 | 16 | #![feature(dropck_eyepatch)] |
8faf50e0 | 17 | #![feature(raw_vec_internals)] |
85aaf69f | 18 | #![cfg_attr(test, feature(test))] |
9cc50fc6 | 19 | #![allow(deprecated)] |
b039eaaf | 20 | |
1a4d82fc | 21 | extern crate alloc; |
83c7162d | 22 | |
532ac7d7 | 23 | use rustc_data_structures::cold_path; |
532ac7d7 | 24 | use smallvec::SmallVec; |
1a4d82fc JJ |
25 | |
26 | use std::cell::{Cell, RefCell}; | |
27 | use std::cmp; | |
1a4d82fc | 28 | use std::intrinsics; |
9cc50fc6 | 29 | use std::marker::{PhantomData, Send}; |
1a4d82fc | 30 | use std::mem; |
1a4d82fc | 31 | use std::ptr; |
c30ab7b3 | 32 | use std::slice; |
e9174d1e | 33 | |
9cc50fc6 | 34 | use alloc::raw_vec::RawVec; |
1a4d82fc | 35 | |
9e0c209e | 36 | /// An arena that can hold objects of only one type. |
1a4d82fc JJ |
37 | pub struct TypedArena<T> { |
38 | /// A pointer to the next object to be allocated. | |
9cc50fc6 | 39 | ptr: Cell<*mut T>, |
1a4d82fc JJ |
40 | |
41 | /// A pointer to the end of the allocated area. When this pointer is | |
42 | /// reached, a new chunk is allocated. | |
9cc50fc6 | 43 | end: Cell<*mut T>, |
1a4d82fc | 44 | |
9e0c209e | 45 | /// A vector of arena chunks. |
9cc50fc6 | 46 | chunks: RefCell<Vec<TypedArenaChunk<T>>>, |
85aaf69f SL |
47 | |
48 | /// Marker indicating that dropping the arena causes its owned | |
49 | /// instances of `T` to be dropped. | |
9cc50fc6 | 50 | _own: PhantomData<T>, |
1a4d82fc JJ |
51 | } |
52 | ||
53 | struct TypedArenaChunk<T> { | |
9e0c209e | 54 | /// The raw storage for the arena chunk. |
9cc50fc6 | 55 | storage: RawVec<T>, |
532ac7d7 XL |
56 | /// The number of valid entries in the chunk. |
57 | entries: usize, | |
1a4d82fc JJ |
58 | } |
59 | ||
60 | impl<T> TypedArenaChunk<T> { | |
61 | #[inline] | |
9cc50fc6 | 62 | unsafe fn new(capacity: usize) -> TypedArenaChunk<T> { |
dfeec247 | 63 | TypedArenaChunk { storage: RawVec::with_capacity(capacity), entries: 0 } |
1a4d82fc JJ |
64 | } |
65 | ||
9cc50fc6 | 66 | /// Destroys this arena chunk. |
1a4d82fc | 67 | #[inline] |
85aaf69f | 68 | unsafe fn destroy(&mut self, len: usize) { |
9cc50fc6 SL |
69 | // The branch on needs_drop() is an -O1 performance optimization. |
70 | // Without the branch, dropping TypedArena<u8> takes linear time. | |
7cac9316 | 71 | if mem::needs_drop::<T>() { |
1a4d82fc | 72 | let mut start = self.start(); |
9cc50fc6 | 73 | // Destroy all allocated objects. |
85aaf69f | 74 | for _ in 0..len { |
9cc50fc6 SL |
75 | ptr::drop_in_place(start); |
76 | start = start.offset(1); | |
1a4d82fc JJ |
77 | } |
78 | } | |
1a4d82fc JJ |
79 | } |
80 | ||
81 | // Returns a pointer to the first allocated object. | |
82 | #[inline] | |
9cc50fc6 SL |
83 | fn start(&self) -> *mut T { |
84 | self.storage.ptr() | |
1a4d82fc JJ |
85 | } |
86 | ||
87 | // Returns a pointer to the end of the allocated space. | |
88 | #[inline] | |
9cc50fc6 | 89 | fn end(&self) -> *mut T { |
1a4d82fc | 90 | unsafe { |
9cc50fc6 SL |
91 | if mem::size_of::<T>() == 0 { |
92 | // A pointer as large as possible for zero-sized elements. | |
93 | !0 as *mut T | |
94 | } else { | |
416331ca | 95 | self.start().add(self.storage.capacity()) |
9cc50fc6 | 96 | } |
1a4d82fc JJ |
97 | } |
98 | } | |
99 | } | |
100 | ||
9cc50fc6 SL |
101 | const PAGE: usize = 4096; |
102 | ||
0bf4aa26 | 103 | impl<T> Default for TypedArena<T> { |
9e0c209e | 104 | /// Creates a new `TypedArena`. |
0bf4aa26 | 105 | fn default() -> TypedArena<T> { |
9e0c209e SL |
106 | TypedArena { |
107 | // We set both `ptr` and `end` to 0 so that the first call to | |
108 | // alloc() will trigger a grow(). | |
dc9dc135 XL |
109 | ptr: Cell::new(ptr::null_mut()), |
110 | end: Cell::new(ptr::null_mut()), | |
9e0c209e SL |
111 | chunks: RefCell::new(vec![]), |
112 | _own: PhantomData, | |
1a4d82fc JJ |
113 | } |
114 | } | |
0bf4aa26 | 115 | } |
1a4d82fc | 116 | |
0bf4aa26 | 117 | impl<T> TypedArena<T> { |
1a4d82fc JJ |
118 | /// Allocates an object in the `TypedArena`, returning a reference to it. |
119 | #[inline] | |
120 | pub fn alloc(&self, object: T) -> &mut T { | |
121 | if self.ptr == self.end { | |
c30ab7b3 | 122 | self.grow(1) |
1a4d82fc JJ |
123 | } |
124 | ||
e9174d1e | 125 | unsafe { |
9cc50fc6 | 126 | if mem::size_of::<T>() == 0 { |
dfeec247 | 127 | self.ptr.set(intrinsics::arith_offset(self.ptr.get() as *mut u8, 1) as *mut T); |
7cac9316 | 128 | let ptr = mem::align_of::<T>() as *mut T; |
9cc50fc6 SL |
129 | // Don't drop the object. This `write` is equivalent to `forget`. |
130 | ptr::write(ptr, object); | |
131 | &mut *ptr | |
132 | } else { | |
133 | let ptr = self.ptr.get(); | |
134 | // Advance the pointer. | |
135 | self.ptr.set(self.ptr.get().offset(1)); | |
136 | // Write into uninitialized memory. | |
137 | ptr::write(ptr, object); | |
138 | &mut *ptr | |
139 | } | |
e9174d1e | 140 | } |
1a4d82fc JJ |
141 | } |
142 | ||
532ac7d7 XL |
143 | #[inline] |
144 | fn can_allocate(&self, len: usize) -> bool { | |
145 | let available_capacity_bytes = self.end.get() as usize - self.ptr.get() as usize; | |
146 | let at_least_bytes = len.checked_mul(mem::size_of::<T>()).unwrap(); | |
147 | available_capacity_bytes >= at_least_bytes | |
148 | } | |
149 | ||
150 | /// Ensures there's enough space in the current chunk to fit `len` objects. | |
151 | #[inline] | |
152 | fn ensure_capacity(&self, len: usize) { | |
153 | if !self.can_allocate(len) { | |
154 | self.grow(len); | |
155 | debug_assert!(self.can_allocate(len)); | |
156 | } | |
157 | } | |
158 | ||
159 | #[inline] | |
160 | unsafe fn alloc_raw_slice(&self, len: usize) -> *mut T { | |
161 | assert!(mem::size_of::<T>() != 0); | |
162 | assert!(len != 0); | |
163 | ||
164 | self.ensure_capacity(len); | |
165 | ||
166 | let start_ptr = self.ptr.get(); | |
167 | self.ptr.set(start_ptr.add(len)); | |
168 | start_ptr | |
169 | } | |
170 | ||
ff7c6d11 | 171 | /// Allocates a slice of objects that are copied into the `TypedArena`, returning a mutable |
c30ab7b3 SL |
172 | /// reference to it. Will panic if passed a zero-sized types. |
173 | /// | |
174 | /// Panics: | |
ff7c6d11 | 175 | /// |
c30ab7b3 SL |
176 | /// - Zero-sized types |
177 | /// - Zero-length slices | |
178 | #[inline] | |
179 | pub fn alloc_slice(&self, slice: &[T]) -> &mut [T] | |
2c00a5a8 XL |
180 | where |
181 | T: Copy, | |
182 | { | |
532ac7d7 XL |
183 | unsafe { |
184 | let len = slice.len(); | |
185 | let start_ptr = self.alloc_raw_slice(len); | |
186 | slice.as_ptr().copy_to_nonoverlapping(start_ptr, len); | |
187 | slice::from_raw_parts_mut(start_ptr, len) | |
188 | } | |
189 | } | |
190 | ||
191 | #[inline] | |
192 | pub fn alloc_from_iter<I: IntoIterator<Item = T>>(&self, iter: I) -> &mut [T] { | |
c30ab7b3 | 193 | assert!(mem::size_of::<T>() != 0); |
60c5eb7d XL |
194 | let mut vec: SmallVec<[_; 8]> = iter.into_iter().collect(); |
195 | if vec.is_empty() { | |
196 | return &mut []; | |
197 | } | |
198 | // Move the content to the arena by copying it and then forgetting | |
199 | // the content of the SmallVec | |
200 | unsafe { | |
201 | let len = vec.len(); | |
202 | let start_ptr = self.alloc_raw_slice(len); | |
203 | vec.as_ptr().copy_to_nonoverlapping(start_ptr, len); | |
204 | vec.set_len(0); | |
205 | slice::from_raw_parts_mut(start_ptr, len) | |
c30ab7b3 SL |
206 | } |
207 | } | |
208 | ||
1a4d82fc JJ |
209 | /// Grows the arena. |
210 | #[inline(never)] | |
9cc50fc6 | 211 | #[cold] |
c30ab7b3 | 212 | fn grow(&self, n: usize) { |
1a4d82fc | 213 | unsafe { |
9cc50fc6 | 214 | let mut chunks = self.chunks.borrow_mut(); |
c30ab7b3 | 215 | let (chunk, mut new_capacity); |
9e0c209e | 216 | if let Some(last_chunk) = chunks.last_mut() { |
c30ab7b3 SL |
217 | let used_bytes = self.ptr.get() as usize - last_chunk.start() as usize; |
218 | let currently_used_cap = used_bytes / mem::size_of::<T>(); | |
532ac7d7 | 219 | last_chunk.entries = currently_used_cap; |
c30ab7b3 | 220 | if last_chunk.storage.reserve_in_place(currently_used_cap, n) { |
9e0c209e SL |
221 | self.end.set(last_chunk.end()); |
222 | return; | |
223 | } else { | |
416331ca | 224 | new_capacity = last_chunk.storage.capacity(); |
c30ab7b3 | 225 | loop { |
32a655c1 | 226 | new_capacity = new_capacity.checked_mul(2).unwrap(); |
c30ab7b3 SL |
227 | if new_capacity >= currently_used_cap + n { |
228 | break; | |
229 | } | |
230 | } | |
9e0c209e | 231 | } |
9cc50fc6 | 232 | } else { |
9e0c209e | 233 | let elem_size = cmp::max(1, mem::size_of::<T>()); |
c30ab7b3 | 234 | new_capacity = cmp::max(n, PAGE / elem_size); |
9cc50fc6 | 235 | } |
9e0c209e SL |
236 | chunk = TypedArenaChunk::<T>::new(new_capacity); |
237 | self.ptr.set(chunk.start()); | |
238 | self.end.set(chunk.end()); | |
239 | chunks.push(chunk); | |
9cc50fc6 SL |
240 | } |
241 | } | |
9e0c209e | 242 | |
9cc50fc6 SL |
243 | /// Clears the arena. Deallocates all but the longest chunk which may be reused. |
244 | pub fn clear(&mut self) { | |
245 | unsafe { | |
246 | // Clear the last chunk, which is partially filled. | |
247 | let mut chunks_borrow = self.chunks.borrow_mut(); | |
a1dfa0c6 | 248 | if let Some(mut last_chunk) = chunks_borrow.last_mut() { |
9e0c209e | 249 | self.clear_last_chunk(&mut last_chunk); |
a1dfa0c6 | 250 | let len = chunks_borrow.len(); |
9e0c209e | 251 | // If `T` is ZST, code below has no effect. |
dfeec247 | 252 | for mut chunk in chunks_borrow.drain(..len - 1) { |
532ac7d7 | 253 | chunk.destroy(chunk.entries); |
9e0c209e | 254 | } |
9cc50fc6 SL |
255 | } |
256 | } | |
257 | } | |
258 | ||
259 | // Drops the contents of the last chunk. The last chunk is partially empty, unlike all other | |
260 | // chunks. | |
261 | fn clear_last_chunk(&self, last_chunk: &mut TypedArenaChunk<T>) { | |
262 | // Determine how much was filled. | |
263 | let start = last_chunk.start() as usize; | |
264 | // We obtain the value of the pointer to the first uninitialized element. | |
265 | let end = self.ptr.get() as usize; | |
266 | // We then calculate the number of elements to be dropped in the last chunk, | |
267 | // which is the filled area's length. | |
268 | let diff = if mem::size_of::<T>() == 0 { | |
269 | // `T` is ZST. It can't have a drop flag, so the value here doesn't matter. We get | |
270 | // the number of zero-sized values in the last and only chunk, just out of caution. | |
271 | // Recall that `end` was incremented for each allocated value. | |
272 | end - start | |
273 | } else { | |
274 | (end - start) / mem::size_of::<T>() | |
275 | }; | |
276 | // Pass that to the `destroy` method. | |
277 | unsafe { | |
278 | last_chunk.destroy(diff); | |
1a4d82fc | 279 | } |
9cc50fc6 SL |
280 | // Reset the chunk. |
281 | self.ptr.set(last_chunk.start()); | |
1a4d82fc JJ |
282 | } |
283 | } | |
284 | ||
32a655c1 | 285 | unsafe impl<#[may_dangle] T> Drop for TypedArena<T> { |
1a4d82fc JJ |
286 | fn drop(&mut self) { |
287 | unsafe { | |
288 | // Determine how much was filled. | |
9cc50fc6 | 289 | let mut chunks_borrow = self.chunks.borrow_mut(); |
9e0c209e SL |
290 | if let Some(mut last_chunk) = chunks_borrow.pop() { |
291 | // Drop the contents of the last chunk. | |
292 | self.clear_last_chunk(&mut last_chunk); | |
293 | // The last chunk will be dropped. Destroy all other chunks. | |
294 | for chunk in chunks_borrow.iter_mut() { | |
532ac7d7 | 295 | chunk.destroy(chunk.entries); |
9e0c209e | 296 | } |
9cc50fc6 SL |
297 | } |
298 | // RawVec handles deallocation of `last_chunk` and `self.chunks`. | |
1a4d82fc JJ |
299 | } |
300 | } | |
301 | } | |
302 | ||
9cc50fc6 SL |
303 | unsafe impl<T: Send> Send for TypedArena<T> {} |
304 | ||
32a655c1 SL |
305 | pub struct DroplessArena { |
306 | /// A pointer to the next object to be allocated. | |
307 | ptr: Cell<*mut u8>, | |
308 | ||
309 | /// A pointer to the end of the allocated area. When this pointer is | |
310 | /// reached, a new chunk is allocated. | |
311 | end: Cell<*mut u8>, | |
312 | ||
313 | /// A vector of arena chunks. | |
314 | chunks: RefCell<Vec<TypedArenaChunk<u8>>>, | |
315 | } | |
316 | ||
83c7162d XL |
317 | unsafe impl Send for DroplessArena {} |
318 | ||
0bf4aa26 | 319 | impl Default for DroplessArena { |
a1dfa0c6 | 320 | #[inline] |
0bf4aa26 | 321 | fn default() -> DroplessArena { |
32a655c1 | 322 | DroplessArena { |
dc9dc135 XL |
323 | ptr: Cell::new(ptr::null_mut()), |
324 | end: Cell::new(ptr::null_mut()), | |
0bf4aa26 | 325 | chunks: Default::default(), |
32a655c1 SL |
326 | } |
327 | } | |
0bf4aa26 | 328 | } |
32a655c1 | 329 | |
0bf4aa26 | 330 | impl DroplessArena { |
a1dfa0c6 | 331 | #[inline] |
94b46f34 | 332 | fn align(&self, align: usize) { |
32a655c1 SL |
333 | let final_address = ((self.ptr.get() as usize) + align - 1) & !(align - 1); |
334 | self.ptr.set(final_address as *mut u8); | |
335 | assert!(self.ptr <= self.end); | |
336 | } | |
337 | ||
338 | #[inline(never)] | |
339 | #[cold] | |
94b46f34 | 340 | fn grow(&self, needed_bytes: usize) { |
32a655c1 SL |
341 | unsafe { |
342 | let mut chunks = self.chunks.borrow_mut(); | |
343 | let (chunk, mut new_capacity); | |
344 | if let Some(last_chunk) = chunks.last_mut() { | |
345 | let used_bytes = self.ptr.get() as usize - last_chunk.start() as usize; | |
dfeec247 | 346 | if last_chunk.storage.reserve_in_place(used_bytes, needed_bytes) { |
32a655c1 SL |
347 | self.end.set(last_chunk.end()); |
348 | return; | |
349 | } else { | |
416331ca | 350 | new_capacity = last_chunk.storage.capacity(); |
32a655c1 SL |
351 | loop { |
352 | new_capacity = new_capacity.checked_mul(2).unwrap(); | |
353 | if new_capacity >= used_bytes + needed_bytes { | |
354 | break; | |
355 | } | |
356 | } | |
357 | } | |
358 | } else { | |
359 | new_capacity = cmp::max(needed_bytes, PAGE); | |
360 | } | |
361 | chunk = TypedArenaChunk::<u8>::new(new_capacity); | |
362 | self.ptr.set(chunk.start()); | |
363 | self.end.set(chunk.end()); | |
364 | chunks.push(chunk); | |
365 | } | |
366 | } | |
367 | ||
368 | #[inline] | |
94b46f34 | 369 | pub fn alloc_raw(&self, bytes: usize, align: usize) -> &mut [u8] { |
32a655c1 | 370 | unsafe { |
94b46f34 XL |
371 | assert!(bytes != 0); |
372 | ||
373 | self.align(align); | |
32a655c1 | 374 | |
94b46f34 | 375 | let future_end = intrinsics::arith_offset(self.ptr.get(), bytes as isize); |
32a655c1 | 376 | if (future_end as *mut u8) >= self.end.get() { |
94b46f34 | 377 | self.grow(bytes); |
32a655c1 SL |
378 | } |
379 | ||
380 | let ptr = self.ptr.get(); | |
381 | // Set the pointer past ourselves | |
dfeec247 | 382 | self.ptr.set(intrinsics::arith_offset(self.ptr.get(), bytes as isize) as *mut u8); |
94b46f34 XL |
383 | slice::from_raw_parts_mut(ptr, bytes) |
384 | } | |
385 | } | |
386 | ||
387 | #[inline] | |
388 | pub fn alloc<T>(&self, object: T) -> &mut T { | |
389 | assert!(!mem::needs_drop::<T>()); | |
390 | ||
dfeec247 | 391 | let mem = self.alloc_raw(mem::size_of::<T>(), mem::align_of::<T>()) as *mut _ as *mut T; |
94b46f34 XL |
392 | |
393 | unsafe { | |
32a655c1 | 394 | // Write into uninitialized memory. |
94b46f34 XL |
395 | ptr::write(mem, object); |
396 | &mut *mem | |
32a655c1 SL |
397 | } |
398 | } | |
399 | ||
400 | /// Allocates a slice of objects that are copied into the `DroplessArena`, returning a mutable | |
401 | /// reference to it. Will panic if passed a zero-sized type. | |
402 | /// | |
403 | /// Panics: | |
ff7c6d11 | 404 | /// |
32a655c1 SL |
405 | /// - Zero-sized types |
406 | /// - Zero-length slices | |
407 | #[inline] | |
408 | pub fn alloc_slice<T>(&self, slice: &[T]) -> &mut [T] | |
2c00a5a8 XL |
409 | where |
410 | T: Copy, | |
411 | { | |
7cac9316 | 412 | assert!(!mem::needs_drop::<T>()); |
32a655c1 | 413 | assert!(mem::size_of::<T>() != 0); |
a1dfa0c6 | 414 | assert!(!slice.is_empty()); |
32a655c1 | 415 | |
dfeec247 XL |
416 | let mem = self.alloc_raw(slice.len() * mem::size_of::<T>(), mem::align_of::<T>()) as *mut _ |
417 | as *mut T; | |
32a655c1 SL |
418 | |
419 | unsafe { | |
94b46f34 | 420 | let arena_slice = slice::from_raw_parts_mut(mem, slice.len()); |
32a655c1 SL |
421 | arena_slice.copy_from_slice(slice); |
422 | arena_slice | |
423 | } | |
424 | } | |
532ac7d7 | 425 | |
dc9dc135 XL |
426 | #[inline] |
427 | unsafe fn write_from_iter<T, I: Iterator<Item = T>>( | |
428 | &self, | |
429 | mut iter: I, | |
430 | len: usize, | |
431 | mem: *mut T, | |
432 | ) -> &mut [T] { | |
433 | let mut i = 0; | |
434 | // Use a manual loop since LLVM manages to optimize it better for | |
435 | // slice iterators | |
436 | loop { | |
437 | let value = iter.next(); | |
438 | if i >= len || value.is_none() { | |
439 | // We only return as many items as the iterator gave us, even | |
440 | // though it was supposed to give us `len` | |
441 | return slice::from_raw_parts_mut(mem, i); | |
442 | } | |
e74abb32 | 443 | ptr::write(mem.add(i), value.unwrap()); |
dc9dc135 XL |
444 | i += 1; |
445 | } | |
446 | } | |
447 | ||
532ac7d7 XL |
448 | #[inline] |
449 | pub fn alloc_from_iter<T, I: IntoIterator<Item = T>>(&self, iter: I) -> &mut [T] { | |
dc9dc135 | 450 | let iter = iter.into_iter(); |
532ac7d7 XL |
451 | assert!(mem::size_of::<T>() != 0); |
452 | assert!(!mem::needs_drop::<T>()); | |
453 | ||
454 | let size_hint = iter.size_hint(); | |
455 | ||
456 | match size_hint { | |
457 | (min, Some(max)) if min == max => { | |
458 | // We know the exact number of elements the iterator will produce here | |
459 | let len = min; | |
460 | ||
461 | if len == 0 { | |
dfeec247 | 462 | return &mut []; |
532ac7d7 XL |
463 | } |
464 | let size = len.checked_mul(mem::size_of::<T>()).unwrap(); | |
465 | let mem = self.alloc_raw(size, mem::align_of::<T>()) as *mut _ as *mut T; | |
dfeec247 | 466 | unsafe { self.write_from_iter(iter, len, mem) } |
532ac7d7 XL |
467 | } |
468 | (_, _) => { | |
469 | cold_path(move || -> &mut [T] { | |
470 | let mut vec: SmallVec<[_; 8]> = iter.collect(); | |
471 | if vec.is_empty() { | |
472 | return &mut []; | |
473 | } | |
474 | // Move the content to the arena by copying it and then forgetting | |
475 | // the content of the SmallVec | |
476 | unsafe { | |
477 | let len = vec.len(); | |
dfeec247 XL |
478 | let start_ptr = self |
479 | .alloc_raw(len * mem::size_of::<T>(), mem::align_of::<T>()) | |
480 | as *mut _ as *mut T; | |
532ac7d7 XL |
481 | vec.as_ptr().copy_to_nonoverlapping(start_ptr, len); |
482 | vec.set_len(0); | |
483 | slice::from_raw_parts_mut(start_ptr, len) | |
484 | } | |
485 | }) | |
486 | } | |
487 | } | |
488 | } | |
32a655c1 SL |
489 | } |
490 | ||
ba9703b0 XL |
491 | /// Calls the destructor for an object when dropped. |
492 | struct DropType { | |
493 | drop_fn: unsafe fn(*mut u8), | |
494 | obj: *mut u8, | |
495 | } | |
496 | ||
497 | unsafe fn drop_for_type<T>(to_drop: *mut u8) { | |
498 | std::ptr::drop_in_place(to_drop as *mut T) | |
499 | } | |
500 | ||
501 | impl Drop for DropType { | |
502 | fn drop(&mut self) { | |
503 | unsafe { (self.drop_fn)(self.obj) } | |
504 | } | |
505 | } | |
506 | ||
507 | /// An arena which can be used to allocate any type. | |
508 | /// Allocating in this arena is unsafe since the type system | |
509 | /// doesn't know which types it contains. In order to | |
510 | /// allocate safely, you must store a PhantomData<T> | |
511 | /// alongside this arena for each type T you allocate. | |
512 | #[derive(Default)] | |
513 | pub struct DropArena { | |
514 | /// A list of destructors to run when the arena drops. | |
515 | /// Ordered so `destructors` gets dropped before the arena | |
516 | /// since its destructor can reference memory in the arena. | |
517 | destructors: RefCell<Vec<DropType>>, | |
518 | arena: DroplessArena, | |
519 | } | |
520 | ||
521 | impl DropArena { | |
522 | #[inline] | |
523 | pub unsafe fn alloc<T>(&self, object: T) -> &mut T { | |
524 | let mem = | |
525 | self.arena.alloc_raw(mem::size_of::<T>(), mem::align_of::<T>()) as *mut _ as *mut T; | |
526 | // Write into uninitialized memory. | |
527 | ptr::write(mem, object); | |
528 | let result = &mut *mem; | |
529 | // Record the destructor after doing the allocation as that may panic | |
530 | // and would cause `object`'s destuctor to run twice if it was recorded before | |
531 | self.destructors | |
532 | .borrow_mut() | |
533 | .push(DropType { drop_fn: drop_for_type::<T>, obj: result as *mut T as *mut u8 }); | |
534 | result | |
535 | } | |
536 | ||
537 | #[inline] | |
538 | pub unsafe fn alloc_from_iter<T, I: IntoIterator<Item = T>>(&self, iter: I) -> &mut [T] { | |
539 | let mut vec: SmallVec<[_; 8]> = iter.into_iter().collect(); | |
540 | if vec.is_empty() { | |
541 | return &mut []; | |
542 | } | |
543 | let len = vec.len(); | |
544 | ||
545 | let start_ptr = self | |
546 | .arena | |
547 | .alloc_raw(len.checked_mul(mem::size_of::<T>()).unwrap(), mem::align_of::<T>()) | |
548 | as *mut _ as *mut T; | |
549 | ||
550 | let mut destructors = self.destructors.borrow_mut(); | |
551 | // Reserve space for the destructors so we can't panic while adding them | |
552 | destructors.reserve(len); | |
553 | ||
554 | // Move the content to the arena by copying it and then forgetting | |
555 | // the content of the SmallVec | |
556 | vec.as_ptr().copy_to_nonoverlapping(start_ptr, len); | |
557 | mem::forget(vec.drain(..)); | |
558 | ||
559 | // Record the destructors after doing the allocation as that may panic | |
560 | // and would cause `object`'s destuctor to run twice if it was recorded before | |
561 | for i in 0..len { | |
562 | destructors.push(DropType { | |
563 | drop_fn: drop_for_type::<T>, | |
564 | obj: start_ptr.offset(i as isize) as *mut u8, | |
565 | }); | |
566 | } | |
567 | ||
568 | slice::from_raw_parts_mut(start_ptr, len) | |
569 | } | |
570 | } | |
571 | ||
572 | #[macro_export] | |
573 | macro_rules! arena_for_type { | |
574 | ([][$ty:ty]) => { | |
575 | $crate::TypedArena<$ty> | |
576 | }; | |
577 | ([few $(, $attrs:ident)*][$ty:ty]) => { | |
578 | ::std::marker::PhantomData<$ty> | |
579 | }; | |
580 | ([$ignore:ident $(, $attrs:ident)*]$args:tt) => { | |
581 | $crate::arena_for_type!([$($attrs),*]$args) | |
582 | }; | |
583 | } | |
584 | ||
585 | #[macro_export] | |
586 | macro_rules! which_arena_for_type { | |
587 | ([][$arena:expr]) => { | |
588 | ::std::option::Option::Some($arena) | |
589 | }; | |
590 | ([few$(, $attrs:ident)*][$arena:expr]) => { | |
591 | ::std::option::Option::None | |
592 | }; | |
593 | ([$ignore:ident$(, $attrs:ident)*]$args:tt) => { | |
594 | $crate::which_arena_for_type!([$($attrs),*]$args) | |
595 | }; | |
596 | } | |
597 | ||
598 | #[macro_export] | |
599 | macro_rules! declare_arena { | |
600 | ([], [$($a:tt $name:ident: $ty:ty,)*], $tcx:lifetime) => { | |
601 | #[derive(Default)] | |
602 | pub struct Arena<$tcx> { | |
603 | pub dropless: $crate::DroplessArena, | |
604 | drop: $crate::DropArena, | |
605 | $($name: $crate::arena_for_type!($a[$ty]),)* | |
606 | } | |
607 | ||
608 | #[marker] | |
609 | pub trait ArenaAllocatable {} | |
610 | ||
611 | impl<T: Copy> ArenaAllocatable for T {} | |
612 | ||
613 | unsafe trait ArenaField<'tcx>: Sized { | |
614 | /// Returns a specific arena to allocate from. | |
615 | /// If `None` is returned, the `DropArena` will be used. | |
616 | fn arena<'a>(arena: &'a Arena<'tcx>) -> Option<&'a $crate::TypedArena<Self>>; | |
617 | } | |
618 | ||
619 | unsafe impl<'tcx, T> ArenaField<'tcx> for T { | |
620 | #[inline] | |
621 | default fn arena<'a>(_: &'a Arena<'tcx>) -> Option<&'a $crate::TypedArena<Self>> { | |
622 | panic!() | |
623 | } | |
624 | } | |
625 | ||
626 | $( | |
627 | #[allow(unused_lifetimes)] | |
628 | impl<$tcx> ArenaAllocatable for $ty {} | |
629 | unsafe impl<$tcx> ArenaField<$tcx> for $ty { | |
630 | #[inline] | |
631 | fn arena<'a>(_arena: &'a Arena<$tcx>) -> Option<&'a $crate::TypedArena<Self>> { | |
632 | $crate::which_arena_for_type!($a[&_arena.$name]) | |
633 | } | |
634 | } | |
635 | )* | |
636 | ||
637 | impl<'tcx> Arena<'tcx> { | |
638 | #[inline] | |
639 | pub fn alloc<T: ArenaAllocatable>(&self, value: T) -> &mut T { | |
640 | if !::std::mem::needs_drop::<T>() { | |
641 | return self.dropless.alloc(value); | |
642 | } | |
643 | match <T as ArenaField<'tcx>>::arena(self) { | |
644 | ::std::option::Option::Some(arena) => arena.alloc(value), | |
645 | ::std::option::Option::None => unsafe { self.drop.alloc(value) }, | |
646 | } | |
647 | } | |
648 | ||
649 | #[inline] | |
650 | pub fn alloc_slice<T: ::std::marker::Copy>(&self, value: &[T]) -> &mut [T] { | |
651 | if value.is_empty() { | |
652 | return &mut []; | |
653 | } | |
654 | self.dropless.alloc_slice(value) | |
655 | } | |
656 | ||
657 | pub fn alloc_from_iter<'a, T: ArenaAllocatable>( | |
658 | &'a self, | |
659 | iter: impl ::std::iter::IntoIterator<Item = T>, | |
660 | ) -> &'a mut [T] { | |
661 | if !::std::mem::needs_drop::<T>() { | |
662 | return self.dropless.alloc_from_iter(iter); | |
663 | } | |
664 | match <T as ArenaField<'tcx>>::arena(self) { | |
665 | ::std::option::Option::Some(arena) => arena.alloc_from_iter(iter), | |
666 | ::std::option::Option::None => unsafe { self.drop.alloc_from_iter(iter) }, | |
667 | } | |
668 | } | |
669 | } | |
670 | } | |
671 | } | |
672 | ||
1a4d82fc | 673 | #[cfg(test)] |
dc9dc135 | 674 | mod tests; |