]>
Commit | Line | Data |
---|---|---|
1a4d82fc JJ |
1 | //! The arena, a fast but limited type of allocator. |
2 | //! | |
3 | //! Arenas are a type of allocator that destroy the objects within, all at | |
4 | //! once, once the arena itself is destroyed. They do not support deallocation | |
5 | //! of individual objects while the arena itself is still alive. The benefit | |
6 | //! of an arena is very fast allocation; just a pointer bump. | |
7 | //! | |
f9f354fc | 8 | //! This crate implements several kinds of arena. |
1a4d82fc | 9 | |
dfeec247 | 10 | #![doc( |
1b1a35ee | 11 | html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/", |
dfeec247 XL |
12 | test(no_crate_inject, attr(deny(warnings))) |
13 | )] | |
32a655c1 | 14 | #![feature(dropck_eyepatch)] |
1b1a35ee XL |
15 | #![feature(new_uninit)] |
16 | #![feature(maybe_uninit_slice)] | |
fc512014 | 17 | #![feature(min_specialization)] |
94222f64 XL |
18 | #![feature(decl_macro)] |
19 | #![feature(rustc_attrs)] | |
85aaf69f | 20 | #![cfg_attr(test, feature(test))] |
83c7162d | 21 | |
6a06907d | 22 | use rustc_data_structures::sync; |
532ac7d7 | 23 | use smallvec::SmallVec; |
1a4d82fc | 24 | |
f035d41b | 25 | use std::alloc::Layout; |
1a4d82fc JJ |
26 | use std::cell::{Cell, RefCell}; |
27 | use std::cmp; | |
9cc50fc6 | 28 | use std::marker::{PhantomData, Send}; |
1b1a35ee | 29 | use std::mem::{self, MaybeUninit}; |
1a4d82fc | 30 | use std::ptr; |
c30ab7b3 | 31 | use std::slice; |
e9174d1e | 32 | |
29967ef6 XL |
33 | #[inline(never)] |
34 | #[cold] | |
5869c6ff | 35 | fn cold_path<F: FnOnce() -> R, R>(f: F) -> R { |
29967ef6 XL |
36 | f() |
37 | } | |
38 | ||
9e0c209e | 39 | /// An arena that can hold objects of only one type. |
1a4d82fc JJ |
40 | pub struct TypedArena<T> { |
41 | /// A pointer to the next object to be allocated. | |
9cc50fc6 | 42 | ptr: Cell<*mut T>, |
1a4d82fc JJ |
43 | |
44 | /// A pointer to the end of the allocated area. When this pointer is | |
45 | /// reached, a new chunk is allocated. | |
9cc50fc6 | 46 | end: Cell<*mut T>, |
1a4d82fc | 47 | |
9e0c209e | 48 | /// A vector of arena chunks. |
9cc50fc6 | 49 | chunks: RefCell<Vec<TypedArenaChunk<T>>>, |
85aaf69f SL |
50 | |
51 | /// Marker indicating that dropping the arena causes its owned | |
52 | /// instances of `T` to be dropped. | |
9cc50fc6 | 53 | _own: PhantomData<T>, |
1a4d82fc JJ |
54 | } |
55 | ||
56 | struct TypedArenaChunk<T> { | |
9e0c209e | 57 | /// The raw storage for the arena chunk. |
1b1a35ee | 58 | storage: Box<[MaybeUninit<T>]>, |
532ac7d7 XL |
59 | /// The number of valid entries in the chunk. |
60 | entries: usize, | |
1a4d82fc JJ |
61 | } |
62 | ||
63 | impl<T> TypedArenaChunk<T> { | |
64 | #[inline] | |
9cc50fc6 | 65 | unsafe fn new(capacity: usize) -> TypedArenaChunk<T> { |
1b1a35ee | 66 | TypedArenaChunk { storage: Box::new_uninit_slice(capacity), entries: 0 } |
1a4d82fc JJ |
67 | } |
68 | ||
9cc50fc6 | 69 | /// Destroys this arena chunk. |
1a4d82fc | 70 | #[inline] |
85aaf69f | 71 | unsafe fn destroy(&mut self, len: usize) { |
9cc50fc6 SL |
72 | // The branch on needs_drop() is an -O1 performance optimization. |
73 | // Without the branch, dropping TypedArena<u8> takes linear time. | |
7cac9316 | 74 | if mem::needs_drop::<T>() { |
1b1a35ee | 75 | ptr::drop_in_place(MaybeUninit::slice_assume_init_mut(&mut self.storage[..len])); |
1a4d82fc | 76 | } |
1a4d82fc JJ |
77 | } |
78 | ||
79 | // Returns a pointer to the first allocated object. | |
80 | #[inline] | |
1b1a35ee XL |
81 | fn start(&mut self) -> *mut T { |
82 | MaybeUninit::slice_as_mut_ptr(&mut self.storage) | |
1a4d82fc JJ |
83 | } |
84 | ||
85 | // Returns a pointer to the end of the allocated space. | |
86 | #[inline] | |
1b1a35ee | 87 | fn end(&mut self) -> *mut T { |
1a4d82fc | 88 | unsafe { |
9cc50fc6 SL |
89 | if mem::size_of::<T>() == 0 { |
90 | // A pointer as large as possible for zero-sized elements. | |
91 | !0 as *mut T | |
92 | } else { | |
1b1a35ee | 93 | self.start().add(self.storage.len()) |
9cc50fc6 | 94 | } |
1a4d82fc JJ |
95 | } |
96 | } | |
97 | } | |
98 | ||
f9f354fc XL |
99 | // The arenas start with PAGE-sized chunks, and then each new chunk is twice as |
100 | // big as its predecessor, up until we reach HUGE_PAGE-sized chunks, whereupon | |
101 | // we stop growing. This scales well, from arenas that are barely used up to | |
102 | // arenas that are used for 100s of MiBs. Note also that the chosen sizes match | |
103 | // the usual sizes of pages and huge pages on Linux. | |
9cc50fc6 | 104 | const PAGE: usize = 4096; |
f9f354fc | 105 | const HUGE_PAGE: usize = 2 * 1024 * 1024; |
9cc50fc6 | 106 | |
0bf4aa26 | 107 | impl<T> Default for TypedArena<T> { |
9e0c209e | 108 | /// Creates a new `TypedArena`. |
0bf4aa26 | 109 | fn default() -> TypedArena<T> { |
9e0c209e SL |
110 | TypedArena { |
111 | // We set both `ptr` and `end` to 0 so that the first call to | |
112 | // alloc() will trigger a grow(). | |
dc9dc135 XL |
113 | ptr: Cell::new(ptr::null_mut()), |
114 | end: Cell::new(ptr::null_mut()), | |
9e0c209e SL |
115 | chunks: RefCell::new(vec![]), |
116 | _own: PhantomData, | |
1a4d82fc JJ |
117 | } |
118 | } | |
0bf4aa26 | 119 | } |
1a4d82fc | 120 | |
fc512014 XL |
121 | trait IterExt<T> { |
122 | fn alloc_from_iter(self, arena: &TypedArena<T>) -> &mut [T]; | |
123 | } | |
124 | ||
125 | impl<I, T> IterExt<T> for I | |
126 | where | |
127 | I: IntoIterator<Item = T>, | |
128 | { | |
129 | #[inline] | |
130 | default fn alloc_from_iter(self, arena: &TypedArena<T>) -> &mut [T] { | |
131 | let vec: SmallVec<[_; 8]> = self.into_iter().collect(); | |
132 | vec.alloc_from_iter(arena) | |
133 | } | |
134 | } | |
135 | ||
136 | impl<T, const N: usize> IterExt<T> for std::array::IntoIter<T, N> { | |
137 | #[inline] | |
138 | fn alloc_from_iter(self, arena: &TypedArena<T>) -> &mut [T] { | |
139 | let len = self.len(); | |
140 | if len == 0 { | |
141 | return &mut []; | |
142 | } | |
143 | // Move the content to the arena by copying and then forgetting it | |
144 | unsafe { | |
145 | let start_ptr = arena.alloc_raw_slice(len); | |
146 | self.as_slice().as_ptr().copy_to_nonoverlapping(start_ptr, len); | |
147 | mem::forget(self); | |
148 | slice::from_raw_parts_mut(start_ptr, len) | |
149 | } | |
150 | } | |
151 | } | |
152 | ||
153 | impl<T> IterExt<T> for Vec<T> { | |
154 | #[inline] | |
155 | fn alloc_from_iter(mut self, arena: &TypedArena<T>) -> &mut [T] { | |
156 | let len = self.len(); | |
157 | if len == 0 { | |
158 | return &mut []; | |
159 | } | |
160 | // Move the content to the arena by copying and then forgetting it | |
161 | unsafe { | |
162 | let start_ptr = arena.alloc_raw_slice(len); | |
163 | self.as_ptr().copy_to_nonoverlapping(start_ptr, len); | |
164 | self.set_len(0); | |
165 | slice::from_raw_parts_mut(start_ptr, len) | |
166 | } | |
167 | } | |
168 | } | |
169 | ||
170 | impl<A: smallvec::Array> IterExt<A::Item> for SmallVec<A> { | |
171 | #[inline] | |
172 | fn alloc_from_iter(mut self, arena: &TypedArena<A::Item>) -> &mut [A::Item] { | |
173 | let len = self.len(); | |
174 | if len == 0 { | |
175 | return &mut []; | |
176 | } | |
177 | // Move the content to the arena by copying and then forgetting it | |
178 | unsafe { | |
179 | let start_ptr = arena.alloc_raw_slice(len); | |
180 | self.as_ptr().copy_to_nonoverlapping(start_ptr, len); | |
181 | self.set_len(0); | |
182 | slice::from_raw_parts_mut(start_ptr, len) | |
183 | } | |
184 | } | |
185 | } | |
186 | ||
0bf4aa26 | 187 | impl<T> TypedArena<T> { |
1a4d82fc JJ |
188 | /// Allocates an object in the `TypedArena`, returning a reference to it. |
189 | #[inline] | |
190 | pub fn alloc(&self, object: T) -> &mut T { | |
191 | if self.ptr == self.end { | |
c30ab7b3 | 192 | self.grow(1) |
1a4d82fc JJ |
193 | } |
194 | ||
e9174d1e | 195 | unsafe { |
9cc50fc6 | 196 | if mem::size_of::<T>() == 0 { |
1b1a35ee | 197 | self.ptr.set((self.ptr.get() as *mut u8).wrapping_offset(1) as *mut T); |
7cac9316 | 198 | let ptr = mem::align_of::<T>() as *mut T; |
9cc50fc6 SL |
199 | // Don't drop the object. This `write` is equivalent to `forget`. |
200 | ptr::write(ptr, object); | |
201 | &mut *ptr | |
202 | } else { | |
203 | let ptr = self.ptr.get(); | |
204 | // Advance the pointer. | |
205 | self.ptr.set(self.ptr.get().offset(1)); | |
206 | // Write into uninitialized memory. | |
207 | ptr::write(ptr, object); | |
208 | &mut *ptr | |
209 | } | |
e9174d1e | 210 | } |
1a4d82fc JJ |
211 | } |
212 | ||
532ac7d7 | 213 | #[inline] |
f035d41b XL |
214 | fn can_allocate(&self, additional: usize) -> bool { |
215 | let available_bytes = self.end.get() as usize - self.ptr.get() as usize; | |
216 | let additional_bytes = additional.checked_mul(mem::size_of::<T>()).unwrap(); | |
217 | available_bytes >= additional_bytes | |
532ac7d7 XL |
218 | } |
219 | ||
220 | /// Ensures there's enough space in the current chunk to fit `len` objects. | |
221 | #[inline] | |
f035d41b XL |
222 | fn ensure_capacity(&self, additional: usize) { |
223 | if !self.can_allocate(additional) { | |
224 | self.grow(additional); | |
225 | debug_assert!(self.can_allocate(additional)); | |
532ac7d7 XL |
226 | } |
227 | } | |
228 | ||
229 | #[inline] | |
230 | unsafe fn alloc_raw_slice(&self, len: usize) -> *mut T { | |
231 | assert!(mem::size_of::<T>() != 0); | |
232 | assert!(len != 0); | |
233 | ||
234 | self.ensure_capacity(len); | |
235 | ||
236 | let start_ptr = self.ptr.get(); | |
237 | self.ptr.set(start_ptr.add(len)); | |
238 | start_ptr | |
239 | } | |
240 | ||
532ac7d7 XL |
241 | #[inline] |
242 | pub fn alloc_from_iter<I: IntoIterator<Item = T>>(&self, iter: I) -> &mut [T] { | |
c30ab7b3 | 243 | assert!(mem::size_of::<T>() != 0); |
fc512014 | 244 | iter.alloc_from_iter(self) |
c30ab7b3 SL |
245 | } |
246 | ||
1a4d82fc JJ |
247 | /// Grows the arena. |
248 | #[inline(never)] | |
9cc50fc6 | 249 | #[cold] |
f035d41b | 250 | fn grow(&self, additional: usize) { |
1a4d82fc | 251 | unsafe { |
f035d41b | 252 | // We need the element size to convert chunk sizes (ranging from |
f9f354fc XL |
253 | // PAGE to HUGE_PAGE bytes) to element counts. |
254 | let elem_size = cmp::max(1, mem::size_of::<T>()); | |
9cc50fc6 | 255 | let mut chunks = self.chunks.borrow_mut(); |
f035d41b | 256 | let mut new_cap; |
9e0c209e | 257 | if let Some(last_chunk) = chunks.last_mut() { |
29967ef6 XL |
258 | // If a type is `!needs_drop`, we don't need to keep track of how many elements |
259 | // the chunk stores - the field will be ignored anyway. | |
260 | if mem::needs_drop::<T>() { | |
261 | let used_bytes = self.ptr.get() as usize - last_chunk.start() as usize; | |
262 | last_chunk.entries = used_bytes / mem::size_of::<T>(); | |
263 | } | |
f035d41b | 264 | |
1b1a35ee | 265 | // If the previous chunk's len is less than HUGE_PAGE |
f035d41b XL |
266 | // bytes, then this chunk will be least double the previous |
267 | // chunk's size. | |
29967ef6 XL |
268 | new_cap = last_chunk.storage.len().min(HUGE_PAGE / elem_size / 2); |
269 | new_cap *= 2; | |
9cc50fc6 | 270 | } else { |
f035d41b | 271 | new_cap = PAGE / elem_size; |
9cc50fc6 | 272 | } |
f035d41b XL |
273 | // Also ensure that this chunk can fit `additional`. |
274 | new_cap = cmp::max(additional, new_cap); | |
f9f354fc | 275 | |
1b1a35ee | 276 | let mut chunk = TypedArenaChunk::<T>::new(new_cap); |
9e0c209e SL |
277 | self.ptr.set(chunk.start()); |
278 | self.end.set(chunk.end()); | |
279 | chunks.push(chunk); | |
9cc50fc6 SL |
280 | } |
281 | } | |
9e0c209e | 282 | |
9cc50fc6 SL |
283 | // Drops the contents of the last chunk. The last chunk is partially empty, unlike all other |
284 | // chunks. | |
285 | fn clear_last_chunk(&self, last_chunk: &mut TypedArenaChunk<T>) { | |
286 | // Determine how much was filled. | |
287 | let start = last_chunk.start() as usize; | |
288 | // We obtain the value of the pointer to the first uninitialized element. | |
289 | let end = self.ptr.get() as usize; | |
290 | // We then calculate the number of elements to be dropped in the last chunk, | |
291 | // which is the filled area's length. | |
292 | let diff = if mem::size_of::<T>() == 0 { | |
293 | // `T` is ZST. It can't have a drop flag, so the value here doesn't matter. We get | |
294 | // the number of zero-sized values in the last and only chunk, just out of caution. | |
295 | // Recall that `end` was incremented for each allocated value. | |
296 | end - start | |
297 | } else { | |
298 | (end - start) / mem::size_of::<T>() | |
299 | }; | |
300 | // Pass that to the `destroy` method. | |
301 | unsafe { | |
302 | last_chunk.destroy(diff); | |
1a4d82fc | 303 | } |
9cc50fc6 SL |
304 | // Reset the chunk. |
305 | self.ptr.set(last_chunk.start()); | |
1a4d82fc JJ |
306 | } |
307 | } | |
308 | ||
32a655c1 | 309 | unsafe impl<#[may_dangle] T> Drop for TypedArena<T> { |
1a4d82fc JJ |
310 | fn drop(&mut self) { |
311 | unsafe { | |
312 | // Determine how much was filled. | |
9cc50fc6 | 313 | let mut chunks_borrow = self.chunks.borrow_mut(); |
9e0c209e SL |
314 | if let Some(mut last_chunk) = chunks_borrow.pop() { |
315 | // Drop the contents of the last chunk. | |
316 | self.clear_last_chunk(&mut last_chunk); | |
317 | // The last chunk will be dropped. Destroy all other chunks. | |
318 | for chunk in chunks_borrow.iter_mut() { | |
532ac7d7 | 319 | chunk.destroy(chunk.entries); |
9e0c209e | 320 | } |
9cc50fc6 | 321 | } |
1b1a35ee | 322 | // Box handles deallocation of `last_chunk` and `self.chunks`. |
1a4d82fc JJ |
323 | } |
324 | } | |
325 | } | |
326 | ||
9cc50fc6 SL |
327 | unsafe impl<T: Send> Send for TypedArena<T> {} |
328 | ||
32a655c1 | 329 | pub struct DroplessArena { |
1b1a35ee XL |
330 | /// A pointer to the start of the free space. |
331 | start: Cell<*mut u8>, | |
32a655c1 | 332 | |
1b1a35ee XL |
333 | /// A pointer to the end of free space. |
334 | /// | |
335 | /// The allocation proceeds from the end of the chunk towards the start. | |
336 | /// When this pointer crosses the start pointer, a new chunk is allocated. | |
32a655c1 SL |
337 | end: Cell<*mut u8>, |
338 | ||
339 | /// A vector of arena chunks. | |
340 | chunks: RefCell<Vec<TypedArenaChunk<u8>>>, | |
341 | } | |
342 | ||
83c7162d XL |
343 | unsafe impl Send for DroplessArena {} |
344 | ||
0bf4aa26 | 345 | impl Default for DroplessArena { |
a1dfa0c6 | 346 | #[inline] |
0bf4aa26 | 347 | fn default() -> DroplessArena { |
32a655c1 | 348 | DroplessArena { |
1b1a35ee | 349 | start: Cell::new(ptr::null_mut()), |
dc9dc135 | 350 | end: Cell::new(ptr::null_mut()), |
0bf4aa26 | 351 | chunks: Default::default(), |
32a655c1 SL |
352 | } |
353 | } | |
0bf4aa26 | 354 | } |
32a655c1 | 355 | |
0bf4aa26 | 356 | impl DroplessArena { |
32a655c1 SL |
357 | #[inline(never)] |
358 | #[cold] | |
f035d41b | 359 | fn grow(&self, additional: usize) { |
32a655c1 SL |
360 | unsafe { |
361 | let mut chunks = self.chunks.borrow_mut(); | |
f035d41b | 362 | let mut new_cap; |
32a655c1 | 363 | if let Some(last_chunk) = chunks.last_mut() { |
f035d41b XL |
364 | // There is no need to update `last_chunk.entries` because that |
365 | // field isn't used by `DroplessArena`. | |
366 | ||
1b1a35ee | 367 | // If the previous chunk's len is less than HUGE_PAGE |
f035d41b XL |
368 | // bytes, then this chunk will be least double the previous |
369 | // chunk's size. | |
29967ef6 XL |
370 | new_cap = last_chunk.storage.len().min(HUGE_PAGE / 2); |
371 | new_cap *= 2; | |
32a655c1 | 372 | } else { |
f035d41b | 373 | new_cap = PAGE; |
32a655c1 | 374 | } |
f035d41b XL |
375 | // Also ensure that this chunk can fit `additional`. |
376 | new_cap = cmp::max(additional, new_cap); | |
f9f354fc | 377 | |
1b1a35ee XL |
378 | let mut chunk = TypedArenaChunk::<u8>::new(new_cap); |
379 | self.start.set(chunk.start()); | |
32a655c1 SL |
380 | self.end.set(chunk.end()); |
381 | chunks.push(chunk); | |
382 | } | |
383 | } | |
384 | ||
f035d41b XL |
385 | /// Allocates a byte slice with specified layout from the current memory |
386 | /// chunk. Returns `None` if there is no free space left to satisfy the | |
387 | /// request. | |
32a655c1 | 388 | #[inline] |
f035d41b | 389 | fn alloc_raw_without_grow(&self, layout: Layout) -> Option<*mut u8> { |
1b1a35ee | 390 | let start = self.start.get() as usize; |
f035d41b | 391 | let end = self.end.get() as usize; |
1b1a35ee | 392 | |
f035d41b XL |
393 | let align = layout.align(); |
394 | let bytes = layout.size(); | |
1b1a35ee XL |
395 | |
396 | let new_end = end.checked_sub(bytes)? & !(align - 1); | |
397 | if start <= new_end { | |
398 | let new_end = new_end as *mut u8; | |
399 | self.end.set(new_end); | |
400 | Some(new_end) | |
f035d41b XL |
401 | } else { |
402 | None | |
403 | } | |
404 | } | |
32a655c1 | 405 | |
f035d41b XL |
406 | #[inline] |
407 | pub fn alloc_raw(&self, layout: Layout) -> *mut u8 { | |
408 | assert!(layout.size() != 0); | |
409 | loop { | |
410 | if let Some(a) = self.alloc_raw_without_grow(layout) { | |
411 | break a; | |
32a655c1 | 412 | } |
f035d41b XL |
413 | // No free space left. Allocate a new chunk to satisfy the request. |
414 | // On failure the grow will panic or abort. | |
415 | self.grow(layout.size()); | |
94b46f34 XL |
416 | } |
417 | } | |
418 | ||
419 | #[inline] | |
420 | pub fn alloc<T>(&self, object: T) -> &mut T { | |
421 | assert!(!mem::needs_drop::<T>()); | |
422 | ||
f035d41b | 423 | let mem = self.alloc_raw(Layout::for_value::<T>(&object)) as *mut T; |
94b46f34 XL |
424 | |
425 | unsafe { | |
32a655c1 | 426 | // Write into uninitialized memory. |
94b46f34 XL |
427 | ptr::write(mem, object); |
428 | &mut *mem | |
32a655c1 SL |
429 | } |
430 | } | |
431 | ||
432 | /// Allocates a slice of objects that are copied into the `DroplessArena`, returning a mutable | |
433 | /// reference to it. Will panic if passed a zero-sized type. | |
434 | /// | |
435 | /// Panics: | |
ff7c6d11 | 436 | /// |
32a655c1 SL |
437 | /// - Zero-sized types |
438 | /// - Zero-length slices | |
439 | #[inline] | |
440 | pub fn alloc_slice<T>(&self, slice: &[T]) -> &mut [T] | |
2c00a5a8 XL |
441 | where |
442 | T: Copy, | |
443 | { | |
7cac9316 | 444 | assert!(!mem::needs_drop::<T>()); |
32a655c1 | 445 | assert!(mem::size_of::<T>() != 0); |
a1dfa0c6 | 446 | assert!(!slice.is_empty()); |
32a655c1 | 447 | |
f035d41b | 448 | let mem = self.alloc_raw(Layout::for_value::<[T]>(slice)) as *mut T; |
32a655c1 SL |
449 | |
450 | unsafe { | |
f035d41b XL |
451 | mem.copy_from_nonoverlapping(slice.as_ptr(), slice.len()); |
452 | slice::from_raw_parts_mut(mem, slice.len()) | |
32a655c1 SL |
453 | } |
454 | } | |
532ac7d7 | 455 | |
dc9dc135 XL |
456 | #[inline] |
457 | unsafe fn write_from_iter<T, I: Iterator<Item = T>>( | |
458 | &self, | |
459 | mut iter: I, | |
460 | len: usize, | |
461 | mem: *mut T, | |
462 | ) -> &mut [T] { | |
463 | let mut i = 0; | |
464 | // Use a manual loop since LLVM manages to optimize it better for | |
465 | // slice iterators | |
466 | loop { | |
467 | let value = iter.next(); | |
468 | if i >= len || value.is_none() { | |
469 | // We only return as many items as the iterator gave us, even | |
470 | // though it was supposed to give us `len` | |
471 | return slice::from_raw_parts_mut(mem, i); | |
472 | } | |
e74abb32 | 473 | ptr::write(mem.add(i), value.unwrap()); |
dc9dc135 XL |
474 | i += 1; |
475 | } | |
476 | } | |
477 | ||
532ac7d7 XL |
478 | #[inline] |
479 | pub fn alloc_from_iter<T, I: IntoIterator<Item = T>>(&self, iter: I) -> &mut [T] { | |
dc9dc135 | 480 | let iter = iter.into_iter(); |
532ac7d7 XL |
481 | assert!(mem::size_of::<T>() != 0); |
482 | assert!(!mem::needs_drop::<T>()); | |
483 | ||
484 | let size_hint = iter.size_hint(); | |
485 | ||
486 | match size_hint { | |
487 | (min, Some(max)) if min == max => { | |
488 | // We know the exact number of elements the iterator will produce here | |
489 | let len = min; | |
490 | ||
491 | if len == 0 { | |
dfeec247 | 492 | return &mut []; |
532ac7d7 | 493 | } |
f035d41b XL |
494 | |
495 | let mem = self.alloc_raw(Layout::array::<T>(len).unwrap()) as *mut T; | |
dfeec247 | 496 | unsafe { self.write_from_iter(iter, len, mem) } |
532ac7d7 XL |
497 | } |
498 | (_, _) => { | |
499 | cold_path(move || -> &mut [T] { | |
500 | let mut vec: SmallVec<[_; 8]> = iter.collect(); | |
501 | if vec.is_empty() { | |
502 | return &mut []; | |
503 | } | |
504 | // Move the content to the arena by copying it and then forgetting | |
505 | // the content of the SmallVec | |
506 | unsafe { | |
507 | let len = vec.len(); | |
f035d41b XL |
508 | let start_ptr = |
509 | self.alloc_raw(Layout::for_value::<[T]>(vec.as_slice())) as *mut T; | |
532ac7d7 XL |
510 | vec.as_ptr().copy_to_nonoverlapping(start_ptr, len); |
511 | vec.set_len(0); | |
512 | slice::from_raw_parts_mut(start_ptr, len) | |
513 | } | |
514 | }) | |
515 | } | |
516 | } | |
517 | } | |
32a655c1 SL |
518 | } |
519 | ||
ba9703b0 XL |
520 | /// Calls the destructor for an object when dropped. |
521 | struct DropType { | |
522 | drop_fn: unsafe fn(*mut u8), | |
523 | obj: *mut u8, | |
524 | } | |
525 | ||
6a06907d XL |
526 | // SAFETY: we require `T: Send` before type-erasing into `DropType`. |
527 | #[cfg(parallel_compiler)] | |
528 | unsafe impl sync::Send for DropType {} | |
529 | ||
530 | impl DropType { | |
531 | #[inline] | |
532 | unsafe fn new<T: sync::Send>(obj: *mut T) -> Self { | |
533 | unsafe fn drop_for_type<T>(to_drop: *mut u8) { | |
534 | std::ptr::drop_in_place(to_drop as *mut T) | |
535 | } | |
536 | ||
537 | DropType { drop_fn: drop_for_type::<T>, obj: obj as *mut u8 } | |
538 | } | |
ba9703b0 XL |
539 | } |
540 | ||
541 | impl Drop for DropType { | |
542 | fn drop(&mut self) { | |
543 | unsafe { (self.drop_fn)(self.obj) } | |
544 | } | |
545 | } | |
546 | ||
547 | /// An arena which can be used to allocate any type. | |
6a06907d XL |
548 | /// |
549 | /// # Safety | |
550 | /// | |
ba9703b0 XL |
551 | /// Allocating in this arena is unsafe since the type system |
552 | /// doesn't know which types it contains. In order to | |
6a06907d XL |
553 | /// allocate safely, you must store a `PhantomData<T>` |
554 | /// alongside this arena for each type `T` you allocate. | |
ba9703b0 XL |
555 | #[derive(Default)] |
556 | pub struct DropArena { | |
557 | /// A list of destructors to run when the arena drops. | |
558 | /// Ordered so `destructors` gets dropped before the arena | |
559 | /// since its destructor can reference memory in the arena. | |
560 | destructors: RefCell<Vec<DropType>>, | |
561 | arena: DroplessArena, | |
562 | } | |
563 | ||
564 | impl DropArena { | |
565 | #[inline] | |
6a06907d XL |
566 | pub unsafe fn alloc<T>(&self, object: T) -> &mut T |
567 | where | |
568 | T: sync::Send, | |
569 | { | |
f035d41b | 570 | let mem = self.arena.alloc_raw(Layout::new::<T>()) as *mut T; |
ba9703b0 XL |
571 | // Write into uninitialized memory. |
572 | ptr::write(mem, object); | |
573 | let result = &mut *mem; | |
574 | // Record the destructor after doing the allocation as that may panic | |
6a06907d XL |
575 | // and would cause `object`'s destructor to run twice if it was recorded before. |
576 | self.destructors.borrow_mut().push(DropType::new(result)); | |
ba9703b0 XL |
577 | result |
578 | } | |
579 | ||
580 | #[inline] | |
6a06907d XL |
581 | pub unsafe fn alloc_from_iter<T, I>(&self, iter: I) -> &mut [T] |
582 | where | |
583 | T: sync::Send, | |
584 | I: IntoIterator<Item = T>, | |
585 | { | |
ba9703b0 XL |
586 | let mut vec: SmallVec<[_; 8]> = iter.into_iter().collect(); |
587 | if vec.is_empty() { | |
588 | return &mut []; | |
589 | } | |
590 | let len = vec.len(); | |
591 | ||
f035d41b | 592 | let start_ptr = self.arena.alloc_raw(Layout::array::<T>(len).unwrap()) as *mut T; |
ba9703b0 XL |
593 | |
594 | let mut destructors = self.destructors.borrow_mut(); | |
6a06907d | 595 | // Reserve space for the destructors so we can't panic while adding them. |
ba9703b0 XL |
596 | destructors.reserve(len); |
597 | ||
598 | // Move the content to the arena by copying it and then forgetting | |
6a06907d | 599 | // the content of the SmallVec. |
ba9703b0 XL |
600 | vec.as_ptr().copy_to_nonoverlapping(start_ptr, len); |
601 | mem::forget(vec.drain(..)); | |
602 | ||
603 | // Record the destructors after doing the allocation as that may panic | |
6a06907d | 604 | // and would cause `object`'s destructor to run twice if it was recorded before. |
ba9703b0 | 605 | for i in 0..len { |
6a06907d | 606 | destructors.push(DropType::new(start_ptr.add(i))); |
ba9703b0 XL |
607 | } |
608 | ||
609 | slice::from_raw_parts_mut(start_ptr, len) | |
610 | } | |
611 | } | |
612 | ||
94222f64 | 613 | pub macro arena_for_type { |
ba9703b0 XL |
614 | ([][$ty:ty]) => { |
615 | $crate::TypedArena<$ty> | |
94222f64 | 616 | }, |
ba9703b0 XL |
617 | ([few $(, $attrs:ident)*][$ty:ty]) => { |
618 | ::std::marker::PhantomData<$ty> | |
94222f64 | 619 | }, |
ba9703b0 XL |
620 | ([$ignore:ident $(, $attrs:ident)*]$args:tt) => { |
621 | $crate::arena_for_type!([$($attrs),*]$args) | |
94222f64 | 622 | }, |
ba9703b0 XL |
623 | } |
624 | ||
94222f64 | 625 | pub macro which_arena_for_type { |
ba9703b0 XL |
626 | ([][$arena:expr]) => { |
627 | ::std::option::Option::Some($arena) | |
94222f64 | 628 | }, |
ba9703b0 XL |
629 | ([few$(, $attrs:ident)*][$arena:expr]) => { |
630 | ::std::option::Option::None | |
94222f64 | 631 | }, |
ba9703b0 XL |
632 | ([$ignore:ident$(, $attrs:ident)*]$args:tt) => { |
633 | $crate::which_arena_for_type!([$($attrs),*]$args) | |
94222f64 | 634 | }, |
ba9703b0 XL |
635 | } |
636 | ||
94222f64 XL |
637 | #[rustc_macro_transparency = "semitransparent"] |
638 | pub macro declare_arena([$($a:tt $name:ident: $ty:ty,)*], $tcx:lifetime) { | |
639 | #[derive(Default)] | |
640 | pub struct Arena<$tcx> { | |
641 | pub dropless: $crate::DroplessArena, | |
642 | drop: $crate::DropArena, | |
643 | $($name: $crate::arena_for_type!($a[$ty]),)* | |
644 | } | |
ba9703b0 | 645 | |
94222f64 XL |
646 | pub trait ArenaAllocatable<'tcx, T = Self>: Sized { |
647 | fn allocate_on<'a>(self, arena: &'a Arena<'tcx>) -> &'a mut Self; | |
648 | fn allocate_from_iter<'a>( | |
649 | arena: &'a Arena<'tcx>, | |
650 | iter: impl ::std::iter::IntoIterator<Item = Self>, | |
651 | ) -> &'a mut [Self]; | |
652 | } | |
653 | ||
654 | impl<'tcx, T: Copy> ArenaAllocatable<'tcx, ()> for T { | |
655 | #[inline] | |
656 | fn allocate_on<'a>(self, arena: &'a Arena<'tcx>) -> &'a mut Self { | |
657 | arena.dropless.alloc(self) | |
658 | } | |
659 | #[inline] | |
660 | fn allocate_from_iter<'a>( | |
661 | arena: &'a Arena<'tcx>, | |
662 | iter: impl ::std::iter::IntoIterator<Item = Self>, | |
663 | ) -> &'a mut [Self] { | |
664 | arena.dropless.alloc_from_iter(iter) | |
ba9703b0 XL |
665 | } |
666 | ||
94222f64 XL |
667 | } |
668 | $( | |
669 | impl<$tcx> ArenaAllocatable<$tcx, $ty> for $ty { | |
ba9703b0 | 670 | #[inline] |
94222f64 XL |
671 | fn allocate_on<'a>(self, arena: &'a Arena<$tcx>) -> &'a mut Self { |
672 | if !::std::mem::needs_drop::<Self>() { | |
673 | return arena.dropless.alloc(self); | |
674 | } | |
675 | match $crate::which_arena_for_type!($a[&arena.$name]) { | |
676 | ::std::option::Option::<&$crate::TypedArena<Self>>::Some(ty_arena) => { | |
677 | ty_arena.alloc(self) | |
678 | } | |
679 | ::std::option::Option::None => unsafe { arena.drop.alloc(self) }, | |
680 | } | |
f035d41b | 681 | } |
94222f64 | 682 | |
f035d41b XL |
683 | #[inline] |
684 | fn allocate_from_iter<'a>( | |
94222f64 | 685 | arena: &'a Arena<$tcx>, |
f035d41b XL |
686 | iter: impl ::std::iter::IntoIterator<Item = Self>, |
687 | ) -> &'a mut [Self] { | |
94222f64 XL |
688 | if !::std::mem::needs_drop::<Self>() { |
689 | return arena.dropless.alloc_from_iter(iter); | |
f035d41b | 690 | } |
94222f64 XL |
691 | match $crate::which_arena_for_type!($a[&arena.$name]) { |
692 | ::std::option::Option::<&$crate::TypedArena<Self>>::Some(ty_arena) => { | |
693 | ty_arena.alloc_from_iter(iter) | |
f035d41b | 694 | } |
94222f64 | 695 | ::std::option::Option::None => unsafe { arena.drop.alloc_from_iter(iter) }, |
ba9703b0 XL |
696 | } |
697 | } | |
94222f64 XL |
698 | } |
699 | )* | |
ba9703b0 | 700 | |
94222f64 XL |
701 | impl<'tcx> Arena<'tcx> { |
702 | #[inline] | |
703 | pub fn alloc<T: ArenaAllocatable<'tcx, U>, U>(&self, value: T) -> &mut T { | |
704 | value.allocate_on(self) | |
705 | } | |
ba9703b0 | 706 | |
94222f64 XL |
707 | #[inline] |
708 | pub fn alloc_slice<T: ::std::marker::Copy>(&self, value: &[T]) -> &mut [T] { | |
709 | if value.is_empty() { | |
710 | return &mut []; | |
ba9703b0 | 711 | } |
94222f64 XL |
712 | self.dropless.alloc_slice(value) |
713 | } | |
ba9703b0 | 714 | |
94222f64 XL |
715 | pub fn alloc_from_iter<'a, T: ArenaAllocatable<'tcx, U>, U>( |
716 | &'a self, | |
717 | iter: impl ::std::iter::IntoIterator<Item = T>, | |
718 | ) -> &'a mut [T] { | |
719 | T::allocate_from_iter(self, iter) | |
ba9703b0 XL |
720 | } |
721 | } | |
722 | } | |
723 | ||
1a4d82fc | 724 | #[cfg(test)] |
dc9dc135 | 725 | mod tests; |