]>
Commit | Line | Data |
---|---|---|
1a4d82fc JJ |
1 | //! The arena, a fast but limited type of allocator. |
2 | //! | |
3 | //! Arenas are a type of allocator that destroy the objects within, all at | |
4 | //! once, once the arena itself is destroyed. They do not support deallocation | |
5 | //! of individual objects while the arena itself is still alive. The benefit | |
6 | //! of an arena is very fast allocation; just a pointer bump. | |
7 | //! | |
9e0c209e SL |
8 | //! This crate implements `TypedArena`, a simple arena that can only hold |
9 | //! objects of a single type. | |
1a4d82fc | 10 | |
9fa01778 | 11 | #![doc(html_root_url = "https://doc.rust-lang.org/nightly/", |
92a42be0 | 12 | test(no_crate_inject, attr(deny(warnings))))] |
1a4d82fc | 13 | |
62682a34 | 14 | #![feature(core_intrinsics)] |
32a655c1 | 15 | #![feature(dropck_eyepatch)] |
8faf50e0 | 16 | #![feature(raw_vec_internals)] |
85aaf69f | 17 | #![cfg_attr(test, feature(test))] |
1a4d82fc | 18 | |
9cc50fc6 | 19 | #![allow(deprecated)] |
b039eaaf | 20 | |
1a4d82fc | 21 | extern crate alloc; |
83c7162d | 22 | |
532ac7d7 | 23 | use rustc_data_structures::cold_path; |
83c7162d | 24 | use rustc_data_structures::sync::MTLock; |
532ac7d7 | 25 | use smallvec::SmallVec; |
1a4d82fc JJ |
26 | |
27 | use std::cell::{Cell, RefCell}; | |
28 | use std::cmp; | |
1a4d82fc | 29 | use std::intrinsics; |
9cc50fc6 | 30 | use std::marker::{PhantomData, Send}; |
1a4d82fc | 31 | use std::mem; |
1a4d82fc | 32 | use std::ptr; |
c30ab7b3 | 33 | use std::slice; |
e9174d1e | 34 | |
9cc50fc6 | 35 | use alloc::raw_vec::RawVec; |
1a4d82fc | 36 | |
9e0c209e | 37 | /// An arena that can hold objects of only one type. |
1a4d82fc JJ |
38 | pub struct TypedArena<T> { |
39 | /// A pointer to the next object to be allocated. | |
9cc50fc6 | 40 | ptr: Cell<*mut T>, |
1a4d82fc JJ |
41 | |
42 | /// A pointer to the end of the allocated area. When this pointer is | |
43 | /// reached, a new chunk is allocated. | |
9cc50fc6 | 44 | end: Cell<*mut T>, |
1a4d82fc | 45 | |
9e0c209e | 46 | /// A vector of arena chunks. |
9cc50fc6 | 47 | chunks: RefCell<Vec<TypedArenaChunk<T>>>, |
85aaf69f SL |
48 | |
49 | /// Marker indicating that dropping the arena causes its owned | |
50 | /// instances of `T` to be dropped. | |
9cc50fc6 | 51 | _own: PhantomData<T>, |
1a4d82fc JJ |
52 | } |
53 | ||
54 | struct TypedArenaChunk<T> { | |
9e0c209e | 55 | /// The raw storage for the arena chunk. |
9cc50fc6 | 56 | storage: RawVec<T>, |
532ac7d7 XL |
57 | /// The number of valid entries in the chunk. |
58 | entries: usize, | |
1a4d82fc JJ |
59 | } |
60 | ||
61 | impl<T> TypedArenaChunk<T> { | |
62 | #[inline] | |
9cc50fc6 | 63 | unsafe fn new(capacity: usize) -> TypedArenaChunk<T> { |
2c00a5a8 XL |
64 | TypedArenaChunk { |
65 | storage: RawVec::with_capacity(capacity), | |
532ac7d7 | 66 | entries: 0, |
2c00a5a8 | 67 | } |
1a4d82fc JJ |
68 | } |
69 | ||
9cc50fc6 | 70 | /// Destroys this arena chunk. |
1a4d82fc | 71 | #[inline] |
85aaf69f | 72 | unsafe fn destroy(&mut self, len: usize) { |
9cc50fc6 SL |
73 | // The branch on needs_drop() is an -O1 performance optimization. |
74 | // Without the branch, dropping TypedArena<u8> takes linear time. | |
7cac9316 | 75 | if mem::needs_drop::<T>() { |
1a4d82fc | 76 | let mut start = self.start(); |
9cc50fc6 | 77 | // Destroy all allocated objects. |
85aaf69f | 78 | for _ in 0..len { |
9cc50fc6 SL |
79 | ptr::drop_in_place(start); |
80 | start = start.offset(1); | |
1a4d82fc JJ |
81 | } |
82 | } | |
1a4d82fc JJ |
83 | } |
84 | ||
85 | // Returns a pointer to the first allocated object. | |
86 | #[inline] | |
9cc50fc6 SL |
87 | fn start(&self) -> *mut T { |
88 | self.storage.ptr() | |
1a4d82fc JJ |
89 | } |
90 | ||
91 | // Returns a pointer to the end of the allocated space. | |
92 | #[inline] | |
9cc50fc6 | 93 | fn end(&self) -> *mut T { |
1a4d82fc | 94 | unsafe { |
9cc50fc6 SL |
95 | if mem::size_of::<T>() == 0 { |
96 | // A pointer as large as possible for zero-sized elements. | |
97 | !0 as *mut T | |
98 | } else { | |
416331ca | 99 | self.start().add(self.storage.capacity()) |
9cc50fc6 | 100 | } |
1a4d82fc JJ |
101 | } |
102 | } | |
103 | } | |
104 | ||
9cc50fc6 SL |
105 | const PAGE: usize = 4096; |
106 | ||
0bf4aa26 | 107 | impl<T> Default for TypedArena<T> { |
9e0c209e | 108 | /// Creates a new `TypedArena`. |
0bf4aa26 | 109 | fn default() -> TypedArena<T> { |
9e0c209e SL |
110 | TypedArena { |
111 | // We set both `ptr` and `end` to 0 so that the first call to | |
112 | // alloc() will trigger a grow(). | |
dc9dc135 XL |
113 | ptr: Cell::new(ptr::null_mut()), |
114 | end: Cell::new(ptr::null_mut()), | |
9e0c209e SL |
115 | chunks: RefCell::new(vec![]), |
116 | _own: PhantomData, | |
1a4d82fc JJ |
117 | } |
118 | } | |
0bf4aa26 | 119 | } |
1a4d82fc | 120 | |
0bf4aa26 | 121 | impl<T> TypedArena<T> { |
0731742a XL |
122 | pub fn in_arena(&self, ptr: *const T) -> bool { |
123 | let ptr = ptr as *const T as *mut T; | |
124 | ||
125 | self.chunks.borrow().iter().any(|chunk| chunk.start() <= ptr && ptr < chunk.end()) | |
126 | } | |
1a4d82fc JJ |
127 | /// Allocates an object in the `TypedArena`, returning a reference to it. |
128 | #[inline] | |
129 | pub fn alloc(&self, object: T) -> &mut T { | |
130 | if self.ptr == self.end { | |
c30ab7b3 | 131 | self.grow(1) |
1a4d82fc JJ |
132 | } |
133 | ||
e9174d1e | 134 | unsafe { |
9cc50fc6 | 135 | if mem::size_of::<T>() == 0 { |
2c00a5a8 XL |
136 | self.ptr |
137 | .set(intrinsics::arith_offset(self.ptr.get() as *mut u8, 1) | |
138 | as *mut T); | |
7cac9316 | 139 | let ptr = mem::align_of::<T>() as *mut T; |
9cc50fc6 SL |
140 | // Don't drop the object. This `write` is equivalent to `forget`. |
141 | ptr::write(ptr, object); | |
142 | &mut *ptr | |
143 | } else { | |
144 | let ptr = self.ptr.get(); | |
145 | // Advance the pointer. | |
146 | self.ptr.set(self.ptr.get().offset(1)); | |
147 | // Write into uninitialized memory. | |
148 | ptr::write(ptr, object); | |
149 | &mut *ptr | |
150 | } | |
e9174d1e | 151 | } |
1a4d82fc JJ |
152 | } |
153 | ||
532ac7d7 XL |
154 | #[inline] |
155 | fn can_allocate(&self, len: usize) -> bool { | |
156 | let available_capacity_bytes = self.end.get() as usize - self.ptr.get() as usize; | |
157 | let at_least_bytes = len.checked_mul(mem::size_of::<T>()).unwrap(); | |
158 | available_capacity_bytes >= at_least_bytes | |
159 | } | |
160 | ||
161 | /// Ensures there's enough space in the current chunk to fit `len` objects. | |
162 | #[inline] | |
163 | fn ensure_capacity(&self, len: usize) { | |
164 | if !self.can_allocate(len) { | |
165 | self.grow(len); | |
166 | debug_assert!(self.can_allocate(len)); | |
167 | } | |
168 | } | |
169 | ||
170 | #[inline] | |
171 | unsafe fn alloc_raw_slice(&self, len: usize) -> *mut T { | |
172 | assert!(mem::size_of::<T>() != 0); | |
173 | assert!(len != 0); | |
174 | ||
175 | self.ensure_capacity(len); | |
176 | ||
177 | let start_ptr = self.ptr.get(); | |
178 | self.ptr.set(start_ptr.add(len)); | |
179 | start_ptr | |
180 | } | |
181 | ||
ff7c6d11 | 182 | /// Allocates a slice of objects that are copied into the `TypedArena`, returning a mutable |
c30ab7b3 SL |
183 | /// reference to it. Will panic if passed a zero-sized types. |
184 | /// | |
185 | /// Panics: | |
ff7c6d11 | 186 | /// |
c30ab7b3 SL |
187 | /// - Zero-sized types |
188 | /// - Zero-length slices | |
189 | #[inline] | |
190 | pub fn alloc_slice(&self, slice: &[T]) -> &mut [T] | |
2c00a5a8 XL |
191 | where |
192 | T: Copy, | |
193 | { | |
532ac7d7 XL |
194 | unsafe { |
195 | let len = slice.len(); | |
196 | let start_ptr = self.alloc_raw_slice(len); | |
197 | slice.as_ptr().copy_to_nonoverlapping(start_ptr, len); | |
198 | slice::from_raw_parts_mut(start_ptr, len) | |
199 | } | |
200 | } | |
201 | ||
202 | #[inline] | |
203 | pub fn alloc_from_iter<I: IntoIterator<Item = T>>(&self, iter: I) -> &mut [T] { | |
c30ab7b3 | 204 | assert!(mem::size_of::<T>() != 0); |
60c5eb7d XL |
205 | let mut vec: SmallVec<[_; 8]> = iter.into_iter().collect(); |
206 | if vec.is_empty() { | |
207 | return &mut []; | |
208 | } | |
209 | // Move the content to the arena by copying it and then forgetting | |
210 | // the content of the SmallVec | |
211 | unsafe { | |
212 | let len = vec.len(); | |
213 | let start_ptr = self.alloc_raw_slice(len); | |
214 | vec.as_ptr().copy_to_nonoverlapping(start_ptr, len); | |
215 | vec.set_len(0); | |
216 | slice::from_raw_parts_mut(start_ptr, len) | |
c30ab7b3 SL |
217 | } |
218 | } | |
219 | ||
1a4d82fc JJ |
220 | /// Grows the arena. |
221 | #[inline(never)] | |
9cc50fc6 | 222 | #[cold] |
c30ab7b3 | 223 | fn grow(&self, n: usize) { |
1a4d82fc | 224 | unsafe { |
9cc50fc6 | 225 | let mut chunks = self.chunks.borrow_mut(); |
c30ab7b3 | 226 | let (chunk, mut new_capacity); |
9e0c209e | 227 | if let Some(last_chunk) = chunks.last_mut() { |
c30ab7b3 SL |
228 | let used_bytes = self.ptr.get() as usize - last_chunk.start() as usize; |
229 | let currently_used_cap = used_bytes / mem::size_of::<T>(); | |
532ac7d7 | 230 | last_chunk.entries = currently_used_cap; |
c30ab7b3 | 231 | if last_chunk.storage.reserve_in_place(currently_used_cap, n) { |
9e0c209e SL |
232 | self.end.set(last_chunk.end()); |
233 | return; | |
234 | } else { | |
416331ca | 235 | new_capacity = last_chunk.storage.capacity(); |
c30ab7b3 | 236 | loop { |
32a655c1 | 237 | new_capacity = new_capacity.checked_mul(2).unwrap(); |
c30ab7b3 SL |
238 | if new_capacity >= currently_used_cap + n { |
239 | break; | |
240 | } | |
241 | } | |
9e0c209e | 242 | } |
9cc50fc6 | 243 | } else { |
9e0c209e | 244 | let elem_size = cmp::max(1, mem::size_of::<T>()); |
c30ab7b3 | 245 | new_capacity = cmp::max(n, PAGE / elem_size); |
9cc50fc6 | 246 | } |
9e0c209e SL |
247 | chunk = TypedArenaChunk::<T>::new(new_capacity); |
248 | self.ptr.set(chunk.start()); | |
249 | self.end.set(chunk.end()); | |
250 | chunks.push(chunk); | |
9cc50fc6 SL |
251 | } |
252 | } | |
9e0c209e | 253 | |
9cc50fc6 SL |
254 | /// Clears the arena. Deallocates all but the longest chunk which may be reused. |
255 | pub fn clear(&mut self) { | |
256 | unsafe { | |
257 | // Clear the last chunk, which is partially filled. | |
258 | let mut chunks_borrow = self.chunks.borrow_mut(); | |
a1dfa0c6 | 259 | if let Some(mut last_chunk) = chunks_borrow.last_mut() { |
9e0c209e | 260 | self.clear_last_chunk(&mut last_chunk); |
a1dfa0c6 | 261 | let len = chunks_borrow.len(); |
9e0c209e | 262 | // If `T` is ZST, code below has no effect. |
a1dfa0c6 | 263 | for mut chunk in chunks_borrow.drain(..len-1) { |
532ac7d7 | 264 | chunk.destroy(chunk.entries); |
9e0c209e | 265 | } |
9cc50fc6 SL |
266 | } |
267 | } | |
268 | } | |
269 | ||
270 | // Drops the contents of the last chunk. The last chunk is partially empty, unlike all other | |
271 | // chunks. | |
272 | fn clear_last_chunk(&self, last_chunk: &mut TypedArenaChunk<T>) { | |
273 | // Determine how much was filled. | |
274 | let start = last_chunk.start() as usize; | |
275 | // We obtain the value of the pointer to the first uninitialized element. | |
276 | let end = self.ptr.get() as usize; | |
277 | // We then calculate the number of elements to be dropped in the last chunk, | |
278 | // which is the filled area's length. | |
279 | let diff = if mem::size_of::<T>() == 0 { | |
280 | // `T` is ZST. It can't have a drop flag, so the value here doesn't matter. We get | |
281 | // the number of zero-sized values in the last and only chunk, just out of caution. | |
282 | // Recall that `end` was incremented for each allocated value. | |
283 | end - start | |
284 | } else { | |
285 | (end - start) / mem::size_of::<T>() | |
286 | }; | |
287 | // Pass that to the `destroy` method. | |
288 | unsafe { | |
289 | last_chunk.destroy(diff); | |
1a4d82fc | 290 | } |
9cc50fc6 SL |
291 | // Reset the chunk. |
292 | self.ptr.set(last_chunk.start()); | |
1a4d82fc JJ |
293 | } |
294 | } | |
295 | ||
32a655c1 | 296 | unsafe impl<#[may_dangle] T> Drop for TypedArena<T> { |
1a4d82fc JJ |
297 | fn drop(&mut self) { |
298 | unsafe { | |
299 | // Determine how much was filled. | |
9cc50fc6 | 300 | let mut chunks_borrow = self.chunks.borrow_mut(); |
9e0c209e SL |
301 | if let Some(mut last_chunk) = chunks_borrow.pop() { |
302 | // Drop the contents of the last chunk. | |
303 | self.clear_last_chunk(&mut last_chunk); | |
304 | // The last chunk will be dropped. Destroy all other chunks. | |
305 | for chunk in chunks_borrow.iter_mut() { | |
532ac7d7 | 306 | chunk.destroy(chunk.entries); |
9e0c209e | 307 | } |
9cc50fc6 SL |
308 | } |
309 | // RawVec handles deallocation of `last_chunk` and `self.chunks`. | |
1a4d82fc JJ |
310 | } |
311 | } | |
312 | } | |
313 | ||
9cc50fc6 SL |
314 | unsafe impl<T: Send> Send for TypedArena<T> {} |
315 | ||
32a655c1 SL |
316 | pub struct DroplessArena { |
317 | /// A pointer to the next object to be allocated. | |
318 | ptr: Cell<*mut u8>, | |
319 | ||
320 | /// A pointer to the end of the allocated area. When this pointer is | |
321 | /// reached, a new chunk is allocated. | |
322 | end: Cell<*mut u8>, | |
323 | ||
324 | /// A vector of arena chunks. | |
325 | chunks: RefCell<Vec<TypedArenaChunk<u8>>>, | |
326 | } | |
327 | ||
83c7162d XL |
328 | unsafe impl Send for DroplessArena {} |
329 | ||
0bf4aa26 | 330 | impl Default for DroplessArena { |
a1dfa0c6 | 331 | #[inline] |
0bf4aa26 | 332 | fn default() -> DroplessArena { |
32a655c1 | 333 | DroplessArena { |
dc9dc135 XL |
334 | ptr: Cell::new(ptr::null_mut()), |
335 | end: Cell::new(ptr::null_mut()), | |
0bf4aa26 | 336 | chunks: Default::default(), |
32a655c1 SL |
337 | } |
338 | } | |
0bf4aa26 | 339 | } |
32a655c1 | 340 | |
0bf4aa26 | 341 | impl DroplessArena { |
32a655c1 SL |
342 | pub fn in_arena<T: ?Sized>(&self, ptr: *const T) -> bool { |
343 | let ptr = ptr as *const u8 as *mut u8; | |
32a655c1 | 344 | |
a1dfa0c6 | 345 | self.chunks.borrow().iter().any(|chunk| chunk.start() <= ptr && ptr < chunk.end()) |
32a655c1 SL |
346 | } |
347 | ||
a1dfa0c6 | 348 | #[inline] |
94b46f34 | 349 | fn align(&self, align: usize) { |
32a655c1 SL |
350 | let final_address = ((self.ptr.get() as usize) + align - 1) & !(align - 1); |
351 | self.ptr.set(final_address as *mut u8); | |
352 | assert!(self.ptr <= self.end); | |
353 | } | |
354 | ||
355 | #[inline(never)] | |
356 | #[cold] | |
94b46f34 | 357 | fn grow(&self, needed_bytes: usize) { |
32a655c1 SL |
358 | unsafe { |
359 | let mut chunks = self.chunks.borrow_mut(); | |
360 | let (chunk, mut new_capacity); | |
361 | if let Some(last_chunk) = chunks.last_mut() { | |
362 | let used_bytes = self.ptr.get() as usize - last_chunk.start() as usize; | |
2c00a5a8 XL |
363 | if last_chunk |
364 | .storage | |
365 | .reserve_in_place(used_bytes, needed_bytes) | |
366 | { | |
32a655c1 SL |
367 | self.end.set(last_chunk.end()); |
368 | return; | |
369 | } else { | |
416331ca | 370 | new_capacity = last_chunk.storage.capacity(); |
32a655c1 SL |
371 | loop { |
372 | new_capacity = new_capacity.checked_mul(2).unwrap(); | |
373 | if new_capacity >= used_bytes + needed_bytes { | |
374 | break; | |
375 | } | |
376 | } | |
377 | } | |
378 | } else { | |
379 | new_capacity = cmp::max(needed_bytes, PAGE); | |
380 | } | |
381 | chunk = TypedArenaChunk::<u8>::new(new_capacity); | |
382 | self.ptr.set(chunk.start()); | |
383 | self.end.set(chunk.end()); | |
384 | chunks.push(chunk); | |
385 | } | |
386 | } | |
387 | ||
388 | #[inline] | |
94b46f34 | 389 | pub fn alloc_raw(&self, bytes: usize, align: usize) -> &mut [u8] { |
32a655c1 | 390 | unsafe { |
94b46f34 XL |
391 | assert!(bytes != 0); |
392 | ||
393 | self.align(align); | |
32a655c1 | 394 | |
94b46f34 | 395 | let future_end = intrinsics::arith_offset(self.ptr.get(), bytes as isize); |
32a655c1 | 396 | if (future_end as *mut u8) >= self.end.get() { |
94b46f34 | 397 | self.grow(bytes); |
32a655c1 SL |
398 | } |
399 | ||
400 | let ptr = self.ptr.get(); | |
401 | // Set the pointer past ourselves | |
2c00a5a8 | 402 | self.ptr.set( |
94b46f34 | 403 | intrinsics::arith_offset(self.ptr.get(), bytes as isize) as *mut u8, |
2c00a5a8 | 404 | ); |
94b46f34 XL |
405 | slice::from_raw_parts_mut(ptr, bytes) |
406 | } | |
407 | } | |
408 | ||
409 | #[inline] | |
410 | pub fn alloc<T>(&self, object: T) -> &mut T { | |
411 | assert!(!mem::needs_drop::<T>()); | |
412 | ||
413 | let mem = self.alloc_raw( | |
414 | mem::size_of::<T>(), | |
415 | mem::align_of::<T>()) as *mut _ as *mut T; | |
416 | ||
417 | unsafe { | |
32a655c1 | 418 | // Write into uninitialized memory. |
94b46f34 XL |
419 | ptr::write(mem, object); |
420 | &mut *mem | |
32a655c1 SL |
421 | } |
422 | } | |
423 | ||
424 | /// Allocates a slice of objects that are copied into the `DroplessArena`, returning a mutable | |
425 | /// reference to it. Will panic if passed a zero-sized type. | |
426 | /// | |
427 | /// Panics: | |
ff7c6d11 | 428 | /// |
32a655c1 SL |
429 | /// - Zero-sized types |
430 | /// - Zero-length slices | |
431 | #[inline] | |
432 | pub fn alloc_slice<T>(&self, slice: &[T]) -> &mut [T] | |
2c00a5a8 XL |
433 | where |
434 | T: Copy, | |
435 | { | |
7cac9316 | 436 | assert!(!mem::needs_drop::<T>()); |
32a655c1 | 437 | assert!(mem::size_of::<T>() != 0); |
a1dfa0c6 | 438 | assert!(!slice.is_empty()); |
32a655c1 | 439 | |
94b46f34 XL |
440 | let mem = self.alloc_raw( |
441 | slice.len() * mem::size_of::<T>(), | |
442 | mem::align_of::<T>()) as *mut _ as *mut T; | |
32a655c1 SL |
443 | |
444 | unsafe { | |
94b46f34 | 445 | let arena_slice = slice::from_raw_parts_mut(mem, slice.len()); |
32a655c1 SL |
446 | arena_slice.copy_from_slice(slice); |
447 | arena_slice | |
448 | } | |
449 | } | |
532ac7d7 | 450 | |
dc9dc135 XL |
451 | #[inline] |
452 | unsafe fn write_from_iter<T, I: Iterator<Item = T>>( | |
453 | &self, | |
454 | mut iter: I, | |
455 | len: usize, | |
456 | mem: *mut T, | |
457 | ) -> &mut [T] { | |
458 | let mut i = 0; | |
459 | // Use a manual loop since LLVM manages to optimize it better for | |
460 | // slice iterators | |
461 | loop { | |
462 | let value = iter.next(); | |
463 | if i >= len || value.is_none() { | |
464 | // We only return as many items as the iterator gave us, even | |
465 | // though it was supposed to give us `len` | |
466 | return slice::from_raw_parts_mut(mem, i); | |
467 | } | |
e74abb32 | 468 | ptr::write(mem.add(i), value.unwrap()); |
dc9dc135 XL |
469 | i += 1; |
470 | } | |
471 | } | |
472 | ||
532ac7d7 XL |
473 | #[inline] |
474 | pub fn alloc_from_iter<T, I: IntoIterator<Item = T>>(&self, iter: I) -> &mut [T] { | |
dc9dc135 | 475 | let iter = iter.into_iter(); |
532ac7d7 XL |
476 | assert!(mem::size_of::<T>() != 0); |
477 | assert!(!mem::needs_drop::<T>()); | |
478 | ||
479 | let size_hint = iter.size_hint(); | |
480 | ||
481 | match size_hint { | |
482 | (min, Some(max)) if min == max => { | |
483 | // We know the exact number of elements the iterator will produce here | |
484 | let len = min; | |
485 | ||
486 | if len == 0 { | |
487 | return &mut [] | |
488 | } | |
489 | let size = len.checked_mul(mem::size_of::<T>()).unwrap(); | |
490 | let mem = self.alloc_raw(size, mem::align_of::<T>()) as *mut _ as *mut T; | |
491 | unsafe { | |
dc9dc135 | 492 | self.write_from_iter(iter, len, mem) |
532ac7d7 XL |
493 | } |
494 | } | |
495 | (_, _) => { | |
496 | cold_path(move || -> &mut [T] { | |
497 | let mut vec: SmallVec<[_; 8]> = iter.collect(); | |
498 | if vec.is_empty() { | |
499 | return &mut []; | |
500 | } | |
501 | // Move the content to the arena by copying it and then forgetting | |
502 | // the content of the SmallVec | |
503 | unsafe { | |
504 | let len = vec.len(); | |
505 | let start_ptr = self.alloc_raw( | |
506 | len * mem::size_of::<T>(), | |
507 | mem::align_of::<T>() | |
508 | ) as *mut _ as *mut T; | |
509 | vec.as_ptr().copy_to_nonoverlapping(start_ptr, len); | |
510 | vec.set_len(0); | |
511 | slice::from_raw_parts_mut(start_ptr, len) | |
512 | } | |
513 | }) | |
514 | } | |
515 | } | |
516 | } | |
32a655c1 SL |
517 | } |
518 | ||
0bf4aa26 XL |
519 | #[derive(Default)] |
520 | // FIXME(@Zoxc): this type is entirely unused in rustc | |
83c7162d XL |
521 | pub struct SyncTypedArena<T> { |
522 | lock: MTLock<TypedArena<T>>, | |
523 | } | |
524 | ||
525 | impl<T> SyncTypedArena<T> { | |
83c7162d XL |
526 | #[inline(always)] |
527 | pub fn alloc(&self, object: T) -> &mut T { | |
528 | // Extend the lifetime of the result since it's limited to the lock guard | |
529 | unsafe { &mut *(self.lock.lock().alloc(object) as *mut T) } | |
530 | } | |
531 | ||
532 | #[inline(always)] | |
533 | pub fn alloc_slice(&self, slice: &[T]) -> &mut [T] | |
534 | where | |
535 | T: Copy, | |
536 | { | |
537 | // Extend the lifetime of the result since it's limited to the lock guard | |
538 | unsafe { &mut *(self.lock.lock().alloc_slice(slice) as *mut [T]) } | |
539 | } | |
540 | ||
541 | #[inline(always)] | |
542 | pub fn clear(&mut self) { | |
543 | self.lock.get_mut().clear(); | |
544 | } | |
545 | } | |
546 | ||
0bf4aa26 | 547 | #[derive(Default)] |
83c7162d XL |
548 | pub struct SyncDroplessArena { |
549 | lock: MTLock<DroplessArena>, | |
550 | } | |
551 | ||
552 | impl SyncDroplessArena { | |
83c7162d XL |
553 | #[inline(always)] |
554 | pub fn in_arena<T: ?Sized>(&self, ptr: *const T) -> bool { | |
555 | self.lock.lock().in_arena(ptr) | |
556 | } | |
557 | ||
94b46f34 XL |
558 | #[inline(always)] |
559 | pub fn alloc_raw(&self, bytes: usize, align: usize) -> &mut [u8] { | |
560 | // Extend the lifetime of the result since it's limited to the lock guard | |
561 | unsafe { &mut *(self.lock.lock().alloc_raw(bytes, align) as *mut [u8]) } | |
562 | } | |
563 | ||
83c7162d XL |
564 | #[inline(always)] |
565 | pub fn alloc<T>(&self, object: T) -> &mut T { | |
566 | // Extend the lifetime of the result since it's limited to the lock guard | |
567 | unsafe { &mut *(self.lock.lock().alloc(object) as *mut T) } | |
568 | } | |
569 | ||
570 | #[inline(always)] | |
571 | pub fn alloc_slice<T>(&self, slice: &[T]) -> &mut [T] | |
572 | where | |
573 | T: Copy, | |
574 | { | |
575 | // Extend the lifetime of the result since it's limited to the lock guard | |
576 | unsafe { &mut *(self.lock.lock().alloc_slice(slice) as *mut [T]) } | |
577 | } | |
578 | } | |
579 | ||
1a4d82fc | 580 | #[cfg(test)] |
dc9dc135 | 581 | mod tests; |