]> git.proxmox.com Git - rustc.git/blob - src/libarena/lib.rs
New upstream version 1.20.0+dfsg1
[rustc.git] / src / libarena / lib.rs
1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 //! The arena, a fast but limited type of allocator.
12 //!
13 //! Arenas are a type of allocator that destroy the objects within, all at
14 //! once, once the arena itself is destroyed. They do not support deallocation
15 //! of individual objects while the arena itself is still alive. The benefit
16 //! of an arena is very fast allocation; just a pointer bump.
17 //!
18 //! This crate implements `TypedArena`, a simple arena that can only hold
19 //! objects of a single type.
20
21 #![crate_name = "arena"]
22 #![crate_type = "rlib"]
23 #![crate_type = "dylib"]
24 #![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
25 html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
26 html_root_url = "https://doc.rust-lang.org/nightly/",
27 test(no_crate_inject, attr(deny(warnings))))]
28 #![deny(warnings)]
29
30 #![feature(alloc)]
31 #![feature(core_intrinsics)]
32 #![feature(dropck_eyepatch)]
33 #![feature(generic_param_attrs)]
34 #![feature(needs_drop)]
35 #![cfg_attr(test, feature(test))]
36
37 #![allow(deprecated)]
38
39 extern crate alloc;
40
41 use std::cell::{Cell, RefCell};
42 use std::cmp;
43 use std::intrinsics;
44 use std::marker::{PhantomData, Send};
45 use std::mem;
46 use std::ptr;
47 use std::slice;
48
49 use alloc::raw_vec::RawVec;
50
51 /// An arena that can hold objects of only one type.
52 pub struct TypedArena<T> {
53 /// A pointer to the next object to be allocated.
54 ptr: Cell<*mut T>,
55
56 /// A pointer to the end of the allocated area. When this pointer is
57 /// reached, a new chunk is allocated.
58 end: Cell<*mut T>,
59
60 /// A vector of arena chunks.
61 chunks: RefCell<Vec<TypedArenaChunk<T>>>,
62
63 /// Marker indicating that dropping the arena causes its owned
64 /// instances of `T` to be dropped.
65 _own: PhantomData<T>,
66 }
67
68 struct TypedArenaChunk<T> {
69 /// The raw storage for the arena chunk.
70 storage: RawVec<T>,
71 }
72
73 impl<T> TypedArenaChunk<T> {
74 #[inline]
75 unsafe fn new(capacity: usize) -> TypedArenaChunk<T> {
76 TypedArenaChunk { storage: RawVec::with_capacity(capacity) }
77 }
78
79 /// Destroys this arena chunk.
80 #[inline]
81 unsafe fn destroy(&mut self, len: usize) {
82 // The branch on needs_drop() is an -O1 performance optimization.
83 // Without the branch, dropping TypedArena<u8> takes linear time.
84 if mem::needs_drop::<T>() {
85 let mut start = self.start();
86 // Destroy all allocated objects.
87 for _ in 0..len {
88 ptr::drop_in_place(start);
89 start = start.offset(1);
90 }
91 }
92 }
93
94 // Returns a pointer to the first allocated object.
95 #[inline]
96 fn start(&self) -> *mut T {
97 self.storage.ptr()
98 }
99
100 // Returns a pointer to the end of the allocated space.
101 #[inline]
102 fn end(&self) -> *mut T {
103 unsafe {
104 if mem::size_of::<T>() == 0 {
105 // A pointer as large as possible for zero-sized elements.
106 !0 as *mut T
107 } else {
108 self.start().offset(self.storage.cap() as isize)
109 }
110 }
111 }
112 }
113
114 const PAGE: usize = 4096;
115
116 impl<T> TypedArena<T> {
117 /// Creates a new `TypedArena`.
118 #[inline]
119 pub fn new() -> TypedArena<T> {
120 TypedArena {
121 // We set both `ptr` and `end` to 0 so that the first call to
122 // alloc() will trigger a grow().
123 ptr: Cell::new(0 as *mut T),
124 end: Cell::new(0 as *mut T),
125 chunks: RefCell::new(vec![]),
126 _own: PhantomData,
127 }
128 }
129
130 /// Allocates an object in the `TypedArena`, returning a reference to it.
131 #[inline]
132 pub fn alloc(&self, object: T) -> &mut T {
133 if self.ptr == self.end {
134 self.grow(1)
135 }
136
137 unsafe {
138 if mem::size_of::<T>() == 0 {
139 self.ptr.set(intrinsics::arith_offset(self.ptr.get() as *mut u8, 1) as *mut T);
140 let ptr = mem::align_of::<T>() as *mut T;
141 // Don't drop the object. This `write` is equivalent to `forget`.
142 ptr::write(ptr, object);
143 &mut *ptr
144 } else {
145 let ptr = self.ptr.get();
146 // Advance the pointer.
147 self.ptr.set(self.ptr.get().offset(1));
148 // Write into uninitialized memory.
149 ptr::write(ptr, object);
150 &mut *ptr
151 }
152 }
153 }
154
155 /// Allocates a slice of objects that are copy into the `TypedArena`, returning a mutable
156 /// reference to it. Will panic if passed a zero-sized types.
157 ///
158 /// Panics:
159 /// - Zero-sized types
160 /// - Zero-length slices
161 #[inline]
162 pub fn alloc_slice(&self, slice: &[T]) -> &mut [T]
163 where T: Copy {
164 assert!(mem::size_of::<T>() != 0);
165 assert!(slice.len() != 0);
166
167 let available_capacity_bytes = self.end.get() as usize - self.ptr.get() as usize;
168 let at_least_bytes = slice.len() * mem::size_of::<T>();
169 if available_capacity_bytes < at_least_bytes {
170 self.grow(slice.len());
171 }
172
173 unsafe {
174 let start_ptr = self.ptr.get();
175 let arena_slice = slice::from_raw_parts_mut(start_ptr, slice.len());
176 self.ptr.set(start_ptr.offset(arena_slice.len() as isize));
177 arena_slice.copy_from_slice(slice);
178 arena_slice
179 }
180 }
181
182 /// Grows the arena.
183 #[inline(never)]
184 #[cold]
185 fn grow(&self, n: usize) {
186 unsafe {
187 let mut chunks = self.chunks.borrow_mut();
188 let (chunk, mut new_capacity);
189 if let Some(last_chunk) = chunks.last_mut() {
190 let used_bytes = self.ptr.get() as usize - last_chunk.start() as usize;
191 let currently_used_cap = used_bytes / mem::size_of::<T>();
192 if last_chunk.storage.reserve_in_place(currently_used_cap, n) {
193 self.end.set(last_chunk.end());
194 return;
195 } else {
196 new_capacity = last_chunk.storage.cap();
197 loop {
198 new_capacity = new_capacity.checked_mul(2).unwrap();
199 if new_capacity >= currently_used_cap + n {
200 break;
201 }
202 }
203 }
204 } else {
205 let elem_size = cmp::max(1, mem::size_of::<T>());
206 new_capacity = cmp::max(n, PAGE / elem_size);
207 }
208 chunk = TypedArenaChunk::<T>::new(new_capacity);
209 self.ptr.set(chunk.start());
210 self.end.set(chunk.end());
211 chunks.push(chunk);
212 }
213 }
214
215 /// Clears the arena. Deallocates all but the longest chunk which may be reused.
216 pub fn clear(&mut self) {
217 unsafe {
218 // Clear the last chunk, which is partially filled.
219 let mut chunks_borrow = self.chunks.borrow_mut();
220 if let Some(mut last_chunk) = chunks_borrow.pop() {
221 self.clear_last_chunk(&mut last_chunk);
222 // If `T` is ZST, code below has no effect.
223 for mut chunk in chunks_borrow.drain(..) {
224 let cap = chunk.storage.cap();
225 chunk.destroy(cap);
226 }
227 chunks_borrow.push(last_chunk);
228 }
229 }
230 }
231
232 // Drops the contents of the last chunk. The last chunk is partially empty, unlike all other
233 // chunks.
234 fn clear_last_chunk(&self, last_chunk: &mut TypedArenaChunk<T>) {
235 // Determine how much was filled.
236 let start = last_chunk.start() as usize;
237 // We obtain the value of the pointer to the first uninitialized element.
238 let end = self.ptr.get() as usize;
239 // We then calculate the number of elements to be dropped in the last chunk,
240 // which is the filled area's length.
241 let diff = if mem::size_of::<T>() == 0 {
242 // `T` is ZST. It can't have a drop flag, so the value here doesn't matter. We get
243 // the number of zero-sized values in the last and only chunk, just out of caution.
244 // Recall that `end` was incremented for each allocated value.
245 end - start
246 } else {
247 (end - start) / mem::size_of::<T>()
248 };
249 // Pass that to the `destroy` method.
250 unsafe {
251 last_chunk.destroy(diff);
252 }
253 // Reset the chunk.
254 self.ptr.set(last_chunk.start());
255 }
256 }
257
258 unsafe impl<#[may_dangle] T> Drop for TypedArena<T> {
259 fn drop(&mut self) {
260 unsafe {
261 // Determine how much was filled.
262 let mut chunks_borrow = self.chunks.borrow_mut();
263 if let Some(mut last_chunk) = chunks_borrow.pop() {
264 // Drop the contents of the last chunk.
265 self.clear_last_chunk(&mut last_chunk);
266 // The last chunk will be dropped. Destroy all other chunks.
267 for chunk in chunks_borrow.iter_mut() {
268 let cap = chunk.storage.cap();
269 chunk.destroy(cap);
270 }
271 }
272 // RawVec handles deallocation of `last_chunk` and `self.chunks`.
273 }
274 }
275 }
276
277 unsafe impl<T: Send> Send for TypedArena<T> {}
278
279 pub struct DroplessArena {
280 /// A pointer to the next object to be allocated.
281 ptr: Cell<*mut u8>,
282
283 /// A pointer to the end of the allocated area. When this pointer is
284 /// reached, a new chunk is allocated.
285 end: Cell<*mut u8>,
286
287 /// A vector of arena chunks.
288 chunks: RefCell<Vec<TypedArenaChunk<u8>>>,
289 }
290
291 impl DroplessArena {
292 pub fn new() -> DroplessArena {
293 DroplessArena {
294 ptr: Cell::new(0 as *mut u8),
295 end: Cell::new(0 as *mut u8),
296 chunks: RefCell::new(vec![]),
297 }
298 }
299
300 pub fn in_arena<T: ?Sized>(&self, ptr: *const T) -> bool {
301 let ptr = ptr as *const u8 as *mut u8;
302 for chunk in &*self.chunks.borrow() {
303 if chunk.start() <= ptr && ptr < chunk.end() {
304 return true;
305 }
306 }
307
308 false
309 }
310
311 fn align_for<T>(&self) {
312 let align = mem::align_of::<T>();
313 let final_address = ((self.ptr.get() as usize) + align - 1) & !(align - 1);
314 self.ptr.set(final_address as *mut u8);
315 assert!(self.ptr <= self.end);
316 }
317
318 #[inline(never)]
319 #[cold]
320 fn grow<T>(&self, n: usize) {
321 let needed_bytes = n * mem::size_of::<T>();
322 unsafe {
323 let mut chunks = self.chunks.borrow_mut();
324 let (chunk, mut new_capacity);
325 if let Some(last_chunk) = chunks.last_mut() {
326 let used_bytes = self.ptr.get() as usize - last_chunk.start() as usize;
327 if last_chunk.storage.reserve_in_place(used_bytes, needed_bytes) {
328 self.end.set(last_chunk.end());
329 return;
330 } else {
331 new_capacity = last_chunk.storage.cap();
332 loop {
333 new_capacity = new_capacity.checked_mul(2).unwrap();
334 if new_capacity >= used_bytes + needed_bytes {
335 break;
336 }
337 }
338 }
339 } else {
340 new_capacity = cmp::max(needed_bytes, PAGE);
341 }
342 chunk = TypedArenaChunk::<u8>::new(new_capacity);
343 self.ptr.set(chunk.start());
344 self.end.set(chunk.end());
345 chunks.push(chunk);
346 }
347 }
348
349 #[inline]
350 pub fn alloc<T>(&self, object: T) -> &mut T {
351 unsafe {
352 assert!(!mem::needs_drop::<T>());
353 assert!(mem::size_of::<T>() != 0);
354
355 self.align_for::<T>();
356 let future_end = intrinsics::arith_offset(self.ptr.get(), mem::size_of::<T>() as isize);
357 if (future_end as *mut u8) >= self.end.get() {
358 self.grow::<T>(1)
359 }
360
361 let ptr = self.ptr.get();
362 // Set the pointer past ourselves
363 self.ptr.set(intrinsics::arith_offset(
364 self.ptr.get(), mem::size_of::<T>() as isize
365 ) as *mut u8);
366 // Write into uninitialized memory.
367 ptr::write(ptr as *mut T, object);
368 &mut *(ptr as *mut T)
369 }
370 }
371
372 /// Allocates a slice of objects that are copied into the `DroplessArena`, returning a mutable
373 /// reference to it. Will panic if passed a zero-sized type.
374 ///
375 /// Panics:
376 /// - Zero-sized types
377 /// - Zero-length slices
378 #[inline]
379 pub fn alloc_slice<T>(&self, slice: &[T]) -> &mut [T]
380 where T: Copy {
381 assert!(!mem::needs_drop::<T>());
382 assert!(mem::size_of::<T>() != 0);
383 assert!(slice.len() != 0);
384 self.align_for::<T>();
385
386 let future_end = unsafe {
387 intrinsics::arith_offset(self.ptr.get(), (slice.len() * mem::size_of::<T>()) as isize)
388 };
389 if (future_end as *mut u8) >= self.end.get() {
390 self.grow::<T>(slice.len());
391 }
392
393 unsafe {
394 let arena_slice = slice::from_raw_parts_mut(self.ptr.get() as *mut T, slice.len());
395 self.ptr.set(intrinsics::arith_offset(
396 self.ptr.get(), (slice.len() * mem::size_of::<T>()) as isize
397 ) as *mut u8);
398 arena_slice.copy_from_slice(slice);
399 arena_slice
400 }
401 }
402 }
403
404 #[cfg(test)]
405 mod tests {
406 extern crate test;
407 use self::test::Bencher;
408 use super::TypedArena;
409 use std::cell::Cell;
410
411 #[allow(dead_code)]
412 #[derive(Debug, Eq, PartialEq)]
413 struct Point {
414 x: i32,
415 y: i32,
416 z: i32,
417 }
418
419 #[test]
420 pub fn test_unused() {
421 let arena: TypedArena<Point> = TypedArena::new();
422 assert!(arena.chunks.borrow().is_empty());
423 }
424
425 #[test]
426 fn test_arena_alloc_nested() {
427 struct Inner {
428 value: u8,
429 }
430 struct Outer<'a> {
431 inner: &'a Inner,
432 }
433 enum EI<'e> {
434 I(Inner),
435 O(Outer<'e>),
436 }
437
438 struct Wrap<'a>(TypedArena<EI<'a>>);
439
440 impl<'a> Wrap<'a> {
441 fn alloc_inner<F: Fn() -> Inner>(&self, f: F) -> &Inner {
442 let r: &EI = self.0.alloc(EI::I(f()));
443 if let &EI::I(ref i) = r {
444 i
445 } else {
446 panic!("mismatch");
447 }
448 }
449 fn alloc_outer<F: Fn() -> Outer<'a>>(&self, f: F) -> &Outer {
450 let r: &EI = self.0.alloc(EI::O(f()));
451 if let &EI::O(ref o) = r {
452 o
453 } else {
454 panic!("mismatch");
455 }
456 }
457 }
458
459 let arena = Wrap(TypedArena::new());
460
461 let result =
462 arena.alloc_outer(|| Outer { inner: arena.alloc_inner(|| Inner { value: 10 }) });
463
464 assert_eq!(result.inner.value, 10);
465 }
466
467 #[test]
468 pub fn test_copy() {
469 let arena = TypedArena::new();
470 for _ in 0..100000 {
471 arena.alloc(Point { x: 1, y: 2, z: 3 });
472 }
473 }
474
475 #[bench]
476 pub fn bench_copy(b: &mut Bencher) {
477 let arena = TypedArena::new();
478 b.iter(|| arena.alloc(Point { x: 1, y: 2, z: 3 }))
479 }
480
481 #[bench]
482 pub fn bench_copy_nonarena(b: &mut Bencher) {
483 b.iter(|| {
484 let _: Box<_> = Box::new(Point { x: 1, y: 2, z: 3 });
485 })
486 }
487
488 #[allow(dead_code)]
489 struct Noncopy {
490 string: String,
491 array: Vec<i32>,
492 }
493
494 #[test]
495 pub fn test_noncopy() {
496 let arena = TypedArena::new();
497 for _ in 0..100000 {
498 arena.alloc(Noncopy {
499 string: "hello world".to_string(),
500 array: vec![1, 2, 3, 4, 5],
501 });
502 }
503 }
504
505 #[test]
506 pub fn test_typed_arena_zero_sized() {
507 let arena = TypedArena::new();
508 for _ in 0..100000 {
509 arena.alloc(());
510 }
511 }
512
513 #[test]
514 pub fn test_typed_arena_clear() {
515 let mut arena = TypedArena::new();
516 for _ in 0..10 {
517 arena.clear();
518 for _ in 0..10000 {
519 arena.alloc(Point { x: 1, y: 2, z: 3 });
520 }
521 }
522 }
523
524 // Drop tests
525
526 struct DropCounter<'a> {
527 count: &'a Cell<u32>,
528 }
529
530 impl<'a> Drop for DropCounter<'a> {
531 fn drop(&mut self) {
532 self.count.set(self.count.get() + 1);
533 }
534 }
535
536 #[test]
537 fn test_typed_arena_drop_count() {
538 let counter = Cell::new(0);
539 {
540 let arena: TypedArena<DropCounter> = TypedArena::new();
541 for _ in 0..100 {
542 // Allocate something with drop glue to make sure it doesn't leak.
543 arena.alloc(DropCounter { count: &counter });
544 }
545 };
546 assert_eq!(counter.get(), 100);
547 }
548
549 #[test]
550 fn test_typed_arena_drop_on_clear() {
551 let counter = Cell::new(0);
552 let mut arena: TypedArena<DropCounter> = TypedArena::new();
553 for i in 0..10 {
554 for _ in 0..100 {
555 // Allocate something with drop glue to make sure it doesn't leak.
556 arena.alloc(DropCounter { count: &counter });
557 }
558 arena.clear();
559 assert_eq!(counter.get(), i * 100 + 100);
560 }
561 }
562
563 thread_local! {
564 static DROP_COUNTER: Cell<u32> = Cell::new(0)
565 }
566
567 struct SmallDroppable;
568
569 impl Drop for SmallDroppable {
570 fn drop(&mut self) {
571 DROP_COUNTER.with(|c| c.set(c.get() + 1));
572 }
573 }
574
575 #[test]
576 fn test_typed_arena_drop_small_count() {
577 DROP_COUNTER.with(|c| c.set(0));
578 {
579 let arena: TypedArena<SmallDroppable> = TypedArena::new();
580 for _ in 0..100 {
581 // Allocate something with drop glue to make sure it doesn't leak.
582 arena.alloc(SmallDroppable);
583 }
584 // dropping
585 };
586 assert_eq!(DROP_COUNTER.with(|c| c.get()), 100);
587 }
588
589 #[bench]
590 pub fn bench_noncopy(b: &mut Bencher) {
591 let arena = TypedArena::new();
592 b.iter(|| {
593 arena.alloc(Noncopy {
594 string: "hello world".to_string(),
595 array: vec![1, 2, 3, 4, 5],
596 })
597 })
598 }
599
600 #[bench]
601 pub fn bench_noncopy_nonarena(b: &mut Bencher) {
602 b.iter(|| {
603 let _: Box<_> = Box::new(Noncopy {
604 string: "hello world".to_string(),
605 array: vec![1, 2, 3, 4, 5],
606 });
607 })
608 }
609 }