]> git.proxmox.com Git - rustc.git/blame - src/libarena/lib.rs
New upstream version 1.31.0~beta.4+dfsg1
[rustc.git] / src / libarena / lib.rs
CommitLineData
1a4d82fc
JJ
1// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2// file at the top-level directory of this distribution and at
3// http://rust-lang.org/COPYRIGHT.
4//
5// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8// option. This file may not be copied, modified, or distributed
9// except according to those terms.
10
11//! The arena, a fast but limited type of allocator.
12//!
13//! Arenas are a type of allocator that destroy the objects within, all at
14//! once, once the arena itself is destroyed. They do not support deallocation
15//! of individual objects while the arena itself is still alive. The benefit
16//! of an arena is very fast allocation; just a pointer bump.
17//!
9e0c209e
SL
18//! This crate implements `TypedArena`, a simple arena that can only hold
19//! objects of a single type.
1a4d82fc 20
e9174d1e 21#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
62682a34 22 html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
92a42be0
SL
23 html_root_url = "https://doc.rust-lang.org/nightly/",
24 test(no_crate_inject, attr(deny(warnings))))]
1a4d82fc 25
85aaf69f 26#![feature(alloc)]
62682a34 27#![feature(core_intrinsics)]
32a655c1 28#![feature(dropck_eyepatch)]
0bf4aa26 29#![feature(nll)]
8faf50e0 30#![feature(raw_vec_internals)]
85aaf69f 31#![cfg_attr(test, feature(test))]
1a4d82fc 32
9cc50fc6 33#![allow(deprecated)]
b039eaaf 34
1a4d82fc 35extern crate alloc;
83c7162d
XL
36extern crate rustc_data_structures;
37
38use rustc_data_structures::sync::MTLock;
1a4d82fc
JJ
39
40use std::cell::{Cell, RefCell};
41use std::cmp;
1a4d82fc 42use std::intrinsics;
9cc50fc6 43use std::marker::{PhantomData, Send};
1a4d82fc 44use std::mem;
1a4d82fc 45use std::ptr;
c30ab7b3 46use std::slice;
e9174d1e 47
9cc50fc6 48use alloc::raw_vec::RawVec;
1a4d82fc 49
9e0c209e 50/// An arena that can hold objects of only one type.
1a4d82fc
JJ
51pub struct TypedArena<T> {
52 /// A pointer to the next object to be allocated.
9cc50fc6 53 ptr: Cell<*mut T>,
1a4d82fc
JJ
54
55 /// A pointer to the end of the allocated area. When this pointer is
56 /// reached, a new chunk is allocated.
9cc50fc6 57 end: Cell<*mut T>,
1a4d82fc 58
9e0c209e 59 /// A vector of arena chunks.
9cc50fc6 60 chunks: RefCell<Vec<TypedArenaChunk<T>>>,
85aaf69f
SL
61
62 /// Marker indicating that dropping the arena causes its owned
63 /// instances of `T` to be dropped.
9cc50fc6 64 _own: PhantomData<T>,
1a4d82fc
JJ
65}
66
67struct TypedArenaChunk<T> {
9e0c209e 68 /// The raw storage for the arena chunk.
9cc50fc6 69 storage: RawVec<T>,
1a4d82fc
JJ
70}
71
72impl<T> TypedArenaChunk<T> {
73 #[inline]
9cc50fc6 74 unsafe fn new(capacity: usize) -> TypedArenaChunk<T> {
2c00a5a8
XL
75 TypedArenaChunk {
76 storage: RawVec::with_capacity(capacity),
77 }
1a4d82fc
JJ
78 }
79
9cc50fc6 80 /// Destroys this arena chunk.
1a4d82fc 81 #[inline]
85aaf69f 82 unsafe fn destroy(&mut self, len: usize) {
9cc50fc6
SL
83 // The branch on needs_drop() is an -O1 performance optimization.
84 // Without the branch, dropping TypedArena<u8> takes linear time.
7cac9316 85 if mem::needs_drop::<T>() {
1a4d82fc 86 let mut start = self.start();
9cc50fc6 87 // Destroy all allocated objects.
85aaf69f 88 for _ in 0..len {
9cc50fc6
SL
89 ptr::drop_in_place(start);
90 start = start.offset(1);
1a4d82fc
JJ
91 }
92 }
1a4d82fc
JJ
93 }
94
95 // Returns a pointer to the first allocated object.
96 #[inline]
9cc50fc6
SL
97 fn start(&self) -> *mut T {
98 self.storage.ptr()
1a4d82fc
JJ
99 }
100
101 // Returns a pointer to the end of the allocated space.
102 #[inline]
9cc50fc6 103 fn end(&self) -> *mut T {
1a4d82fc 104 unsafe {
9cc50fc6
SL
105 if mem::size_of::<T>() == 0 {
106 // A pointer as large as possible for zero-sized elements.
107 !0 as *mut T
108 } else {
b7449926 109 self.start().add(self.storage.cap())
9cc50fc6 110 }
1a4d82fc
JJ
111 }
112 }
113}
114
9cc50fc6
SL
115const PAGE: usize = 4096;
116
0bf4aa26 117impl<T> Default for TypedArena<T> {
9e0c209e 118 /// Creates a new `TypedArena`.
0bf4aa26 119 fn default() -> TypedArena<T> {
9e0c209e
SL
120 TypedArena {
121 // We set both `ptr` and `end` to 0 so that the first call to
122 // alloc() will trigger a grow().
123 ptr: Cell::new(0 as *mut T),
124 end: Cell::new(0 as *mut T),
125 chunks: RefCell::new(vec![]),
126 _own: PhantomData,
1a4d82fc
JJ
127 }
128 }
0bf4aa26 129}
1a4d82fc 130
0bf4aa26 131impl<T> TypedArena<T> {
1a4d82fc
JJ
132 /// Allocates an object in the `TypedArena`, returning a reference to it.
133 #[inline]
134 pub fn alloc(&self, object: T) -> &mut T {
135 if self.ptr == self.end {
c30ab7b3 136 self.grow(1)
1a4d82fc
JJ
137 }
138
e9174d1e 139 unsafe {
9cc50fc6 140 if mem::size_of::<T>() == 0 {
2c00a5a8
XL
141 self.ptr
142 .set(intrinsics::arith_offset(self.ptr.get() as *mut u8, 1)
143 as *mut T);
7cac9316 144 let ptr = mem::align_of::<T>() as *mut T;
9cc50fc6
SL
145 // Don't drop the object. This `write` is equivalent to `forget`.
146 ptr::write(ptr, object);
147 &mut *ptr
148 } else {
149 let ptr = self.ptr.get();
150 // Advance the pointer.
151 self.ptr.set(self.ptr.get().offset(1));
152 // Write into uninitialized memory.
153 ptr::write(ptr, object);
154 &mut *ptr
155 }
e9174d1e 156 }
1a4d82fc
JJ
157 }
158
ff7c6d11 159 /// Allocates a slice of objects that are copied into the `TypedArena`, returning a mutable
c30ab7b3
SL
160 /// reference to it. Will panic if passed a zero-sized types.
161 ///
162 /// Panics:
ff7c6d11 163 ///
c30ab7b3
SL
164 /// - Zero-sized types
165 /// - Zero-length slices
166 #[inline]
167 pub fn alloc_slice(&self, slice: &[T]) -> &mut [T]
2c00a5a8
XL
168 where
169 T: Copy,
170 {
c30ab7b3
SL
171 assert!(mem::size_of::<T>() != 0);
172 assert!(slice.len() != 0);
173
174 let available_capacity_bytes = self.end.get() as usize - self.ptr.get() as usize;
175 let at_least_bytes = slice.len() * mem::size_of::<T>();
176 if available_capacity_bytes < at_least_bytes {
177 self.grow(slice.len());
178 }
179
180 unsafe {
181 let start_ptr = self.ptr.get();
182 let arena_slice = slice::from_raw_parts_mut(start_ptr, slice.len());
b7449926 183 self.ptr.set(start_ptr.add(arena_slice.len()));
c30ab7b3
SL
184 arena_slice.copy_from_slice(slice);
185 arena_slice
186 }
187 }
188
1a4d82fc
JJ
189 /// Grows the arena.
190 #[inline(never)]
9cc50fc6 191 #[cold]
c30ab7b3 192 fn grow(&self, n: usize) {
1a4d82fc 193 unsafe {
9cc50fc6 194 let mut chunks = self.chunks.borrow_mut();
c30ab7b3 195 let (chunk, mut new_capacity);
9e0c209e 196 if let Some(last_chunk) = chunks.last_mut() {
c30ab7b3
SL
197 let used_bytes = self.ptr.get() as usize - last_chunk.start() as usize;
198 let currently_used_cap = used_bytes / mem::size_of::<T>();
199 if last_chunk.storage.reserve_in_place(currently_used_cap, n) {
9e0c209e
SL
200 self.end.set(last_chunk.end());
201 return;
202 } else {
32a655c1 203 new_capacity = last_chunk.storage.cap();
c30ab7b3 204 loop {
32a655c1 205 new_capacity = new_capacity.checked_mul(2).unwrap();
c30ab7b3
SL
206 if new_capacity >= currently_used_cap + n {
207 break;
208 }
209 }
9e0c209e 210 }
9cc50fc6 211 } else {
9e0c209e 212 let elem_size = cmp::max(1, mem::size_of::<T>());
c30ab7b3 213 new_capacity = cmp::max(n, PAGE / elem_size);
9cc50fc6 214 }
9e0c209e
SL
215 chunk = TypedArenaChunk::<T>::new(new_capacity);
216 self.ptr.set(chunk.start());
217 self.end.set(chunk.end());
218 chunks.push(chunk);
9cc50fc6
SL
219 }
220 }
9e0c209e 221
9cc50fc6
SL
222 /// Clears the arena. Deallocates all but the longest chunk which may be reused.
223 pub fn clear(&mut self) {
224 unsafe {
225 // Clear the last chunk, which is partially filled.
226 let mut chunks_borrow = self.chunks.borrow_mut();
9e0c209e
SL
227 if let Some(mut last_chunk) = chunks_borrow.pop() {
228 self.clear_last_chunk(&mut last_chunk);
229 // If `T` is ZST, code below has no effect.
230 for mut chunk in chunks_borrow.drain(..) {
231 let cap = chunk.storage.cap();
232 chunk.destroy(cap);
233 }
234 chunks_borrow.push(last_chunk);
9cc50fc6
SL
235 }
236 }
237 }
238
239 // Drops the contents of the last chunk. The last chunk is partially empty, unlike all other
240 // chunks.
241 fn clear_last_chunk(&self, last_chunk: &mut TypedArenaChunk<T>) {
242 // Determine how much was filled.
243 let start = last_chunk.start() as usize;
244 // We obtain the value of the pointer to the first uninitialized element.
245 let end = self.ptr.get() as usize;
246 // We then calculate the number of elements to be dropped in the last chunk,
247 // which is the filled area's length.
248 let diff = if mem::size_of::<T>() == 0 {
249 // `T` is ZST. It can't have a drop flag, so the value here doesn't matter. We get
250 // the number of zero-sized values in the last and only chunk, just out of caution.
251 // Recall that `end` was incremented for each allocated value.
252 end - start
253 } else {
254 (end - start) / mem::size_of::<T>()
255 };
256 // Pass that to the `destroy` method.
257 unsafe {
258 last_chunk.destroy(diff);
1a4d82fc 259 }
9cc50fc6
SL
260 // Reset the chunk.
261 self.ptr.set(last_chunk.start());
1a4d82fc
JJ
262 }
263}
264
32a655c1 265unsafe impl<#[may_dangle] T> Drop for TypedArena<T> {
1a4d82fc
JJ
266 fn drop(&mut self) {
267 unsafe {
268 // Determine how much was filled.
9cc50fc6 269 let mut chunks_borrow = self.chunks.borrow_mut();
9e0c209e
SL
270 if let Some(mut last_chunk) = chunks_borrow.pop() {
271 // Drop the contents of the last chunk.
272 self.clear_last_chunk(&mut last_chunk);
273 // The last chunk will be dropped. Destroy all other chunks.
274 for chunk in chunks_borrow.iter_mut() {
275 let cap = chunk.storage.cap();
276 chunk.destroy(cap);
277 }
9cc50fc6
SL
278 }
279 // RawVec handles deallocation of `last_chunk` and `self.chunks`.
1a4d82fc
JJ
280 }
281 }
282}
283
9cc50fc6
SL
284unsafe impl<T: Send> Send for TypedArena<T> {}
285
32a655c1
SL
286pub struct DroplessArena {
287 /// A pointer to the next object to be allocated.
288 ptr: Cell<*mut u8>,
289
290 /// A pointer to the end of the allocated area. When this pointer is
291 /// reached, a new chunk is allocated.
292 end: Cell<*mut u8>,
293
294 /// A vector of arena chunks.
295 chunks: RefCell<Vec<TypedArenaChunk<u8>>>,
296}
297
83c7162d
XL
298unsafe impl Send for DroplessArena {}
299
0bf4aa26
XL
300impl Default for DroplessArena {
301 fn default() -> DroplessArena {
32a655c1
SL
302 DroplessArena {
303 ptr: Cell::new(0 as *mut u8),
304 end: Cell::new(0 as *mut u8),
0bf4aa26 305 chunks: Default::default(),
32a655c1
SL
306 }
307 }
0bf4aa26 308}
32a655c1 309
0bf4aa26 310impl DroplessArena {
32a655c1
SL
311 pub fn in_arena<T: ?Sized>(&self, ptr: *const T) -> bool {
312 let ptr = ptr as *const u8 as *mut u8;
313 for chunk in &*self.chunks.borrow() {
314 if chunk.start() <= ptr && ptr < chunk.end() {
315 return true;
316 }
317 }
318
319 false
320 }
321
94b46f34 322 fn align(&self, align: usize) {
32a655c1
SL
323 let final_address = ((self.ptr.get() as usize) + align - 1) & !(align - 1);
324 self.ptr.set(final_address as *mut u8);
325 assert!(self.ptr <= self.end);
326 }
327
328 #[inline(never)]
329 #[cold]
94b46f34 330 fn grow(&self, needed_bytes: usize) {
32a655c1
SL
331 unsafe {
332 let mut chunks = self.chunks.borrow_mut();
333 let (chunk, mut new_capacity);
334 if let Some(last_chunk) = chunks.last_mut() {
335 let used_bytes = self.ptr.get() as usize - last_chunk.start() as usize;
2c00a5a8
XL
336 if last_chunk
337 .storage
338 .reserve_in_place(used_bytes, needed_bytes)
339 {
32a655c1
SL
340 self.end.set(last_chunk.end());
341 return;
342 } else {
343 new_capacity = last_chunk.storage.cap();
344 loop {
345 new_capacity = new_capacity.checked_mul(2).unwrap();
346 if new_capacity >= used_bytes + needed_bytes {
347 break;
348 }
349 }
350 }
351 } else {
352 new_capacity = cmp::max(needed_bytes, PAGE);
353 }
354 chunk = TypedArenaChunk::<u8>::new(new_capacity);
355 self.ptr.set(chunk.start());
356 self.end.set(chunk.end());
357 chunks.push(chunk);
358 }
359 }
360
361 #[inline]
94b46f34 362 pub fn alloc_raw(&self, bytes: usize, align: usize) -> &mut [u8] {
32a655c1 363 unsafe {
94b46f34
XL
364 assert!(bytes != 0);
365
366 self.align(align);
32a655c1 367
94b46f34 368 let future_end = intrinsics::arith_offset(self.ptr.get(), bytes as isize);
32a655c1 369 if (future_end as *mut u8) >= self.end.get() {
94b46f34 370 self.grow(bytes);
32a655c1
SL
371 }
372
373 let ptr = self.ptr.get();
374 // Set the pointer past ourselves
2c00a5a8 375 self.ptr.set(
94b46f34 376 intrinsics::arith_offset(self.ptr.get(), bytes as isize) as *mut u8,
2c00a5a8 377 );
94b46f34
XL
378 slice::from_raw_parts_mut(ptr, bytes)
379 }
380 }
381
382 #[inline]
383 pub fn alloc<T>(&self, object: T) -> &mut T {
384 assert!(!mem::needs_drop::<T>());
385
386 let mem = self.alloc_raw(
387 mem::size_of::<T>(),
388 mem::align_of::<T>()) as *mut _ as *mut T;
389
390 unsafe {
32a655c1 391 // Write into uninitialized memory.
94b46f34
XL
392 ptr::write(mem, object);
393 &mut *mem
32a655c1
SL
394 }
395 }
396
397 /// Allocates a slice of objects that are copied into the `DroplessArena`, returning a mutable
398 /// reference to it. Will panic if passed a zero-sized type.
399 ///
400 /// Panics:
ff7c6d11 401 ///
32a655c1
SL
402 /// - Zero-sized types
403 /// - Zero-length slices
404 #[inline]
405 pub fn alloc_slice<T>(&self, slice: &[T]) -> &mut [T]
2c00a5a8
XL
406 where
407 T: Copy,
408 {
7cac9316 409 assert!(!mem::needs_drop::<T>());
32a655c1
SL
410 assert!(mem::size_of::<T>() != 0);
411 assert!(slice.len() != 0);
32a655c1 412
94b46f34
XL
413 let mem = self.alloc_raw(
414 slice.len() * mem::size_of::<T>(),
415 mem::align_of::<T>()) as *mut _ as *mut T;
32a655c1
SL
416
417 unsafe {
94b46f34 418 let arena_slice = slice::from_raw_parts_mut(mem, slice.len());
32a655c1
SL
419 arena_slice.copy_from_slice(slice);
420 arena_slice
421 }
422 }
423}
424
0bf4aa26
XL
425#[derive(Default)]
426// FIXME(@Zoxc): this type is entirely unused in rustc
83c7162d
XL
427pub struct SyncTypedArena<T> {
428 lock: MTLock<TypedArena<T>>,
429}
430
431impl<T> SyncTypedArena<T> {
83c7162d
XL
432 #[inline(always)]
433 pub fn alloc(&self, object: T) -> &mut T {
434 // Extend the lifetime of the result since it's limited to the lock guard
435 unsafe { &mut *(self.lock.lock().alloc(object) as *mut T) }
436 }
437
438 #[inline(always)]
439 pub fn alloc_slice(&self, slice: &[T]) -> &mut [T]
440 where
441 T: Copy,
442 {
443 // Extend the lifetime of the result since it's limited to the lock guard
444 unsafe { &mut *(self.lock.lock().alloc_slice(slice) as *mut [T]) }
445 }
446
447 #[inline(always)]
448 pub fn clear(&mut self) {
449 self.lock.get_mut().clear();
450 }
451}
452
0bf4aa26 453#[derive(Default)]
83c7162d
XL
454pub struct SyncDroplessArena {
455 lock: MTLock<DroplessArena>,
456}
457
458impl SyncDroplessArena {
83c7162d
XL
459 #[inline(always)]
460 pub fn in_arena<T: ?Sized>(&self, ptr: *const T) -> bool {
461 self.lock.lock().in_arena(ptr)
462 }
463
94b46f34
XL
464 #[inline(always)]
465 pub fn alloc_raw(&self, bytes: usize, align: usize) -> &mut [u8] {
466 // Extend the lifetime of the result since it's limited to the lock guard
467 unsafe { &mut *(self.lock.lock().alloc_raw(bytes, align) as *mut [u8]) }
468 }
469
83c7162d
XL
470 #[inline(always)]
471 pub fn alloc<T>(&self, object: T) -> &mut T {
472 // Extend the lifetime of the result since it's limited to the lock guard
473 unsafe { &mut *(self.lock.lock().alloc(object) as *mut T) }
474 }
475
476 #[inline(always)]
477 pub fn alloc_slice<T>(&self, slice: &[T]) -> &mut [T]
478 where
479 T: Copy,
480 {
481 // Extend the lifetime of the result since it's limited to the lock guard
482 unsafe { &mut *(self.lock.lock().alloc_slice(slice) as *mut [T]) }
483 }
484}
485
1a4d82fc
JJ
486#[cfg(test)]
487mod tests {
488 extern crate test;
489 use self::test::Bencher;
54a0048b 490 use super::TypedArena;
9cc50fc6 491 use std::cell::Cell;
1a4d82fc
JJ
492
493 #[allow(dead_code)]
9cc50fc6 494 #[derive(Debug, Eq, PartialEq)]
1a4d82fc 495 struct Point {
85aaf69f
SL
496 x: i32,
497 y: i32,
498 z: i32,
499 }
500
9e0c209e
SL
501 #[test]
502 pub fn test_unused() {
0bf4aa26 503 let arena: TypedArena<Point> = TypedArena::default();
9e0c209e
SL
504 assert!(arena.chunks.borrow().is_empty());
505 }
506
85aaf69f
SL
507 #[test]
508 fn test_arena_alloc_nested() {
b039eaaf
SL
509 struct Inner {
510 value: u8,
511 }
512 struct Outer<'a> {
513 inner: &'a Inner,
514 }
515 enum EI<'e> {
516 I(Inner),
517 O(Outer<'e>),
518 }
85aaf69f
SL
519
520 struct Wrap<'a>(TypedArena<EI<'a>>);
521
522 impl<'a> Wrap<'a> {
b039eaaf 523 fn alloc_inner<F: Fn() -> Inner>(&self, f: F) -> &Inner {
85aaf69f
SL
524 let r: &EI = self.0.alloc(EI::I(f()));
525 if let &EI::I(ref i) = r {
526 i
527 } else {
528 panic!("mismatch");
529 }
530 }
b039eaaf 531 fn alloc_outer<F: Fn() -> Outer<'a>>(&self, f: F) -> &Outer {
85aaf69f
SL
532 let r: &EI = self.0.alloc(EI::O(f()));
533 if let &EI::O(ref o) = r {
534 o
535 } else {
536 panic!("mismatch");
537 }
538 }
539 }
540
0bf4aa26 541 let arena = Wrap(TypedArena::default());
85aaf69f 542
2c00a5a8
XL
543 let result = arena.alloc_outer(|| Outer {
544 inner: arena.alloc_inner(|| Inner { value: 10 }),
545 });
85aaf69f
SL
546
547 assert_eq!(result.inner.value, 10);
1a4d82fc
JJ
548 }
549
550 #[test]
551 pub fn test_copy() {
0bf4aa26 552 let arena = TypedArena::default();
85aaf69f 553 for _ in 0..100000 {
b039eaaf 554 arena.alloc(Point { x: 1, y: 2, z: 3 });
1a4d82fc
JJ
555 }
556 }
557
558 #[bench]
559 pub fn bench_copy(b: &mut Bencher) {
0bf4aa26 560 let arena = TypedArena::default();
b039eaaf 561 b.iter(|| arena.alloc(Point { x: 1, y: 2, z: 3 }))
1a4d82fc
JJ
562 }
563
564 #[bench]
565 pub fn bench_copy_nonarena(b: &mut Bencher) {
566 b.iter(|| {
9cc50fc6 567 let _: Box<_> = Box::new(Point { x: 1, y: 2, z: 3 });
1a4d82fc
JJ
568 })
569 }
570
1a4d82fc
JJ
571 #[allow(dead_code)]
572 struct Noncopy {
573 string: String,
85aaf69f 574 array: Vec<i32>,
1a4d82fc
JJ
575 }
576
577 #[test]
578 pub fn test_noncopy() {
0bf4aa26 579 let arena = TypedArena::default();
85aaf69f 580 for _ in 0..100000 {
1a4d82fc
JJ
581 arena.alloc(Noncopy {
582 string: "hello world".to_string(),
92a42be0 583 array: vec![1, 2, 3, 4, 5],
1a4d82fc
JJ
584 });
585 }
586 }
587
9cc50fc6
SL
588 #[test]
589 pub fn test_typed_arena_zero_sized() {
0bf4aa26 590 let arena = TypedArena::default();
9cc50fc6
SL
591 for _ in 0..100000 {
592 arena.alloc(());
593 }
594 }
595
9cc50fc6
SL
596 #[test]
597 pub fn test_typed_arena_clear() {
0bf4aa26 598 let mut arena = TypedArena::default();
9cc50fc6
SL
599 for _ in 0..10 {
600 arena.clear();
601 for _ in 0..10000 {
602 arena.alloc(Point { x: 1, y: 2, z: 3 });
603 }
604 }
605 }
606
9cc50fc6
SL
607 // Drop tests
608
609 struct DropCounter<'a> {
610 count: &'a Cell<u32>,
611 }
612
613 impl<'a> Drop for DropCounter<'a> {
614 fn drop(&mut self) {
615 self.count.set(self.count.get() + 1);
616 }
617 }
618
9cc50fc6
SL
619 #[test]
620 fn test_typed_arena_drop_count() {
621 let counter = Cell::new(0);
622 {
0bf4aa26 623 let arena: TypedArena<DropCounter> = TypedArena::default();
9cc50fc6
SL
624 for _ in 0..100 {
625 // Allocate something with drop glue to make sure it doesn't leak.
626 arena.alloc(DropCounter { count: &counter });
627 }
628 };
629 assert_eq!(counter.get(), 100);
630 }
631
632 #[test]
633 fn test_typed_arena_drop_on_clear() {
634 let counter = Cell::new(0);
0bf4aa26 635 let mut arena: TypedArena<DropCounter> = TypedArena::default();
9cc50fc6
SL
636 for i in 0..10 {
637 for _ in 0..100 {
638 // Allocate something with drop glue to make sure it doesn't leak.
639 arena.alloc(DropCounter { count: &counter });
640 }
641 arena.clear();
642 assert_eq!(counter.get(), i * 100 + 100);
643 }
644 }
645
646 thread_local! {
647 static DROP_COUNTER: Cell<u32> = Cell::new(0)
648 }
649
650 struct SmallDroppable;
651
652 impl Drop for SmallDroppable {
653 fn drop(&mut self) {
654 DROP_COUNTER.with(|c| c.set(c.get() + 1));
655 }
656 }
657
9cc50fc6
SL
658 #[test]
659 fn test_typed_arena_drop_small_count() {
660 DROP_COUNTER.with(|c| c.set(0));
661 {
0bf4aa26 662 let arena: TypedArena<SmallDroppable> = TypedArena::default();
9cc50fc6
SL
663 for _ in 0..100 {
664 // Allocate something with drop glue to make sure it doesn't leak.
665 arena.alloc(SmallDroppable);
666 }
667 // dropping
668 };
669 assert_eq!(DROP_COUNTER.with(|c| c.get()), 100);
670 }
671
1a4d82fc
JJ
672 #[bench]
673 pub fn bench_noncopy(b: &mut Bencher) {
0bf4aa26 674 let arena = TypedArena::default();
1a4d82fc
JJ
675 b.iter(|| {
676 arena.alloc(Noncopy {
677 string: "hello world".to_string(),
92a42be0 678 array: vec![1, 2, 3, 4, 5],
1a4d82fc
JJ
679 })
680 })
681 }
682
683 #[bench]
684 pub fn bench_noncopy_nonarena(b: &mut Bencher) {
685 b.iter(|| {
9cc50fc6 686 let _: Box<_> = Box::new(Noncopy {
1a4d82fc 687 string: "hello world".to_string(),
92a42be0 688 array: vec![1, 2, 3, 4, 5],
9cc50fc6 689 });
1a4d82fc
JJ
690 })
691 }
1a4d82fc 692}