]> git.proxmox.com Git - rustc.git/blame - src/libarena/lib.rs
Imported Upstream version 1.2.0+dfsg1
[rustc.git] / src / libarena / lib.rs
CommitLineData
1a4d82fc
JJ
1// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2// file at the top-level directory of this distribution and at
3// http://rust-lang.org/COPYRIGHT.
4//
5// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8// option. This file may not be copied, modified, or distributed
9// except according to those terms.
10
11//! The arena, a fast but limited type of allocator.
12//!
13//! Arenas are a type of allocator that destroy the objects within, all at
14//! once, once the arena itself is destroyed. They do not support deallocation
15//! of individual objects while the arena itself is still alive. The benefit
16//! of an arena is very fast allocation; just a pointer bump.
17//!
18//! This crate has two arenas implemented: `TypedArena`, which is a simpler
19//! arena but can only hold objects of a single type, and `Arena`, which is a
20//! more complex, slower arena which can hold objects of any type.
21
c34b1796
AL
22// Do not remove on snapshot creation. Needed for bootstrap. (Issue #22364)
23#![cfg_attr(stage0, feature(custom_attribute))]
1a4d82fc 24#![crate_name = "arena"]
85aaf69f 25#![unstable(feature = "rustc_private")]
1a4d82fc
JJ
26#![staged_api]
27#![crate_type = "rlib"]
28#![crate_type = "dylib"]
29#![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
62682a34 30 html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
1a4d82fc
JJ
31 html_root_url = "http://doc.rust-lang.org/nightly/")]
32
85aaf69f 33#![feature(alloc)]
1a4d82fc 34#![feature(box_syntax)]
62682a34
SL
35#![feature(core_intrinsics)]
36#![feature(heap_api)]
37#![feature(oom)]
38#![feature(ptr_as_ref)]
39#![feature(raw)]
85aaf69f 40#![feature(staged_api)]
85aaf69f 41#![cfg_attr(test, feature(test))]
1a4d82fc
JJ
42
43extern crate alloc;
44
45use std::cell::{Cell, RefCell};
46use std::cmp;
1a4d82fc 47use std::intrinsics;
85aaf69f 48use std::marker;
1a4d82fc 49use std::mem;
1a4d82fc
JJ
50use std::ptr;
51use std::rc::Rc;
52use std::rt::heap::{allocate, deallocate};
53
54// The way arena uses arrays is really deeply awful. The arrays are
55// allocated, and have capacities reserved, but the fill for the array
56// will always stay at 0.
57#[derive(Clone, PartialEq)]
58struct Chunk {
59 data: Rc<RefCell<Vec<u8>>>,
85aaf69f 60 fill: Cell<usize>,
1a4d82fc
JJ
61 is_copy: Cell<bool>,
62}
63
64impl Chunk {
85aaf69f 65 fn capacity(&self) -> usize {
1a4d82fc
JJ
66 self.data.borrow().capacity()
67 }
68
69 unsafe fn as_ptr(&self) -> *const u8 {
70 self.data.borrow().as_ptr()
71 }
72}
73
74/// A slower reflection-based arena that can allocate objects of any type.
75///
76/// This arena uses `Vec<u8>` as a backing store to allocate objects from. For
77/// each allocated object, the arena stores a pointer to the type descriptor
78/// followed by the object (potentially with alignment padding after each
79/// element). When the arena is destroyed, it iterates through all of its
80/// chunks, and uses the tydesc information to trace through the objects,
81/// calling the destructors on them. One subtle point that needs to be
82/// addressed is how to handle panics while running the user provided
83/// initializer function. It is important to not run the destructor on
84/// uninitialized objects, but how to detect them is somewhat subtle. Since
85/// `alloc()` can be invoked recursively, it is not sufficient to simply exclude
86/// the most recent object. To solve this without requiring extra space, we
87/// use the low order bit of the tydesc pointer to encode whether the object
88/// it describes has been fully initialized.
89///
90/// As an optimization, objects with destructors are stored in different chunks
91/// than objects without destructors. This reduces overhead when initializing
92/// plain-old-data (`Copy` types) and means we don't need to waste time running
93/// their destructors.
85aaf69f 94pub struct Arena<'longer_than_self> {
1a4d82fc
JJ
95 // The head is separated out from the list as a unbenchmarked
96 // microoptimization, to avoid needing to case on the list to access the
97 // head.
98 head: RefCell<Chunk>,
99 copy_head: RefCell<Chunk>,
100 chunks: RefCell<Vec<Chunk>>,
85aaf69f 101 _marker: marker::PhantomData<*mut &'longer_than_self()>,
1a4d82fc
JJ
102}
103
85aaf69f 104impl<'a> Arena<'a> {
1a4d82fc 105 /// Allocates a new Arena with 32 bytes preallocated.
85aaf69f
SL
106 pub fn new() -> Arena<'a> {
107 Arena::new_with_size(32)
1a4d82fc
JJ
108 }
109
110 /// Allocates a new Arena with `initial_size` bytes preallocated.
85aaf69f 111 pub fn new_with_size(initial_size: usize) -> Arena<'a> {
1a4d82fc
JJ
112 Arena {
113 head: RefCell::new(chunk(initial_size, false)),
114 copy_head: RefCell::new(chunk(initial_size, true)),
115 chunks: RefCell::new(Vec::new()),
85aaf69f 116 _marker: marker::PhantomData,
1a4d82fc
JJ
117 }
118 }
119}
120
85aaf69f 121fn chunk(size: usize, is_copy: bool) -> Chunk {
1a4d82fc
JJ
122 Chunk {
123 data: Rc::new(RefCell::new(Vec::with_capacity(size))),
85aaf69f 124 fill: Cell::new(0),
1a4d82fc
JJ
125 is_copy: Cell::new(is_copy),
126 }
127}
128
85aaf69f 129impl<'longer_than_self> Drop for Arena<'longer_than_self> {
1a4d82fc
JJ
130 fn drop(&mut self) {
131 unsafe {
132 destroy_chunk(&*self.head.borrow());
62682a34 133 for chunk in self.chunks.borrow().iter() {
1a4d82fc
JJ
134 if !chunk.is_copy.get() {
135 destroy_chunk(chunk);
136 }
137 }
138 }
139 }
140}
141
142#[inline]
85aaf69f 143fn round_up(base: usize, align: usize) -> usize {
1a4d82fc
JJ
144 (base.checked_add(align - 1)).unwrap() & !(align - 1)
145}
146
147// Walk down a chunk, running the destructors for any objects stored
148// in it.
149unsafe fn destroy_chunk(chunk: &Chunk) {
150 let mut idx = 0;
151 let buf = chunk.as_ptr();
152 let fill = chunk.fill.get();
153
154 while idx < fill {
85aaf69f 155 let tydesc_data: *const usize = mem::transmute(buf.offset(idx as isize));
1a4d82fc
JJ
156 let (tydesc, is_done) = un_bitpack_tydesc_ptr(*tydesc_data);
157 let (size, align) = ((*tydesc).size, (*tydesc).align);
158
159 let after_tydesc = idx + mem::size_of::<*const TyDesc>();
160
161 let start = round_up(after_tydesc, align);
162
163 //debug!("freeing object: idx = {}, size = {}, align = {}, done = {}",
164 // start, size, align, is_done);
165 if is_done {
85aaf69f 166 ((*tydesc).drop_glue)(buf.offset(start as isize) as *const i8);
1a4d82fc
JJ
167 }
168
169 // Find where the next tydesc lives
170 idx = round_up(start + size, mem::align_of::<*const TyDesc>());
171 }
172}
173
174// We encode whether the object a tydesc describes has been
175// initialized in the arena in the low bit of the tydesc pointer. This
176// is necessary in order to properly do cleanup if a panic occurs
177// during an initializer.
178#[inline]
85aaf69f
SL
179fn bitpack_tydesc_ptr(p: *const TyDesc, is_done: bool) -> usize {
180 p as usize | (is_done as usize)
1a4d82fc
JJ
181}
182#[inline]
85aaf69f 183fn un_bitpack_tydesc_ptr(p: usize) -> (*const TyDesc, bool) {
1a4d82fc
JJ
184 ((p & !1) as *const TyDesc, p & 1 == 1)
185}
186
c34b1796
AL
187// HACK(eddyb) TyDesc replacement using a trait object vtable.
188// This could be replaced in the future with a custom DST layout,
189// or `&'static (drop_glue, size, align)` created by a `const fn`.
190struct TyDesc {
191 drop_glue: fn(*const i8),
192 size: usize,
193 align: usize
194}
195
9346a6ac
AL
196trait AllTypes { fn dummy(&self) { } }
197impl<T:?Sized> AllTypes for T { }
198
c34b1796
AL
199unsafe fn get_tydesc<T>() -> *const TyDesc {
200 use std::raw::TraitObject;
201
202 let ptr = &*(1 as *const T);
203
204 // Can use any trait that is implemented for all types.
9346a6ac 205 let obj = mem::transmute::<&AllTypes, TraitObject>(ptr);
c34b1796
AL
206 obj.vtable as *const TyDesc
207}
208
85aaf69f
SL
209impl<'longer_than_self> Arena<'longer_than_self> {
210 fn chunk_size(&self) -> usize {
1a4d82fc
JJ
211 self.copy_head.borrow().capacity()
212 }
213
214 // Functions for the POD part of the arena
85aaf69f 215 fn alloc_copy_grow(&self, n_bytes: usize, align: usize) -> *const u8 {
1a4d82fc
JJ
216 // Allocate a new chunk.
217 let new_min_chunk_size = cmp::max(n_bytes, self.chunk_size());
218 self.chunks.borrow_mut().push(self.copy_head.borrow().clone());
219
220 *self.copy_head.borrow_mut() =
85aaf69f 221 chunk((new_min_chunk_size + 1).next_power_of_two(), true);
1a4d82fc
JJ
222
223 return self.alloc_copy_inner(n_bytes, align);
224 }
225
226 #[inline]
85aaf69f 227 fn alloc_copy_inner(&self, n_bytes: usize, align: usize) -> *const u8 {
1a4d82fc
JJ
228 let start = round_up(self.copy_head.borrow().fill.get(), align);
229
230 let end = start + n_bytes;
231 if end > self.chunk_size() {
232 return self.alloc_copy_grow(n_bytes, align);
233 }
234
235 let copy_head = self.copy_head.borrow();
236 copy_head.fill.set(end);
237
238 unsafe {
85aaf69f 239 copy_head.as_ptr().offset(start as isize)
1a4d82fc
JJ
240 }
241 }
242
243 #[inline]
244 fn alloc_copy<T, F>(&self, op: F) -> &mut T where F: FnOnce() -> T {
245 unsafe {
246 let ptr = self.alloc_copy_inner(mem::size_of::<T>(),
62682a34 247 mem::align_of::<T>());
1a4d82fc
JJ
248 let ptr = ptr as *mut T;
249 ptr::write(&mut (*ptr), op());
250 return &mut *ptr;
251 }
252 }
253
254 // Functions for the non-POD part of the arena
85aaf69f
SL
255 fn alloc_noncopy_grow(&self, n_bytes: usize,
256 align: usize) -> (*const u8, *const u8) {
1a4d82fc
JJ
257 // Allocate a new chunk.
258 let new_min_chunk_size = cmp::max(n_bytes, self.chunk_size());
259 self.chunks.borrow_mut().push(self.head.borrow().clone());
260
261 *self.head.borrow_mut() =
85aaf69f 262 chunk((new_min_chunk_size + 1).next_power_of_two(), false);
1a4d82fc
JJ
263
264 return self.alloc_noncopy_inner(n_bytes, align);
265 }
266
267 #[inline]
85aaf69f
SL
268 fn alloc_noncopy_inner(&self, n_bytes: usize,
269 align: usize) -> (*const u8, *const u8) {
1a4d82fc
JJ
270 // Be careful to not maintain any `head` borrows active, because
271 // `alloc_noncopy_grow` borrows it mutably.
272 let (start, end, tydesc_start, head_capacity) = {
273 let head = self.head.borrow();
274 let fill = head.fill.get();
275
276 let tydesc_start = fill;
277 let after_tydesc = fill + mem::size_of::<*const TyDesc>();
278 let start = round_up(after_tydesc, align);
279 let end = start + n_bytes;
280
281 (start, end, tydesc_start, head.capacity())
282 };
283
284 if end > head_capacity {
285 return self.alloc_noncopy_grow(n_bytes, align);
286 }
287
288 let head = self.head.borrow();
289 head.fill.set(round_up(end, mem::align_of::<*const TyDesc>()));
290
291 unsafe {
292 let buf = head.as_ptr();
85aaf69f 293 return (buf.offset(tydesc_start as isize), buf.offset(start as isize));
1a4d82fc
JJ
294 }
295 }
296
297 #[inline]
298 fn alloc_noncopy<T, F>(&self, op: F) -> &mut T where F: FnOnce() -> T {
299 unsafe {
300 let tydesc = get_tydesc::<T>();
301 let (ty_ptr, ptr) =
302 self.alloc_noncopy_inner(mem::size_of::<T>(),
62682a34 303 mem::align_of::<T>());
85aaf69f 304 let ty_ptr = ty_ptr as *mut usize;
1a4d82fc
JJ
305 let ptr = ptr as *mut T;
306 // Write in our tydesc along with a bit indicating that it
307 // has *not* been initialized yet.
308 *ty_ptr = mem::transmute(tydesc);
309 // Actually initialize it
310 ptr::write(&mut(*ptr), op());
311 // Now that we are done, update the tydesc to indicate that
312 // the object is there.
313 *ty_ptr = bitpack_tydesc_ptr(tydesc, true);
314
315 return &mut *ptr;
316 }
317 }
318
319 /// Allocates a new item in the arena, using `op` to initialize the value,
320 /// and returns a reference to it.
321 #[inline]
85aaf69f 322 pub fn alloc<T:'longer_than_self, F>(&self, op: F) -> &mut T where F: FnOnce() -> T {
1a4d82fc
JJ
323 unsafe {
324 if intrinsics::needs_drop::<T>() {
325 self.alloc_noncopy(op)
326 } else {
327 self.alloc_copy(op)
328 }
329 }
330 }
331}
332
333#[test]
334fn test_arena_destructors() {
335 let arena = Arena::new();
85aaf69f 336 for i in 0..10 {
1a4d82fc
JJ
337 // Arena allocate something with drop glue to make sure it
338 // doesn't leak.
339 arena.alloc(|| Rc::new(i));
340 // Allocate something with funny size and alignment, to keep
341 // things interesting.
342 arena.alloc(|| [0u8, 1u8, 2u8]);
343 }
344}
345
1a4d82fc 346#[test]
c34b1796 347#[should_panic]
1a4d82fc
JJ
348fn test_arena_destructors_fail() {
349 let arena = Arena::new();
350 // Put some stuff in the arena.
85aaf69f 351 for i in 0..10 {
1a4d82fc
JJ
352 // Arena allocate something with drop glue to make sure it
353 // doesn't leak.
354 arena.alloc(|| { Rc::new(i) });
355 // Allocate something with funny size and alignment, to keep
356 // things interesting.
85aaf69f 357 arena.alloc(|| { [0u8, 1, 2] });
1a4d82fc
JJ
358 }
359 // Now, panic while allocating
85aaf69f 360 arena.alloc::<Rc<i32>, _>(|| {
1a4d82fc
JJ
361 panic!();
362 });
363}
364
365/// A faster arena that can hold objects of only one type.
1a4d82fc
JJ
366pub struct TypedArena<T> {
367 /// A pointer to the next object to be allocated.
368 ptr: Cell<*const T>,
369
370 /// A pointer to the end of the allocated area. When this pointer is
371 /// reached, a new chunk is allocated.
372 end: Cell<*const T>,
373
374 /// A pointer to the first arena segment.
375 first: RefCell<*mut TypedArenaChunk<T>>,
85aaf69f
SL
376
377 /// Marker indicating that dropping the arena causes its owned
378 /// instances of `T` to be dropped.
379 _own: marker::PhantomData<T>,
1a4d82fc
JJ
380}
381
382struct TypedArenaChunk<T> {
85aaf69f
SL
383 marker: marker::PhantomData<T>,
384
1a4d82fc
JJ
385 /// Pointer to the next arena segment.
386 next: *mut TypedArenaChunk<T>,
387
388 /// The number of elements that this chunk can hold.
85aaf69f 389 capacity: usize,
1a4d82fc
JJ
390
391 // Objects follow here, suitably aligned.
392}
393
85aaf69f 394fn calculate_size<T>(capacity: usize) -> usize {
1a4d82fc 395 let mut size = mem::size_of::<TypedArenaChunk<T>>();
62682a34 396 size = round_up(size, mem::align_of::<T>());
1a4d82fc
JJ
397 let elem_size = mem::size_of::<T>();
398 let elems_size = elem_size.checked_mul(capacity).unwrap();
399 size = size.checked_add(elems_size).unwrap();
400 size
401}
402
403impl<T> TypedArenaChunk<T> {
404 #[inline]
85aaf69f 405 unsafe fn new(next: *mut TypedArenaChunk<T>, capacity: usize)
1a4d82fc
JJ
406 -> *mut TypedArenaChunk<T> {
407 let size = calculate_size::<T>(capacity);
62682a34 408 let chunk = allocate(size, mem::align_of::<TypedArenaChunk<T>>())
1a4d82fc
JJ
409 as *mut TypedArenaChunk<T>;
410 if chunk.is_null() { alloc::oom() }
411 (*chunk).next = next;
412 (*chunk).capacity = capacity;
413 chunk
414 }
415
416 /// Destroys this arena chunk. If the type descriptor is supplied, the
417 /// drop glue is called; otherwise, drop glue is not called.
418 #[inline]
85aaf69f 419 unsafe fn destroy(&mut self, len: usize) {
1a4d82fc
JJ
420 // Destroy all the allocated objects.
421 if intrinsics::needs_drop::<T>() {
422 let mut start = self.start();
85aaf69f 423 for _ in 0..len {
1a4d82fc 424 ptr::read(start as *const T); // run the destructor on the pointer
85aaf69f 425 start = start.offset(mem::size_of::<T>() as isize)
1a4d82fc
JJ
426 }
427 }
428
429 // Destroy the next chunk.
430 let next = self.next;
431 let size = calculate_size::<T>(self.capacity);
c34b1796
AL
432 let self_ptr: *mut TypedArenaChunk<T> = self;
433 deallocate(self_ptr as *mut u8, size,
62682a34 434 mem::align_of::<TypedArenaChunk<T>>());
1a4d82fc
JJ
435 if !next.is_null() {
436 let capacity = (*next).capacity;
437 (*next).destroy(capacity);
438 }
439 }
440
441 // Returns a pointer to the first allocated object.
442 #[inline]
443 fn start(&self) -> *const u8 {
444 let this: *const TypedArenaChunk<T> = self;
445 unsafe {
85aaf69f 446 mem::transmute(round_up(this.offset(1) as usize,
62682a34 447 mem::align_of::<T>()))
1a4d82fc
JJ
448 }
449 }
450
451 // Returns a pointer to the end of the allocated space.
452 #[inline]
453 fn end(&self) -> *const u8 {
454 unsafe {
455 let size = mem::size_of::<T>().checked_mul(self.capacity).unwrap();
85aaf69f 456 self.start().offset(size as isize)
1a4d82fc
JJ
457 }
458 }
459}
460
461impl<T> TypedArena<T> {
462 /// Creates a new `TypedArena` with preallocated space for eight objects.
463 #[inline]
464 pub fn new() -> TypedArena<T> {
465 TypedArena::with_capacity(8)
466 }
467
468 /// Creates a new `TypedArena` with preallocated space for the given number of
469 /// objects.
470 #[inline]
85aaf69f 471 pub fn with_capacity(capacity: usize) -> TypedArena<T> {
1a4d82fc
JJ
472 unsafe {
473 let chunk = TypedArenaChunk::<T>::new(ptr::null_mut(), capacity);
474 TypedArena {
475 ptr: Cell::new((*chunk).start() as *const T),
476 end: Cell::new((*chunk).end() as *const T),
477 first: RefCell::new(chunk),
85aaf69f 478 _own: marker::PhantomData,
1a4d82fc
JJ
479 }
480 }
481 }
482
483 /// Allocates an object in the `TypedArena`, returning a reference to it.
484 #[inline]
485 pub fn alloc(&self, object: T) -> &mut T {
486 if self.ptr == self.end {
487 self.grow()
488 }
489
490 let ptr: &mut T = unsafe {
491 let ptr: &mut T = mem::transmute(self.ptr.clone());
492 ptr::write(ptr, object);
493 self.ptr.set(self.ptr.get().offset(1));
494 ptr
495 };
496
497 ptr
498 }
499
500 /// Grows the arena.
501 #[inline(never)]
502 fn grow(&self) {
503 unsafe {
504 let chunk = *self.first.borrow_mut();
505 let new_capacity = (*chunk).capacity.checked_mul(2).unwrap();
506 let chunk = TypedArenaChunk::<T>::new(chunk, new_capacity);
507 self.ptr.set((*chunk).start() as *const T);
508 self.end.set((*chunk).end() as *const T);
509 *self.first.borrow_mut() = chunk
510 }
511 }
512}
513
1a4d82fc
JJ
514impl<T> Drop for TypedArena<T> {
515 fn drop(&mut self) {
516 unsafe {
517 // Determine how much was filled.
85aaf69f
SL
518 let start = self.first.borrow().as_ref().unwrap().start() as usize;
519 let end = self.ptr.get() as usize;
1a4d82fc
JJ
520 let diff = (end - start) / mem::size_of::<T>();
521
522 // Pass that to the `destroy` method.
523 (**self.first.borrow_mut()).destroy(diff)
524 }
525 }
526}
527
528#[cfg(test)]
529mod tests {
530 extern crate test;
531 use self::test::Bencher;
532 use super::{Arena, TypedArena};
533
534 #[allow(dead_code)]
535 struct Point {
85aaf69f
SL
536 x: i32,
537 y: i32,
538 z: i32,
539 }
540
541 #[test]
542 fn test_arena_alloc_nested() {
543 struct Inner { value: u8 }
544 struct Outer<'a> { inner: &'a Inner }
545 enum EI<'e> { I(Inner), O(Outer<'e>) }
546
547 struct Wrap<'a>(TypedArena<EI<'a>>);
548
549 impl<'a> Wrap<'a> {
550 fn alloc_inner<F:Fn() -> Inner>(&self, f: F) -> &Inner {
551 let r: &EI = self.0.alloc(EI::I(f()));
552 if let &EI::I(ref i) = r {
553 i
554 } else {
555 panic!("mismatch");
556 }
557 }
558 fn alloc_outer<F:Fn() -> Outer<'a>>(&self, f: F) -> &Outer {
559 let r: &EI = self.0.alloc(EI::O(f()));
560 if let &EI::O(ref o) = r {
561 o
562 } else {
563 panic!("mismatch");
564 }
565 }
566 }
567
568 let arena = Wrap(TypedArena::new());
569
570 let result = arena.alloc_outer(|| Outer {
571 inner: arena.alloc_inner(|| Inner { value: 10 }) });
572
573 assert_eq!(result.inner.value, 10);
1a4d82fc
JJ
574 }
575
576 #[test]
577 pub fn test_copy() {
578 let arena = TypedArena::new();
85aaf69f 579 for _ in 0..100000 {
1a4d82fc
JJ
580 arena.alloc(Point {
581 x: 1,
582 y: 2,
583 z: 3,
584 });
585 }
586 }
587
588 #[bench]
589 pub fn bench_copy(b: &mut Bencher) {
590 let arena = TypedArena::new();
591 b.iter(|| {
592 arena.alloc(Point {
593 x: 1,
594 y: 2,
595 z: 3,
596 })
597 })
598 }
599
600 #[bench]
601 pub fn bench_copy_nonarena(b: &mut Bencher) {
602 b.iter(|| {
c34b1796 603 let _: Box<_> = box Point {
1a4d82fc
JJ
604 x: 1,
605 y: 2,
606 z: 3,
c34b1796 607 };
1a4d82fc
JJ
608 })
609 }
610
611 #[bench]
612 pub fn bench_copy_old_arena(b: &mut Bencher) {
613 let arena = Arena::new();
614 b.iter(|| {
615 arena.alloc(|| {
616 Point {
617 x: 1,
618 y: 2,
619 z: 3,
620 }
621 })
622 })
623 }
624
625 #[allow(dead_code)]
626 struct Noncopy {
627 string: String,
85aaf69f 628 array: Vec<i32>,
1a4d82fc
JJ
629 }
630
631 #[test]
632 pub fn test_noncopy() {
633 let arena = TypedArena::new();
85aaf69f 634 for _ in 0..100000 {
1a4d82fc
JJ
635 arena.alloc(Noncopy {
636 string: "hello world".to_string(),
637 array: vec!( 1, 2, 3, 4, 5 ),
638 });
639 }
640 }
641
642 #[bench]
643 pub fn bench_noncopy(b: &mut Bencher) {
644 let arena = TypedArena::new();
645 b.iter(|| {
646 arena.alloc(Noncopy {
647 string: "hello world".to_string(),
648 array: vec!( 1, 2, 3, 4, 5 ),
649 })
650 })
651 }
652
653 #[bench]
654 pub fn bench_noncopy_nonarena(b: &mut Bencher) {
655 b.iter(|| {
c34b1796 656 let _: Box<_> = box Noncopy {
1a4d82fc
JJ
657 string: "hello world".to_string(),
658 array: vec!( 1, 2, 3, 4, 5 ),
c34b1796 659 };
1a4d82fc
JJ
660 })
661 }
662
663 #[bench]
664 pub fn bench_noncopy_old_arena(b: &mut Bencher) {
665 let arena = Arena::new();
666 b.iter(|| {
667 arena.alloc(|| Noncopy {
668 string: "hello world".to_string(),
669 array: vec!( 1, 2, 3, 4, 5 ),
670 })
671 })
672 }
673}