1 // Data structures for storing garbage to be freed later (once the
2 // epochs have sufficiently advanced).
4 // In general, we try to manage the garbage thread locally whenever
5 // possible. Each thread keep track of three bags of garbage. But if a
6 // thread is exiting, these bags must be moved into the global garbage
11 use std
::sync
::atomic
::AtomicPtr
;
12 use std
::sync
::atomic
::Ordering
::{Relaxed, Release, Acquire}
;
16 /// One item of garbage.
18 /// Stores enough information to do a deallocation.
22 free
: unsafe fn(*mut u8),
25 /// A single, thread-local bag of garbage.
27 pub struct Bag(Vec
<Item
>);
34 fn insert
<T
>(&mut self, elem
: *mut T
) {
35 let size
= mem
::size_of
::<T
>();
42 unsafe fn free
<T
>(t
: *mut u8) {
43 drop(Vec
::from_raw_parts(t
as *mut T
, 0, 1));
47 fn len(&self) -> usize {
51 /// Deallocate all garbage in the bag
52 pub unsafe fn collect(&mut self) {
53 let mut data
= mem
::replace(&mut self.0, Vec
::new());
54 for item
in data
.iter() {
55 (item
.free
)(item
.ptr
);
62 // needed because the bags store raw pointers.
63 unsafe impl Send
for Bag {}
64 unsafe impl Sync
for Bag {}
66 /// A thread-local set of garbage bags.
69 /// Garbage added at least one epoch behind the current local epoch
71 /// Garbage added in the current local epoch or earlier
73 /// Garbage added in the current *global* epoch
78 pub fn new() -> Local
{
86 pub fn insert
<T
>(&mut self, elem
: *mut T
) {
90 /// Collect one epoch of garbage, rotating the local garbage bags.
91 pub unsafe fn collect(&mut self) {
92 let ret
= self.old
.collect();
93 mem
::swap(&mut self.old
, &mut self.cur
);
94 mem
::swap(&mut self.cur
, &mut self.new
);
98 pub fn size(&self) -> usize {
99 self.old
.len() + self.cur
.len() + self.new
.len()
103 /// A concurrent garbage bag, currently based on Treiber's stack.
105 /// The elements are themselves owned `Bag`s.
108 head
: AtomicPtr
<Node
>,
111 unsafe impl ZerosValid
for ConcBag {}
116 next
: AtomicPtr
<Node
>,
120 pub fn insert(&self, t
: Bag
){
121 let n
= Box
::into_raw(Box
::new(
122 Node { data: t, next: AtomicPtr::new(ptr::null_mut()) }
));
124 let head
= self.head
.load(Acquire
);
125 unsafe { (*n).next.store(head, Relaxed) }
;
126 if self.head
.compare_and_swap(head
, n
, Release
) == head { break }
130 pub unsafe fn collect(&self) {
131 // check to avoid xchg instruction
132 // when no garbage exists
133 let mut head
= self.head
.load(Relaxed
);
134 if head
!= ptr
::null_mut() {
135 head
= self.head
.swap(ptr
::null_mut(), Acquire
);
137 while head
!= ptr
::null_mut() {
138 let mut n
= Box
::from_raw(head
);
140 head
= n
.next
.load(Relaxed
);