]> git.proxmox.com Git - cargo.git/blob - vendor/crossbeam-0.3.0/src/epoch/garbage.rs
New upstream version 0.23.0
[cargo.git] / vendor / crossbeam-0.3.0 / src / epoch / garbage.rs
1 // Data structures for storing garbage to be freed later (once the
2 // epochs have sufficiently advanced).
3 //
4 // In general, we try to manage the garbage thread locally whenever
5 // possible. Each thread keep track of three bags of garbage. But if a
6 // thread is exiting, these bags must be moved into the global garbage
7 // bags.
8
9 use std::ptr;
10 use std::mem;
11 use std::sync::atomic::AtomicPtr;
12 use std::sync::atomic::Ordering::{Relaxed, Release, Acquire};
13
14 use ZerosValid;
15
16 /// One item of garbage.
17 ///
18 /// Stores enough information to do a deallocation.
19 #[derive(Debug)]
20 struct Item {
21 ptr: *mut u8,
22 free: unsafe fn(*mut u8),
23 }
24
25 /// A single, thread-local bag of garbage.
26 #[derive(Debug)]
27 pub struct Bag(Vec<Item>);
28
29 impl Bag {
30 fn new() -> Bag {
31 Bag(vec![])
32 }
33
34 fn insert<T>(&mut self, elem: *mut T) {
35 let size = mem::size_of::<T>();
36 if size > 0 {
37 self.0.push(Item {
38 ptr: elem as *mut u8,
39 free: free::<T>,
40 })
41 }
42 unsafe fn free<T>(t: *mut u8) {
43 drop(Vec::from_raw_parts(t as *mut T, 0, 1));
44 }
45 }
46
47 fn len(&self) -> usize {
48 self.0.len()
49 }
50
51 /// Deallocate all garbage in the bag
52 pub unsafe fn collect(&mut self) {
53 let mut data = mem::replace(&mut self.0, Vec::new());
54 for item in data.iter() {
55 (item.free)(item.ptr);
56 }
57 data.truncate(0);
58 self.0 = data;
59 }
60 }
61
62 // needed because the bags store raw pointers.
63 unsafe impl Send for Bag {}
64 unsafe impl Sync for Bag {}
65
66 /// A thread-local set of garbage bags.
67 #[derive(Debug)]
68 pub struct Local {
69 /// Garbage added at least one epoch behind the current local epoch
70 pub old: Bag,
71 /// Garbage added in the current local epoch or earlier
72 pub cur: Bag,
73 /// Garbage added in the current *global* epoch
74 pub new: Bag,
75 }
76
77 impl Local {
78 pub fn new() -> Local {
79 Local {
80 old: Bag::new(),
81 cur: Bag::new(),
82 new: Bag::new(),
83 }
84 }
85
86 pub fn insert<T>(&mut self, elem: *mut T) {
87 self.new.insert(elem)
88 }
89
90 /// Collect one epoch of garbage, rotating the local garbage bags.
91 pub unsafe fn collect(&mut self) {
92 let ret = self.old.collect();
93 mem::swap(&mut self.old, &mut self.cur);
94 mem::swap(&mut self.cur, &mut self.new);
95 ret
96 }
97
98 pub fn size(&self) -> usize {
99 self.old.len() + self.cur.len() + self.new.len()
100 }
101 }
102
103 /// A concurrent garbage bag, currently based on Treiber's stack.
104 ///
105 /// The elements are themselves owned `Bag`s.
106 #[derive(Debug)]
107 pub struct ConcBag {
108 head: AtomicPtr<Node>,
109 }
110
111 unsafe impl ZerosValid for ConcBag {}
112
113 #[derive(Debug)]
114 struct Node {
115 data: Bag,
116 next: AtomicPtr<Node>,
117 }
118
119 impl ConcBag {
120 pub fn insert(&self, t: Bag){
121 let n = Box::into_raw(Box::new(
122 Node { data: t, next: AtomicPtr::new(ptr::null_mut()) }));
123 loop {
124 let head = self.head.load(Acquire);
125 unsafe { (*n).next.store(head, Relaxed) };
126 if self.head.compare_and_swap(head, n, Release) == head { break }
127 }
128 }
129
130 pub unsafe fn collect(&self) {
131 // check to avoid xchg instruction
132 // when no garbage exists
133 let mut head = self.head.load(Relaxed);
134 if head != ptr::null_mut() {
135 head = self.head.swap(ptr::null_mut(), Acquire);
136
137 while head != ptr::null_mut() {
138 let mut n = Box::from_raw(head);
139 n.data.collect();
140 head = n.next.load(Relaxed);
141 }
142 }
143 }
144 }