]>
git.proxmox.com Git - mirror_frr.git/blob - lib/atomlist.c
1 // SPDX-License-Identifier: ISC
3 * Copyright (c) 2016-2018 David Lamparter, for NetDEF, Inc.
14 void atomlist_add_head(struct atomlist_head
*h
, struct atomlist_item
*item
)
17 atomptr_t i
= atomptr_i(item
);
19 atomic_fetch_add_explicit(&h
->count
, 1, memory_order_relaxed
);
21 /* updating ->last is possible here, but makes the code considerably
22 * more complicated... let's not.
24 prevval
= ATOMPTR_NULL
;
25 item
->next
= ATOMPTR_NULL
;
27 /* head-insert atomically
28 * release barrier: item + item->next writes must be completed
30 while (!atomic_compare_exchange_weak_explicit(&h
->first
, &prevval
, i
,
31 memory_order_release
, memory_order_relaxed
))
32 atomic_store_explicit(&item
->next
, prevval
,
33 memory_order_relaxed
);
36 void atomlist_add_tail(struct atomlist_head
*h
, struct atomlist_item
*item
)
38 atomptr_t prevval
= ATOMPTR_NULL
;
39 atomptr_t i
= atomptr_i(item
);
41 struct atomlist_item
*prevptr
;
42 _Atomic atomptr_t
*prev
;
44 item
->next
= ATOMPTR_NULL
;
46 atomic_fetch_add_explicit(&h
->count
, 1, memory_order_relaxed
);
48 /* place new item into ->last
49 * release: item writes completed; acquire: DD barrier on hint
51 hint
= atomic_exchange_explicit(&h
->last
, i
, memory_order_acq_rel
);
54 if (atomptr_p(hint
) == NULL
)
57 prev
= &atomlist_itemp(hint
)->next
;
60 prevval
= atomic_load_explicit(prev
,
61 memory_order_consume
);
62 prevptr
= atomlist_itemp(prevval
);
66 prev
= &prevptr
->next
;
69 /* last item is being deleted - start over */
70 if (atomptr_l(prevval
)) {
75 /* no barrier - item->next is NULL and was so in xchg above */
76 if (!atomic_compare_exchange_strong_explicit(prev
, &prevval
, i
,
78 memory_order_consume
)) {
86 static void atomlist_del_core(struct atomlist_head
*h
,
87 struct atomlist_item
*item
,
88 _Atomic atomptr_t
*hint
,
91 _Atomic atomptr_t
*prev
= hint
? hint
: &h
->first
, *upd
;
92 atomptr_t prevval
, updval
;
93 struct atomlist_item
*prevptr
;
95 /* drop us off "last" if needed. no r/w to barrier. */
96 prevval
= atomptr_i(item
);
97 atomic_compare_exchange_strong_explicit(&h
->last
, &prevval
,
99 memory_order_relaxed
, memory_order_relaxed
);
101 atomic_fetch_sub_explicit(&h
->count
, 1, memory_order_relaxed
);
103 /* the following code should be identical (except sort<>list) to
104 * atomsort_del_hint()
108 updval
= ATOMPTR_LOCK
;
111 prevval
= atomic_load_explicit(prev
,
112 memory_order_consume
);
114 /* track the beginning of a chain of deleted items
115 * this is necessary to make this lock-free; we can
116 * complete deletions started by other threads.
118 if (!atomptr_l(prevval
)) {
123 prevptr
= atomlist_itemp(prevval
);
127 prev
= &prevptr
->next
;
131 /* another thread completed our deletion */
134 if (!upd
|| atomptr_l(updval
)) {
135 /* failed to find non-deleted predecessor...
142 if (!atomic_compare_exchange_strong_explicit(upd
, &updval
,
143 next
, memory_order_consume
,
144 memory_order_consume
)) {
145 /* prev doesn't point to item anymore, something
146 * was inserted. continue at same position forward.
154 void atomlist_del_hint(struct atomlist_head
*h
, struct atomlist_item
*item
,
155 _Atomic atomptr_t
*hint
)
159 /* mark ourselves in-delete - full barrier */
160 next
= atomic_fetch_or_explicit(&item
->next
, ATOMPTR_LOCK
,
161 memory_order_acquire
);
162 assert(!atomptr_l(next
)); /* delete race on same item */
164 atomlist_del_core(h
, item
, hint
, next
);
167 struct atomlist_item
*atomlist_pop(struct atomlist_head
*h
)
169 struct atomlist_item
*item
;
172 /* grab head of the list - and remember it in replval for the
173 * actual delete below. No matter what, the head of the list is
174 * where we start deleting because either it's our item, or it's
175 * some delete-marked items and then our item.
177 next
= atomic_load_explicit(&h
->first
, memory_order_consume
);
180 item
= atomlist_itemp(next
);
184 /* try to mark deletion */
185 next
= atomic_fetch_or_explicit(&item
->next
, ATOMPTR_LOCK
,
186 memory_order_acquire
);
188 } while (atomptr_l(next
));
189 /* if loop is taken: delete race on same item (another pop or del)
190 * => proceed to next item
191 * if loop exited here: we have our item selected and marked
193 atomlist_del_core(h
, item
, &h
->first
, next
);
197 struct atomsort_item
*atomsort_add(struct atomsort_head
*h
,
198 struct atomsort_item
*item
, int (*cmpfn
)(
199 const struct atomsort_item
*,
200 const struct atomsort_item
*))
202 _Atomic atomptr_t
*prev
;
204 atomptr_t i
= atomptr_i(item
);
205 struct atomsort_item
*previtem
;
212 prevval
= atomic_load_explicit(prev
,
213 memory_order_acquire
);
214 previtem
= atomptr_p(prevval
);
216 if (!previtem
|| (cmpval
= cmpfn(previtem
, item
)) > 0)
221 prev
= &previtem
->next
;
224 if (atomptr_l(prevval
))
227 item
->next
= prevval
;
228 if (atomic_compare_exchange_strong_explicit(prev
, &prevval
, i
,
229 memory_order_release
, memory_order_relaxed
))
233 atomic_fetch_add_explicit(&h
->count
, 1, memory_order_relaxed
);
237 static void atomsort_del_core(struct atomsort_head
*h
,
238 struct atomsort_item
*item
, _Atomic atomptr_t
*hint
,
241 _Atomic atomptr_t
*prev
= hint
? hint
: &h
->first
, *upd
;
242 atomptr_t prevval
, updval
;
243 struct atomsort_item
*prevptr
;
245 atomic_fetch_sub_explicit(&h
->count
, 1, memory_order_relaxed
);
247 /* the following code should be identical (except sort<>list) to
248 * atomlist_del_core()
252 updval
= ATOMPTR_LOCK
;
255 prevval
= atomic_load_explicit(prev
,
256 memory_order_consume
);
258 /* track the beginning of a chain of deleted items
259 * this is necessary to make this lock-free; we can
260 * complete deletions started by other threads.
262 if (!atomptr_l(prevval
)) {
267 prevptr
= atomsort_itemp(prevval
);
271 prev
= &prevptr
->next
;
275 /* another thread completed our deletion */
278 if (!upd
|| atomptr_l(updval
)) {
279 /* failed to find non-deleted predecessor...
286 if (!atomic_compare_exchange_strong_explicit(upd
, &updval
,
287 next
, memory_order_relaxed
,
288 memory_order_relaxed
)) {
289 /* prev doesn't point to item anymore, something
290 * was inserted. continue at same position forward.
298 void atomsort_del_hint(struct atomsort_head
*h
, struct atomsort_item
*item
,
299 _Atomic atomptr_t
*hint
)
303 /* mark ourselves in-delete - full barrier */
304 next
= atomic_fetch_or_explicit(&item
->next
, ATOMPTR_LOCK
,
305 memory_order_seq_cst
);
306 assert(!atomptr_l(next
)); /* delete race on same item */
308 atomsort_del_core(h
, item
, hint
, next
);
311 struct atomsort_item
*atomsort_pop(struct atomsort_head
*h
)
313 struct atomsort_item
*item
;
316 /* grab head of the list - and remember it in replval for the
317 * actual delete below. No matter what, the head of the list is
318 * where we start deleting because either it's our item, or it's
319 * some delete-marked items and then our item.
321 next
= atomic_load_explicit(&h
->first
, memory_order_consume
);
324 item
= atomsort_itemp(next
);
328 /* try to mark deletion */
329 next
= atomic_fetch_or_explicit(&item
->next
, ATOMPTR_LOCK
,
330 memory_order_acquire
);
332 } while (atomptr_l(next
));
333 /* if loop is taken: delete race on same item (another pop or del)
334 * => proceed to next item
335 * if loop exited here: we have our item selected and marked
337 atomsort_del_core(h
, item
, &h
->first
, next
);