]> git.proxmox.com Git - ceph.git/blame - ceph/src/spdk/dpdk/lib/librte_eal/common/include/generic/rte_atomic.h
import 15.2.0 Octopus source
[ceph.git] / ceph / src / spdk / dpdk / lib / librte_eal / common / include / generic / rte_atomic.h
CommitLineData
11fdf7f2
TL
1/* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2010-2014 Intel Corporation
7c673cae
FG
3 */
4
5#ifndef _RTE_ATOMIC_H_
6#define _RTE_ATOMIC_H_
7
8/**
9 * @file
10 * Atomic Operations
11 *
12 * This file defines a generic API for atomic operations.
13 */
14
15#include <stdint.h>
16#include <rte_common.h>
17
18#ifdef __DOXYGEN__
19
11fdf7f2
TL
20/** @name Memory Barrier
21 */
22///@{
7c673cae
FG
23/**
24 * General memory barrier.
25 *
26 * Guarantees that the LOAD and STORE operations generated before the
27 * barrier occur before the LOAD and STORE operations generated after.
7c673cae
FG
28 */
29static inline void rte_mb(void);
30
31/**
32 * Write memory barrier.
33 *
34 * Guarantees that the STORE operations generated before the barrier
35 * occur before the STORE operations generated after.
7c673cae
FG
36 */
37static inline void rte_wmb(void);
38
39/**
40 * Read memory barrier.
41 *
42 * Guarantees that the LOAD operations generated before the barrier
43 * occur before the LOAD operations generated after.
7c673cae
FG
44 */
45static inline void rte_rmb(void);
11fdf7f2 46///@}
7c673cae 47
11fdf7f2
TL
48/** @name SMP Memory Barrier
49 */
50///@{
7c673cae
FG
51/**
52 * General memory barrier between lcores
53 *
54 * Guarantees that the LOAD and STORE operations that precede the
55 * rte_smp_mb() call are globally visible across the lcores
11fdf7f2 56 * before the LOAD and STORE operations that follows it.
7c673cae
FG
57 */
58static inline void rte_smp_mb(void);
59
60/**
61 * Write memory barrier between lcores
62 *
63 * Guarantees that the STORE operations that precede the
64 * rte_smp_wmb() call are globally visible across the lcores
11fdf7f2 65 * before the STORE operations that follows it.
7c673cae
FG
66 */
67static inline void rte_smp_wmb(void);
68
69/**
70 * Read memory barrier between lcores
71 *
72 * Guarantees that the LOAD operations that precede the
73 * rte_smp_rmb() call are globally visible across the lcores
11fdf7f2 74 * before the LOAD operations that follows it.
7c673cae
FG
75 */
76static inline void rte_smp_rmb(void);
11fdf7f2
TL
77///@}
78
79/** @name I/O Memory Barrier
80 */
81///@{
82/**
83 * General memory barrier for I/O device
84 *
85 * Guarantees that the LOAD and STORE operations that precede the
86 * rte_io_mb() call are visible to I/O device or CPU before the
87 * LOAD and STORE operations that follow it.
88 */
89static inline void rte_io_mb(void);
90
91/**
92 * Write memory barrier for I/O device
93 *
94 * Guarantees that the STORE operations that precede the
95 * rte_io_wmb() call are visible to I/O device before the STORE
96 * operations that follow it.
97 */
98static inline void rte_io_wmb(void);
99
100/**
101 * Read memory barrier for IO device
102 *
103 * Guarantees that the LOAD operations on I/O device that precede the
104 * rte_io_rmb() call are visible to CPU before the LOAD
105 * operations that follow it.
106 */
107static inline void rte_io_rmb(void);
108///@}
109
110/** @name Coherent I/O Memory Barrier
111 *
112 * Coherent I/O memory barrier is a lightweight version of I/O memory
113 * barriers which are system-wide data synchronization barriers. This
114 * is for only coherent memory domain between lcore and I/O device but
115 * it is same as the I/O memory barriers in most of architectures.
116 * However, some architecture provides even lighter barriers which are
117 * somewhere in between I/O memory barriers and SMP memory barriers.
118 * For example, in case of ARMv8, DMB(data memory barrier) instruction
119 * can have different shareability domains - inner-shareable and
120 * outer-shareable. And inner-shareable DMB fits for SMP memory
121 * barriers and outer-shareable DMB for coherent I/O memory barriers,
122 * which acts on coherent memory.
123 *
124 * In most cases, I/O memory barriers are safer but if operations are
125 * on coherent memory instead of incoherent MMIO region of a device,
126 * then coherent I/O memory barriers can be used and this could bring
127 * performance gain depending on architectures.
128 */
129///@{
130/**
131 * Write memory barrier for coherent memory between lcore and I/O device
132 *
133 * Guarantees that the STORE operations on coherent memory that
134 * precede the rte_cio_wmb() call are visible to I/O device before the
135 * STORE operations that follow it.
136 */
137static inline void rte_cio_wmb(void);
138
139/**
140 * Read memory barrier for coherent memory between lcore and I/O device
141 *
142 * Guarantees that the LOAD operations on coherent memory updated by
143 * I/O device that precede the rte_cio_rmb() call are visible to CPU
144 * before the LOAD operations that follow it.
145 */
146static inline void rte_cio_rmb(void);
147///@}
7c673cae
FG
148
149#endif /* __DOXYGEN__ */
150
151/**
152 * Compiler barrier.
153 *
154 * Guarantees that operation reordering does not occur at compile time
155 * for operations directly before and after the barrier.
156 */
157#define rte_compiler_barrier() do { \
158 asm volatile ("" : : : "memory"); \
159} while(0)
160
161/*------------------------- 16 bit atomic operations -------------------------*/
162
163/**
164 * Atomic compare and set.
165 *
166 * (atomic) equivalent to:
167 * if (*dst == exp)
168 * *dst = src (all 16-bit words)
169 *
170 * @param dst
171 * The destination location into which the value will be written.
172 * @param exp
173 * The expected value.
174 * @param src
175 * The new value.
176 * @return
177 * Non-zero on success; 0 on failure.
178 */
179static inline int
180rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src);
181
182#ifdef RTE_FORCE_INTRINSICS
183static inline int
184rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
185{
186 return __sync_bool_compare_and_swap(dst, exp, src);
187}
188#endif
189
11fdf7f2
TL
190/**
191 * Atomic exchange.
192 *
193 * (atomic) equivalent to:
194 * ret = *dst
195 * *dst = val;
196 * return ret;
197 *
198 * @param dst
199 * The destination location into which the value will be written.
200 * @param val
201 * The new value.
202 * @return
203 * The original value at that location
204 */
205static inline uint16_t
206rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val);
207
208#ifdef RTE_FORCE_INTRINSICS
209static inline uint16_t
210rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val)
211{
9f95a23c 212#if defined(__clang__)
11fdf7f2
TL
213 return __atomic_exchange_n(dst, val, __ATOMIC_SEQ_CST);
214#else
215 return __atomic_exchange_2(dst, val, __ATOMIC_SEQ_CST);
216#endif
217}
218#endif
219
7c673cae
FG
220/**
221 * The atomic counter structure.
222 */
223typedef struct {
224 volatile int16_t cnt; /**< An internal counter value. */
225} rte_atomic16_t;
226
227/**
228 * Static initializer for an atomic counter.
229 */
230#define RTE_ATOMIC16_INIT(val) { (val) }
231
232/**
233 * Initialize an atomic counter.
234 *
235 * @param v
236 * A pointer to the atomic counter.
237 */
238static inline void
239rte_atomic16_init(rte_atomic16_t *v)
240{
241 v->cnt = 0;
242}
243
244/**
245 * Atomically read a 16-bit value from a counter.
246 *
247 * @param v
248 * A pointer to the atomic counter.
249 * @return
250 * The value of the counter.
251 */
252static inline int16_t
253rte_atomic16_read(const rte_atomic16_t *v)
254{
255 return v->cnt;
256}
257
258/**
259 * Atomically set a counter to a 16-bit value.
260 *
261 * @param v
262 * A pointer to the atomic counter.
263 * @param new_value
264 * The new value for the counter.
265 */
266static inline void
267rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
268{
269 v->cnt = new_value;
270}
271
272/**
273 * Atomically add a 16-bit value to an atomic counter.
274 *
275 * @param v
276 * A pointer to the atomic counter.
277 * @param inc
278 * The value to be added to the counter.
279 */
280static inline void
281rte_atomic16_add(rte_atomic16_t *v, int16_t inc)
282{
283 __sync_fetch_and_add(&v->cnt, inc);
284}
285
286/**
287 * Atomically subtract a 16-bit value from an atomic counter.
288 *
289 * @param v
290 * A pointer to the atomic counter.
291 * @param dec
292 * The value to be subtracted from the counter.
293 */
294static inline void
295rte_atomic16_sub(rte_atomic16_t *v, int16_t dec)
296{
297 __sync_fetch_and_sub(&v->cnt, dec);
298}
299
300/**
301 * Atomically increment a counter by one.
302 *
303 * @param v
304 * A pointer to the atomic counter.
305 */
306static inline void
307rte_atomic16_inc(rte_atomic16_t *v);
308
309#ifdef RTE_FORCE_INTRINSICS
310static inline void
311rte_atomic16_inc(rte_atomic16_t *v)
312{
313 rte_atomic16_add(v, 1);
314}
315#endif
316
317/**
318 * Atomically decrement a counter by one.
319 *
320 * @param v
321 * A pointer to the atomic counter.
322 */
323static inline void
324rte_atomic16_dec(rte_atomic16_t *v);
325
326#ifdef RTE_FORCE_INTRINSICS
327static inline void
328rte_atomic16_dec(rte_atomic16_t *v)
329{
330 rte_atomic16_sub(v, 1);
331}
332#endif
333
334/**
335 * Atomically add a 16-bit value to a counter and return the result.
336 *
337 * Atomically adds the 16-bits value (inc) to the atomic counter (v) and
338 * returns the value of v after addition.
339 *
340 * @param v
341 * A pointer to the atomic counter.
342 * @param inc
343 * The value to be added to the counter.
344 * @return
345 * The value of v after the addition.
346 */
347static inline int16_t
348rte_atomic16_add_return(rte_atomic16_t *v, int16_t inc)
349{
350 return __sync_add_and_fetch(&v->cnt, inc);
351}
352
353/**
354 * Atomically subtract a 16-bit value from a counter and return
355 * the result.
356 *
357 * Atomically subtracts the 16-bit value (inc) from the atomic counter
358 * (v) and returns the value of v after the subtraction.
359 *
360 * @param v
361 * A pointer to the atomic counter.
362 * @param dec
363 * The value to be subtracted from the counter.
364 * @return
365 * The value of v after the subtraction.
366 */
367static inline int16_t
368rte_atomic16_sub_return(rte_atomic16_t *v, int16_t dec)
369{
370 return __sync_sub_and_fetch(&v->cnt, dec);
371}
372
373/**
374 * Atomically increment a 16-bit counter by one and test.
375 *
376 * Atomically increments the atomic counter (v) by one and returns true if
377 * the result is 0, or false in all other cases.
378 *
379 * @param v
380 * A pointer to the atomic counter.
381 * @return
382 * True if the result after the increment operation is 0; false otherwise.
383 */
384static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v);
385
386#ifdef RTE_FORCE_INTRINSICS
387static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v)
388{
389 return __sync_add_and_fetch(&v->cnt, 1) == 0;
390}
391#endif
392
393/**
394 * Atomically decrement a 16-bit counter by one and test.
395 *
396 * Atomically decrements the atomic counter (v) by one and returns true if
397 * the result is 0, or false in all other cases.
398 *
399 * @param v
400 * A pointer to the atomic counter.
401 * @return
402 * True if the result after the decrement operation is 0; false otherwise.
403 */
404static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v);
405
406#ifdef RTE_FORCE_INTRINSICS
407static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v)
408{
409 return __sync_sub_and_fetch(&v->cnt, 1) == 0;
410}
411#endif
412
413/**
414 * Atomically test and set a 16-bit atomic counter.
415 *
416 * If the counter value is already set, return 0 (failed). Otherwise, set
417 * the counter value to 1 and return 1 (success).
418 *
419 * @param v
420 * A pointer to the atomic counter.
421 * @return
422 * 0 if failed; else 1, success.
423 */
424static inline int rte_atomic16_test_and_set(rte_atomic16_t *v);
425
426#ifdef RTE_FORCE_INTRINSICS
427static inline int rte_atomic16_test_and_set(rte_atomic16_t *v)
428{
429 return rte_atomic16_cmpset((volatile uint16_t *)&v->cnt, 0, 1);
430}
431#endif
432
433/**
434 * Atomically set a 16-bit counter to 0.
435 *
436 * @param v
437 * A pointer to the atomic counter.
438 */
439static inline void rte_atomic16_clear(rte_atomic16_t *v)
440{
441 v->cnt = 0;
442}
443
444/*------------------------- 32 bit atomic operations -------------------------*/
445
446/**
447 * Atomic compare and set.
448 *
449 * (atomic) equivalent to:
450 * if (*dst == exp)
451 * *dst = src (all 32-bit words)
452 *
453 * @param dst
454 * The destination location into which the value will be written.
455 * @param exp
456 * The expected value.
457 * @param src
458 * The new value.
459 * @return
460 * Non-zero on success; 0 on failure.
461 */
462static inline int
463rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src);
464
465#ifdef RTE_FORCE_INTRINSICS
466static inline int
467rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
468{
469 return __sync_bool_compare_and_swap(dst, exp, src);
470}
471#endif
472
11fdf7f2
TL
473/**
474 * Atomic exchange.
475 *
476 * (atomic) equivalent to:
477 * ret = *dst
478 * *dst = val;
479 * return ret;
480 *
481 * @param dst
482 * The destination location into which the value will be written.
483 * @param val
484 * The new value.
485 * @return
486 * The original value at that location
487 */
488static inline uint32_t
489rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val);
490
491#ifdef RTE_FORCE_INTRINSICS
492static inline uint32_t
493rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val)
494{
9f95a23c 495#if defined(__clang__)
11fdf7f2
TL
496 return __atomic_exchange_n(dst, val, __ATOMIC_SEQ_CST);
497#else
498 return __atomic_exchange_4(dst, val, __ATOMIC_SEQ_CST);
499#endif
500}
501#endif
502
7c673cae
FG
503/**
504 * The atomic counter structure.
505 */
506typedef struct {
507 volatile int32_t cnt; /**< An internal counter value. */
508} rte_atomic32_t;
509
510/**
511 * Static initializer for an atomic counter.
512 */
513#define RTE_ATOMIC32_INIT(val) { (val) }
514
515/**
516 * Initialize an atomic counter.
517 *
518 * @param v
519 * A pointer to the atomic counter.
520 */
521static inline void
522rte_atomic32_init(rte_atomic32_t *v)
523{
524 v->cnt = 0;
525}
526
527/**
528 * Atomically read a 32-bit value from a counter.
529 *
530 * @param v
531 * A pointer to the atomic counter.
532 * @return
533 * The value of the counter.
534 */
535static inline int32_t
536rte_atomic32_read(const rte_atomic32_t *v)
537{
538 return v->cnt;
539}
540
541/**
542 * Atomically set a counter to a 32-bit value.
543 *
544 * @param v
545 * A pointer to the atomic counter.
546 * @param new_value
547 * The new value for the counter.
548 */
549static inline void
550rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
551{
552 v->cnt = new_value;
553}
554
555/**
556 * Atomically add a 32-bit value to an atomic counter.
557 *
558 * @param v
559 * A pointer to the atomic counter.
560 * @param inc
561 * The value to be added to the counter.
562 */
563static inline void
564rte_atomic32_add(rte_atomic32_t *v, int32_t inc)
565{
566 __sync_fetch_and_add(&v->cnt, inc);
567}
568
569/**
570 * Atomically subtract a 32-bit value from an atomic counter.
571 *
572 * @param v
573 * A pointer to the atomic counter.
574 * @param dec
575 * The value to be subtracted from the counter.
576 */
577static inline void
578rte_atomic32_sub(rte_atomic32_t *v, int32_t dec)
579{
580 __sync_fetch_and_sub(&v->cnt, dec);
581}
582
583/**
584 * Atomically increment a counter by one.
585 *
586 * @param v
587 * A pointer to the atomic counter.
588 */
589static inline void
590rte_atomic32_inc(rte_atomic32_t *v);
591
592#ifdef RTE_FORCE_INTRINSICS
593static inline void
594rte_atomic32_inc(rte_atomic32_t *v)
595{
596 rte_atomic32_add(v, 1);
597}
598#endif
599
600/**
601 * Atomically decrement a counter by one.
602 *
603 * @param v
604 * A pointer to the atomic counter.
605 */
606static inline void
607rte_atomic32_dec(rte_atomic32_t *v);
608
609#ifdef RTE_FORCE_INTRINSICS
610static inline void
611rte_atomic32_dec(rte_atomic32_t *v)
612{
613 rte_atomic32_sub(v,1);
614}
615#endif
616
617/**
618 * Atomically add a 32-bit value to a counter and return the result.
619 *
620 * Atomically adds the 32-bits value (inc) to the atomic counter (v) and
621 * returns the value of v after addition.
622 *
623 * @param v
624 * A pointer to the atomic counter.
625 * @param inc
626 * The value to be added to the counter.
627 * @return
628 * The value of v after the addition.
629 */
630static inline int32_t
631rte_atomic32_add_return(rte_atomic32_t *v, int32_t inc)
632{
633 return __sync_add_and_fetch(&v->cnt, inc);
634}
635
636/**
637 * Atomically subtract a 32-bit value from a counter and return
638 * the result.
639 *
640 * Atomically subtracts the 32-bit value (inc) from the atomic counter
641 * (v) and returns the value of v after the subtraction.
642 *
643 * @param v
644 * A pointer to the atomic counter.
645 * @param dec
646 * The value to be subtracted from the counter.
647 * @return
648 * The value of v after the subtraction.
649 */
650static inline int32_t
651rte_atomic32_sub_return(rte_atomic32_t *v, int32_t dec)
652{
653 return __sync_sub_and_fetch(&v->cnt, dec);
654}
655
656/**
657 * Atomically increment a 32-bit counter by one and test.
658 *
659 * Atomically increments the atomic counter (v) by one and returns true if
660 * the result is 0, or false in all other cases.
661 *
662 * @param v
663 * A pointer to the atomic counter.
664 * @return
665 * True if the result after the increment operation is 0; false otherwise.
666 */
667static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v);
668
669#ifdef RTE_FORCE_INTRINSICS
670static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v)
671{
672 return __sync_add_and_fetch(&v->cnt, 1) == 0;
673}
674#endif
675
676/**
677 * Atomically decrement a 32-bit counter by one and test.
678 *
679 * Atomically decrements the atomic counter (v) by one and returns true if
680 * the result is 0, or false in all other cases.
681 *
682 * @param v
683 * A pointer to the atomic counter.
684 * @return
685 * True if the result after the decrement operation is 0; false otherwise.
686 */
687static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v);
688
689#ifdef RTE_FORCE_INTRINSICS
690static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v)
691{
692 return __sync_sub_and_fetch(&v->cnt, 1) == 0;
693}
694#endif
695
696/**
697 * Atomically test and set a 32-bit atomic counter.
698 *
699 * If the counter value is already set, return 0 (failed). Otherwise, set
700 * the counter value to 1 and return 1 (success).
701 *
702 * @param v
703 * A pointer to the atomic counter.
704 * @return
705 * 0 if failed; else 1, success.
706 */
707static inline int rte_atomic32_test_and_set(rte_atomic32_t *v);
708
709#ifdef RTE_FORCE_INTRINSICS
710static inline int rte_atomic32_test_and_set(rte_atomic32_t *v)
711{
712 return rte_atomic32_cmpset((volatile uint32_t *)&v->cnt, 0, 1);
713}
714#endif
715
716/**
717 * Atomically set a 32-bit counter to 0.
718 *
719 * @param v
720 * A pointer to the atomic counter.
721 */
722static inline void rte_atomic32_clear(rte_atomic32_t *v)
723{
724 v->cnt = 0;
725}
726
727/*------------------------- 64 bit atomic operations -------------------------*/
728
729/**
730 * An atomic compare and set function used by the mutex functions.
731 * (atomic) equivalent to:
732 * if (*dst == exp)
733 * *dst = src (all 64-bit words)
734 *
735 * @param dst
736 * The destination into which the value will be written.
737 * @param exp
738 * The expected value.
739 * @param src
740 * The new value.
741 * @return
742 * Non-zero on success; 0 on failure.
743 */
744static inline int
745rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src);
746
747#ifdef RTE_FORCE_INTRINSICS
748static inline int
749rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
750{
751 return __sync_bool_compare_and_swap(dst, exp, src);
752}
753#endif
754
11fdf7f2
TL
755/**
756 * Atomic exchange.
757 *
758 * (atomic) equivalent to:
759 * ret = *dst
760 * *dst = val;
761 * return ret;
762 *
763 * @param dst
764 * The destination location into which the value will be written.
765 * @param val
766 * The new value.
767 * @return
768 * The original value at that location
769 */
770static inline uint64_t
771rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val);
772
773#ifdef RTE_FORCE_INTRINSICS
774static inline uint64_t
775rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val)
776{
9f95a23c 777#if defined(__clang__)
11fdf7f2
TL
778 return __atomic_exchange_n(dst, val, __ATOMIC_SEQ_CST);
779#else
780 return __atomic_exchange_8(dst, val, __ATOMIC_SEQ_CST);
781#endif
782}
783#endif
784
7c673cae
FG
785/**
786 * The atomic counter structure.
787 */
788typedef struct {
789 volatile int64_t cnt; /**< Internal counter value. */
790} rte_atomic64_t;
791
792/**
793 * Static initializer for an atomic counter.
794 */
795#define RTE_ATOMIC64_INIT(val) { (val) }
796
797/**
798 * Initialize the atomic counter.
799 *
800 * @param v
801 * A pointer to the atomic counter.
802 */
803static inline void
804rte_atomic64_init(rte_atomic64_t *v);
805
806#ifdef RTE_FORCE_INTRINSICS
807static inline void
808rte_atomic64_init(rte_atomic64_t *v)
809{
810#ifdef __LP64__
811 v->cnt = 0;
812#else
813 int success = 0;
814 uint64_t tmp;
815
816 while (success == 0) {
817 tmp = v->cnt;
818 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
819 tmp, 0);
820 }
821#endif
822}
823#endif
824
825/**
826 * Atomically read a 64-bit counter.
827 *
828 * @param v
829 * A pointer to the atomic counter.
830 * @return
831 * The value of the counter.
832 */
833static inline int64_t
834rte_atomic64_read(rte_atomic64_t *v);
835
836#ifdef RTE_FORCE_INTRINSICS
837static inline int64_t
838rte_atomic64_read(rte_atomic64_t *v)
839{
840#ifdef __LP64__
841 return v->cnt;
842#else
843 int success = 0;
844 uint64_t tmp;
845
846 while (success == 0) {
847 tmp = v->cnt;
848 /* replace the value by itself */
849 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
850 tmp, tmp);
851 }
852 return tmp;
853#endif
854}
855#endif
856
857/**
858 * Atomically set a 64-bit counter.
859 *
860 * @param v
861 * A pointer to the atomic counter.
862 * @param new_value
863 * The new value of the counter.
864 */
865static inline void
866rte_atomic64_set(rte_atomic64_t *v, int64_t new_value);
867
868#ifdef RTE_FORCE_INTRINSICS
869static inline void
870rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
871{
872#ifdef __LP64__
873 v->cnt = new_value;
874#else
875 int success = 0;
876 uint64_t tmp;
877
878 while (success == 0) {
879 tmp = v->cnt;
880 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
881 tmp, new_value);
882 }
883#endif
884}
885#endif
886
887/**
888 * Atomically add a 64-bit value to a counter.
889 *
890 * @param v
891 * A pointer to the atomic counter.
892 * @param inc
893 * The value to be added to the counter.
894 */
895static inline void
896rte_atomic64_add(rte_atomic64_t *v, int64_t inc);
897
898#ifdef RTE_FORCE_INTRINSICS
899static inline void
900rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
901{
902 __sync_fetch_and_add(&v->cnt, inc);
903}
904#endif
905
906/**
907 * Atomically subtract a 64-bit value from a counter.
908 *
909 * @param v
910 * A pointer to the atomic counter.
911 * @param dec
912 * The value to be subtracted from the counter.
913 */
914static inline void
915rte_atomic64_sub(rte_atomic64_t *v, int64_t dec);
916
917#ifdef RTE_FORCE_INTRINSICS
918static inline void
919rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
920{
921 __sync_fetch_and_sub(&v->cnt, dec);
922}
923#endif
924
925/**
926 * Atomically increment a 64-bit counter by one and test.
927 *
928 * @param v
929 * A pointer to the atomic counter.
930 */
931static inline void
932rte_atomic64_inc(rte_atomic64_t *v);
933
934#ifdef RTE_FORCE_INTRINSICS
935static inline void
936rte_atomic64_inc(rte_atomic64_t *v)
937{
938 rte_atomic64_add(v, 1);
939}
940#endif
941
942/**
943 * Atomically decrement a 64-bit counter by one and test.
944 *
945 * @param v
946 * A pointer to the atomic counter.
947 */
948static inline void
949rte_atomic64_dec(rte_atomic64_t *v);
950
951#ifdef RTE_FORCE_INTRINSICS
952static inline void
953rte_atomic64_dec(rte_atomic64_t *v)
954{
955 rte_atomic64_sub(v, 1);
956}
957#endif
958
959/**
960 * Add a 64-bit value to an atomic counter and return the result.
961 *
962 * Atomically adds the 64-bit value (inc) to the atomic counter (v) and
963 * returns the value of v after the addition.
964 *
965 * @param v
966 * A pointer to the atomic counter.
967 * @param inc
968 * The value to be added to the counter.
969 * @return
970 * The value of v after the addition.
971 */
972static inline int64_t
973rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc);
974
975#ifdef RTE_FORCE_INTRINSICS
976static inline int64_t
977rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc)
978{
979 return __sync_add_and_fetch(&v->cnt, inc);
980}
981#endif
982
983/**
984 * Subtract a 64-bit value from an atomic counter and return the result.
985 *
986 * Atomically subtracts the 64-bit value (dec) from the atomic counter (v)
987 * and returns the value of v after the subtraction.
988 *
989 * @param v
990 * A pointer to the atomic counter.
991 * @param dec
992 * The value to be subtracted from the counter.
993 * @return
994 * The value of v after the subtraction.
995 */
996static inline int64_t
997rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec);
998
999#ifdef RTE_FORCE_INTRINSICS
1000static inline int64_t
1001rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
1002{
1003 return __sync_sub_and_fetch(&v->cnt, dec);
1004}
1005#endif
1006
1007/**
1008 * Atomically increment a 64-bit counter by one and test.
1009 *
1010 * Atomically increments the atomic counter (v) by one and returns
1011 * true if the result is 0, or false in all other cases.
1012 *
1013 * @param v
1014 * A pointer to the atomic counter.
1015 * @return
1016 * True if the result after the addition is 0; false otherwise.
1017 */
1018static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v);
1019
1020#ifdef RTE_FORCE_INTRINSICS
1021static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v)
1022{
1023 return rte_atomic64_add_return(v, 1) == 0;
1024}
1025#endif
1026
1027/**
1028 * Atomically decrement a 64-bit counter by one and test.
1029 *
1030 * Atomically decrements the atomic counter (v) by one and returns true if
1031 * the result is 0, or false in all other cases.
1032 *
1033 * @param v
1034 * A pointer to the atomic counter.
1035 * @return
1036 * True if the result after subtraction is 0; false otherwise.
1037 */
1038static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v);
1039
1040#ifdef RTE_FORCE_INTRINSICS
1041static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v)
1042{
1043 return rte_atomic64_sub_return(v, 1) == 0;
1044}
1045#endif
1046
1047/**
1048 * Atomically test and set a 64-bit atomic counter.
1049 *
1050 * If the counter value is already set, return 0 (failed). Otherwise, set
1051 * the counter value to 1 and return 1 (success).
1052 *
1053 * @param v
1054 * A pointer to the atomic counter.
1055 * @return
1056 * 0 if failed; else 1, success.
1057 */
1058static inline int rte_atomic64_test_and_set(rte_atomic64_t *v);
1059
1060#ifdef RTE_FORCE_INTRINSICS
1061static inline int rte_atomic64_test_and_set(rte_atomic64_t *v)
1062{
1063 return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1);
1064}
1065#endif
1066
1067/**
1068 * Atomically set a 64-bit counter to 0.
1069 *
1070 * @param v
1071 * A pointer to the atomic counter.
1072 */
1073static inline void rte_atomic64_clear(rte_atomic64_t *v);
1074
1075#ifdef RTE_FORCE_INTRINSICS
1076static inline void rte_atomic64_clear(rte_atomic64_t *v)
1077{
1078 rte_atomic64_set(v, 0);
1079}
1080#endif
1081
9f95a23c
TL
1082/*------------------------ 128 bit atomic operations -------------------------*/
1083
1084#ifdef __DOXYGEN__
1085
1086/**
1087 * An atomic compare and set function used by the mutex functions.
1088 * (Atomically) Equivalent to:
1089 * @code
1090 * if (*dst == *exp)
1091 * *dst = *src
1092 * else
1093 * *exp = *dst
1094 * @endcode
1095 *
1096 * @note This function is currently only available for the x86-64 platform.
1097 *
1098 * @note The success and failure arguments must be one of the __ATOMIC_* values
1099 * defined in the C++11 standard. For details on their behavior, refer to the
1100 * standard.
1101 *
1102 * @param dst
1103 * The destination into which the value will be written.
1104 * @param exp
1105 * Pointer to the expected value. If the operation fails, this memory is
1106 * updated with the actual value.
1107 * @param src
1108 * Pointer to the new value.
1109 * @param weak
1110 * A value of true allows the comparison to spuriously fail and allows the
1111 * 'exp' update to occur non-atomically (i.e. a torn read may occur).
1112 * Implementations may ignore this argument and only implement the strong
1113 * variant.
1114 * @param success
1115 * If successful, the operation's memory behavior conforms to this (or a
1116 * stronger) model.
1117 * @param failure
1118 * If unsuccessful, the operation's memory behavior conforms to this (or a
1119 * stronger) model. This argument cannot be __ATOMIC_RELEASE,
1120 * __ATOMIC_ACQ_REL, or a stronger model than success.
1121 * @return
1122 * Non-zero on success; 0 on failure.
1123 */
1124static inline int __rte_experimental
1125rte_atomic128_cmp_exchange(rte_int128_t *dst,
1126 rte_int128_t *exp,
1127 const rte_int128_t *src,
1128 unsigned int weak,
1129 int success,
1130 int failure);
1131
1132#endif /* __DOXYGEN__ */
1133
7c673cae 1134#endif /* _RTE_ATOMIC_H_ */