]> git.proxmox.com Git - ceph.git/blob - ceph/src/spdk/dpdk/lib/librte_eal/common/include/generic/rte_atomic.h
bump version to 15.2.11-pve1
[ceph.git] / ceph / src / spdk / dpdk / lib / librte_eal / common / include / generic / rte_atomic.h
1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2010-2014 Intel Corporation
3 */
4
5 #ifndef _RTE_ATOMIC_H_
6 #define _RTE_ATOMIC_H_
7
8 /**
9 * @file
10 * Atomic Operations
11 *
12 * This file defines a generic API for atomic operations.
13 */
14
15 #include <stdint.h>
16 #include <rte_common.h>
17
18 #ifdef __DOXYGEN__
19
20 /** @name Memory Barrier
21 */
22 ///@{
23 /**
24 * General memory barrier.
25 *
26 * Guarantees that the LOAD and STORE operations generated before the
27 * barrier occur before the LOAD and STORE operations generated after.
28 */
29 static inline void rte_mb(void);
30
31 /**
32 * Write memory barrier.
33 *
34 * Guarantees that the STORE operations generated before the barrier
35 * occur before the STORE operations generated after.
36 */
37 static inline void rte_wmb(void);
38
39 /**
40 * Read memory barrier.
41 *
42 * Guarantees that the LOAD operations generated before the barrier
43 * occur before the LOAD operations generated after.
44 */
45 static inline void rte_rmb(void);
46 ///@}
47
48 /** @name SMP Memory Barrier
49 */
50 ///@{
51 /**
52 * General memory barrier between lcores
53 *
54 * Guarantees that the LOAD and STORE operations that precede the
55 * rte_smp_mb() call are globally visible across the lcores
56 * before the LOAD and STORE operations that follows it.
57 */
58 static inline void rte_smp_mb(void);
59
60 /**
61 * Write memory barrier between lcores
62 *
63 * Guarantees that the STORE operations that precede the
64 * rte_smp_wmb() call are globally visible across the lcores
65 * before the STORE operations that follows it.
66 */
67 static inline void rte_smp_wmb(void);
68
69 /**
70 * Read memory barrier between lcores
71 *
72 * Guarantees that the LOAD operations that precede the
73 * rte_smp_rmb() call are globally visible across the lcores
74 * before the LOAD operations that follows it.
75 */
76 static inline void rte_smp_rmb(void);
77 ///@}
78
79 /** @name I/O Memory Barrier
80 */
81 ///@{
82 /**
83 * General memory barrier for I/O device
84 *
85 * Guarantees that the LOAD and STORE operations that precede the
86 * rte_io_mb() call are visible to I/O device or CPU before the
87 * LOAD and STORE operations that follow it.
88 */
89 static inline void rte_io_mb(void);
90
91 /**
92 * Write memory barrier for I/O device
93 *
94 * Guarantees that the STORE operations that precede the
95 * rte_io_wmb() call are visible to I/O device before the STORE
96 * operations that follow it.
97 */
98 static inline void rte_io_wmb(void);
99
100 /**
101 * Read memory barrier for IO device
102 *
103 * Guarantees that the LOAD operations on I/O device that precede the
104 * rte_io_rmb() call are visible to CPU before the LOAD
105 * operations that follow it.
106 */
107 static inline void rte_io_rmb(void);
108 ///@}
109
110 /** @name Coherent I/O Memory Barrier
111 *
112 * Coherent I/O memory barrier is a lightweight version of I/O memory
113 * barriers which are system-wide data synchronization barriers. This
114 * is for only coherent memory domain between lcore and I/O device but
115 * it is same as the I/O memory barriers in most of architectures.
116 * However, some architecture provides even lighter barriers which are
117 * somewhere in between I/O memory barriers and SMP memory barriers.
118 * For example, in case of ARMv8, DMB(data memory barrier) instruction
119 * can have different shareability domains - inner-shareable and
120 * outer-shareable. And inner-shareable DMB fits for SMP memory
121 * barriers and outer-shareable DMB for coherent I/O memory barriers,
122 * which acts on coherent memory.
123 *
124 * In most cases, I/O memory barriers are safer but if operations are
125 * on coherent memory instead of incoherent MMIO region of a device,
126 * then coherent I/O memory barriers can be used and this could bring
127 * performance gain depending on architectures.
128 */
129 ///@{
130 /**
131 * Write memory barrier for coherent memory between lcore and I/O device
132 *
133 * Guarantees that the STORE operations on coherent memory that
134 * precede the rte_cio_wmb() call are visible to I/O device before the
135 * STORE operations that follow it.
136 */
137 static inline void rte_cio_wmb(void);
138
139 /**
140 * Read memory barrier for coherent memory between lcore and I/O device
141 *
142 * Guarantees that the LOAD operations on coherent memory updated by
143 * I/O device that precede the rte_cio_rmb() call are visible to CPU
144 * before the LOAD operations that follow it.
145 */
146 static inline void rte_cio_rmb(void);
147 ///@}
148
149 #endif /* __DOXYGEN__ */
150
151 /**
152 * Compiler barrier.
153 *
154 * Guarantees that operation reordering does not occur at compile time
155 * for operations directly before and after the barrier.
156 */
157 #define rte_compiler_barrier() do { \
158 asm volatile ("" : : : "memory"); \
159 } while(0)
160
161 /*------------------------- 16 bit atomic operations -------------------------*/
162
163 /**
164 * Atomic compare and set.
165 *
166 * (atomic) equivalent to:
167 * if (*dst == exp)
168 * *dst = src (all 16-bit words)
169 *
170 * @param dst
171 * The destination location into which the value will be written.
172 * @param exp
173 * The expected value.
174 * @param src
175 * The new value.
176 * @return
177 * Non-zero on success; 0 on failure.
178 */
179 static inline int
180 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src);
181
182 #ifdef RTE_FORCE_INTRINSICS
183 static inline int
184 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
185 {
186 return __sync_bool_compare_and_swap(dst, exp, src);
187 }
188 #endif
189
190 /**
191 * Atomic exchange.
192 *
193 * (atomic) equivalent to:
194 * ret = *dst
195 * *dst = val;
196 * return ret;
197 *
198 * @param dst
199 * The destination location into which the value will be written.
200 * @param val
201 * The new value.
202 * @return
203 * The original value at that location
204 */
205 static inline uint16_t
206 rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val);
207
208 #ifdef RTE_FORCE_INTRINSICS
209 static inline uint16_t
210 rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val)
211 {
212 #if defined(__clang__)
213 return __atomic_exchange_n(dst, val, __ATOMIC_SEQ_CST);
214 #else
215 return __atomic_exchange_2(dst, val, __ATOMIC_SEQ_CST);
216 #endif
217 }
218 #endif
219
220 /**
221 * The atomic counter structure.
222 */
223 typedef struct {
224 volatile int16_t cnt; /**< An internal counter value. */
225 } rte_atomic16_t;
226
227 /**
228 * Static initializer for an atomic counter.
229 */
230 #define RTE_ATOMIC16_INIT(val) { (val) }
231
232 /**
233 * Initialize an atomic counter.
234 *
235 * @param v
236 * A pointer to the atomic counter.
237 */
238 static inline void
239 rte_atomic16_init(rte_atomic16_t *v)
240 {
241 v->cnt = 0;
242 }
243
244 /**
245 * Atomically read a 16-bit value from a counter.
246 *
247 * @param v
248 * A pointer to the atomic counter.
249 * @return
250 * The value of the counter.
251 */
252 static inline int16_t
253 rte_atomic16_read(const rte_atomic16_t *v)
254 {
255 return v->cnt;
256 }
257
258 /**
259 * Atomically set a counter to a 16-bit value.
260 *
261 * @param v
262 * A pointer to the atomic counter.
263 * @param new_value
264 * The new value for the counter.
265 */
266 static inline void
267 rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
268 {
269 v->cnt = new_value;
270 }
271
272 /**
273 * Atomically add a 16-bit value to an atomic counter.
274 *
275 * @param v
276 * A pointer to the atomic counter.
277 * @param inc
278 * The value to be added to the counter.
279 */
280 static inline void
281 rte_atomic16_add(rte_atomic16_t *v, int16_t inc)
282 {
283 __sync_fetch_and_add(&v->cnt, inc);
284 }
285
286 /**
287 * Atomically subtract a 16-bit value from an atomic counter.
288 *
289 * @param v
290 * A pointer to the atomic counter.
291 * @param dec
292 * The value to be subtracted from the counter.
293 */
294 static inline void
295 rte_atomic16_sub(rte_atomic16_t *v, int16_t dec)
296 {
297 __sync_fetch_and_sub(&v->cnt, dec);
298 }
299
300 /**
301 * Atomically increment a counter by one.
302 *
303 * @param v
304 * A pointer to the atomic counter.
305 */
306 static inline void
307 rte_atomic16_inc(rte_atomic16_t *v);
308
309 #ifdef RTE_FORCE_INTRINSICS
310 static inline void
311 rte_atomic16_inc(rte_atomic16_t *v)
312 {
313 rte_atomic16_add(v, 1);
314 }
315 #endif
316
317 /**
318 * Atomically decrement a counter by one.
319 *
320 * @param v
321 * A pointer to the atomic counter.
322 */
323 static inline void
324 rte_atomic16_dec(rte_atomic16_t *v);
325
326 #ifdef RTE_FORCE_INTRINSICS
327 static inline void
328 rte_atomic16_dec(rte_atomic16_t *v)
329 {
330 rte_atomic16_sub(v, 1);
331 }
332 #endif
333
334 /**
335 * Atomically add a 16-bit value to a counter and return the result.
336 *
337 * Atomically adds the 16-bits value (inc) to the atomic counter (v) and
338 * returns the value of v after addition.
339 *
340 * @param v
341 * A pointer to the atomic counter.
342 * @param inc
343 * The value to be added to the counter.
344 * @return
345 * The value of v after the addition.
346 */
347 static inline int16_t
348 rte_atomic16_add_return(rte_atomic16_t *v, int16_t inc)
349 {
350 return __sync_add_and_fetch(&v->cnt, inc);
351 }
352
353 /**
354 * Atomically subtract a 16-bit value from a counter and return
355 * the result.
356 *
357 * Atomically subtracts the 16-bit value (inc) from the atomic counter
358 * (v) and returns the value of v after the subtraction.
359 *
360 * @param v
361 * A pointer to the atomic counter.
362 * @param dec
363 * The value to be subtracted from the counter.
364 * @return
365 * The value of v after the subtraction.
366 */
367 static inline int16_t
368 rte_atomic16_sub_return(rte_atomic16_t *v, int16_t dec)
369 {
370 return __sync_sub_and_fetch(&v->cnt, dec);
371 }
372
373 /**
374 * Atomically increment a 16-bit counter by one and test.
375 *
376 * Atomically increments the atomic counter (v) by one and returns true if
377 * the result is 0, or false in all other cases.
378 *
379 * @param v
380 * A pointer to the atomic counter.
381 * @return
382 * True if the result after the increment operation is 0; false otherwise.
383 */
384 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v);
385
386 #ifdef RTE_FORCE_INTRINSICS
387 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v)
388 {
389 return __sync_add_and_fetch(&v->cnt, 1) == 0;
390 }
391 #endif
392
393 /**
394 * Atomically decrement a 16-bit counter by one and test.
395 *
396 * Atomically decrements the atomic counter (v) by one and returns true if
397 * the result is 0, or false in all other cases.
398 *
399 * @param v
400 * A pointer to the atomic counter.
401 * @return
402 * True if the result after the decrement operation is 0; false otherwise.
403 */
404 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v);
405
406 #ifdef RTE_FORCE_INTRINSICS
407 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v)
408 {
409 return __sync_sub_and_fetch(&v->cnt, 1) == 0;
410 }
411 #endif
412
413 /**
414 * Atomically test and set a 16-bit atomic counter.
415 *
416 * If the counter value is already set, return 0 (failed). Otherwise, set
417 * the counter value to 1 and return 1 (success).
418 *
419 * @param v
420 * A pointer to the atomic counter.
421 * @return
422 * 0 if failed; else 1, success.
423 */
424 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v);
425
426 #ifdef RTE_FORCE_INTRINSICS
427 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v)
428 {
429 return rte_atomic16_cmpset((volatile uint16_t *)&v->cnt, 0, 1);
430 }
431 #endif
432
433 /**
434 * Atomically set a 16-bit counter to 0.
435 *
436 * @param v
437 * A pointer to the atomic counter.
438 */
439 static inline void rte_atomic16_clear(rte_atomic16_t *v)
440 {
441 v->cnt = 0;
442 }
443
444 /*------------------------- 32 bit atomic operations -------------------------*/
445
446 /**
447 * Atomic compare and set.
448 *
449 * (atomic) equivalent to:
450 * if (*dst == exp)
451 * *dst = src (all 32-bit words)
452 *
453 * @param dst
454 * The destination location into which the value will be written.
455 * @param exp
456 * The expected value.
457 * @param src
458 * The new value.
459 * @return
460 * Non-zero on success; 0 on failure.
461 */
462 static inline int
463 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src);
464
465 #ifdef RTE_FORCE_INTRINSICS
466 static inline int
467 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
468 {
469 return __sync_bool_compare_and_swap(dst, exp, src);
470 }
471 #endif
472
473 /**
474 * Atomic exchange.
475 *
476 * (atomic) equivalent to:
477 * ret = *dst
478 * *dst = val;
479 * return ret;
480 *
481 * @param dst
482 * The destination location into which the value will be written.
483 * @param val
484 * The new value.
485 * @return
486 * The original value at that location
487 */
488 static inline uint32_t
489 rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val);
490
491 #ifdef RTE_FORCE_INTRINSICS
492 static inline uint32_t
493 rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val)
494 {
495 #if defined(__clang__)
496 return __atomic_exchange_n(dst, val, __ATOMIC_SEQ_CST);
497 #else
498 return __atomic_exchange_4(dst, val, __ATOMIC_SEQ_CST);
499 #endif
500 }
501 #endif
502
503 /**
504 * The atomic counter structure.
505 */
506 typedef struct {
507 volatile int32_t cnt; /**< An internal counter value. */
508 } rte_atomic32_t;
509
510 /**
511 * Static initializer for an atomic counter.
512 */
513 #define RTE_ATOMIC32_INIT(val) { (val) }
514
515 /**
516 * Initialize an atomic counter.
517 *
518 * @param v
519 * A pointer to the atomic counter.
520 */
521 static inline void
522 rte_atomic32_init(rte_atomic32_t *v)
523 {
524 v->cnt = 0;
525 }
526
527 /**
528 * Atomically read a 32-bit value from a counter.
529 *
530 * @param v
531 * A pointer to the atomic counter.
532 * @return
533 * The value of the counter.
534 */
535 static inline int32_t
536 rte_atomic32_read(const rte_atomic32_t *v)
537 {
538 return v->cnt;
539 }
540
541 /**
542 * Atomically set a counter to a 32-bit value.
543 *
544 * @param v
545 * A pointer to the atomic counter.
546 * @param new_value
547 * The new value for the counter.
548 */
549 static inline void
550 rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
551 {
552 v->cnt = new_value;
553 }
554
555 /**
556 * Atomically add a 32-bit value to an atomic counter.
557 *
558 * @param v
559 * A pointer to the atomic counter.
560 * @param inc
561 * The value to be added to the counter.
562 */
563 static inline void
564 rte_atomic32_add(rte_atomic32_t *v, int32_t inc)
565 {
566 __sync_fetch_and_add(&v->cnt, inc);
567 }
568
569 /**
570 * Atomically subtract a 32-bit value from an atomic counter.
571 *
572 * @param v
573 * A pointer to the atomic counter.
574 * @param dec
575 * The value to be subtracted from the counter.
576 */
577 static inline void
578 rte_atomic32_sub(rte_atomic32_t *v, int32_t dec)
579 {
580 __sync_fetch_and_sub(&v->cnt, dec);
581 }
582
583 /**
584 * Atomically increment a counter by one.
585 *
586 * @param v
587 * A pointer to the atomic counter.
588 */
589 static inline void
590 rte_atomic32_inc(rte_atomic32_t *v);
591
592 #ifdef RTE_FORCE_INTRINSICS
593 static inline void
594 rte_atomic32_inc(rte_atomic32_t *v)
595 {
596 rte_atomic32_add(v, 1);
597 }
598 #endif
599
600 /**
601 * Atomically decrement a counter by one.
602 *
603 * @param v
604 * A pointer to the atomic counter.
605 */
606 static inline void
607 rte_atomic32_dec(rte_atomic32_t *v);
608
609 #ifdef RTE_FORCE_INTRINSICS
610 static inline void
611 rte_atomic32_dec(rte_atomic32_t *v)
612 {
613 rte_atomic32_sub(v,1);
614 }
615 #endif
616
617 /**
618 * Atomically add a 32-bit value to a counter and return the result.
619 *
620 * Atomically adds the 32-bits value (inc) to the atomic counter (v) and
621 * returns the value of v after addition.
622 *
623 * @param v
624 * A pointer to the atomic counter.
625 * @param inc
626 * The value to be added to the counter.
627 * @return
628 * The value of v after the addition.
629 */
630 static inline int32_t
631 rte_atomic32_add_return(rte_atomic32_t *v, int32_t inc)
632 {
633 return __sync_add_and_fetch(&v->cnt, inc);
634 }
635
636 /**
637 * Atomically subtract a 32-bit value from a counter and return
638 * the result.
639 *
640 * Atomically subtracts the 32-bit value (inc) from the atomic counter
641 * (v) and returns the value of v after the subtraction.
642 *
643 * @param v
644 * A pointer to the atomic counter.
645 * @param dec
646 * The value to be subtracted from the counter.
647 * @return
648 * The value of v after the subtraction.
649 */
650 static inline int32_t
651 rte_atomic32_sub_return(rte_atomic32_t *v, int32_t dec)
652 {
653 return __sync_sub_and_fetch(&v->cnt, dec);
654 }
655
656 /**
657 * Atomically increment a 32-bit counter by one and test.
658 *
659 * Atomically increments the atomic counter (v) by one and returns true if
660 * the result is 0, or false in all other cases.
661 *
662 * @param v
663 * A pointer to the atomic counter.
664 * @return
665 * True if the result after the increment operation is 0; false otherwise.
666 */
667 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v);
668
669 #ifdef RTE_FORCE_INTRINSICS
670 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v)
671 {
672 return __sync_add_and_fetch(&v->cnt, 1) == 0;
673 }
674 #endif
675
676 /**
677 * Atomically decrement a 32-bit counter by one and test.
678 *
679 * Atomically decrements the atomic counter (v) by one and returns true if
680 * the result is 0, or false in all other cases.
681 *
682 * @param v
683 * A pointer to the atomic counter.
684 * @return
685 * True if the result after the decrement operation is 0; false otherwise.
686 */
687 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v);
688
689 #ifdef RTE_FORCE_INTRINSICS
690 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v)
691 {
692 return __sync_sub_and_fetch(&v->cnt, 1) == 0;
693 }
694 #endif
695
696 /**
697 * Atomically test and set a 32-bit atomic counter.
698 *
699 * If the counter value is already set, return 0 (failed). Otherwise, set
700 * the counter value to 1 and return 1 (success).
701 *
702 * @param v
703 * A pointer to the atomic counter.
704 * @return
705 * 0 if failed; else 1, success.
706 */
707 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v);
708
709 #ifdef RTE_FORCE_INTRINSICS
710 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v)
711 {
712 return rte_atomic32_cmpset((volatile uint32_t *)&v->cnt, 0, 1);
713 }
714 #endif
715
716 /**
717 * Atomically set a 32-bit counter to 0.
718 *
719 * @param v
720 * A pointer to the atomic counter.
721 */
722 static inline void rte_atomic32_clear(rte_atomic32_t *v)
723 {
724 v->cnt = 0;
725 }
726
727 /*------------------------- 64 bit atomic operations -------------------------*/
728
729 /**
730 * An atomic compare and set function used by the mutex functions.
731 * (atomic) equivalent to:
732 * if (*dst == exp)
733 * *dst = src (all 64-bit words)
734 *
735 * @param dst
736 * The destination into which the value will be written.
737 * @param exp
738 * The expected value.
739 * @param src
740 * The new value.
741 * @return
742 * Non-zero on success; 0 on failure.
743 */
744 static inline int
745 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src);
746
747 #ifdef RTE_FORCE_INTRINSICS
748 static inline int
749 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
750 {
751 return __sync_bool_compare_and_swap(dst, exp, src);
752 }
753 #endif
754
755 /**
756 * Atomic exchange.
757 *
758 * (atomic) equivalent to:
759 * ret = *dst
760 * *dst = val;
761 * return ret;
762 *
763 * @param dst
764 * The destination location into which the value will be written.
765 * @param val
766 * The new value.
767 * @return
768 * The original value at that location
769 */
770 static inline uint64_t
771 rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val);
772
773 #ifdef RTE_FORCE_INTRINSICS
774 static inline uint64_t
775 rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val)
776 {
777 #if defined(__clang__)
778 return __atomic_exchange_n(dst, val, __ATOMIC_SEQ_CST);
779 #else
780 return __atomic_exchange_8(dst, val, __ATOMIC_SEQ_CST);
781 #endif
782 }
783 #endif
784
785 /**
786 * The atomic counter structure.
787 */
788 typedef struct {
789 volatile int64_t cnt; /**< Internal counter value. */
790 } rte_atomic64_t;
791
792 /**
793 * Static initializer for an atomic counter.
794 */
795 #define RTE_ATOMIC64_INIT(val) { (val) }
796
797 /**
798 * Initialize the atomic counter.
799 *
800 * @param v
801 * A pointer to the atomic counter.
802 */
803 static inline void
804 rte_atomic64_init(rte_atomic64_t *v);
805
806 #ifdef RTE_FORCE_INTRINSICS
807 static inline void
808 rte_atomic64_init(rte_atomic64_t *v)
809 {
810 #ifdef __LP64__
811 v->cnt = 0;
812 #else
813 int success = 0;
814 uint64_t tmp;
815
816 while (success == 0) {
817 tmp = v->cnt;
818 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
819 tmp, 0);
820 }
821 #endif
822 }
823 #endif
824
825 /**
826 * Atomically read a 64-bit counter.
827 *
828 * @param v
829 * A pointer to the atomic counter.
830 * @return
831 * The value of the counter.
832 */
833 static inline int64_t
834 rte_atomic64_read(rte_atomic64_t *v);
835
836 #ifdef RTE_FORCE_INTRINSICS
837 static inline int64_t
838 rte_atomic64_read(rte_atomic64_t *v)
839 {
840 #ifdef __LP64__
841 return v->cnt;
842 #else
843 int success = 0;
844 uint64_t tmp;
845
846 while (success == 0) {
847 tmp = v->cnt;
848 /* replace the value by itself */
849 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
850 tmp, tmp);
851 }
852 return tmp;
853 #endif
854 }
855 #endif
856
857 /**
858 * Atomically set a 64-bit counter.
859 *
860 * @param v
861 * A pointer to the atomic counter.
862 * @param new_value
863 * The new value of the counter.
864 */
865 static inline void
866 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value);
867
868 #ifdef RTE_FORCE_INTRINSICS
869 static inline void
870 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
871 {
872 #ifdef __LP64__
873 v->cnt = new_value;
874 #else
875 int success = 0;
876 uint64_t tmp;
877
878 while (success == 0) {
879 tmp = v->cnt;
880 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
881 tmp, new_value);
882 }
883 #endif
884 }
885 #endif
886
887 /**
888 * Atomically add a 64-bit value to a counter.
889 *
890 * @param v
891 * A pointer to the atomic counter.
892 * @param inc
893 * The value to be added to the counter.
894 */
895 static inline void
896 rte_atomic64_add(rte_atomic64_t *v, int64_t inc);
897
898 #ifdef RTE_FORCE_INTRINSICS
899 static inline void
900 rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
901 {
902 __sync_fetch_and_add(&v->cnt, inc);
903 }
904 #endif
905
906 /**
907 * Atomically subtract a 64-bit value from a counter.
908 *
909 * @param v
910 * A pointer to the atomic counter.
911 * @param dec
912 * The value to be subtracted from the counter.
913 */
914 static inline void
915 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec);
916
917 #ifdef RTE_FORCE_INTRINSICS
918 static inline void
919 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
920 {
921 __sync_fetch_and_sub(&v->cnt, dec);
922 }
923 #endif
924
925 /**
926 * Atomically increment a 64-bit counter by one and test.
927 *
928 * @param v
929 * A pointer to the atomic counter.
930 */
931 static inline void
932 rte_atomic64_inc(rte_atomic64_t *v);
933
934 #ifdef RTE_FORCE_INTRINSICS
935 static inline void
936 rte_atomic64_inc(rte_atomic64_t *v)
937 {
938 rte_atomic64_add(v, 1);
939 }
940 #endif
941
942 /**
943 * Atomically decrement a 64-bit counter by one and test.
944 *
945 * @param v
946 * A pointer to the atomic counter.
947 */
948 static inline void
949 rte_atomic64_dec(rte_atomic64_t *v);
950
951 #ifdef RTE_FORCE_INTRINSICS
952 static inline void
953 rte_atomic64_dec(rte_atomic64_t *v)
954 {
955 rte_atomic64_sub(v, 1);
956 }
957 #endif
958
959 /**
960 * Add a 64-bit value to an atomic counter and return the result.
961 *
962 * Atomically adds the 64-bit value (inc) to the atomic counter (v) and
963 * returns the value of v after the addition.
964 *
965 * @param v
966 * A pointer to the atomic counter.
967 * @param inc
968 * The value to be added to the counter.
969 * @return
970 * The value of v after the addition.
971 */
972 static inline int64_t
973 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc);
974
975 #ifdef RTE_FORCE_INTRINSICS
976 static inline int64_t
977 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc)
978 {
979 return __sync_add_and_fetch(&v->cnt, inc);
980 }
981 #endif
982
983 /**
984 * Subtract a 64-bit value from an atomic counter and return the result.
985 *
986 * Atomically subtracts the 64-bit value (dec) from the atomic counter (v)
987 * and returns the value of v after the subtraction.
988 *
989 * @param v
990 * A pointer to the atomic counter.
991 * @param dec
992 * The value to be subtracted from the counter.
993 * @return
994 * The value of v after the subtraction.
995 */
996 static inline int64_t
997 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec);
998
999 #ifdef RTE_FORCE_INTRINSICS
1000 static inline int64_t
1001 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
1002 {
1003 return __sync_sub_and_fetch(&v->cnt, dec);
1004 }
1005 #endif
1006
1007 /**
1008 * Atomically increment a 64-bit counter by one and test.
1009 *
1010 * Atomically increments the atomic counter (v) by one and returns
1011 * true if the result is 0, or false in all other cases.
1012 *
1013 * @param v
1014 * A pointer to the atomic counter.
1015 * @return
1016 * True if the result after the addition is 0; false otherwise.
1017 */
1018 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v);
1019
1020 #ifdef RTE_FORCE_INTRINSICS
1021 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v)
1022 {
1023 return rte_atomic64_add_return(v, 1) == 0;
1024 }
1025 #endif
1026
1027 /**
1028 * Atomically decrement a 64-bit counter by one and test.
1029 *
1030 * Atomically decrements the atomic counter (v) by one and returns true if
1031 * the result is 0, or false in all other cases.
1032 *
1033 * @param v
1034 * A pointer to the atomic counter.
1035 * @return
1036 * True if the result after subtraction is 0; false otherwise.
1037 */
1038 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v);
1039
1040 #ifdef RTE_FORCE_INTRINSICS
1041 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v)
1042 {
1043 return rte_atomic64_sub_return(v, 1) == 0;
1044 }
1045 #endif
1046
1047 /**
1048 * Atomically test and set a 64-bit atomic counter.
1049 *
1050 * If the counter value is already set, return 0 (failed). Otherwise, set
1051 * the counter value to 1 and return 1 (success).
1052 *
1053 * @param v
1054 * A pointer to the atomic counter.
1055 * @return
1056 * 0 if failed; else 1, success.
1057 */
1058 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v);
1059
1060 #ifdef RTE_FORCE_INTRINSICS
1061 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v)
1062 {
1063 return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1);
1064 }
1065 #endif
1066
1067 /**
1068 * Atomically set a 64-bit counter to 0.
1069 *
1070 * @param v
1071 * A pointer to the atomic counter.
1072 */
1073 static inline void rte_atomic64_clear(rte_atomic64_t *v);
1074
1075 #ifdef RTE_FORCE_INTRINSICS
1076 static inline void rte_atomic64_clear(rte_atomic64_t *v)
1077 {
1078 rte_atomic64_set(v, 0);
1079 }
1080 #endif
1081
1082 /*------------------------ 128 bit atomic operations -------------------------*/
1083
1084 #ifdef __DOXYGEN__
1085
1086 /**
1087 * An atomic compare and set function used by the mutex functions.
1088 * (Atomically) Equivalent to:
1089 * @code
1090 * if (*dst == *exp)
1091 * *dst = *src
1092 * else
1093 * *exp = *dst
1094 * @endcode
1095 *
1096 * @note This function is currently only available for the x86-64 platform.
1097 *
1098 * @note The success and failure arguments must be one of the __ATOMIC_* values
1099 * defined in the C++11 standard. For details on their behavior, refer to the
1100 * standard.
1101 *
1102 * @param dst
1103 * The destination into which the value will be written.
1104 * @param exp
1105 * Pointer to the expected value. If the operation fails, this memory is
1106 * updated with the actual value.
1107 * @param src
1108 * Pointer to the new value.
1109 * @param weak
1110 * A value of true allows the comparison to spuriously fail and allows the
1111 * 'exp' update to occur non-atomically (i.e. a torn read may occur).
1112 * Implementations may ignore this argument and only implement the strong
1113 * variant.
1114 * @param success
1115 * If successful, the operation's memory behavior conforms to this (or a
1116 * stronger) model.
1117 * @param failure
1118 * If unsuccessful, the operation's memory behavior conforms to this (or a
1119 * stronger) model. This argument cannot be __ATOMIC_RELEASE,
1120 * __ATOMIC_ACQ_REL, or a stronger model than success.
1121 * @return
1122 * Non-zero on success; 0 on failure.
1123 */
1124 static inline int __rte_experimental
1125 rte_atomic128_cmp_exchange(rte_int128_t *dst,
1126 rte_int128_t *exp,
1127 const rte_int128_t *src,
1128 unsigned int weak,
1129 int success,
1130 int failure);
1131
1132 #endif /* __DOXYGEN__ */
1133
1134 #endif /* _RTE_ATOMIC_H_ */