]> git.proxmox.com Git - ceph.git/blob - ceph/src/seastar/dpdk/lib/librte_eal/common/include/generic/rte_atomic.h
update sources to ceph Nautilus 14.2.1
[ceph.git] / ceph / src / seastar / dpdk / lib / librte_eal / common / include / generic / rte_atomic.h
1 /*-
2 * BSD LICENSE
3 *
4 * Copyright(c) 2010-2014 Intel Corporation. All rights reserved.
5 * All rights reserved.
6 *
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
9 * are met:
10 *
11 * * Redistributions of source code must retain the above copyright
12 * notice, this list of conditions and the following disclaimer.
13 * * Redistributions in binary form must reproduce the above copyright
14 * notice, this list of conditions and the following disclaimer in
15 * the documentation and/or other materials provided with the
16 * distribution.
17 * * Neither the name of Intel Corporation nor the names of its
18 * contributors may be used to endorse or promote products derived
19 * from this software without specific prior written permission.
20 *
21 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
22 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
23 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
24 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
25 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
26 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
27 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
28 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
29 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
30 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
31 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32 */
33
34 #ifndef _RTE_ATOMIC_H_
35 #define _RTE_ATOMIC_H_
36
37 /**
38 * @file
39 * Atomic Operations
40 *
41 * This file defines a generic API for atomic operations.
42 */
43
44 #include <stdint.h>
45 #include <rte_common.h>
46
47 #ifdef __DOXYGEN__
48
49 /**
50 * General memory barrier.
51 *
52 * Guarantees that the LOAD and STORE operations generated before the
53 * barrier occur before the LOAD and STORE operations generated after.
54 * This function is architecture dependent.
55 */
56 static inline void rte_mb(void);
57
58 /**
59 * Write memory barrier.
60 *
61 * Guarantees that the STORE operations generated before the barrier
62 * occur before the STORE operations generated after.
63 * This function is architecture dependent.
64 */
65 static inline void rte_wmb(void);
66
67 /**
68 * Read memory barrier.
69 *
70 * Guarantees that the LOAD operations generated before the barrier
71 * occur before the LOAD operations generated after.
72 * This function is architecture dependent.
73 */
74 static inline void rte_rmb(void);
75
76 /**
77 * General memory barrier between lcores
78 *
79 * Guarantees that the LOAD and STORE operations that precede the
80 * rte_smp_mb() call are globally visible across the lcores
81 * before the the LOAD and STORE operations that follows it.
82 */
83 static inline void rte_smp_mb(void);
84
85 /**
86 * Write memory barrier between lcores
87 *
88 * Guarantees that the STORE operations that precede the
89 * rte_smp_wmb() call are globally visible across the lcores
90 * before the the STORE operations that follows it.
91 */
92 static inline void rte_smp_wmb(void);
93
94 /**
95 * Read memory barrier between lcores
96 *
97 * Guarantees that the LOAD operations that precede the
98 * rte_smp_rmb() call are globally visible across the lcores
99 * before the the LOAD operations that follows it.
100 */
101 static inline void rte_smp_rmb(void);
102
103 /**
104 * General memory barrier for I/O device
105 *
106 * Guarantees that the LOAD and STORE operations that precede the
107 * rte_io_mb() call are visible to I/O device or CPU before the
108 * LOAD and STORE operations that follow it.
109 */
110 static inline void rte_io_mb(void);
111
112 /**
113 * Write memory barrier for I/O device
114 *
115 * Guarantees that the STORE operations that precede the
116 * rte_io_wmb() call are visible to I/O device before the STORE
117 * operations that follow it.
118 */
119 static inline void rte_io_wmb(void);
120
121 /**
122 * Read memory barrier for IO device
123 *
124 * Guarantees that the LOAD operations on I/O device that precede the
125 * rte_io_rmb() call are visible to CPU before the LOAD
126 * operations that follow it.
127 */
128 static inline void rte_io_rmb(void);
129
130 #endif /* __DOXYGEN__ */
131
132 /**
133 * Compiler barrier.
134 *
135 * Guarantees that operation reordering does not occur at compile time
136 * for operations directly before and after the barrier.
137 */
138 #define rte_compiler_barrier() do { \
139 asm volatile ("" : : : "memory"); \
140 } while(0)
141
142 /*------------------------- 16 bit atomic operations -------------------------*/
143
144 /**
145 * Atomic compare and set.
146 *
147 * (atomic) equivalent to:
148 * if (*dst == exp)
149 * *dst = src (all 16-bit words)
150 *
151 * @param dst
152 * The destination location into which the value will be written.
153 * @param exp
154 * The expected value.
155 * @param src
156 * The new value.
157 * @return
158 * Non-zero on success; 0 on failure.
159 */
160 static inline int
161 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src);
162
163 #ifdef RTE_FORCE_INTRINSICS
164 static inline int
165 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
166 {
167 return __sync_bool_compare_and_swap(dst, exp, src);
168 }
169 #endif
170
171 /**
172 * The atomic counter structure.
173 */
174 typedef struct {
175 volatile int16_t cnt; /**< An internal counter value. */
176 } rte_atomic16_t;
177
178 /**
179 * Static initializer for an atomic counter.
180 */
181 #define RTE_ATOMIC16_INIT(val) { (val) }
182
183 /**
184 * Initialize an atomic counter.
185 *
186 * @param v
187 * A pointer to the atomic counter.
188 */
189 static inline void
190 rte_atomic16_init(rte_atomic16_t *v)
191 {
192 v->cnt = 0;
193 }
194
195 /**
196 * Atomically read a 16-bit value from a counter.
197 *
198 * @param v
199 * A pointer to the atomic counter.
200 * @return
201 * The value of the counter.
202 */
203 static inline int16_t
204 rte_atomic16_read(const rte_atomic16_t *v)
205 {
206 return v->cnt;
207 }
208
209 /**
210 * Atomically set a counter to a 16-bit value.
211 *
212 * @param v
213 * A pointer to the atomic counter.
214 * @param new_value
215 * The new value for the counter.
216 */
217 static inline void
218 rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
219 {
220 v->cnt = new_value;
221 }
222
223 /**
224 * Atomically add a 16-bit value to an atomic counter.
225 *
226 * @param v
227 * A pointer to the atomic counter.
228 * @param inc
229 * The value to be added to the counter.
230 */
231 static inline void
232 rte_atomic16_add(rte_atomic16_t *v, int16_t inc)
233 {
234 __sync_fetch_and_add(&v->cnt, inc);
235 }
236
237 /**
238 * Atomically subtract a 16-bit value from an atomic counter.
239 *
240 * @param v
241 * A pointer to the atomic counter.
242 * @param dec
243 * The value to be subtracted from the counter.
244 */
245 static inline void
246 rte_atomic16_sub(rte_atomic16_t *v, int16_t dec)
247 {
248 __sync_fetch_and_sub(&v->cnt, dec);
249 }
250
251 /**
252 * Atomically increment a counter by one.
253 *
254 * @param v
255 * A pointer to the atomic counter.
256 */
257 static inline void
258 rte_atomic16_inc(rte_atomic16_t *v);
259
260 #ifdef RTE_FORCE_INTRINSICS
261 static inline void
262 rte_atomic16_inc(rte_atomic16_t *v)
263 {
264 rte_atomic16_add(v, 1);
265 }
266 #endif
267
268 /**
269 * Atomically decrement a counter by one.
270 *
271 * @param v
272 * A pointer to the atomic counter.
273 */
274 static inline void
275 rte_atomic16_dec(rte_atomic16_t *v);
276
277 #ifdef RTE_FORCE_INTRINSICS
278 static inline void
279 rte_atomic16_dec(rte_atomic16_t *v)
280 {
281 rte_atomic16_sub(v, 1);
282 }
283 #endif
284
285 /**
286 * Atomically add a 16-bit value to a counter and return the result.
287 *
288 * Atomically adds the 16-bits value (inc) to the atomic counter (v) and
289 * returns the value of v after addition.
290 *
291 * @param v
292 * A pointer to the atomic counter.
293 * @param inc
294 * The value to be added to the counter.
295 * @return
296 * The value of v after the addition.
297 */
298 static inline int16_t
299 rte_atomic16_add_return(rte_atomic16_t *v, int16_t inc)
300 {
301 return __sync_add_and_fetch(&v->cnt, inc);
302 }
303
304 /**
305 * Atomically subtract a 16-bit value from a counter and return
306 * the result.
307 *
308 * Atomically subtracts the 16-bit value (inc) from the atomic counter
309 * (v) and returns the value of v after the subtraction.
310 *
311 * @param v
312 * A pointer to the atomic counter.
313 * @param dec
314 * The value to be subtracted from the counter.
315 * @return
316 * The value of v after the subtraction.
317 */
318 static inline int16_t
319 rte_atomic16_sub_return(rte_atomic16_t *v, int16_t dec)
320 {
321 return __sync_sub_and_fetch(&v->cnt, dec);
322 }
323
324 /**
325 * Atomically increment a 16-bit counter by one and test.
326 *
327 * Atomically increments the atomic counter (v) by one and returns true if
328 * the result is 0, or false in all other cases.
329 *
330 * @param v
331 * A pointer to the atomic counter.
332 * @return
333 * True if the result after the increment operation is 0; false otherwise.
334 */
335 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v);
336
337 #ifdef RTE_FORCE_INTRINSICS
338 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v)
339 {
340 return __sync_add_and_fetch(&v->cnt, 1) == 0;
341 }
342 #endif
343
344 /**
345 * Atomically decrement a 16-bit counter by one and test.
346 *
347 * Atomically decrements the atomic counter (v) by one and returns true if
348 * the result is 0, or false in all other cases.
349 *
350 * @param v
351 * A pointer to the atomic counter.
352 * @return
353 * True if the result after the decrement operation is 0; false otherwise.
354 */
355 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v);
356
357 #ifdef RTE_FORCE_INTRINSICS
358 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v)
359 {
360 return __sync_sub_and_fetch(&v->cnt, 1) == 0;
361 }
362 #endif
363
364 /**
365 * Atomically test and set a 16-bit atomic counter.
366 *
367 * If the counter value is already set, return 0 (failed). Otherwise, set
368 * the counter value to 1 and return 1 (success).
369 *
370 * @param v
371 * A pointer to the atomic counter.
372 * @return
373 * 0 if failed; else 1, success.
374 */
375 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v);
376
377 #ifdef RTE_FORCE_INTRINSICS
378 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v)
379 {
380 return rte_atomic16_cmpset((volatile uint16_t *)&v->cnt, 0, 1);
381 }
382 #endif
383
384 /**
385 * Atomically set a 16-bit counter to 0.
386 *
387 * @param v
388 * A pointer to the atomic counter.
389 */
390 static inline void rte_atomic16_clear(rte_atomic16_t *v)
391 {
392 v->cnt = 0;
393 }
394
395 /*------------------------- 32 bit atomic operations -------------------------*/
396
397 /**
398 * Atomic compare and set.
399 *
400 * (atomic) equivalent to:
401 * if (*dst == exp)
402 * *dst = src (all 32-bit words)
403 *
404 * @param dst
405 * The destination location into which the value will be written.
406 * @param exp
407 * The expected value.
408 * @param src
409 * The new value.
410 * @return
411 * Non-zero on success; 0 on failure.
412 */
413 static inline int
414 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src);
415
416 #ifdef RTE_FORCE_INTRINSICS
417 static inline int
418 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
419 {
420 return __sync_bool_compare_and_swap(dst, exp, src);
421 }
422 #endif
423
424 /**
425 * The atomic counter structure.
426 */
427 typedef struct {
428 volatile int32_t cnt; /**< An internal counter value. */
429 } rte_atomic32_t;
430
431 /**
432 * Static initializer for an atomic counter.
433 */
434 #define RTE_ATOMIC32_INIT(val) { (val) }
435
436 /**
437 * Initialize an atomic counter.
438 *
439 * @param v
440 * A pointer to the atomic counter.
441 */
442 static inline void
443 rte_atomic32_init(rte_atomic32_t *v)
444 {
445 v->cnt = 0;
446 }
447
448 /**
449 * Atomically read a 32-bit value from a counter.
450 *
451 * @param v
452 * A pointer to the atomic counter.
453 * @return
454 * The value of the counter.
455 */
456 static inline int32_t
457 rte_atomic32_read(const rte_atomic32_t *v)
458 {
459 return v->cnt;
460 }
461
462 /**
463 * Atomically set a counter to a 32-bit value.
464 *
465 * @param v
466 * A pointer to the atomic counter.
467 * @param new_value
468 * The new value for the counter.
469 */
470 static inline void
471 rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
472 {
473 v->cnt = new_value;
474 }
475
476 /**
477 * Atomically add a 32-bit value to an atomic counter.
478 *
479 * @param v
480 * A pointer to the atomic counter.
481 * @param inc
482 * The value to be added to the counter.
483 */
484 static inline void
485 rte_atomic32_add(rte_atomic32_t *v, int32_t inc)
486 {
487 __sync_fetch_and_add(&v->cnt, inc);
488 }
489
490 /**
491 * Atomically subtract a 32-bit value from an atomic counter.
492 *
493 * @param v
494 * A pointer to the atomic counter.
495 * @param dec
496 * The value to be subtracted from the counter.
497 */
498 static inline void
499 rte_atomic32_sub(rte_atomic32_t *v, int32_t dec)
500 {
501 __sync_fetch_and_sub(&v->cnt, dec);
502 }
503
504 /**
505 * Atomically increment a counter by one.
506 *
507 * @param v
508 * A pointer to the atomic counter.
509 */
510 static inline void
511 rte_atomic32_inc(rte_atomic32_t *v);
512
513 #ifdef RTE_FORCE_INTRINSICS
514 static inline void
515 rte_atomic32_inc(rte_atomic32_t *v)
516 {
517 rte_atomic32_add(v, 1);
518 }
519 #endif
520
521 /**
522 * Atomically decrement a counter by one.
523 *
524 * @param v
525 * A pointer to the atomic counter.
526 */
527 static inline void
528 rte_atomic32_dec(rte_atomic32_t *v);
529
530 #ifdef RTE_FORCE_INTRINSICS
531 static inline void
532 rte_atomic32_dec(rte_atomic32_t *v)
533 {
534 rte_atomic32_sub(v,1);
535 }
536 #endif
537
538 /**
539 * Atomically add a 32-bit value to a counter and return the result.
540 *
541 * Atomically adds the 32-bits value (inc) to the atomic counter (v) and
542 * returns the value of v after addition.
543 *
544 * @param v
545 * A pointer to the atomic counter.
546 * @param inc
547 * The value to be added to the counter.
548 * @return
549 * The value of v after the addition.
550 */
551 static inline int32_t
552 rte_atomic32_add_return(rte_atomic32_t *v, int32_t inc)
553 {
554 return __sync_add_and_fetch(&v->cnt, inc);
555 }
556
557 /**
558 * Atomically subtract a 32-bit value from a counter and return
559 * the result.
560 *
561 * Atomically subtracts the 32-bit value (inc) from the atomic counter
562 * (v) and returns the value of v after the subtraction.
563 *
564 * @param v
565 * A pointer to the atomic counter.
566 * @param dec
567 * The value to be subtracted from the counter.
568 * @return
569 * The value of v after the subtraction.
570 */
571 static inline int32_t
572 rte_atomic32_sub_return(rte_atomic32_t *v, int32_t dec)
573 {
574 return __sync_sub_and_fetch(&v->cnt, dec);
575 }
576
577 /**
578 * Atomically increment a 32-bit counter by one and test.
579 *
580 * Atomically increments the atomic counter (v) by one and returns true if
581 * the result is 0, or false in all other cases.
582 *
583 * @param v
584 * A pointer to the atomic counter.
585 * @return
586 * True if the result after the increment operation is 0; false otherwise.
587 */
588 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v);
589
590 #ifdef RTE_FORCE_INTRINSICS
591 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v)
592 {
593 return __sync_add_and_fetch(&v->cnt, 1) == 0;
594 }
595 #endif
596
597 /**
598 * Atomically decrement a 32-bit counter by one and test.
599 *
600 * Atomically decrements the atomic counter (v) by one and returns true if
601 * the result is 0, or false in all other cases.
602 *
603 * @param v
604 * A pointer to the atomic counter.
605 * @return
606 * True if the result after the decrement operation is 0; false otherwise.
607 */
608 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v);
609
610 #ifdef RTE_FORCE_INTRINSICS
611 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v)
612 {
613 return __sync_sub_and_fetch(&v->cnt, 1) == 0;
614 }
615 #endif
616
617 /**
618 * Atomically test and set a 32-bit atomic counter.
619 *
620 * If the counter value is already set, return 0 (failed). Otherwise, set
621 * the counter value to 1 and return 1 (success).
622 *
623 * @param v
624 * A pointer to the atomic counter.
625 * @return
626 * 0 if failed; else 1, success.
627 */
628 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v);
629
630 #ifdef RTE_FORCE_INTRINSICS
631 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v)
632 {
633 return rte_atomic32_cmpset((volatile uint32_t *)&v->cnt, 0, 1);
634 }
635 #endif
636
637 /**
638 * Atomically set a 32-bit counter to 0.
639 *
640 * @param v
641 * A pointer to the atomic counter.
642 */
643 static inline void rte_atomic32_clear(rte_atomic32_t *v)
644 {
645 v->cnt = 0;
646 }
647
648 /*------------------------- 64 bit atomic operations -------------------------*/
649
650 /**
651 * An atomic compare and set function used by the mutex functions.
652 * (atomic) equivalent to:
653 * if (*dst == exp)
654 * *dst = src (all 64-bit words)
655 *
656 * @param dst
657 * The destination into which the value will be written.
658 * @param exp
659 * The expected value.
660 * @param src
661 * The new value.
662 * @return
663 * Non-zero on success; 0 on failure.
664 */
665 static inline int
666 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src);
667
668 #ifdef RTE_FORCE_INTRINSICS
669 static inline int
670 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
671 {
672 return __sync_bool_compare_and_swap(dst, exp, src);
673 }
674 #endif
675
676 /**
677 * The atomic counter structure.
678 */
679 typedef struct {
680 volatile int64_t cnt; /**< Internal counter value. */
681 } rte_atomic64_t;
682
683 /**
684 * Static initializer for an atomic counter.
685 */
686 #define RTE_ATOMIC64_INIT(val) { (val) }
687
688 /**
689 * Initialize the atomic counter.
690 *
691 * @param v
692 * A pointer to the atomic counter.
693 */
694 static inline void
695 rte_atomic64_init(rte_atomic64_t *v);
696
697 #ifdef RTE_FORCE_INTRINSICS
698 static inline void
699 rte_atomic64_init(rte_atomic64_t *v)
700 {
701 #ifdef __LP64__
702 v->cnt = 0;
703 #else
704 int success = 0;
705 uint64_t tmp;
706
707 while (success == 0) {
708 tmp = v->cnt;
709 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
710 tmp, 0);
711 }
712 #endif
713 }
714 #endif
715
716 /**
717 * Atomically read a 64-bit counter.
718 *
719 * @param v
720 * A pointer to the atomic counter.
721 * @return
722 * The value of the counter.
723 */
724 static inline int64_t
725 rte_atomic64_read(rte_atomic64_t *v);
726
727 #ifdef RTE_FORCE_INTRINSICS
728 static inline int64_t
729 rte_atomic64_read(rte_atomic64_t *v)
730 {
731 #ifdef __LP64__
732 return v->cnt;
733 #else
734 int success = 0;
735 uint64_t tmp;
736
737 while (success == 0) {
738 tmp = v->cnt;
739 /* replace the value by itself */
740 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
741 tmp, tmp);
742 }
743 return tmp;
744 #endif
745 }
746 #endif
747
748 /**
749 * Atomically set a 64-bit counter.
750 *
751 * @param v
752 * A pointer to the atomic counter.
753 * @param new_value
754 * The new value of the counter.
755 */
756 static inline void
757 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value);
758
759 #ifdef RTE_FORCE_INTRINSICS
760 static inline void
761 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
762 {
763 #ifdef __LP64__
764 v->cnt = new_value;
765 #else
766 int success = 0;
767 uint64_t tmp;
768
769 while (success == 0) {
770 tmp = v->cnt;
771 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
772 tmp, new_value);
773 }
774 #endif
775 }
776 #endif
777
778 /**
779 * Atomically add a 64-bit value to a counter.
780 *
781 * @param v
782 * A pointer to the atomic counter.
783 * @param inc
784 * The value to be added to the counter.
785 */
786 static inline void
787 rte_atomic64_add(rte_atomic64_t *v, int64_t inc);
788
789 #ifdef RTE_FORCE_INTRINSICS
790 static inline void
791 rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
792 {
793 __sync_fetch_and_add(&v->cnt, inc);
794 }
795 #endif
796
797 /**
798 * Atomically subtract a 64-bit value from a counter.
799 *
800 * @param v
801 * A pointer to the atomic counter.
802 * @param dec
803 * The value to be subtracted from the counter.
804 */
805 static inline void
806 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec);
807
808 #ifdef RTE_FORCE_INTRINSICS
809 static inline void
810 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
811 {
812 __sync_fetch_and_sub(&v->cnt, dec);
813 }
814 #endif
815
816 /**
817 * Atomically increment a 64-bit counter by one and test.
818 *
819 * @param v
820 * A pointer to the atomic counter.
821 */
822 static inline void
823 rte_atomic64_inc(rte_atomic64_t *v);
824
825 #ifdef RTE_FORCE_INTRINSICS
826 static inline void
827 rte_atomic64_inc(rte_atomic64_t *v)
828 {
829 rte_atomic64_add(v, 1);
830 }
831 #endif
832
833 /**
834 * Atomically decrement a 64-bit counter by one and test.
835 *
836 * @param v
837 * A pointer to the atomic counter.
838 */
839 static inline void
840 rte_atomic64_dec(rte_atomic64_t *v);
841
842 #ifdef RTE_FORCE_INTRINSICS
843 static inline void
844 rte_atomic64_dec(rte_atomic64_t *v)
845 {
846 rte_atomic64_sub(v, 1);
847 }
848 #endif
849
850 /**
851 * Add a 64-bit value to an atomic counter and return the result.
852 *
853 * Atomically adds the 64-bit value (inc) to the atomic counter (v) and
854 * returns the value of v after the addition.
855 *
856 * @param v
857 * A pointer to the atomic counter.
858 * @param inc
859 * The value to be added to the counter.
860 * @return
861 * The value of v after the addition.
862 */
863 static inline int64_t
864 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc);
865
866 #ifdef RTE_FORCE_INTRINSICS
867 static inline int64_t
868 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc)
869 {
870 return __sync_add_and_fetch(&v->cnt, inc);
871 }
872 #endif
873
874 /**
875 * Subtract a 64-bit value from an atomic counter and return the result.
876 *
877 * Atomically subtracts the 64-bit value (dec) from the atomic counter (v)
878 * and returns the value of v after the subtraction.
879 *
880 * @param v
881 * A pointer to the atomic counter.
882 * @param dec
883 * The value to be subtracted from the counter.
884 * @return
885 * The value of v after the subtraction.
886 */
887 static inline int64_t
888 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec);
889
890 #ifdef RTE_FORCE_INTRINSICS
891 static inline int64_t
892 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
893 {
894 return __sync_sub_and_fetch(&v->cnt, dec);
895 }
896 #endif
897
898 /**
899 * Atomically increment a 64-bit counter by one and test.
900 *
901 * Atomically increments the atomic counter (v) by one and returns
902 * true if the result is 0, or false in all other cases.
903 *
904 * @param v
905 * A pointer to the atomic counter.
906 * @return
907 * True if the result after the addition is 0; false otherwise.
908 */
909 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v);
910
911 #ifdef RTE_FORCE_INTRINSICS
912 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v)
913 {
914 return rte_atomic64_add_return(v, 1) == 0;
915 }
916 #endif
917
918 /**
919 * Atomically decrement a 64-bit counter by one and test.
920 *
921 * Atomically decrements the atomic counter (v) by one and returns true if
922 * the result is 0, or false in all other cases.
923 *
924 * @param v
925 * A pointer to the atomic counter.
926 * @return
927 * True if the result after subtraction is 0; false otherwise.
928 */
929 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v);
930
931 #ifdef RTE_FORCE_INTRINSICS
932 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v)
933 {
934 return rte_atomic64_sub_return(v, 1) == 0;
935 }
936 #endif
937
938 /**
939 * Atomically test and set a 64-bit atomic counter.
940 *
941 * If the counter value is already set, return 0 (failed). Otherwise, set
942 * the counter value to 1 and return 1 (success).
943 *
944 * @param v
945 * A pointer to the atomic counter.
946 * @return
947 * 0 if failed; else 1, success.
948 */
949 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v);
950
951 #ifdef RTE_FORCE_INTRINSICS
952 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v)
953 {
954 return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1);
955 }
956 #endif
957
958 /**
959 * Atomically set a 64-bit counter to 0.
960 *
961 * @param v
962 * A pointer to the atomic counter.
963 */
964 static inline void rte_atomic64_clear(rte_atomic64_t *v);
965
966 #ifdef RTE_FORCE_INTRINSICS
967 static inline void rte_atomic64_clear(rte_atomic64_t *v)
968 {
969 rte_atomic64_set(v, 0);
970 }
971 #endif
972
973 #endif /* _RTE_ATOMIC_H_ */