]> git.proxmox.com Git - ceph.git/blob - ceph/src/dpdk/lib/librte_eal/common/include/generic/rte_atomic.h
add subtree-ish sources for 12.0.3
[ceph.git] / ceph / src / dpdk / lib / librte_eal / common / include / generic / rte_atomic.h
1 /*-
2 * BSD LICENSE
3 *
4 * Copyright(c) 2010-2014 Intel Corporation. All rights reserved.
5 * All rights reserved.
6 *
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
9 * are met:
10 *
11 * * Redistributions of source code must retain the above copyright
12 * notice, this list of conditions and the following disclaimer.
13 * * Redistributions in binary form must reproduce the above copyright
14 * notice, this list of conditions and the following disclaimer in
15 * the documentation and/or other materials provided with the
16 * distribution.
17 * * Neither the name of Intel Corporation nor the names of its
18 * contributors may be used to endorse or promote products derived
19 * from this software without specific prior written permission.
20 *
21 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
22 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
23 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
24 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
25 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
26 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
27 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
28 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
29 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
30 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
31 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32 */
33
34 #ifndef _RTE_ATOMIC_H_
35 #define _RTE_ATOMIC_H_
36
37 /**
38 * @file
39 * Atomic Operations
40 *
41 * This file defines a generic API for atomic operations.
42 */
43
44 #include <stdint.h>
45 #include <rte_common.h>
46
47 #ifdef __DOXYGEN__
48
49 /**
50 * General memory barrier.
51 *
52 * Guarantees that the LOAD and STORE operations generated before the
53 * barrier occur before the LOAD and STORE operations generated after.
54 * This function is architecture dependent.
55 */
56 static inline void rte_mb(void);
57
58 /**
59 * Write memory barrier.
60 *
61 * Guarantees that the STORE operations generated before the barrier
62 * occur before the STORE operations generated after.
63 * This function is architecture dependent.
64 */
65 static inline void rte_wmb(void);
66
67 /**
68 * Read memory barrier.
69 *
70 * Guarantees that the LOAD operations generated before the barrier
71 * occur before the LOAD operations generated after.
72 * This function is architecture dependent.
73 */
74 static inline void rte_rmb(void);
75
76 /**
77 * General memory barrier between lcores
78 *
79 * Guarantees that the LOAD and STORE operations that precede the
80 * rte_smp_mb() call are globally visible across the lcores
81 * before the the LOAD and STORE operations that follows it.
82 */
83 static inline void rte_smp_mb(void);
84
85 /**
86 * Write memory barrier between lcores
87 *
88 * Guarantees that the STORE operations that precede the
89 * rte_smp_wmb() call are globally visible across the lcores
90 * before the the STORE operations that follows it.
91 */
92 static inline void rte_smp_wmb(void);
93
94 /**
95 * Read memory barrier between lcores
96 *
97 * Guarantees that the LOAD operations that precede the
98 * rte_smp_rmb() call are globally visible across the lcores
99 * before the the LOAD operations that follows it.
100 */
101 static inline void rte_smp_rmb(void);
102
103 #endif /* __DOXYGEN__ */
104
105 /**
106 * Compiler barrier.
107 *
108 * Guarantees that operation reordering does not occur at compile time
109 * for operations directly before and after the barrier.
110 */
111 #define rte_compiler_barrier() do { \
112 asm volatile ("" : : : "memory"); \
113 } while(0)
114
115 /*------------------------- 16 bit atomic operations -------------------------*/
116
117 /**
118 * Atomic compare and set.
119 *
120 * (atomic) equivalent to:
121 * if (*dst == exp)
122 * *dst = src (all 16-bit words)
123 *
124 * @param dst
125 * The destination location into which the value will be written.
126 * @param exp
127 * The expected value.
128 * @param src
129 * The new value.
130 * @return
131 * Non-zero on success; 0 on failure.
132 */
133 static inline int
134 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src);
135
136 #ifdef RTE_FORCE_INTRINSICS
137 static inline int
138 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
139 {
140 return __sync_bool_compare_and_swap(dst, exp, src);
141 }
142 #endif
143
144 /**
145 * The atomic counter structure.
146 */
147 typedef struct {
148 volatile int16_t cnt; /**< An internal counter value. */
149 } rte_atomic16_t;
150
151 /**
152 * Static initializer for an atomic counter.
153 */
154 #define RTE_ATOMIC16_INIT(val) { (val) }
155
156 /**
157 * Initialize an atomic counter.
158 *
159 * @param v
160 * A pointer to the atomic counter.
161 */
162 static inline void
163 rte_atomic16_init(rte_atomic16_t *v)
164 {
165 v->cnt = 0;
166 }
167
168 /**
169 * Atomically read a 16-bit value from a counter.
170 *
171 * @param v
172 * A pointer to the atomic counter.
173 * @return
174 * The value of the counter.
175 */
176 static inline int16_t
177 rte_atomic16_read(const rte_atomic16_t *v)
178 {
179 return v->cnt;
180 }
181
182 /**
183 * Atomically set a counter to a 16-bit value.
184 *
185 * @param v
186 * A pointer to the atomic counter.
187 * @param new_value
188 * The new value for the counter.
189 */
190 static inline void
191 rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
192 {
193 v->cnt = new_value;
194 }
195
196 /**
197 * Atomically add a 16-bit value to an atomic counter.
198 *
199 * @param v
200 * A pointer to the atomic counter.
201 * @param inc
202 * The value to be added to the counter.
203 */
204 static inline void
205 rte_atomic16_add(rte_atomic16_t *v, int16_t inc)
206 {
207 __sync_fetch_and_add(&v->cnt, inc);
208 }
209
210 /**
211 * Atomically subtract a 16-bit value from an atomic counter.
212 *
213 * @param v
214 * A pointer to the atomic counter.
215 * @param dec
216 * The value to be subtracted from the counter.
217 */
218 static inline void
219 rte_atomic16_sub(rte_atomic16_t *v, int16_t dec)
220 {
221 __sync_fetch_and_sub(&v->cnt, dec);
222 }
223
224 /**
225 * Atomically increment a counter by one.
226 *
227 * @param v
228 * A pointer to the atomic counter.
229 */
230 static inline void
231 rte_atomic16_inc(rte_atomic16_t *v);
232
233 #ifdef RTE_FORCE_INTRINSICS
234 static inline void
235 rte_atomic16_inc(rte_atomic16_t *v)
236 {
237 rte_atomic16_add(v, 1);
238 }
239 #endif
240
241 /**
242 * Atomically decrement a counter by one.
243 *
244 * @param v
245 * A pointer to the atomic counter.
246 */
247 static inline void
248 rte_atomic16_dec(rte_atomic16_t *v);
249
250 #ifdef RTE_FORCE_INTRINSICS
251 static inline void
252 rte_atomic16_dec(rte_atomic16_t *v)
253 {
254 rte_atomic16_sub(v, 1);
255 }
256 #endif
257
258 /**
259 * Atomically add a 16-bit value to a counter and return the result.
260 *
261 * Atomically adds the 16-bits value (inc) to the atomic counter (v) and
262 * returns the value of v after addition.
263 *
264 * @param v
265 * A pointer to the atomic counter.
266 * @param inc
267 * The value to be added to the counter.
268 * @return
269 * The value of v after the addition.
270 */
271 static inline int16_t
272 rte_atomic16_add_return(rte_atomic16_t *v, int16_t inc)
273 {
274 return __sync_add_and_fetch(&v->cnt, inc);
275 }
276
277 /**
278 * Atomically subtract a 16-bit value from a counter and return
279 * the result.
280 *
281 * Atomically subtracts the 16-bit value (inc) from the atomic counter
282 * (v) and returns the value of v after the subtraction.
283 *
284 * @param v
285 * A pointer to the atomic counter.
286 * @param dec
287 * The value to be subtracted from the counter.
288 * @return
289 * The value of v after the subtraction.
290 */
291 static inline int16_t
292 rte_atomic16_sub_return(rte_atomic16_t *v, int16_t dec)
293 {
294 return __sync_sub_and_fetch(&v->cnt, dec);
295 }
296
297 /**
298 * Atomically increment a 16-bit counter by one and test.
299 *
300 * Atomically increments the atomic counter (v) by one and returns true if
301 * the result is 0, or false in all other cases.
302 *
303 * @param v
304 * A pointer to the atomic counter.
305 * @return
306 * True if the result after the increment operation is 0; false otherwise.
307 */
308 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v);
309
310 #ifdef RTE_FORCE_INTRINSICS
311 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v)
312 {
313 return __sync_add_and_fetch(&v->cnt, 1) == 0;
314 }
315 #endif
316
317 /**
318 * Atomically decrement a 16-bit counter by one and test.
319 *
320 * Atomically decrements the atomic counter (v) by one and returns true if
321 * the result is 0, or false in all other cases.
322 *
323 * @param v
324 * A pointer to the atomic counter.
325 * @return
326 * True if the result after the decrement operation is 0; false otherwise.
327 */
328 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v);
329
330 #ifdef RTE_FORCE_INTRINSICS
331 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v)
332 {
333 return __sync_sub_and_fetch(&v->cnt, 1) == 0;
334 }
335 #endif
336
337 /**
338 * Atomically test and set a 16-bit atomic counter.
339 *
340 * If the counter value is already set, return 0 (failed). Otherwise, set
341 * the counter value to 1 and return 1 (success).
342 *
343 * @param v
344 * A pointer to the atomic counter.
345 * @return
346 * 0 if failed; else 1, success.
347 */
348 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v);
349
350 #ifdef RTE_FORCE_INTRINSICS
351 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v)
352 {
353 return rte_atomic16_cmpset((volatile uint16_t *)&v->cnt, 0, 1);
354 }
355 #endif
356
357 /**
358 * Atomically set a 16-bit counter to 0.
359 *
360 * @param v
361 * A pointer to the atomic counter.
362 */
363 static inline void rte_atomic16_clear(rte_atomic16_t *v)
364 {
365 v->cnt = 0;
366 }
367
368 /*------------------------- 32 bit atomic operations -------------------------*/
369
370 /**
371 * Atomic compare and set.
372 *
373 * (atomic) equivalent to:
374 * if (*dst == exp)
375 * *dst = src (all 32-bit words)
376 *
377 * @param dst
378 * The destination location into which the value will be written.
379 * @param exp
380 * The expected value.
381 * @param src
382 * The new value.
383 * @return
384 * Non-zero on success; 0 on failure.
385 */
386 static inline int
387 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src);
388
389 #ifdef RTE_FORCE_INTRINSICS
390 static inline int
391 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
392 {
393 return __sync_bool_compare_and_swap(dst, exp, src);
394 }
395 #endif
396
397 /**
398 * The atomic counter structure.
399 */
400 typedef struct {
401 volatile int32_t cnt; /**< An internal counter value. */
402 } rte_atomic32_t;
403
404 /**
405 * Static initializer for an atomic counter.
406 */
407 #define RTE_ATOMIC32_INIT(val) { (val) }
408
409 /**
410 * Initialize an atomic counter.
411 *
412 * @param v
413 * A pointer to the atomic counter.
414 */
415 static inline void
416 rte_atomic32_init(rte_atomic32_t *v)
417 {
418 v->cnt = 0;
419 }
420
421 /**
422 * Atomically read a 32-bit value from a counter.
423 *
424 * @param v
425 * A pointer to the atomic counter.
426 * @return
427 * The value of the counter.
428 */
429 static inline int32_t
430 rte_atomic32_read(const rte_atomic32_t *v)
431 {
432 return v->cnt;
433 }
434
435 /**
436 * Atomically set a counter to a 32-bit value.
437 *
438 * @param v
439 * A pointer to the atomic counter.
440 * @param new_value
441 * The new value for the counter.
442 */
443 static inline void
444 rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
445 {
446 v->cnt = new_value;
447 }
448
449 /**
450 * Atomically add a 32-bit value to an atomic counter.
451 *
452 * @param v
453 * A pointer to the atomic counter.
454 * @param inc
455 * The value to be added to the counter.
456 */
457 static inline void
458 rte_atomic32_add(rte_atomic32_t *v, int32_t inc)
459 {
460 __sync_fetch_and_add(&v->cnt, inc);
461 }
462
463 /**
464 * Atomically subtract a 32-bit value from an atomic counter.
465 *
466 * @param v
467 * A pointer to the atomic counter.
468 * @param dec
469 * The value to be subtracted from the counter.
470 */
471 static inline void
472 rte_atomic32_sub(rte_atomic32_t *v, int32_t dec)
473 {
474 __sync_fetch_and_sub(&v->cnt, dec);
475 }
476
477 /**
478 * Atomically increment a counter by one.
479 *
480 * @param v
481 * A pointer to the atomic counter.
482 */
483 static inline void
484 rte_atomic32_inc(rte_atomic32_t *v);
485
486 #ifdef RTE_FORCE_INTRINSICS
487 static inline void
488 rte_atomic32_inc(rte_atomic32_t *v)
489 {
490 rte_atomic32_add(v, 1);
491 }
492 #endif
493
494 /**
495 * Atomically decrement a counter by one.
496 *
497 * @param v
498 * A pointer to the atomic counter.
499 */
500 static inline void
501 rte_atomic32_dec(rte_atomic32_t *v);
502
503 #ifdef RTE_FORCE_INTRINSICS
504 static inline void
505 rte_atomic32_dec(rte_atomic32_t *v)
506 {
507 rte_atomic32_sub(v,1);
508 }
509 #endif
510
511 /**
512 * Atomically add a 32-bit value to a counter and return the result.
513 *
514 * Atomically adds the 32-bits value (inc) to the atomic counter (v) and
515 * returns the value of v after addition.
516 *
517 * @param v
518 * A pointer to the atomic counter.
519 * @param inc
520 * The value to be added to the counter.
521 * @return
522 * The value of v after the addition.
523 */
524 static inline int32_t
525 rte_atomic32_add_return(rte_atomic32_t *v, int32_t inc)
526 {
527 return __sync_add_and_fetch(&v->cnt, inc);
528 }
529
530 /**
531 * Atomically subtract a 32-bit value from a counter and return
532 * the result.
533 *
534 * Atomically subtracts the 32-bit value (inc) from the atomic counter
535 * (v) and returns the value of v after the subtraction.
536 *
537 * @param v
538 * A pointer to the atomic counter.
539 * @param dec
540 * The value to be subtracted from the counter.
541 * @return
542 * The value of v after the subtraction.
543 */
544 static inline int32_t
545 rte_atomic32_sub_return(rte_atomic32_t *v, int32_t dec)
546 {
547 return __sync_sub_and_fetch(&v->cnt, dec);
548 }
549
550 /**
551 * Atomically increment a 32-bit counter by one and test.
552 *
553 * Atomically increments the atomic counter (v) by one and returns true if
554 * the result is 0, or false in all other cases.
555 *
556 * @param v
557 * A pointer to the atomic counter.
558 * @return
559 * True if the result after the increment operation is 0; false otherwise.
560 */
561 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v);
562
563 #ifdef RTE_FORCE_INTRINSICS
564 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v)
565 {
566 return __sync_add_and_fetch(&v->cnt, 1) == 0;
567 }
568 #endif
569
570 /**
571 * Atomically decrement a 32-bit counter by one and test.
572 *
573 * Atomically decrements the atomic counter (v) by one and returns true if
574 * the result is 0, or false in all other cases.
575 *
576 * @param v
577 * A pointer to the atomic counter.
578 * @return
579 * True if the result after the decrement operation is 0; false otherwise.
580 */
581 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v);
582
583 #ifdef RTE_FORCE_INTRINSICS
584 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v)
585 {
586 return __sync_sub_and_fetch(&v->cnt, 1) == 0;
587 }
588 #endif
589
590 /**
591 * Atomically test and set a 32-bit atomic counter.
592 *
593 * If the counter value is already set, return 0 (failed). Otherwise, set
594 * the counter value to 1 and return 1 (success).
595 *
596 * @param v
597 * A pointer to the atomic counter.
598 * @return
599 * 0 if failed; else 1, success.
600 */
601 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v);
602
603 #ifdef RTE_FORCE_INTRINSICS
604 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v)
605 {
606 return rte_atomic32_cmpset((volatile uint32_t *)&v->cnt, 0, 1);
607 }
608 #endif
609
610 /**
611 * Atomically set a 32-bit counter to 0.
612 *
613 * @param v
614 * A pointer to the atomic counter.
615 */
616 static inline void rte_atomic32_clear(rte_atomic32_t *v)
617 {
618 v->cnt = 0;
619 }
620
621 /*------------------------- 64 bit atomic operations -------------------------*/
622
623 /**
624 * An atomic compare and set function used by the mutex functions.
625 * (atomic) equivalent to:
626 * if (*dst == exp)
627 * *dst = src (all 64-bit words)
628 *
629 * @param dst
630 * The destination into which the value will be written.
631 * @param exp
632 * The expected value.
633 * @param src
634 * The new value.
635 * @return
636 * Non-zero on success; 0 on failure.
637 */
638 static inline int
639 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src);
640
641 #ifdef RTE_FORCE_INTRINSICS
642 static inline int
643 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
644 {
645 return __sync_bool_compare_and_swap(dst, exp, src);
646 }
647 #endif
648
649 /**
650 * The atomic counter structure.
651 */
652 typedef struct {
653 volatile int64_t cnt; /**< Internal counter value. */
654 } rte_atomic64_t;
655
656 /**
657 * Static initializer for an atomic counter.
658 */
659 #define RTE_ATOMIC64_INIT(val) { (val) }
660
661 /**
662 * Initialize the atomic counter.
663 *
664 * @param v
665 * A pointer to the atomic counter.
666 */
667 static inline void
668 rte_atomic64_init(rte_atomic64_t *v);
669
670 #ifdef RTE_FORCE_INTRINSICS
671 static inline void
672 rte_atomic64_init(rte_atomic64_t *v)
673 {
674 #ifdef __LP64__
675 v->cnt = 0;
676 #else
677 int success = 0;
678 uint64_t tmp;
679
680 while (success == 0) {
681 tmp = v->cnt;
682 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
683 tmp, 0);
684 }
685 #endif
686 }
687 #endif
688
689 /**
690 * Atomically read a 64-bit counter.
691 *
692 * @param v
693 * A pointer to the atomic counter.
694 * @return
695 * The value of the counter.
696 */
697 static inline int64_t
698 rte_atomic64_read(rte_atomic64_t *v);
699
700 #ifdef RTE_FORCE_INTRINSICS
701 static inline int64_t
702 rte_atomic64_read(rte_atomic64_t *v)
703 {
704 #ifdef __LP64__
705 return v->cnt;
706 #else
707 int success = 0;
708 uint64_t tmp;
709
710 while (success == 0) {
711 tmp = v->cnt;
712 /* replace the value by itself */
713 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
714 tmp, tmp);
715 }
716 return tmp;
717 #endif
718 }
719 #endif
720
721 /**
722 * Atomically set a 64-bit counter.
723 *
724 * @param v
725 * A pointer to the atomic counter.
726 * @param new_value
727 * The new value of the counter.
728 */
729 static inline void
730 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value);
731
732 #ifdef RTE_FORCE_INTRINSICS
733 static inline void
734 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
735 {
736 #ifdef __LP64__
737 v->cnt = new_value;
738 #else
739 int success = 0;
740 uint64_t tmp;
741
742 while (success == 0) {
743 tmp = v->cnt;
744 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
745 tmp, new_value);
746 }
747 #endif
748 }
749 #endif
750
751 /**
752 * Atomically add a 64-bit value to a counter.
753 *
754 * @param v
755 * A pointer to the atomic counter.
756 * @param inc
757 * The value to be added to the counter.
758 */
759 static inline void
760 rte_atomic64_add(rte_atomic64_t *v, int64_t inc);
761
762 #ifdef RTE_FORCE_INTRINSICS
763 static inline void
764 rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
765 {
766 __sync_fetch_and_add(&v->cnt, inc);
767 }
768 #endif
769
770 /**
771 * Atomically subtract a 64-bit value from a counter.
772 *
773 * @param v
774 * A pointer to the atomic counter.
775 * @param dec
776 * The value to be subtracted from the counter.
777 */
778 static inline void
779 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec);
780
781 #ifdef RTE_FORCE_INTRINSICS
782 static inline void
783 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
784 {
785 __sync_fetch_and_sub(&v->cnt, dec);
786 }
787 #endif
788
789 /**
790 * Atomically increment a 64-bit counter by one and test.
791 *
792 * @param v
793 * A pointer to the atomic counter.
794 */
795 static inline void
796 rte_atomic64_inc(rte_atomic64_t *v);
797
798 #ifdef RTE_FORCE_INTRINSICS
799 static inline void
800 rte_atomic64_inc(rte_atomic64_t *v)
801 {
802 rte_atomic64_add(v, 1);
803 }
804 #endif
805
806 /**
807 * Atomically decrement a 64-bit counter by one and test.
808 *
809 * @param v
810 * A pointer to the atomic counter.
811 */
812 static inline void
813 rte_atomic64_dec(rte_atomic64_t *v);
814
815 #ifdef RTE_FORCE_INTRINSICS
816 static inline void
817 rte_atomic64_dec(rte_atomic64_t *v)
818 {
819 rte_atomic64_sub(v, 1);
820 }
821 #endif
822
823 /**
824 * Add a 64-bit value to an atomic counter and return the result.
825 *
826 * Atomically adds the 64-bit value (inc) to the atomic counter (v) and
827 * returns the value of v after the addition.
828 *
829 * @param v
830 * A pointer to the atomic counter.
831 * @param inc
832 * The value to be added to the counter.
833 * @return
834 * The value of v after the addition.
835 */
836 static inline int64_t
837 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc);
838
839 #ifdef RTE_FORCE_INTRINSICS
840 static inline int64_t
841 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc)
842 {
843 return __sync_add_and_fetch(&v->cnt, inc);
844 }
845 #endif
846
847 /**
848 * Subtract a 64-bit value from an atomic counter and return the result.
849 *
850 * Atomically subtracts the 64-bit value (dec) from the atomic counter (v)
851 * and returns the value of v after the subtraction.
852 *
853 * @param v
854 * A pointer to the atomic counter.
855 * @param dec
856 * The value to be subtracted from the counter.
857 * @return
858 * The value of v after the subtraction.
859 */
860 static inline int64_t
861 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec);
862
863 #ifdef RTE_FORCE_INTRINSICS
864 static inline int64_t
865 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
866 {
867 return __sync_sub_and_fetch(&v->cnt, dec);
868 }
869 #endif
870
871 /**
872 * Atomically increment a 64-bit counter by one and test.
873 *
874 * Atomically increments the atomic counter (v) by one and returns
875 * true if the result is 0, or false in all other cases.
876 *
877 * @param v
878 * A pointer to the atomic counter.
879 * @return
880 * True if the result after the addition is 0; false otherwise.
881 */
882 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v);
883
884 #ifdef RTE_FORCE_INTRINSICS
885 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v)
886 {
887 return rte_atomic64_add_return(v, 1) == 0;
888 }
889 #endif
890
891 /**
892 * Atomically decrement a 64-bit counter by one and test.
893 *
894 * Atomically decrements the atomic counter (v) by one and returns true if
895 * the result is 0, or false in all other cases.
896 *
897 * @param v
898 * A pointer to the atomic counter.
899 * @return
900 * True if the result after subtraction is 0; false otherwise.
901 */
902 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v);
903
904 #ifdef RTE_FORCE_INTRINSICS
905 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v)
906 {
907 return rte_atomic64_sub_return(v, 1) == 0;
908 }
909 #endif
910
911 /**
912 * Atomically test and set a 64-bit atomic counter.
913 *
914 * If the counter value is already set, return 0 (failed). Otherwise, set
915 * the counter value to 1 and return 1 (success).
916 *
917 * @param v
918 * A pointer to the atomic counter.
919 * @return
920 * 0 if failed; else 1, success.
921 */
922 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v);
923
924 #ifdef RTE_FORCE_INTRINSICS
925 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v)
926 {
927 return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1);
928 }
929 #endif
930
931 /**
932 * Atomically set a 64-bit counter to 0.
933 *
934 * @param v
935 * A pointer to the atomic counter.
936 */
937 static inline void rte_atomic64_clear(rte_atomic64_t *v);
938
939 #ifdef RTE_FORCE_INTRINSICS
940 static inline void rte_atomic64_clear(rte_atomic64_t *v)
941 {
942 rte_atomic64_set(v, 0);
943 }
944 #endif
945
946 #endif /* _RTE_ATOMIC_H_ */