]>
Commit | Line | Data |
---|---|---|
31a3fc6e | 1 | /* |
1bd2c9ed | 2 | * Copyright (c) 2013, 2014 Nicira, Inc. |
31a3fc6e BP |
3 | * |
4 | * Licensed under the Apache License, Version 2.0 (the "License"); | |
5 | * you may not use this file except in compliance with the License. | |
6 | * You may obtain a copy of the License at: | |
7 | * | |
8 | * http://www.apache.org/licenses/LICENSE-2.0 | |
9 | * | |
10 | * Unless required by applicable law or agreed to in writing, software | |
11 | * distributed under the License is distributed on an "AS IS" BASIS, | |
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
13 | * See the License for the specific language governing permissions and | |
14 | * limitations under the License. | |
15 | */ | |
16 | ||
17 | /* This header implements atomic operation primitives on GCC 4.x. */ | |
18 | #ifndef IN_OVS_ATOMIC_H | |
19 | #error "This header should only be included indirectly via ovs-atomic.h." | |
20 | #endif | |
21 | ||
1bd2c9ed | 22 | #include "ovs-atomic-locked.h" |
31a3fc6e BP |
23 | #define OVS_ATOMIC_GCC4P_IMPL 1 |
24 | ||
1bd2c9ed | 25 | #define ATOMIC(TYPE) TYPE |
31a3fc6e BP |
26 | |
27 | #define ATOMIC_BOOL_LOCK_FREE 2 | |
31a3fc6e | 28 | #define ATOMIC_CHAR_LOCK_FREE 2 |
31a3fc6e | 29 | #define ATOMIC_SHORT_LOCK_FREE 2 |
31a3fc6e | 30 | #define ATOMIC_INT_LOCK_FREE 2 |
1bd2c9ed BP |
31 | #define ATOMIC_LONG_LOCK_FREE (ULONG_MAX <= UINTPTR_MAX ? 2 : 0) |
32 | #define ATOMIC_LLONG_LOCK_FREE (ULLONG_MAX <= UINTPTR_MAX ? 2 : 0) | |
31a3fc6e | 33 | #define ATOMIC_POINTER_LOCK_FREE 2 |
31a3fc6e BP |
34 | |
35 | typedef enum { | |
36 | memory_order_relaxed, | |
37 | memory_order_consume, | |
38 | memory_order_acquire, | |
39 | memory_order_release, | |
40 | memory_order_acq_rel, | |
41 | memory_order_seq_cst | |
42 | } memory_order; | |
43 | \f | |
1bd2c9ed | 44 | #define IS_LOCKLESS_ATOMIC(OBJECT) (sizeof(OBJECT) <= sizeof(void *)) |
31a3fc6e | 45 | \f |
1bd2c9ed BP |
46 | #define ATOMIC_VAR_INIT(VALUE) VALUE |
47 | #define atomic_init(OBJECT, VALUE) (*(OBJECT) = (VALUE), (void) 0) | |
31a3fc6e BP |
48 | |
49 | static inline void | |
50 | atomic_thread_fence(memory_order order) | |
51 | { | |
52 | if (order != memory_order_relaxed) { | |
53 | __sync_synchronize(); | |
54 | } | |
55 | } | |
56 | ||
57 | static inline void | |
58 | atomic_thread_fence_if_seq_cst(memory_order order) | |
59 | { | |
60 | if (order == memory_order_seq_cst) { | |
61 | __sync_synchronize(); | |
62 | } | |
63 | } | |
64 | ||
65 | static inline void | |
15ba057e | 66 | atomic_signal_fence(memory_order order) |
31a3fc6e BP |
67 | { |
68 | if (order != memory_order_relaxed) { | |
69 | asm volatile("" : : : "memory"); | |
70 | } | |
71 | } | |
72 | ||
31a3fc6e | 73 | #define atomic_is_lock_free(OBJ) \ |
1bd2c9ed | 74 | ((void) *(OBJ), \ |
3e0b991e | 75 | IS_LOCKLESS_ATOMIC(*(OBJ)) ? 2 : 0) |
31a3fc6e BP |
76 | |
77 | #define atomic_store(DST, SRC) \ | |
78 | atomic_store_explicit(DST, SRC, memory_order_seq_cst) | |
1bd2c9ed BP |
79 | #define atomic_store_explicit(DST, SRC, ORDER) \ |
80 | ({ \ | |
81 | typeof(DST) dst__ = (DST); \ | |
82 | typeof(SRC) src__ = (SRC); \ | |
1bd2c9ed BP |
83 | \ |
84 | if (IS_LOCKLESS_ATOMIC(*dst__)) { \ | |
15ba057e | 85 | atomic_thread_fence(ORDER); \ |
1bd2c9ed | 86 | *dst__ = src__; \ |
15ba057e | 87 | atomic_thread_fence_if_seq_cst(ORDER); \ |
1bd2c9ed | 88 | } else { \ |
3e0b991e | 89 | atomic_store_locked(dst__, src__); \ |
1bd2c9ed BP |
90 | } \ |
91 | (void) 0; \ | |
92 | }) | |
31a3fc6e BP |
93 | #define atomic_read(SRC, DST) \ |
94 | atomic_read_explicit(SRC, DST, memory_order_seq_cst) | |
1bd2c9ed BP |
95 | #define atomic_read_explicit(SRC, DST, ORDER) \ |
96 | ({ \ | |
97 | typeof(DST) dst__ = (DST); \ | |
98 | typeof(SRC) src__ = (SRC); \ | |
1bd2c9ed BP |
99 | \ |
100 | if (IS_LOCKLESS_ATOMIC(*src__)) { \ | |
15ba057e | 101 | atomic_thread_fence_if_seq_cst(ORDER); \ |
1bd2c9ed BP |
102 | *dst__ = *src__; \ |
103 | } else { \ | |
3e0b991e | 104 | atomic_read_locked(src__, dst__); \ |
1bd2c9ed BP |
105 | } \ |
106 | (void) 0; \ | |
107 | }) | |
108 | ||
25045d75 JR |
109 | #define atomic_compare_exchange_strong(DST, EXP, SRC) \ |
110 | ({ \ | |
111 | typeof(DST) dst__ = (DST); \ | |
112 | typeof(EXP) expp__ = (EXP); \ | |
113 | typeof(SRC) src__ = (SRC); \ | |
114 | typeof(SRC) exp__ = *expp__; \ | |
115 | typeof(SRC) ret__; \ | |
116 | \ | |
117 | ret__ = __sync_val_compare_and_swap(dst__, exp__, src__); \ | |
118 | if (ret__ != exp__) { \ | |
119 | *expp__ = ret__; \ | |
120 | } \ | |
121 | ret__ == exp__; \ | |
122 | }) | |
123 | #define atomic_compare_exchange_strong_explicit(DST, EXP, SRC, ORD1, ORD2) \ | |
124 | ((void) (ORD1), (void) (ORD2), \ | |
125 | atomic_compare_exchange_strong(DST, EXP, SRC)) | |
126 | #define atomic_compare_exchange_weak \ | |
127 | atomic_compare_exchange_strong | |
128 | #define atomic_compare_exchange_weak_explicit \ | |
129 | atomic_compare_exchange_strong_explicit | |
130 | ||
131 | ||
1bd2c9ed BP |
132 | #define atomic_op__(RMW, OP, ARG, ORIG) \ |
133 | ({ \ | |
134 | typeof(RMW) rmw__ = (RMW); \ | |
135 | typeof(ARG) arg__ = (ARG); \ | |
136 | typeof(ORIG) orig__ = (ORIG); \ | |
137 | \ | |
138 | if (IS_LOCKLESS_ATOMIC(*rmw__)) { \ | |
139 | *orig__ = __sync_fetch_and_##OP(rmw__, arg__); \ | |
140 | } else { \ | |
3e0b991e | 141 | atomic_op_locked(rmw__, OP, arg__, orig__); \ |
1bd2c9ed BP |
142 | } \ |
143 | }) | |
31a3fc6e BP |
144 | |
145 | #define atomic_add(RMW, ARG, ORIG) atomic_op__(RMW, add, ARG, ORIG) | |
146 | #define atomic_sub(RMW, ARG, ORIG) atomic_op__(RMW, sub, ARG, ORIG) | |
147 | #define atomic_or( RMW, ARG, ORIG) atomic_op__(RMW, or, ARG, ORIG) | |
148 | #define atomic_xor(RMW, ARG, ORIG) atomic_op__(RMW, xor, ARG, ORIG) | |
149 | #define atomic_and(RMW, ARG, ORIG) atomic_op__(RMW, and, ARG, ORIG) | |
150 | ||
151 | #define atomic_add_explicit(RMW, OPERAND, ORIG, ORDER) \ | |
152 | ((void) (ORDER), atomic_add(RMW, OPERAND, ORIG)) | |
153 | #define atomic_sub_explicit(RMW, OPERAND, ORIG, ORDER) \ | |
154 | ((void) (ORDER), atomic_sub(RMW, OPERAND, ORIG)) | |
155 | #define atomic_or_explicit(RMW, OPERAND, ORIG, ORDER) \ | |
156 | ((void) (ORDER), atomic_or(RMW, OPERAND, ORIG)) | |
157 | #define atomic_xor_explicit(RMW, OPERAND, ORIG, ORDER) \ | |
158 | ((void) (ORDER), atomic_xor(RMW, OPERAND, ORIG)) | |
159 | #define atomic_and_explicit(RMW, OPERAND, ORIG, ORDER) \ | |
160 | ((void) (ORDER), atomic_and(RMW, OPERAND, ORIG)) | |
161 | \f | |
162 | /* atomic_flag */ | |
163 | ||
164 | typedef struct { | |
165 | int b; | |
166 | } atomic_flag; | |
167 | #define ATOMIC_FLAG_INIT { false } | |
168 | ||
169 | static inline bool | |
170 | atomic_flag_test_and_set(volatile atomic_flag *object) | |
171 | { | |
172 | return __sync_lock_test_and_set(&object->b, 1); | |
173 | } | |
174 | ||
175 | static inline bool | |
176 | atomic_flag_test_and_set_explicit(volatile atomic_flag *object, | |
177 | memory_order order OVS_UNUSED) | |
178 | { | |
179 | return atomic_flag_test_and_set(object); | |
180 | } | |
181 | ||
182 | static inline void | |
183 | atomic_flag_clear(volatile atomic_flag *object) | |
184 | { | |
185 | __sync_lock_release(&object->b); | |
186 | } | |
187 | ||
188 | static inline void | |
189 | atomic_flag_clear_explicit(volatile atomic_flag *object, | |
190 | memory_order order OVS_UNUSED) | |
191 | { | |
192 | atomic_flag_clear(object); | |
193 | } |