]> git.proxmox.com Git - mirror_ovs.git/blob - lib/ovs-atomic-pthreads.h
ovsdb-idl: Fix iteration over tracked rows with no actual data.
[mirror_ovs.git] / lib / ovs-atomic-pthreads.h
1 /*
2 * Copyright (c) 2013, 2014 Nicira, Inc.
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at:
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 /* This header implements atomic operation primitives using pthreads. */
18 #ifndef IN_OVS_ATOMIC_H
19 #error "This header should only be included indirectly via ovs-atomic.h."
20 #endif
21
22 #include "ovs-atomic-locked.h"
23
24 #define OVS_ATOMIC_PTHREADS_IMPL 1
25
26 #define ATOMIC(TYPE) TYPE
27
28 #define ATOMIC_BOOL_LOCK_FREE 0
29 #define ATOMIC_CHAR_LOCK_FREE 0
30 #define ATOMIC_SHORT_LOCK_FREE 0
31 #define ATOMIC_INT_LOCK_FREE 0
32 #define ATOMIC_LONG_LOCK_FREE 0
33 #define ATOMIC_LLONG_LOCK_FREE 0
34 #define ATOMIC_POINTER_LOCK_FREE 0
35
36 typedef enum {
37 memory_order_relaxed,
38 memory_order_consume,
39 memory_order_acquire,
40 memory_order_release,
41 memory_order_acq_rel,
42 memory_order_seq_cst
43 } memory_order;
44
45 #define ATOMIC_VAR_INIT(VALUE) (VALUE)
46 #define atomic_init(OBJECT, VALUE) (*(OBJECT) = (VALUE), (void) 0)
47
48 static inline void
49 atomic_thread_fence(memory_order order OVS_UNUSED)
50 {
51 /* Nothing to do. */
52 }
53
54 static inline void
55 atomic_signal_fence(memory_order order OVS_UNUSED)
56 {
57 /* Nothing to do. */
58 }
59
60 #define atomic_is_lock_free(OBJ) false
61
62 #define atomic_store(DST, SRC) atomic_store_locked(DST, SRC)
63 #define atomic_store_explicit(DST, SRC, ORDER) \
64 ((void) (ORDER), atomic_store(DST, SRC))
65
66 #define atomic_read(SRC, DST) atomic_read_locked(SRC, DST)
67 #define atomic_read_explicit(SRC, DST, ORDER) \
68 ((void) (ORDER), atomic_read(SRC, DST))
69
70 #define atomic_compare_exchange_strong(DST, EXP, SRC) \
71 atomic_compare_exchange_locked(DST, EXP, SRC)
72 #define atomic_compare_exchange_strong_explicit(DST, EXP, SRC, ORD1, ORD2) \
73 ((void) (ORD1), (void) (ORD2), \
74 atomic_compare_exchange_strong(DST, EXP, SRC))
75 #define atomic_compare_exchange_weak \
76 atomic_compare_exchange_strong
77 #define atomic_compare_exchange_weak_explicit \
78 atomic_compare_exchange_strong_explicit
79
80 #define atomic_add(RMW, ARG, ORIG) atomic_op_locked(RMW, add, ARG, ORIG)
81 #define atomic_sub(RMW, ARG, ORIG) atomic_op_locked(RMW, sub, ARG, ORIG)
82 #define atomic_or( RMW, ARG, ORIG) atomic_op_locked(RMW, or, ARG, ORIG)
83 #define atomic_xor(RMW, ARG, ORIG) atomic_op_locked(RMW, xor, ARG, ORIG)
84 #define atomic_and(RMW, ARG, ORIG) atomic_op_locked(RMW, and, ARG, ORIG)
85
86 #define atomic_add_explicit(RMW, ARG, ORIG, ORDER) \
87 ((void) (ORDER), atomic_add(RMW, ARG, ORIG))
88 #define atomic_sub_explicit(RMW, ARG, ORIG, ORDER) \
89 ((void) (ORDER), atomic_sub(RMW, ARG, ORIG))
90 #define atomic_or_explicit(RMW, ARG, ORIG, ORDER) \
91 ((void) (ORDER), atomic_or(RMW, ARG, ORIG))
92 #define atomic_xor_explicit(RMW, ARG, ORIG, ORDER) \
93 ((void) (ORDER), atomic_xor(RMW, ARG, ORIG))
94 #define atomic_and_explicit(RMW, ARG, ORIG, ORDER) \
95 ((void) (ORDER), atomic_and(RMW, ARG, ORIG))
96 \f
97 /* atomic_flag */
98
99 typedef struct {
100 bool b;
101 } atomic_flag;
102 #define ATOMIC_FLAG_INIT { false }
103
104 static inline bool
105 atomic_flag_test_and_set(volatile atomic_flag *flag_)
106 {
107 atomic_flag *flag = CONST_CAST(atomic_flag *, flag_);
108 bool old_value;
109
110 atomic_lock__(flag);
111 old_value = flag->b;
112 flag->b = true;
113 atomic_unlock__(flag);
114
115 return old_value;
116 }
117
118 static inline bool
119 atomic_flag_test_and_set_explicit(volatile atomic_flag *flag,
120 memory_order order OVS_UNUSED)
121 {
122 return atomic_flag_test_and_set(flag);
123 }
124
125 static inline void
126 atomic_flag_clear(volatile atomic_flag *flag_)
127 {
128 atomic_flag *flag = CONST_CAST(atomic_flag *, flag_);
129
130 atomic_lock__(flag);
131 flag->b = false;
132 atomic_unlock__(flag);
133 }
134
135 static inline void
136 atomic_flag_clear_explicit(volatile atomic_flag *flag,
137 memory_order order OVS_UNUSED)
138 {
139 atomic_flag_clear(flag);
140 }