]>
Commit | Line | Data |
---|---|---|
b2441318 | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
6006c0d8 JH |
2 | #ifndef __ASM_METAG_ATOMIC_LOCK1_H |
3 | #define __ASM_METAG_ATOMIC_LOCK1_H | |
4 | ||
5 | #define ATOMIC_INIT(i) { (i) } | |
6 | ||
7 | #include <linux/compiler.h> | |
8 | ||
9 | #include <asm/barrier.h> | |
10 | #include <asm/global_lock.h> | |
11 | ||
12 | static inline int atomic_read(const atomic_t *v) | |
13 | { | |
62e8a325 | 14 | return READ_ONCE((v)->counter); |
6006c0d8 JH |
15 | } |
16 | ||
17 | /* | |
18 | * atomic_set needs to be take the lock to protect atomic_add_unless from a | |
19 | * possible race, as it reads the counter twice: | |
20 | * | |
21 | * CPU0 CPU1 | |
22 | * atomic_add_unless(1, 0) | |
23 | * ret = v->counter (non-zero) | |
24 | * if (ret != u) v->counter = 0 | |
25 | * v->counter += 1 (counter set to 1) | |
26 | * | |
27 | * Making atomic_set take the lock ensures that ordering and logical | |
28 | * consistency is preserved. | |
29 | */ | |
30 | static inline int atomic_set(atomic_t *v, int i) | |
31 | { | |
32 | unsigned long flags; | |
33 | ||
34 | __global_lock1(flags); | |
35 | fence(); | |
36 | v->counter = i; | |
37 | __global_unlock1(flags); | |
38 | return i; | |
39 | } | |
40 | ||
9d664c0a PZ |
41 | #define atomic_set_release(v, i) atomic_set((v), (i)) |
42 | ||
d6dfe250 PZ |
43 | #define ATOMIC_OP(op, c_op) \ |
44 | static inline void atomic_##op(int i, atomic_t *v) \ | |
45 | { \ | |
46 | unsigned long flags; \ | |
47 | \ | |
48 | __global_lock1(flags); \ | |
49 | fence(); \ | |
50 | v->counter c_op i; \ | |
51 | __global_unlock1(flags); \ | |
52 | } \ | |
53 | ||
54 | #define ATOMIC_OP_RETURN(op, c_op) \ | |
55 | static inline int atomic_##op##_return(int i, atomic_t *v) \ | |
56 | { \ | |
57 | unsigned long result; \ | |
58 | unsigned long flags; \ | |
59 | \ | |
60 | __global_lock1(flags); \ | |
61 | result = v->counter; \ | |
62 | result c_op i; \ | |
63 | fence(); \ | |
64 | v->counter = result; \ | |
65 | __global_unlock1(flags); \ | |
66 | \ | |
67 | return result; \ | |
6006c0d8 JH |
68 | } |
69 | ||
e898eb27 PZ |
70 | #define ATOMIC_FETCH_OP(op, c_op) \ |
71 | static inline int atomic_fetch_##op(int i, atomic_t *v) \ | |
72 | { \ | |
73 | unsigned long result; \ | |
74 | unsigned long flags; \ | |
75 | \ | |
76 | __global_lock1(flags); \ | |
77 | result = v->counter; \ | |
78 | fence(); \ | |
79 | v->counter c_op i; \ | |
80 | __global_unlock1(flags); \ | |
81 | \ | |
82 | return result; \ | |
83 | } | |
84 | ||
85 | #define ATOMIC_OPS(op, c_op) \ | |
86 | ATOMIC_OP(op, c_op) \ | |
87 | ATOMIC_OP_RETURN(op, c_op) \ | |
88 | ATOMIC_FETCH_OP(op, c_op) | |
6006c0d8 | 89 | |
d6dfe250 PZ |
90 | ATOMIC_OPS(add, +=) |
91 | ATOMIC_OPS(sub, -=) | |
6006c0d8 | 92 | |
d6dfe250 | 93 | #undef ATOMIC_OPS |
e898eb27 PZ |
94 | #define ATOMIC_OPS(op, c_op) \ |
95 | ATOMIC_OP(op, c_op) \ | |
96 | ATOMIC_FETCH_OP(op, c_op) | |
97 | ||
98 | ATOMIC_OPS(and, &=) | |
99 | ATOMIC_OPS(or, |=) | |
100 | ATOMIC_OPS(xor, ^=) | |
101 | ||
102 | #undef ATOMIC_OPS | |
103 | #undef ATOMIC_FETCH_OP | |
d6dfe250 PZ |
104 | #undef ATOMIC_OP_RETURN |
105 | #undef ATOMIC_OP | |
6006c0d8 | 106 | |
6006c0d8 JH |
107 | static inline int atomic_cmpxchg(atomic_t *v, int old, int new) |
108 | { | |
109 | int ret; | |
110 | unsigned long flags; | |
111 | ||
112 | __global_lock1(flags); | |
113 | ret = v->counter; | |
114 | if (ret == old) { | |
115 | fence(); | |
116 | v->counter = new; | |
117 | } | |
118 | __global_unlock1(flags); | |
119 | ||
120 | return ret; | |
121 | } | |
122 | ||
123 | #define atomic_xchg(v, new) (xchg(&((v)->counter), new)) | |
124 | ||
125 | static inline int __atomic_add_unless(atomic_t *v, int a, int u) | |
126 | { | |
127 | int ret; | |
128 | unsigned long flags; | |
129 | ||
130 | __global_lock1(flags); | |
131 | ret = v->counter; | |
132 | if (ret != u) { | |
133 | fence(); | |
134 | v->counter += a; | |
135 | } | |
136 | __global_unlock1(flags); | |
137 | ||
138 | return ret; | |
139 | } | |
140 | ||
141 | static inline int atomic_sub_if_positive(int i, atomic_t *v) | |
142 | { | |
143 | int ret; | |
144 | unsigned long flags; | |
145 | ||
146 | __global_lock1(flags); | |
147 | ret = v->counter - 1; | |
148 | if (ret >= 0) { | |
149 | fence(); | |
150 | v->counter = ret; | |
151 | } | |
152 | __global_unlock1(flags); | |
153 | ||
154 | return ret; | |
155 | } | |
156 | ||
157 | #endif /* __ASM_METAG_ATOMIC_LOCK1_H */ |