]> git.proxmox.com Git - mirror_ubuntu-focal-kernel.git/blame - arch/s390/include/asm/atomic.h
Merge remote-tracking branches 'asoc/topic/ac97', 'asoc/topic/ac97-mfd', 'asoc/topic...
[mirror_ubuntu-focal-kernel.git] / arch / s390 / include / asm / atomic.h
CommitLineData
b2441318 1/* SPDX-License-Identifier: GPL-2.0 */
1da177e4 2/*
126b30c3 3 * Copyright IBM Corp. 1999, 2016
12751058
HC
4 * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>,
5 * Denis Joseph Barrow,
126b30c3 6 * Arnd Bergmann,
1da177e4
LT
7 */
8
a53c8fab
HC
9#ifndef __ARCH_S390_ATOMIC__
10#define __ARCH_S390_ATOMIC__
11
12751058
HC
12#include <linux/compiler.h>
13#include <linux/types.h>
126b30c3 14#include <asm/atomic_ops.h>
0ccc8b7a 15#include <asm/barrier.h>
a0616cde 16#include <asm/cmpxchg.h>
1da177e4 17
1da177e4
LT
18#define ATOMIC_INIT(i) { (i) }
19
c51b9621
HC
20static inline int atomic_read(const atomic_t *v)
21{
7657e41a
HC
22 int c;
23
24 asm volatile(
25 " l %0,%1\n"
26 : "=d" (c) : "Q" (v->counter));
27 return c;
c51b9621
HC
28}
29
30static inline void atomic_set(atomic_t *v, int i)
31{
7657e41a
HC
32 asm volatile(
33 " st %1,%0\n"
34 : "=Q" (v->counter) : "d" (i));
c51b9621 35}
1da177e4 36
bfe3349b 37static inline int atomic_add_return(int i, atomic_t *v)
1da177e4 38{
126b30c3 39 return __atomic_add_barrier(i, &v->counter) + i;
1da177e4 40}
75287430 41
56fefbbc
PZ
42static inline int atomic_fetch_add(int i, atomic_t *v)
43{
126b30c3 44 return __atomic_add_barrier(i, &v->counter);
56fefbbc
PZ
45}
46
5692e4d1
HC
47static inline void atomic_add(int i, atomic_t *v)
48{
49#ifdef CONFIG_HAVE_MARCH_Z196_FEATURES
50 if (__builtin_constant_p(i) && (i > -129) && (i < 128)) {
126b30c3 51 __atomic_add_const(i, &v->counter);
0ccc8b7a 52 return;
5692e4d1 53 }
5692e4d1 54#endif
126b30c3 55 __atomic_add(i, &v->counter);
5692e4d1
HC
56}
57
973bd993 58#define atomic_add_negative(_i, _v) (atomic_add_return(_i, _v) < 0)
5692e4d1 59#define atomic_inc(_v) atomic_add(1, _v)
973bd993
MS
60#define atomic_inc_return(_v) atomic_add_return(1, _v)
61#define atomic_inc_and_test(_v) (atomic_add_return(1, _v) == 0)
5692e4d1 62#define atomic_sub(_i, _v) atomic_add(-(int)(_i), _v)
86d51bc3 63#define atomic_sub_return(_i, _v) atomic_add_return(-(int)(_i), _v)
56fefbbc 64#define atomic_fetch_sub(_i, _v) atomic_fetch_add(-(int)(_i), _v)
973bd993 65#define atomic_sub_and_test(_i, _v) (atomic_sub_return(_i, _v) == 0)
5692e4d1 66#define atomic_dec(_v) atomic_sub(1, _v)
973bd993
MS
67#define atomic_dec_return(_v) atomic_sub_return(1, _v)
68#define atomic_dec_and_test(_v) (atomic_sub_return(1, _v) == 0)
1da177e4 69
126b30c3 70#define ATOMIC_OPS(op) \
ae8c35c8
PZ
71static inline void atomic_##op(int i, atomic_t *v) \
72{ \
126b30c3 73 __atomic_##op(i, &v->counter); \
56fefbbc
PZ
74} \
75static inline int atomic_fetch_##op(int i, atomic_t *v) \
76{ \
126b30c3 77 return __atomic_##op##_barrier(i, &v->counter); \
ae8c35c8
PZ
78}
79
126b30c3
MS
80ATOMIC_OPS(and)
81ATOMIC_OPS(or)
82ATOMIC_OPS(xor)
ae8c35c8 83
56fefbbc 84#undef ATOMIC_OPS
ae8c35c8 85
ffbf670f
IM
86#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
87
bfe3349b 88static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
973bd993 89{
126b30c3 90 return __atomic_cmpxchg(&v->counter, old, new);
973bd993
MS
91}
92
f24219b4 93static inline int __atomic_add_unless(atomic_t *v, int a, int u)
973bd993
MS
94{
95 int c, old;
973bd993 96 c = atomic_read(v);
0b2fcfdb
NP
97 for (;;) {
98 if (unlikely(c == u))
99 break;
100 old = atomic_cmpxchg(v, c, c + a);
101 if (likely(old == c))
102 break;
973bd993 103 c = old;
0b2fcfdb 104 }
f24219b4 105 return c;
973bd993
MS
106}
107
1da177e4
LT
108#define ATOMIC64_INIT(i) { (i) }
109
126b30c3 110static inline long atomic64_read(const atomic64_t *v)
c51b9621 111{
126b30c3 112 long c;
7657e41a
HC
113
114 asm volatile(
115 " lg %0,%1\n"
116 : "=d" (c) : "Q" (v->counter));
117 return c;
c51b9621
HC
118}
119
126b30c3 120static inline void atomic64_set(atomic64_t *v, long i)
c51b9621 121{
7657e41a
HC
122 asm volatile(
123 " stg %1,%0\n"
124 : "=Q" (v->counter) : "d" (i));
c51b9621 125}
1da177e4 126
126b30c3 127static inline long atomic64_add_return(long i, atomic64_t *v)
1da177e4 128{
126b30c3 129 return __atomic64_add_barrier(i, &v->counter) + i;
0ccc8b7a
HC
130}
131
126b30c3 132static inline long atomic64_fetch_add(long i, atomic64_t *v)
56fefbbc 133{
126b30c3 134 return __atomic64_add_barrier(i, &v->counter);
56fefbbc
PZ
135}
136
126b30c3 137static inline void atomic64_add(long i, atomic64_t *v)
0ccc8b7a
HC
138{
139#ifdef CONFIG_HAVE_MARCH_Z196_FEATURES
140 if (__builtin_constant_p(i) && (i > -129) && (i < 128)) {
126b30c3 141 __atomic64_add_const(i, &v->counter);
0ccc8b7a
HC
142 return;
143 }
144#endif
126b30c3 145 __atomic64_add(i, &v->counter);
1da177e4 146}
973bd993 147
3a5f10e3
MD
148#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
149
126b30c3 150static inline long atomic64_cmpxchg(atomic64_t *v, long old, long new)
973bd993 151{
126b30c3 152 return __atomic64_cmpxchg(&v->counter, old, new);
973bd993 153}
1da177e4 154
126b30c3 155#define ATOMIC64_OPS(op) \
ae8c35c8
PZ
156static inline void atomic64_##op(long i, atomic64_t *v) \
157{ \
126b30c3 158 __atomic64_##op(i, &v->counter); \
56fefbbc
PZ
159} \
160static inline long atomic64_fetch_##op(long i, atomic64_t *v) \
161{ \
126b30c3 162 return __atomic64_##op##_barrier(i, &v->counter); \
ae8c35c8
PZ
163}
164
126b30c3
MS
165ATOMIC64_OPS(and)
166ATOMIC64_OPS(or)
167ATOMIC64_OPS(xor)
ae8c35c8 168
56fefbbc 169#undef ATOMIC64_OPS
12751058 170
126b30c3 171static inline int atomic64_add_unless(atomic64_t *v, long i, long u)
1da177e4 172{
126b30c3 173 long c, old;
2ddb3ec4 174
973bd993 175 c = atomic64_read(v);
0b2fcfdb
NP
176 for (;;) {
177 if (unlikely(c == u))
178 break;
9a70a428 179 old = atomic64_cmpxchg(v, c, c + i);
0b2fcfdb
NP
180 if (likely(old == c))
181 break;
973bd993 182 c = old;
0b2fcfdb 183 }
973bd993 184 return c != u;
1da177e4
LT
185}
186
126b30c3 187static inline long atomic64_dec_if_positive(atomic64_t *v)
2ddb3ec4 188{
126b30c3 189 long c, old, dec;
2ddb3ec4
HC
190
191 c = atomic64_read(v);
192 for (;;) {
193 dec = c - 1;
194 if (unlikely(dec < 0))
195 break;
196 old = atomic64_cmpxchg((v), c, dec);
197 if (likely(old == c))
198 break;
199 c = old;
200 }
201 return dec;
202}
203
12751058 204#define atomic64_add_negative(_i, _v) (atomic64_add_return(_i, _v) < 0)
5692e4d1 205#define atomic64_inc(_v) atomic64_add(1, _v)
12751058
HC
206#define atomic64_inc_return(_v) atomic64_add_return(1, _v)
207#define atomic64_inc_and_test(_v) (atomic64_add_return(1, _v) == 0)
126b30c3
MS
208#define atomic64_sub_return(_i, _v) atomic64_add_return(-(long)(_i), _v)
209#define atomic64_fetch_sub(_i, _v) atomic64_fetch_add(-(long)(_i), _v)
210#define atomic64_sub(_i, _v) atomic64_add(-(long)(_i), _v)
12751058 211#define atomic64_sub_and_test(_i, _v) (atomic64_sub_return(_i, _v) == 0)
5692e4d1 212#define atomic64_dec(_v) atomic64_sub(1, _v)
12751058
HC
213#define atomic64_dec_return(_v) atomic64_sub_return(1, _v)
214#define atomic64_dec_and_test(_v) (atomic64_sub_return(1, _v) == 0)
215#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
8426e1f6 216
1da177e4 217#endif /* __ARCH_S390_ATOMIC__ */