]> git.proxmox.com Git - mirror_ubuntu-artful-kernel.git/blob - arch/arm64/include/asm/atomic.h
Merge tag 'arm64-upstream' of git://git.kernel.org/pub/scm/linux/kernel/git/arm64...
[mirror_ubuntu-artful-kernel.git] / arch / arm64 / include / asm / atomic.h
1 /*
2 * Based on arch/arm/include/asm/atomic.h
3 *
4 * Copyright (C) 1996 Russell King.
5 * Copyright (C) 2002 Deep Blue Solutions Ltd.
6 * Copyright (C) 2012 ARM Ltd.
7 *
8 * This program is free software; you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License version 2 as
10 * published by the Free Software Foundation.
11 *
12 * This program is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with this program. If not, see <http://www.gnu.org/licenses/>.
19 */
20 #ifndef __ASM_ATOMIC_H
21 #define __ASM_ATOMIC_H
22
23 #include <linux/compiler.h>
24 #include <linux/types.h>
25
26 #include <asm/barrier.h>
27 #include <asm/lse.h>
28
29 #ifdef __KERNEL__
30
31 #define __ARM64_IN_ATOMIC_IMPL
32
33 #if defined(CONFIG_ARM64_LSE_ATOMICS) && defined(CONFIG_AS_LSE)
34 #include <asm/atomic_lse.h>
35 #else
36 #include <asm/atomic_ll_sc.h>
37 #endif
38
39 #undef __ARM64_IN_ATOMIC_IMPL
40
41 #include <asm/cmpxchg.h>
42
43 #define ___atomic_add_unless(v, a, u, sfx) \
44 ({ \
45 typeof((v)->counter) c, old; \
46 \
47 c = atomic##sfx##_read(v); \
48 while (c != (u) && \
49 (old = atomic##sfx##_cmpxchg((v), c, c + (a))) != c) \
50 c = old; \
51 c; \
52 })
53
54 #define ATOMIC_INIT(i) { (i) }
55
56 #define atomic_read(v) READ_ONCE((v)->counter)
57 #define atomic_set(v, i) WRITE_ONCE(((v)->counter), (i))
58
59 #define atomic_add_return_relaxed atomic_add_return_relaxed
60 #define atomic_add_return_acquire atomic_add_return_acquire
61 #define atomic_add_return_release atomic_add_return_release
62 #define atomic_add_return atomic_add_return
63
64 #define atomic_inc_return_relaxed(v) atomic_add_return_relaxed(1, (v))
65 #define atomic_inc_return_acquire(v) atomic_add_return_acquire(1, (v))
66 #define atomic_inc_return_release(v) atomic_add_return_release(1, (v))
67 #define atomic_inc_return(v) atomic_add_return(1, (v))
68
69 #define atomic_sub_return_relaxed atomic_sub_return_relaxed
70 #define atomic_sub_return_acquire atomic_sub_return_acquire
71 #define atomic_sub_return_release atomic_sub_return_release
72 #define atomic_sub_return atomic_sub_return
73
74 #define atomic_dec_return_relaxed(v) atomic_sub_return_relaxed(1, (v))
75 #define atomic_dec_return_acquire(v) atomic_sub_return_acquire(1, (v))
76 #define atomic_dec_return_release(v) atomic_sub_return_release(1, (v))
77 #define atomic_dec_return(v) atomic_sub_return(1, (v))
78
79 #define atomic_xchg_relaxed(v, new) xchg_relaxed(&((v)->counter), (new))
80 #define atomic_xchg_acquire(v, new) xchg_acquire(&((v)->counter), (new))
81 #define atomic_xchg_release(v, new) xchg_release(&((v)->counter), (new))
82 #define atomic_xchg(v, new) xchg(&((v)->counter), (new))
83
84 #define atomic_cmpxchg_relaxed(v, old, new) \
85 cmpxchg_relaxed(&((v)->counter), (old), (new))
86 #define atomic_cmpxchg_acquire(v, old, new) \
87 cmpxchg_acquire(&((v)->counter), (old), (new))
88 #define atomic_cmpxchg_release(v, old, new) \
89 cmpxchg_release(&((v)->counter), (old), (new))
90 #define atomic_cmpxchg(v, old, new) cmpxchg(&((v)->counter), (old), (new))
91
92 #define atomic_inc(v) atomic_add(1, (v))
93 #define atomic_dec(v) atomic_sub(1, (v))
94 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
95 #define atomic_dec_and_test(v) (atomic_dec_return(v) == 0)
96 #define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0)
97 #define atomic_add_negative(i, v) (atomic_add_return((i), (v)) < 0)
98 #define __atomic_add_unless(v, a, u) ___atomic_add_unless(v, a, u,)
99 #define atomic_andnot atomic_andnot
100
101 /*
102 * 64-bit atomic operations.
103 */
104 #define ATOMIC64_INIT ATOMIC_INIT
105 #define atomic64_read atomic_read
106 #define atomic64_set atomic_set
107
108 #define atomic64_add_return_relaxed atomic64_add_return_relaxed
109 #define atomic64_add_return_acquire atomic64_add_return_acquire
110 #define atomic64_add_return_release atomic64_add_return_release
111 #define atomic64_add_return atomic64_add_return
112
113 #define atomic64_inc_return_relaxed(v) atomic64_add_return_relaxed(1, (v))
114 #define atomic64_inc_return_acquire(v) atomic64_add_return_acquire(1, (v))
115 #define atomic64_inc_return_release(v) atomic64_add_return_release(1, (v))
116 #define atomic64_inc_return(v) atomic64_add_return(1, (v))
117
118 #define atomic64_sub_return_relaxed atomic64_sub_return_relaxed
119 #define atomic64_sub_return_acquire atomic64_sub_return_acquire
120 #define atomic64_sub_return_release atomic64_sub_return_release
121 #define atomic64_sub_return atomic64_sub_return
122
123 #define atomic64_dec_return_relaxed(v) atomic64_sub_return_relaxed(1, (v))
124 #define atomic64_dec_return_acquire(v) atomic64_sub_return_acquire(1, (v))
125 #define atomic64_dec_return_release(v) atomic64_sub_return_release(1, (v))
126 #define atomic64_dec_return(v) atomic64_sub_return(1, (v))
127
128 #define atomic64_xchg_relaxed atomic_xchg_relaxed
129 #define atomic64_xchg_acquire atomic_xchg_acquire
130 #define atomic64_xchg_release atomic_xchg_release
131 #define atomic64_xchg atomic_xchg
132
133 #define atomic64_cmpxchg_relaxed atomic_cmpxchg_relaxed
134 #define atomic64_cmpxchg_acquire atomic_cmpxchg_acquire
135 #define atomic64_cmpxchg_release atomic_cmpxchg_release
136 #define atomic64_cmpxchg atomic_cmpxchg
137
138 #define atomic64_inc(v) atomic64_add(1, (v))
139 #define atomic64_dec(v) atomic64_sub(1, (v))
140 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
141 #define atomic64_dec_and_test(v) (atomic64_dec_return(v) == 0)
142 #define atomic64_sub_and_test(i, v) (atomic64_sub_return((i), (v)) == 0)
143 #define atomic64_add_negative(i, v) (atomic64_add_return((i), (v)) < 0)
144 #define atomic64_add_unless(v, a, u) (___atomic_add_unless(v, a, u, 64) != u)
145 #define atomic64_andnot atomic64_andnot
146
147 #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
148
149 #endif
150 #endif