]>
Commit | Line | Data |
---|---|---|
b2441318 | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
24f287e4 | 2 | /* atomic.S: These things are too big to do inline. |
1da177e4 | 3 | * |
193d2aad | 4 | * Copyright (C) 1999, 2007 2012 David S. Miller (davem@davemloft.net) |
1da177e4 LT |
5 | */ |
6 | ||
8695c37d | 7 | #include <linux/linkage.h> |
1da177e4 | 8 | #include <asm/asi.h> |
24f287e4 | 9 | #include <asm/backoff.h> |
d3867f04 | 10 | #include <asm/export.h> |
1da177e4 | 11 | |
1da177e4 LT |
12 | .text |
13 | ||
3a1adb23 | 14 | /* Three versions of the atomic routines, one that |
1da177e4 | 15 | * does not return a value and does not perform |
3a1adb23 PZ |
16 | * memory barriers, and a two which return |
17 | * a value, the new and old value resp. and does the | |
18 | * barriers. | |
1da177e4 | 19 | */ |
1da177e4 | 20 | |
4f3316c2 | 21 | #define ATOMIC_OP(op) \ |
ff5b4f1e | 22 | ENTRY(arch_atomic_##op) /* %o0 = increment, %o1 = atomic_ptr */ \ |
4f3316c2 PZ |
23 | BACKOFF_SETUP(%o2); \ |
24 | 1: lduw [%o1], %g1; \ | |
25 | op %g1, %o0, %g7; \ | |
26 | cas [%o1], %g1, %g7; \ | |
27 | cmp %g1, %g7; \ | |
28 | bne,pn %icc, BACKOFF_LABEL(2f, 1b); \ | |
29 | nop; \ | |
30 | retl; \ | |
31 | nop; \ | |
32 | 2: BACKOFF_SPIN(%o2, %o3, 1b); \ | |
ff5b4f1e MR |
33 | ENDPROC(arch_atomic_##op); \ |
34 | EXPORT_SYMBOL(arch_atomic_##op); | |
1da177e4 | 35 | |
4f3316c2 | 36 | #define ATOMIC_OP_RETURN(op) \ |
ff5b4f1e | 37 | ENTRY(arch_atomic_##op##_return) /* %o0 = increment, %o1 = atomic_ptr */\ |
4f3316c2 PZ |
38 | BACKOFF_SETUP(%o2); \ |
39 | 1: lduw [%o1], %g1; \ | |
40 | op %g1, %o0, %g7; \ | |
41 | cas [%o1], %g1, %g7; \ | |
42 | cmp %g1, %g7; \ | |
43 | bne,pn %icc, BACKOFF_LABEL(2f, 1b); \ | |
caa17d49 | 44 | op %g1, %o0, %g1; \ |
4f3316c2 PZ |
45 | retl; \ |
46 | sra %g1, 0, %o0; \ | |
47 | 2: BACKOFF_SPIN(%o2, %o3, 1b); \ | |
ff5b4f1e MR |
48 | ENDPROC(arch_atomic_##op##_return); \ |
49 | EXPORT_SYMBOL(arch_atomic_##op##_return); | |
1da177e4 | 50 | |
3a1adb23 | 51 | #define ATOMIC_FETCH_OP(op) \ |
ff5b4f1e | 52 | ENTRY(arch_atomic_fetch_##op) /* %o0 = increment, %o1 = atomic_ptr */ \ |
3a1adb23 PZ |
53 | BACKOFF_SETUP(%o2); \ |
54 | 1: lduw [%o1], %g1; \ | |
55 | op %g1, %o0, %g7; \ | |
56 | cas [%o1], %g1, %g7; \ | |
57 | cmp %g1, %g7; \ | |
58 | bne,pn %icc, BACKOFF_LABEL(2f, 1b); \ | |
59 | nop; \ | |
60 | retl; \ | |
61 | sra %g1, 0, %o0; \ | |
62 | 2: BACKOFF_SPIN(%o2, %o3, 1b); \ | |
ff5b4f1e MR |
63 | ENDPROC(arch_atomic_fetch_##op); \ |
64 | EXPORT_SYMBOL(arch_atomic_fetch_##op); | |
3a1adb23 | 65 | |
d16c0649 NM |
66 | ATOMIC_OP(add) |
67 | ATOMIC_OP_RETURN(add) | |
68 | ATOMIC_FETCH_OP(add) | |
1da177e4 | 69 | |
d16c0649 NM |
70 | ATOMIC_OP(sub) |
71 | ATOMIC_OP_RETURN(sub) | |
72 | ATOMIC_FETCH_OP(sub) | |
1da177e4 | 73 | |
d16c0649 NM |
74 | ATOMIC_OP(and) |
75 | ATOMIC_FETCH_OP(and) | |
3a1adb23 | 76 | |
d16c0649 NM |
77 | ATOMIC_OP(or) |
78 | ATOMIC_FETCH_OP(or) | |
79 | ||
80 | ATOMIC_OP(xor) | |
81 | ATOMIC_FETCH_OP(xor) | |
3a1adb23 | 82 | |
3a1adb23 | 83 | #undef ATOMIC_FETCH_OP |
4f3316c2 PZ |
84 | #undef ATOMIC_OP_RETURN |
85 | #undef ATOMIC_OP | |
1da177e4 | 86 | |
4f3316c2 | 87 | #define ATOMIC64_OP(op) \ |
ff5b4f1e | 88 | ENTRY(arch_atomic64_##op) /* %o0 = increment, %o1 = atomic_ptr */ \ |
4f3316c2 PZ |
89 | BACKOFF_SETUP(%o2); \ |
90 | 1: ldx [%o1], %g1; \ | |
91 | op %g1, %o0, %g7; \ | |
92 | casx [%o1], %g1, %g7; \ | |
93 | cmp %g1, %g7; \ | |
94 | bne,pn %xcc, BACKOFF_LABEL(2f, 1b); \ | |
95 | nop; \ | |
96 | retl; \ | |
97 | nop; \ | |
98 | 2: BACKOFF_SPIN(%o2, %o3, 1b); \ | |
ff5b4f1e MR |
99 | ENDPROC(arch_atomic64_##op); \ |
100 | EXPORT_SYMBOL(arch_atomic64_##op); | |
1da177e4 | 101 | |
4f3316c2 | 102 | #define ATOMIC64_OP_RETURN(op) \ |
ff5b4f1e | 103 | ENTRY(arch_atomic64_##op##_return) /* %o0 = increment, %o1 = atomic_ptr */ \ |
4f3316c2 PZ |
104 | BACKOFF_SETUP(%o2); \ |
105 | 1: ldx [%o1], %g1; \ | |
106 | op %g1, %o0, %g7; \ | |
107 | casx [%o1], %g1, %g7; \ | |
108 | cmp %g1, %g7; \ | |
109 | bne,pn %xcc, BACKOFF_LABEL(2f, 1b); \ | |
110 | nop; \ | |
111 | retl; \ | |
caa17d49 | 112 | op %g1, %o0, %o0; \ |
4f3316c2 | 113 | 2: BACKOFF_SPIN(%o2, %o3, 1b); \ |
ff5b4f1e MR |
114 | ENDPROC(arch_atomic64_##op##_return); \ |
115 | EXPORT_SYMBOL(arch_atomic64_##op##_return); | |
4f3316c2 | 116 | |
3a1adb23 | 117 | #define ATOMIC64_FETCH_OP(op) \ |
ff5b4f1e | 118 | ENTRY(arch_atomic64_fetch_##op) /* %o0 = increment, %o1 = atomic_ptr */ \ |
3a1adb23 PZ |
119 | BACKOFF_SETUP(%o2); \ |
120 | 1: ldx [%o1], %g1; \ | |
121 | op %g1, %o0, %g7; \ | |
122 | casx [%o1], %g1, %g7; \ | |
123 | cmp %g1, %g7; \ | |
124 | bne,pn %xcc, BACKOFF_LABEL(2f, 1b); \ | |
125 | nop; \ | |
126 | retl; \ | |
127 | mov %g1, %o0; \ | |
128 | 2: BACKOFF_SPIN(%o2, %o3, 1b); \ | |
ff5b4f1e MR |
129 | ENDPROC(arch_atomic64_fetch_##op); \ |
130 | EXPORT_SYMBOL(arch_atomic64_fetch_##op); | |
3a1adb23 | 131 | |
d16c0649 NM |
132 | ATOMIC64_OP(add) |
133 | ATOMIC64_OP_RETURN(add) | |
134 | ATOMIC64_FETCH_OP(add) | |
135 | ||
136 | ATOMIC64_OP(sub) | |
137 | ATOMIC64_OP_RETURN(sub) | |
138 | ATOMIC64_FETCH_OP(sub) | |
4f3316c2 | 139 | |
d16c0649 NM |
140 | ATOMIC64_OP(and) |
141 | ATOMIC64_FETCH_OP(and) | |
4f3316c2 | 142 | |
d16c0649 NM |
143 | ATOMIC64_OP(or) |
144 | ATOMIC64_FETCH_OP(or) | |
3a1adb23 | 145 | |
d16c0649 NM |
146 | ATOMIC64_OP(xor) |
147 | ATOMIC64_FETCH_OP(xor) | |
3a1adb23 | 148 | |
3a1adb23 | 149 | #undef ATOMIC64_FETCH_OP |
4f3316c2 PZ |
150 | #undef ATOMIC64_OP_RETURN |
151 | #undef ATOMIC64_OP | |
193d2aad | 152 | |
ff5b4f1e | 153 | ENTRY(arch_atomic64_dec_if_positive) /* %o0 = atomic_ptr */ |
193d2aad DM |
154 | BACKOFF_SETUP(%o2) |
155 | 1: ldx [%o0], %g1 | |
156 | brlez,pn %g1, 3f | |
157 | sub %g1, 1, %g7 | |
158 | casx [%o0], %g1, %g7 | |
159 | cmp %g1, %g7 | |
160 | bne,pn %xcc, BACKOFF_LABEL(2f, 1b) | |
161 | nop | |
162 | 3: retl | |
163 | sub %g1, 1, %o0 | |
164 | 2: BACKOFF_SPIN(%o2, %o3, 1b) | |
ff5b4f1e MR |
165 | ENDPROC(arch_atomic64_dec_if_positive) |
166 | EXPORT_SYMBOL(arch_atomic64_dec_if_positive) |