]>
git.proxmox.com Git - mirror_ubuntu-jammy-kernel.git/blob - arch/arm64/include/asm/percpu.h
2 * Copyright (C) 2013 ARM Ltd.
4 * This program is free software; you can redistribute it and/or modify
5 * it under the terms of the GNU General Public License version 2 as
6 * published by the Free Software Foundation.
8 * This program is distributed in the hope that it will be useful,
9 * but WITHOUT ANY WARRANTY; without even the implied warranty of
10 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 * GNU General Public License for more details.
13 * You should have received a copy of the GNU General Public License
14 * along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #ifndef __ASM_PERCPU_H
17 #define __ASM_PERCPU_H
19 #include <asm/alternative.h>
20 #include <asm/stack_pointer.h>
22 static inline void set_my_cpu_offset(unsigned long off
)
24 asm volatile(ALTERNATIVE("msr tpidr_el1, %0",
26 ARM64_HAS_VIRT_HOST_EXTN
)
27 :: "r" (off
) : "memory");
30 static inline unsigned long __my_cpu_offset(void)
35 * We want to allow caching the value, so avoid using volatile and
36 * instead use a fake stack read to hazard against barrier().
38 asm(ALTERNATIVE("mrs %0, tpidr_el1",
40 ARM64_HAS_VIRT_HOST_EXTN
)
42 "Q" (*(const unsigned long *)current_stack_pointer
));
46 #define __my_cpu_offset __my_cpu_offset()
48 #define PERCPU_OP(op, asm_op) \
49 static inline unsigned long __percpu_##op(void *ptr, \
50 unsigned long val, int size) \
52 unsigned long loop, ret; \
56 asm ("//__per_cpu_" #op "_1\n" \
57 "1: ldxrb %w[ret], %[ptr]\n" \
58 #asm_op " %w[ret], %w[ret], %w[val]\n" \
59 " stxrb %w[loop], %w[ret], %[ptr]\n" \
60 " cbnz %w[loop], 1b" \
61 : [loop] "=&r" (loop), [ret] "=&r" (ret), \
62 [ptr] "+Q"(*(u8 *)ptr) \
63 : [val] "Ir" (val)); \
66 asm ("//__per_cpu_" #op "_2\n" \
67 "1: ldxrh %w[ret], %[ptr]\n" \
68 #asm_op " %w[ret], %w[ret], %w[val]\n" \
69 " stxrh %w[loop], %w[ret], %[ptr]\n" \
70 " cbnz %w[loop], 1b" \
71 : [loop] "=&r" (loop), [ret] "=&r" (ret), \
72 [ptr] "+Q"(*(u16 *)ptr) \
73 : [val] "Ir" (val)); \
76 asm ("//__per_cpu_" #op "_4\n" \
77 "1: ldxr %w[ret], %[ptr]\n" \
78 #asm_op " %w[ret], %w[ret], %w[val]\n" \
79 " stxr %w[loop], %w[ret], %[ptr]\n" \
80 " cbnz %w[loop], 1b" \
81 : [loop] "=&r" (loop), [ret] "=&r" (ret), \
82 [ptr] "+Q"(*(u32 *)ptr) \
83 : [val] "Ir" (val)); \
86 asm ("//__per_cpu_" #op "_8\n" \
87 "1: ldxr %[ret], %[ptr]\n" \
88 #asm_op " %[ret], %[ret], %[val]\n" \
89 " stxr %w[loop], %[ret], %[ptr]\n" \
90 " cbnz %w[loop], 1b" \
91 : [loop] "=&r" (loop), [ret] "=&r" (ret), \
92 [ptr] "+Q"(*(u64 *)ptr) \
93 : [val] "Ir" (val)); \
107 static inline unsigned long __percpu_read(void *ptr
, int size
)
113 ret
= READ_ONCE(*(u8
*)ptr
);
116 ret
= READ_ONCE(*(u16
*)ptr
);
119 ret
= READ_ONCE(*(u32
*)ptr
);
122 ret
= READ_ONCE(*(u64
*)ptr
);
131 static inline void __percpu_write(void *ptr
, unsigned long val
, int size
)
135 WRITE_ONCE(*(u8
*)ptr
, (u8
)val
);
138 WRITE_ONCE(*(u16
*)ptr
, (u16
)val
);
141 WRITE_ONCE(*(u32
*)ptr
, (u32
)val
);
144 WRITE_ONCE(*(u64
*)ptr
, (u64
)val
);
151 static inline unsigned long __percpu_xchg(void *ptr
, unsigned long val
,
154 unsigned long ret
, loop
;
158 asm ("//__percpu_xchg_1\n"
159 "1: ldxrb %w[ret], %[ptr]\n"
160 " stxrb %w[loop], %w[val], %[ptr]\n"
162 : [loop
] "=&r"(loop
), [ret
] "=&r"(ret
),
163 [ptr
] "+Q"(*(u8
*)ptr
)
167 asm ("//__percpu_xchg_2\n"
168 "1: ldxrh %w[ret], %[ptr]\n"
169 " stxrh %w[loop], %w[val], %[ptr]\n"
171 : [loop
] "=&r"(loop
), [ret
] "=&r"(ret
),
172 [ptr
] "+Q"(*(u16
*)ptr
)
176 asm ("//__percpu_xchg_4\n"
177 "1: ldxr %w[ret], %[ptr]\n"
178 " stxr %w[loop], %w[val], %[ptr]\n"
180 : [loop
] "=&r"(loop
), [ret
] "=&r"(ret
),
181 [ptr
] "+Q"(*(u32
*)ptr
)
185 asm ("//__percpu_xchg_8\n"
186 "1: ldxr %[ret], %[ptr]\n"
187 " stxr %w[loop], %[val], %[ptr]\n"
189 : [loop
] "=&r"(loop
), [ret
] "=&r"(ret
),
190 [ptr
] "+Q"(*(u64
*)ptr
)
200 #define _percpu_read(pcp) \
202 typeof(pcp) __retval; \
203 preempt_disable_notrace(); \
204 __retval = (typeof(pcp))__percpu_read(raw_cpu_ptr(&(pcp)), \
206 preempt_enable_notrace(); \
210 #define _percpu_write(pcp, val) \
212 preempt_disable_notrace(); \
213 __percpu_write(raw_cpu_ptr(&(pcp)), (unsigned long)(val), \
215 preempt_enable_notrace(); \
218 #define _pcp_protect(operation, pcp, val) \
220 typeof(pcp) __retval; \
222 __retval = (typeof(pcp))operation(raw_cpu_ptr(&(pcp)), \
223 (val), sizeof(pcp)); \
228 #define _percpu_add(pcp, val) \
229 _pcp_protect(__percpu_add, pcp, val)
231 #define _percpu_add_return(pcp, val) _percpu_add(pcp, val)
233 #define _percpu_and(pcp, val) \
234 _pcp_protect(__percpu_and, pcp, val)
236 #define _percpu_or(pcp, val) \
237 _pcp_protect(__percpu_or, pcp, val)
239 #define _percpu_xchg(pcp, val) (typeof(pcp)) \
240 _pcp_protect(__percpu_xchg, pcp, (unsigned long)(val))
242 #define this_cpu_add_1(pcp, val) _percpu_add(pcp, val)
243 #define this_cpu_add_2(pcp, val) _percpu_add(pcp, val)
244 #define this_cpu_add_4(pcp, val) _percpu_add(pcp, val)
245 #define this_cpu_add_8(pcp, val) _percpu_add(pcp, val)
247 #define this_cpu_add_return_1(pcp, val) _percpu_add_return(pcp, val)
248 #define this_cpu_add_return_2(pcp, val) _percpu_add_return(pcp, val)
249 #define this_cpu_add_return_4(pcp, val) _percpu_add_return(pcp, val)
250 #define this_cpu_add_return_8(pcp, val) _percpu_add_return(pcp, val)
252 #define this_cpu_and_1(pcp, val) _percpu_and(pcp, val)
253 #define this_cpu_and_2(pcp, val) _percpu_and(pcp, val)
254 #define this_cpu_and_4(pcp, val) _percpu_and(pcp, val)
255 #define this_cpu_and_8(pcp, val) _percpu_and(pcp, val)
257 #define this_cpu_or_1(pcp, val) _percpu_or(pcp, val)
258 #define this_cpu_or_2(pcp, val) _percpu_or(pcp, val)
259 #define this_cpu_or_4(pcp, val) _percpu_or(pcp, val)
260 #define this_cpu_or_8(pcp, val) _percpu_or(pcp, val)
262 #define this_cpu_read_1(pcp) _percpu_read(pcp)
263 #define this_cpu_read_2(pcp) _percpu_read(pcp)
264 #define this_cpu_read_4(pcp) _percpu_read(pcp)
265 #define this_cpu_read_8(pcp) _percpu_read(pcp)
267 #define this_cpu_write_1(pcp, val) _percpu_write(pcp, val)
268 #define this_cpu_write_2(pcp, val) _percpu_write(pcp, val)
269 #define this_cpu_write_4(pcp, val) _percpu_write(pcp, val)
270 #define this_cpu_write_8(pcp, val) _percpu_write(pcp, val)
272 #define this_cpu_xchg_1(pcp, val) _percpu_xchg(pcp, val)
273 #define this_cpu_xchg_2(pcp, val) _percpu_xchg(pcp, val)
274 #define this_cpu_xchg_4(pcp, val) _percpu_xchg(pcp, val)
275 #define this_cpu_xchg_8(pcp, val) _percpu_xchg(pcp, val)
277 #include <asm-generic/percpu.h>
279 #endif /* __ASM_PERCPU_H */