]> git.proxmox.com Git - mirror_ubuntu-bionic-kernel.git/blob - include/asm-generic/percpu.h
Merge branches 'iommu/fixes', 'arm/exynos', 'arm/renesas', 'arm/smmu', 'arm/mediatek...
[mirror_ubuntu-bionic-kernel.git] / include / asm-generic / percpu.h
1 #ifndef _ASM_GENERIC_PERCPU_H_
2 #define _ASM_GENERIC_PERCPU_H_
3
4 #include <linux/compiler.h>
5 #include <linux/threads.h>
6 #include <linux/percpu-defs.h>
7
8 #ifdef CONFIG_SMP
9
10 /*
11 * per_cpu_offset() is the offset that has to be added to a
12 * percpu variable to get to the instance for a certain processor.
13 *
14 * Most arches use the __per_cpu_offset array for those offsets but
15 * some arches have their own ways of determining the offset (x86_64, s390).
16 */
17 #ifndef __per_cpu_offset
18 extern unsigned long __per_cpu_offset[NR_CPUS];
19
20 #define per_cpu_offset(x) (__per_cpu_offset[x])
21 #endif
22
23 /*
24 * Determine the offset for the currently active processor.
25 * An arch may define __my_cpu_offset to provide a more effective
26 * means of obtaining the offset to the per cpu variables of the
27 * current processor.
28 */
29 #ifndef __my_cpu_offset
30 #define __my_cpu_offset per_cpu_offset(raw_smp_processor_id())
31 #endif
32 #ifdef CONFIG_DEBUG_PREEMPT
33 #define my_cpu_offset per_cpu_offset(smp_processor_id())
34 #else
35 #define my_cpu_offset __my_cpu_offset
36 #endif
37
38 /*
39 * Arch may define arch_raw_cpu_ptr() to provide more efficient address
40 * translations for raw_cpu_ptr().
41 */
42 #ifndef arch_raw_cpu_ptr
43 #define arch_raw_cpu_ptr(ptr) SHIFT_PERCPU_PTR(ptr, __my_cpu_offset)
44 #endif
45
46 #ifdef CONFIG_HAVE_SETUP_PER_CPU_AREA
47 extern void setup_per_cpu_areas(void);
48 #endif
49
50 #endif /* SMP */
51
52 #ifndef PER_CPU_BASE_SECTION
53 #ifdef CONFIG_SMP
54 #define PER_CPU_BASE_SECTION ".data..percpu"
55 #else
56 #define PER_CPU_BASE_SECTION ".data"
57 #endif
58 #endif
59
60 #ifndef PER_CPU_ATTRIBUTES
61 #define PER_CPU_ATTRIBUTES
62 #endif
63
64 #ifndef PER_CPU_DEF_ATTRIBUTES
65 #define PER_CPU_DEF_ATTRIBUTES
66 #endif
67
68 #define raw_cpu_generic_read(pcp) \
69 ({ \
70 *raw_cpu_ptr(&(pcp)); \
71 })
72
73 #define raw_cpu_generic_to_op(pcp, val, op) \
74 do { \
75 *raw_cpu_ptr(&(pcp)) op val; \
76 } while (0)
77
78 #define raw_cpu_generic_add_return(pcp, val) \
79 ({ \
80 typeof(&(pcp)) __p = raw_cpu_ptr(&(pcp)); \
81 \
82 *__p += val; \
83 *__p; \
84 })
85
86 #define raw_cpu_generic_xchg(pcp, nval) \
87 ({ \
88 typeof(&(pcp)) __p = raw_cpu_ptr(&(pcp)); \
89 typeof(pcp) __ret; \
90 __ret = *__p; \
91 *__p = nval; \
92 __ret; \
93 })
94
95 #define raw_cpu_generic_cmpxchg(pcp, oval, nval) \
96 ({ \
97 typeof(&(pcp)) __p = raw_cpu_ptr(&(pcp)); \
98 typeof(pcp) __ret; \
99 __ret = *__p; \
100 if (__ret == (oval)) \
101 *__p = nval; \
102 __ret; \
103 })
104
105 #define raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
106 ({ \
107 typeof(&(pcp1)) __p1 = raw_cpu_ptr(&(pcp1)); \
108 typeof(&(pcp2)) __p2 = raw_cpu_ptr(&(pcp2)); \
109 int __ret = 0; \
110 if (*__p1 == (oval1) && *__p2 == (oval2)) { \
111 *__p1 = nval1; \
112 *__p2 = nval2; \
113 __ret = 1; \
114 } \
115 (__ret); \
116 })
117
118 #define this_cpu_generic_read(pcp) \
119 ({ \
120 typeof(pcp) __ret; \
121 preempt_disable_notrace(); \
122 __ret = raw_cpu_generic_read(pcp); \
123 preempt_enable_notrace(); \
124 __ret; \
125 })
126
127 #define this_cpu_generic_to_op(pcp, val, op) \
128 do { \
129 unsigned long __flags; \
130 raw_local_irq_save(__flags); \
131 raw_cpu_generic_to_op(pcp, val, op); \
132 raw_local_irq_restore(__flags); \
133 } while (0)
134
135
136 #define this_cpu_generic_add_return(pcp, val) \
137 ({ \
138 typeof(pcp) __ret; \
139 unsigned long __flags; \
140 raw_local_irq_save(__flags); \
141 __ret = raw_cpu_generic_add_return(pcp, val); \
142 raw_local_irq_restore(__flags); \
143 __ret; \
144 })
145
146 #define this_cpu_generic_xchg(pcp, nval) \
147 ({ \
148 typeof(pcp) __ret; \
149 unsigned long __flags; \
150 raw_local_irq_save(__flags); \
151 __ret = raw_cpu_generic_xchg(pcp, nval); \
152 raw_local_irq_restore(__flags); \
153 __ret; \
154 })
155
156 #define this_cpu_generic_cmpxchg(pcp, oval, nval) \
157 ({ \
158 typeof(pcp) __ret; \
159 unsigned long __flags; \
160 raw_local_irq_save(__flags); \
161 __ret = raw_cpu_generic_cmpxchg(pcp, oval, nval); \
162 raw_local_irq_restore(__flags); \
163 __ret; \
164 })
165
166 #define this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
167 ({ \
168 int __ret; \
169 unsigned long __flags; \
170 raw_local_irq_save(__flags); \
171 __ret = raw_cpu_generic_cmpxchg_double(pcp1, pcp2, \
172 oval1, oval2, nval1, nval2); \
173 raw_local_irq_restore(__flags); \
174 __ret; \
175 })
176
177 #ifndef raw_cpu_read_1
178 #define raw_cpu_read_1(pcp) raw_cpu_generic_read(pcp)
179 #endif
180 #ifndef raw_cpu_read_2
181 #define raw_cpu_read_2(pcp) raw_cpu_generic_read(pcp)
182 #endif
183 #ifndef raw_cpu_read_4
184 #define raw_cpu_read_4(pcp) raw_cpu_generic_read(pcp)
185 #endif
186 #ifndef raw_cpu_read_8
187 #define raw_cpu_read_8(pcp) raw_cpu_generic_read(pcp)
188 #endif
189
190 #ifndef raw_cpu_write_1
191 #define raw_cpu_write_1(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
192 #endif
193 #ifndef raw_cpu_write_2
194 #define raw_cpu_write_2(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
195 #endif
196 #ifndef raw_cpu_write_4
197 #define raw_cpu_write_4(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
198 #endif
199 #ifndef raw_cpu_write_8
200 #define raw_cpu_write_8(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
201 #endif
202
203 #ifndef raw_cpu_add_1
204 #define raw_cpu_add_1(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
205 #endif
206 #ifndef raw_cpu_add_2
207 #define raw_cpu_add_2(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
208 #endif
209 #ifndef raw_cpu_add_4
210 #define raw_cpu_add_4(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
211 #endif
212 #ifndef raw_cpu_add_8
213 #define raw_cpu_add_8(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
214 #endif
215
216 #ifndef raw_cpu_and_1
217 #define raw_cpu_and_1(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
218 #endif
219 #ifndef raw_cpu_and_2
220 #define raw_cpu_and_2(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
221 #endif
222 #ifndef raw_cpu_and_4
223 #define raw_cpu_and_4(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
224 #endif
225 #ifndef raw_cpu_and_8
226 #define raw_cpu_and_8(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
227 #endif
228
229 #ifndef raw_cpu_or_1
230 #define raw_cpu_or_1(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
231 #endif
232 #ifndef raw_cpu_or_2
233 #define raw_cpu_or_2(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
234 #endif
235 #ifndef raw_cpu_or_4
236 #define raw_cpu_or_4(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
237 #endif
238 #ifndef raw_cpu_or_8
239 #define raw_cpu_or_8(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
240 #endif
241
242 #ifndef raw_cpu_add_return_1
243 #define raw_cpu_add_return_1(pcp, val) raw_cpu_generic_add_return(pcp, val)
244 #endif
245 #ifndef raw_cpu_add_return_2
246 #define raw_cpu_add_return_2(pcp, val) raw_cpu_generic_add_return(pcp, val)
247 #endif
248 #ifndef raw_cpu_add_return_4
249 #define raw_cpu_add_return_4(pcp, val) raw_cpu_generic_add_return(pcp, val)
250 #endif
251 #ifndef raw_cpu_add_return_8
252 #define raw_cpu_add_return_8(pcp, val) raw_cpu_generic_add_return(pcp, val)
253 #endif
254
255 #ifndef raw_cpu_xchg_1
256 #define raw_cpu_xchg_1(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
257 #endif
258 #ifndef raw_cpu_xchg_2
259 #define raw_cpu_xchg_2(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
260 #endif
261 #ifndef raw_cpu_xchg_4
262 #define raw_cpu_xchg_4(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
263 #endif
264 #ifndef raw_cpu_xchg_8
265 #define raw_cpu_xchg_8(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
266 #endif
267
268 #ifndef raw_cpu_cmpxchg_1
269 #define raw_cpu_cmpxchg_1(pcp, oval, nval) \
270 raw_cpu_generic_cmpxchg(pcp, oval, nval)
271 #endif
272 #ifndef raw_cpu_cmpxchg_2
273 #define raw_cpu_cmpxchg_2(pcp, oval, nval) \
274 raw_cpu_generic_cmpxchg(pcp, oval, nval)
275 #endif
276 #ifndef raw_cpu_cmpxchg_4
277 #define raw_cpu_cmpxchg_4(pcp, oval, nval) \
278 raw_cpu_generic_cmpxchg(pcp, oval, nval)
279 #endif
280 #ifndef raw_cpu_cmpxchg_8
281 #define raw_cpu_cmpxchg_8(pcp, oval, nval) \
282 raw_cpu_generic_cmpxchg(pcp, oval, nval)
283 #endif
284
285 #ifndef raw_cpu_cmpxchg_double_1
286 #define raw_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \
287 raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
288 #endif
289 #ifndef raw_cpu_cmpxchg_double_2
290 #define raw_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \
291 raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
292 #endif
293 #ifndef raw_cpu_cmpxchg_double_4
294 #define raw_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \
295 raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
296 #endif
297 #ifndef raw_cpu_cmpxchg_double_8
298 #define raw_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \
299 raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
300 #endif
301
302 #ifndef this_cpu_read_1
303 #define this_cpu_read_1(pcp) this_cpu_generic_read(pcp)
304 #endif
305 #ifndef this_cpu_read_2
306 #define this_cpu_read_2(pcp) this_cpu_generic_read(pcp)
307 #endif
308 #ifndef this_cpu_read_4
309 #define this_cpu_read_4(pcp) this_cpu_generic_read(pcp)
310 #endif
311 #ifndef this_cpu_read_8
312 #define this_cpu_read_8(pcp) this_cpu_generic_read(pcp)
313 #endif
314
315 #ifndef this_cpu_write_1
316 #define this_cpu_write_1(pcp, val) this_cpu_generic_to_op(pcp, val, =)
317 #endif
318 #ifndef this_cpu_write_2
319 #define this_cpu_write_2(pcp, val) this_cpu_generic_to_op(pcp, val, =)
320 #endif
321 #ifndef this_cpu_write_4
322 #define this_cpu_write_4(pcp, val) this_cpu_generic_to_op(pcp, val, =)
323 #endif
324 #ifndef this_cpu_write_8
325 #define this_cpu_write_8(pcp, val) this_cpu_generic_to_op(pcp, val, =)
326 #endif
327
328 #ifndef this_cpu_add_1
329 #define this_cpu_add_1(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
330 #endif
331 #ifndef this_cpu_add_2
332 #define this_cpu_add_2(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
333 #endif
334 #ifndef this_cpu_add_4
335 #define this_cpu_add_4(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
336 #endif
337 #ifndef this_cpu_add_8
338 #define this_cpu_add_8(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
339 #endif
340
341 #ifndef this_cpu_and_1
342 #define this_cpu_and_1(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
343 #endif
344 #ifndef this_cpu_and_2
345 #define this_cpu_and_2(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
346 #endif
347 #ifndef this_cpu_and_4
348 #define this_cpu_and_4(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
349 #endif
350 #ifndef this_cpu_and_8
351 #define this_cpu_and_8(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
352 #endif
353
354 #ifndef this_cpu_or_1
355 #define this_cpu_or_1(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
356 #endif
357 #ifndef this_cpu_or_2
358 #define this_cpu_or_2(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
359 #endif
360 #ifndef this_cpu_or_4
361 #define this_cpu_or_4(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
362 #endif
363 #ifndef this_cpu_or_8
364 #define this_cpu_or_8(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
365 #endif
366
367 #ifndef this_cpu_add_return_1
368 #define this_cpu_add_return_1(pcp, val) this_cpu_generic_add_return(pcp, val)
369 #endif
370 #ifndef this_cpu_add_return_2
371 #define this_cpu_add_return_2(pcp, val) this_cpu_generic_add_return(pcp, val)
372 #endif
373 #ifndef this_cpu_add_return_4
374 #define this_cpu_add_return_4(pcp, val) this_cpu_generic_add_return(pcp, val)
375 #endif
376 #ifndef this_cpu_add_return_8
377 #define this_cpu_add_return_8(pcp, val) this_cpu_generic_add_return(pcp, val)
378 #endif
379
380 #ifndef this_cpu_xchg_1
381 #define this_cpu_xchg_1(pcp, nval) this_cpu_generic_xchg(pcp, nval)
382 #endif
383 #ifndef this_cpu_xchg_2
384 #define this_cpu_xchg_2(pcp, nval) this_cpu_generic_xchg(pcp, nval)
385 #endif
386 #ifndef this_cpu_xchg_4
387 #define this_cpu_xchg_4(pcp, nval) this_cpu_generic_xchg(pcp, nval)
388 #endif
389 #ifndef this_cpu_xchg_8
390 #define this_cpu_xchg_8(pcp, nval) this_cpu_generic_xchg(pcp, nval)
391 #endif
392
393 #ifndef this_cpu_cmpxchg_1
394 #define this_cpu_cmpxchg_1(pcp, oval, nval) \
395 this_cpu_generic_cmpxchg(pcp, oval, nval)
396 #endif
397 #ifndef this_cpu_cmpxchg_2
398 #define this_cpu_cmpxchg_2(pcp, oval, nval) \
399 this_cpu_generic_cmpxchg(pcp, oval, nval)
400 #endif
401 #ifndef this_cpu_cmpxchg_4
402 #define this_cpu_cmpxchg_4(pcp, oval, nval) \
403 this_cpu_generic_cmpxchg(pcp, oval, nval)
404 #endif
405 #ifndef this_cpu_cmpxchg_8
406 #define this_cpu_cmpxchg_8(pcp, oval, nval) \
407 this_cpu_generic_cmpxchg(pcp, oval, nval)
408 #endif
409
410 #ifndef this_cpu_cmpxchg_double_1
411 #define this_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \
412 this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
413 #endif
414 #ifndef this_cpu_cmpxchg_double_2
415 #define this_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \
416 this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
417 #endif
418 #ifndef this_cpu_cmpxchg_double_4
419 #define this_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \
420 this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
421 #endif
422 #ifndef this_cpu_cmpxchg_double_8
423 #define this_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \
424 this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
425 #endif
426
427 #endif /* _ASM_GENERIC_PERCPU_H_ */