]> git.proxmox.com Git - mirror_ubuntu-artful-kernel.git/blame_incremental - include/asm-generic/percpu.h
UBUNTU: [Config] Updating config after rebasing
[mirror_ubuntu-artful-kernel.git] / include / asm-generic / percpu.h
... / ...
CommitLineData
1#ifndef _ASM_GENERIC_PERCPU_H_
2#define _ASM_GENERIC_PERCPU_H_
3
4#include <linux/compiler.h>
5#include <linux/threads.h>
6#include <linux/percpu-defs.h>
7
8#ifdef CONFIG_SMP
9
10/*
11 * per_cpu_offset() is the offset that has to be added to a
12 * percpu variable to get to the instance for a certain processor.
13 *
14 * Most arches use the __per_cpu_offset array for those offsets but
15 * some arches have their own ways of determining the offset (x86_64, s390).
16 */
17#ifndef __per_cpu_offset
18extern unsigned long __per_cpu_offset[NR_CPUS];
19
20#define per_cpu_offset(x) (__per_cpu_offset[x])
21#endif
22
23/*
24 * Determine the offset for the currently active processor.
25 * An arch may define __my_cpu_offset to provide a more effective
26 * means of obtaining the offset to the per cpu variables of the
27 * current processor.
28 */
29#ifndef __my_cpu_offset
30#define __my_cpu_offset per_cpu_offset(raw_smp_processor_id())
31#endif
32#ifdef CONFIG_DEBUG_PREEMPT
33#define my_cpu_offset per_cpu_offset(smp_processor_id())
34#else
35#define my_cpu_offset __my_cpu_offset
36#endif
37
38/*
39 * Arch may define arch_raw_cpu_ptr() to provide more efficient address
40 * translations for raw_cpu_ptr().
41 */
42#ifndef arch_raw_cpu_ptr
43#define arch_raw_cpu_ptr(ptr) SHIFT_PERCPU_PTR(ptr, __my_cpu_offset)
44#endif
45
46#ifdef CONFIG_HAVE_SETUP_PER_CPU_AREA
47extern void setup_per_cpu_areas(void);
48#endif
49
50#endif /* SMP */
51
52#ifndef PER_CPU_BASE_SECTION
53#ifdef CONFIG_SMP
54#define PER_CPU_BASE_SECTION ".data..percpu"
55#else
56#define PER_CPU_BASE_SECTION ".data"
57#endif
58#endif
59
60#ifndef PER_CPU_ATTRIBUTES
61#define PER_CPU_ATTRIBUTES
62#endif
63
64#ifndef PER_CPU_DEF_ATTRIBUTES
65#define PER_CPU_DEF_ATTRIBUTES
66#endif
67
68#define raw_cpu_generic_read(pcp) \
69({ \
70 *raw_cpu_ptr(&(pcp)); \
71})
72
73#define raw_cpu_generic_to_op(pcp, val, op) \
74do { \
75 *raw_cpu_ptr(&(pcp)) op val; \
76} while (0)
77
78#define raw_cpu_generic_add_return(pcp, val) \
79({ \
80 typeof(&(pcp)) __p = raw_cpu_ptr(&(pcp)); \
81 \
82 *__p += val; \
83 *__p; \
84})
85
86#define raw_cpu_generic_xchg(pcp, nval) \
87({ \
88 typeof(&(pcp)) __p = raw_cpu_ptr(&(pcp)); \
89 typeof(pcp) __ret; \
90 __ret = *__p; \
91 *__p = nval; \
92 __ret; \
93})
94
95#define raw_cpu_generic_cmpxchg(pcp, oval, nval) \
96({ \
97 typeof(&(pcp)) __p = raw_cpu_ptr(&(pcp)); \
98 typeof(pcp) __ret; \
99 __ret = *__p; \
100 if (__ret == (oval)) \
101 *__p = nval; \
102 __ret; \
103})
104
105#define raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
106({ \
107 typeof(&(pcp1)) __p1 = raw_cpu_ptr(&(pcp1)); \
108 typeof(&(pcp2)) __p2 = raw_cpu_ptr(&(pcp2)); \
109 int __ret = 0; \
110 if (*__p1 == (oval1) && *__p2 == (oval2)) { \
111 *__p1 = nval1; \
112 *__p2 = nval2; \
113 __ret = 1; \
114 } \
115 (__ret); \
116})
117
118#define __this_cpu_generic_read_nopreempt(pcp) \
119({ \
120 typeof(pcp) __ret; \
121 preempt_disable_notrace(); \
122 __ret = READ_ONCE(*raw_cpu_ptr(&(pcp))); \
123 preempt_enable_notrace(); \
124 __ret; \
125})
126
127#define __this_cpu_generic_read_noirq(pcp) \
128({ \
129 typeof(pcp) __ret; \
130 unsigned long __flags; \
131 raw_local_irq_save(__flags); \
132 __ret = raw_cpu_generic_read(pcp); \
133 raw_local_irq_restore(__flags); \
134 __ret; \
135})
136
137#define this_cpu_generic_read(pcp) \
138({ \
139 typeof(pcp) __ret; \
140 if (__native_word(pcp)) \
141 __ret = __this_cpu_generic_read_nopreempt(pcp); \
142 else \
143 __ret = __this_cpu_generic_read_noirq(pcp); \
144 __ret; \
145})
146
147#define this_cpu_generic_to_op(pcp, val, op) \
148do { \
149 unsigned long __flags; \
150 raw_local_irq_save(__flags); \
151 raw_cpu_generic_to_op(pcp, val, op); \
152 raw_local_irq_restore(__flags); \
153} while (0)
154
155
156#define this_cpu_generic_add_return(pcp, val) \
157({ \
158 typeof(pcp) __ret; \
159 unsigned long __flags; \
160 raw_local_irq_save(__flags); \
161 __ret = raw_cpu_generic_add_return(pcp, val); \
162 raw_local_irq_restore(__flags); \
163 __ret; \
164})
165
166#define this_cpu_generic_xchg(pcp, nval) \
167({ \
168 typeof(pcp) __ret; \
169 unsigned long __flags; \
170 raw_local_irq_save(__flags); \
171 __ret = raw_cpu_generic_xchg(pcp, nval); \
172 raw_local_irq_restore(__flags); \
173 __ret; \
174})
175
176#define this_cpu_generic_cmpxchg(pcp, oval, nval) \
177({ \
178 typeof(pcp) __ret; \
179 unsigned long __flags; \
180 raw_local_irq_save(__flags); \
181 __ret = raw_cpu_generic_cmpxchg(pcp, oval, nval); \
182 raw_local_irq_restore(__flags); \
183 __ret; \
184})
185
186#define this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
187({ \
188 int __ret; \
189 unsigned long __flags; \
190 raw_local_irq_save(__flags); \
191 __ret = raw_cpu_generic_cmpxchg_double(pcp1, pcp2, \
192 oval1, oval2, nval1, nval2); \
193 raw_local_irq_restore(__flags); \
194 __ret; \
195})
196
197#ifndef raw_cpu_read_1
198#define raw_cpu_read_1(pcp) raw_cpu_generic_read(pcp)
199#endif
200#ifndef raw_cpu_read_2
201#define raw_cpu_read_2(pcp) raw_cpu_generic_read(pcp)
202#endif
203#ifndef raw_cpu_read_4
204#define raw_cpu_read_4(pcp) raw_cpu_generic_read(pcp)
205#endif
206#ifndef raw_cpu_read_8
207#define raw_cpu_read_8(pcp) raw_cpu_generic_read(pcp)
208#endif
209
210#ifndef raw_cpu_write_1
211#define raw_cpu_write_1(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
212#endif
213#ifndef raw_cpu_write_2
214#define raw_cpu_write_2(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
215#endif
216#ifndef raw_cpu_write_4
217#define raw_cpu_write_4(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
218#endif
219#ifndef raw_cpu_write_8
220#define raw_cpu_write_8(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
221#endif
222
223#ifndef raw_cpu_add_1
224#define raw_cpu_add_1(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
225#endif
226#ifndef raw_cpu_add_2
227#define raw_cpu_add_2(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
228#endif
229#ifndef raw_cpu_add_4
230#define raw_cpu_add_4(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
231#endif
232#ifndef raw_cpu_add_8
233#define raw_cpu_add_8(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
234#endif
235
236#ifndef raw_cpu_and_1
237#define raw_cpu_and_1(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
238#endif
239#ifndef raw_cpu_and_2
240#define raw_cpu_and_2(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
241#endif
242#ifndef raw_cpu_and_4
243#define raw_cpu_and_4(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
244#endif
245#ifndef raw_cpu_and_8
246#define raw_cpu_and_8(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
247#endif
248
249#ifndef raw_cpu_or_1
250#define raw_cpu_or_1(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
251#endif
252#ifndef raw_cpu_or_2
253#define raw_cpu_or_2(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
254#endif
255#ifndef raw_cpu_or_4
256#define raw_cpu_or_4(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
257#endif
258#ifndef raw_cpu_or_8
259#define raw_cpu_or_8(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
260#endif
261
262#ifndef raw_cpu_add_return_1
263#define raw_cpu_add_return_1(pcp, val) raw_cpu_generic_add_return(pcp, val)
264#endif
265#ifndef raw_cpu_add_return_2
266#define raw_cpu_add_return_2(pcp, val) raw_cpu_generic_add_return(pcp, val)
267#endif
268#ifndef raw_cpu_add_return_4
269#define raw_cpu_add_return_4(pcp, val) raw_cpu_generic_add_return(pcp, val)
270#endif
271#ifndef raw_cpu_add_return_8
272#define raw_cpu_add_return_8(pcp, val) raw_cpu_generic_add_return(pcp, val)
273#endif
274
275#ifndef raw_cpu_xchg_1
276#define raw_cpu_xchg_1(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
277#endif
278#ifndef raw_cpu_xchg_2
279#define raw_cpu_xchg_2(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
280#endif
281#ifndef raw_cpu_xchg_4
282#define raw_cpu_xchg_4(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
283#endif
284#ifndef raw_cpu_xchg_8
285#define raw_cpu_xchg_8(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
286#endif
287
288#ifndef raw_cpu_cmpxchg_1
289#define raw_cpu_cmpxchg_1(pcp, oval, nval) \
290 raw_cpu_generic_cmpxchg(pcp, oval, nval)
291#endif
292#ifndef raw_cpu_cmpxchg_2
293#define raw_cpu_cmpxchg_2(pcp, oval, nval) \
294 raw_cpu_generic_cmpxchg(pcp, oval, nval)
295#endif
296#ifndef raw_cpu_cmpxchg_4
297#define raw_cpu_cmpxchg_4(pcp, oval, nval) \
298 raw_cpu_generic_cmpxchg(pcp, oval, nval)
299#endif
300#ifndef raw_cpu_cmpxchg_8
301#define raw_cpu_cmpxchg_8(pcp, oval, nval) \
302 raw_cpu_generic_cmpxchg(pcp, oval, nval)
303#endif
304
305#ifndef raw_cpu_cmpxchg_double_1
306#define raw_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \
307 raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
308#endif
309#ifndef raw_cpu_cmpxchg_double_2
310#define raw_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \
311 raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
312#endif
313#ifndef raw_cpu_cmpxchg_double_4
314#define raw_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \
315 raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
316#endif
317#ifndef raw_cpu_cmpxchg_double_8
318#define raw_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \
319 raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
320#endif
321
322#ifndef this_cpu_read_1
323#define this_cpu_read_1(pcp) this_cpu_generic_read(pcp)
324#endif
325#ifndef this_cpu_read_2
326#define this_cpu_read_2(pcp) this_cpu_generic_read(pcp)
327#endif
328#ifndef this_cpu_read_4
329#define this_cpu_read_4(pcp) this_cpu_generic_read(pcp)
330#endif
331#ifndef this_cpu_read_8
332#define this_cpu_read_8(pcp) this_cpu_generic_read(pcp)
333#endif
334
335#ifndef this_cpu_write_1
336#define this_cpu_write_1(pcp, val) this_cpu_generic_to_op(pcp, val, =)
337#endif
338#ifndef this_cpu_write_2
339#define this_cpu_write_2(pcp, val) this_cpu_generic_to_op(pcp, val, =)
340#endif
341#ifndef this_cpu_write_4
342#define this_cpu_write_4(pcp, val) this_cpu_generic_to_op(pcp, val, =)
343#endif
344#ifndef this_cpu_write_8
345#define this_cpu_write_8(pcp, val) this_cpu_generic_to_op(pcp, val, =)
346#endif
347
348#ifndef this_cpu_add_1
349#define this_cpu_add_1(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
350#endif
351#ifndef this_cpu_add_2
352#define this_cpu_add_2(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
353#endif
354#ifndef this_cpu_add_4
355#define this_cpu_add_4(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
356#endif
357#ifndef this_cpu_add_8
358#define this_cpu_add_8(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
359#endif
360
361#ifndef this_cpu_and_1
362#define this_cpu_and_1(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
363#endif
364#ifndef this_cpu_and_2
365#define this_cpu_and_2(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
366#endif
367#ifndef this_cpu_and_4
368#define this_cpu_and_4(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
369#endif
370#ifndef this_cpu_and_8
371#define this_cpu_and_8(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
372#endif
373
374#ifndef this_cpu_or_1
375#define this_cpu_or_1(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
376#endif
377#ifndef this_cpu_or_2
378#define this_cpu_or_2(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
379#endif
380#ifndef this_cpu_or_4
381#define this_cpu_or_4(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
382#endif
383#ifndef this_cpu_or_8
384#define this_cpu_or_8(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
385#endif
386
387#ifndef this_cpu_add_return_1
388#define this_cpu_add_return_1(pcp, val) this_cpu_generic_add_return(pcp, val)
389#endif
390#ifndef this_cpu_add_return_2
391#define this_cpu_add_return_2(pcp, val) this_cpu_generic_add_return(pcp, val)
392#endif
393#ifndef this_cpu_add_return_4
394#define this_cpu_add_return_4(pcp, val) this_cpu_generic_add_return(pcp, val)
395#endif
396#ifndef this_cpu_add_return_8
397#define this_cpu_add_return_8(pcp, val) this_cpu_generic_add_return(pcp, val)
398#endif
399
400#ifndef this_cpu_xchg_1
401#define this_cpu_xchg_1(pcp, nval) this_cpu_generic_xchg(pcp, nval)
402#endif
403#ifndef this_cpu_xchg_2
404#define this_cpu_xchg_2(pcp, nval) this_cpu_generic_xchg(pcp, nval)
405#endif
406#ifndef this_cpu_xchg_4
407#define this_cpu_xchg_4(pcp, nval) this_cpu_generic_xchg(pcp, nval)
408#endif
409#ifndef this_cpu_xchg_8
410#define this_cpu_xchg_8(pcp, nval) this_cpu_generic_xchg(pcp, nval)
411#endif
412
413#ifndef this_cpu_cmpxchg_1
414#define this_cpu_cmpxchg_1(pcp, oval, nval) \
415 this_cpu_generic_cmpxchg(pcp, oval, nval)
416#endif
417#ifndef this_cpu_cmpxchg_2
418#define this_cpu_cmpxchg_2(pcp, oval, nval) \
419 this_cpu_generic_cmpxchg(pcp, oval, nval)
420#endif
421#ifndef this_cpu_cmpxchg_4
422#define this_cpu_cmpxchg_4(pcp, oval, nval) \
423 this_cpu_generic_cmpxchg(pcp, oval, nval)
424#endif
425#ifndef this_cpu_cmpxchg_8
426#define this_cpu_cmpxchg_8(pcp, oval, nval) \
427 this_cpu_generic_cmpxchg(pcp, oval, nval)
428#endif
429
430#ifndef this_cpu_cmpxchg_double_1
431#define this_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \
432 this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
433#endif
434#ifndef this_cpu_cmpxchg_double_2
435#define this_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \
436 this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
437#endif
438#ifndef this_cpu_cmpxchg_double_4
439#define this_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \
440 this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
441#endif
442#ifndef this_cpu_cmpxchg_double_8
443#define this_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \
444 this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
445#endif
446
447#endif /* _ASM_GENERIC_PERCPU_H_ */