]> git.proxmox.com Git - mirror_ubuntu-zesty-kernel.git/blob - arch/arm64/include/asm/barrier.h
Merge tag 'for-linus-4.2-rc0-tag' of git://git.kernel.org/pub/scm/linux/kernel/git...
[mirror_ubuntu-zesty-kernel.git] / arch / arm64 / include / asm / barrier.h
1 /*
2 * Based on arch/arm/include/asm/barrier.h
3 *
4 * Copyright (C) 2012 ARM Ltd.
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 as
8 * published by the Free Software Foundation.
9 *
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
14 *
15 * You should have received a copy of the GNU General Public License
16 * along with this program. If not, see <http://www.gnu.org/licenses/>.
17 */
18 #ifndef __ASM_BARRIER_H
19 #define __ASM_BARRIER_H
20
21 #ifndef __ASSEMBLY__
22
23 #define sev() asm volatile("sev" : : : "memory")
24 #define wfe() asm volatile("wfe" : : : "memory")
25 #define wfi() asm volatile("wfi" : : : "memory")
26
27 #define isb() asm volatile("isb" : : : "memory")
28 #define dmb(opt) asm volatile("dmb " #opt : : : "memory")
29 #define dsb(opt) asm volatile("dsb " #opt : : : "memory")
30
31 #define mb() dsb(sy)
32 #define rmb() dsb(ld)
33 #define wmb() dsb(st)
34
35 #define dma_rmb() dmb(oshld)
36 #define dma_wmb() dmb(oshst)
37
38 #ifndef CONFIG_SMP
39 #define smp_mb() barrier()
40 #define smp_rmb() barrier()
41 #define smp_wmb() barrier()
42
43 #define smp_store_release(p, v) \
44 do { \
45 compiletime_assert_atomic_type(*p); \
46 barrier(); \
47 ACCESS_ONCE(*p) = (v); \
48 } while (0)
49
50 #define smp_load_acquire(p) \
51 ({ \
52 typeof(*p) ___p1 = ACCESS_ONCE(*p); \
53 compiletime_assert_atomic_type(*p); \
54 barrier(); \
55 ___p1; \
56 })
57
58 #else
59
60 #define smp_mb() dmb(ish)
61 #define smp_rmb() dmb(ishld)
62 #define smp_wmb() dmb(ishst)
63
64 #define smp_store_release(p, v) \
65 do { \
66 compiletime_assert_atomic_type(*p); \
67 switch (sizeof(*p)) { \
68 case 1: \
69 asm volatile ("stlrb %w1, %0" \
70 : "=Q" (*p) : "r" (v) : "memory"); \
71 break; \
72 case 2: \
73 asm volatile ("stlrh %w1, %0" \
74 : "=Q" (*p) : "r" (v) : "memory"); \
75 break; \
76 case 4: \
77 asm volatile ("stlr %w1, %0" \
78 : "=Q" (*p) : "r" (v) : "memory"); \
79 break; \
80 case 8: \
81 asm volatile ("stlr %1, %0" \
82 : "=Q" (*p) : "r" (v) : "memory"); \
83 break; \
84 } \
85 } while (0)
86
87 #define smp_load_acquire(p) \
88 ({ \
89 typeof(*p) ___p1; \
90 compiletime_assert_atomic_type(*p); \
91 switch (sizeof(*p)) { \
92 case 1: \
93 asm volatile ("ldarb %w0, %1" \
94 : "=r" (___p1) : "Q" (*p) : "memory"); \
95 break; \
96 case 2: \
97 asm volatile ("ldarh %w0, %1" \
98 : "=r" (___p1) : "Q" (*p) : "memory"); \
99 break; \
100 case 4: \
101 asm volatile ("ldar %w0, %1" \
102 : "=r" (___p1) : "Q" (*p) : "memory"); \
103 break; \
104 case 8: \
105 asm volatile ("ldar %0, %1" \
106 : "=r" (___p1) : "Q" (*p) : "memory"); \
107 break; \
108 } \
109 ___p1; \
110 })
111
112 #endif
113
114 #define read_barrier_depends() do { } while(0)
115 #define smp_read_barrier_depends() do { } while(0)
116
117 #define smp_store_mb(var, value) do { WRITE_ONCE(var, value); smp_mb(); } while (0)
118 #define nop() asm volatile("nop");
119
120 #define smp_mb__before_atomic() smp_mb()
121 #define smp_mb__after_atomic() smp_mb()
122
123 #endif /* __ASSEMBLY__ */
124
125 #endif /* __ASM_BARRIER_H */