]> git.proxmox.com Git - mirror_ubuntu-focal-kernel.git/blob - arch/arm64/include/asm/barrier.h
Merge tag 'arm64-upstream' of git://git.kernel.org/pub/scm/linux/kernel/git/arm64...
[mirror_ubuntu-focal-kernel.git] / arch / arm64 / include / asm / barrier.h
1 /*
2 * Based on arch/arm/include/asm/barrier.h
3 *
4 * Copyright (C) 2012 ARM Ltd.
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 as
8 * published by the Free Software Foundation.
9 *
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
14 *
15 * You should have received a copy of the GNU General Public License
16 * along with this program. If not, see <http://www.gnu.org/licenses/>.
17 */
18 #ifndef __ASM_BARRIER_H
19 #define __ASM_BARRIER_H
20
21 #ifndef __ASSEMBLY__
22
23 #define sev() asm volatile("sev" : : : "memory")
24 #define wfe() asm volatile("wfe" : : : "memory")
25 #define wfi() asm volatile("wfi" : : : "memory")
26
27 #define isb() asm volatile("isb" : : : "memory")
28 #define dmb(opt) asm volatile("dmb " #opt : : : "memory")
29 #define dsb(opt) asm volatile("dsb " #opt : : : "memory")
30
31 #define mb() dsb(sy)
32 #define rmb() dsb(ld)
33 #define wmb() dsb(st)
34
35 #ifndef CONFIG_SMP
36 #define smp_mb() barrier()
37 #define smp_rmb() barrier()
38 #define smp_wmb() barrier()
39
40 #define smp_store_release(p, v) \
41 do { \
42 compiletime_assert_atomic_type(*p); \
43 barrier(); \
44 ACCESS_ONCE(*p) = (v); \
45 } while (0)
46
47 #define smp_load_acquire(p) \
48 ({ \
49 typeof(*p) ___p1 = ACCESS_ONCE(*p); \
50 compiletime_assert_atomic_type(*p); \
51 barrier(); \
52 ___p1; \
53 })
54
55 #else
56
57 #define smp_mb() dmb(ish)
58 #define smp_rmb() dmb(ishld)
59 #define smp_wmb() dmb(ishst)
60
61 #define smp_store_release(p, v) \
62 do { \
63 compiletime_assert_atomic_type(*p); \
64 switch (sizeof(*p)) { \
65 case 4: \
66 asm volatile ("stlr %w1, %0" \
67 : "=Q" (*p) : "r" (v) : "memory"); \
68 break; \
69 case 8: \
70 asm volatile ("stlr %1, %0" \
71 : "=Q" (*p) : "r" (v) : "memory"); \
72 break; \
73 } \
74 } while (0)
75
76 #define smp_load_acquire(p) \
77 ({ \
78 typeof(*p) ___p1; \
79 compiletime_assert_atomic_type(*p); \
80 switch (sizeof(*p)) { \
81 case 4: \
82 asm volatile ("ldar %w0, %1" \
83 : "=r" (___p1) : "Q" (*p) : "memory"); \
84 break; \
85 case 8: \
86 asm volatile ("ldar %0, %1" \
87 : "=r" (___p1) : "Q" (*p) : "memory"); \
88 break; \
89 } \
90 ___p1; \
91 })
92
93 #endif
94
95 #define read_barrier_depends() do { } while(0)
96 #define smp_read_barrier_depends() do { } while(0)
97
98 #define set_mb(var, value) do { var = value; smp_mb(); } while (0)
99 #define nop() asm volatile("nop");
100
101 #define smp_mb__before_atomic() smp_mb()
102 #define smp_mb__after_atomic() smp_mb()
103
104 #endif /* __ASSEMBLY__ */
105
106 #endif /* __ASM_BARRIER_H */