]> git.proxmox.com Git - mirror_ubuntu-focal-kernel.git/blame - arch/arm64/include/asm/barrier.h
treewide: Replace GPLv2 boilerplate/reference with SPDX - rule 234
[mirror_ubuntu-focal-kernel.git] / arch / arm64 / include / asm / barrier.h
CommitLineData
caab277b 1/* SPDX-License-Identifier: GPL-2.0-only */
10b663ae
CM
2/*
3 * Based on arch/arm/include/asm/barrier.h
4 *
5 * Copyright (C) 2012 ARM Ltd.
10b663ae
CM
6 */
7#ifndef __ASM_BARRIER_H
8#define __ASM_BARRIER_H
9
10#ifndef __ASSEMBLY__
11
131e135f
MR
12#include <linux/kasan-checks.h>
13
f99a250c
WD
14#define __nops(n) ".rept " #n "\nnop\n.endr\n"
15#define nops(n) asm volatile(__nops(n))
16
10b663ae
CM
17#define sev() asm volatile("sev" : : : "memory")
18#define wfe() asm volatile("wfe" : : : "memory")
19#define wfi() asm volatile("wfi" : : : "memory")
20
21#define isb() asm volatile("isb" : : : "memory")
493e6874
WD
22#define dmb(opt) asm volatile("dmb " #opt : : : "memory")
23#define dsb(opt) asm volatile("dsb " #opt : : : "memory")
10b663ae 24
a173c390 25#define psb_csync() asm volatile("hint #17" : : : "memory")
669474e7 26#define csdb() asm volatile("hint #20" : : : "memory")
a173c390 27
bd4fb6d2
WD
28#define spec_bar() asm volatile(ALTERNATIVE("dsb nsh\nisb\n", \
29 SB_BARRIER_INSN"nop\n", \
30 ARM64_HAS_SB))
31
98f7685e 32#define mb() dsb(sy)
493e6874
WD
33#define rmb() dsb(ld)
34#define wmb() dsb(st)
10b663ae 35
1077fa36
AD
36#define dma_rmb() dmb(oshld)
37#define dma_wmb() dmb(oshst)
38
022620ee
RM
39/*
40 * Generate a mask for array_index__nospec() that is ~0UL when 0 <= idx < sz
41 * and 0 otherwise.
42 */
43#define array_index_mask_nospec array_index_mask_nospec
44static inline unsigned long array_index_mask_nospec(unsigned long idx,
45 unsigned long sz)
46{
47 unsigned long mask;
48
49 asm volatile(
50 " cmp %1, %2\n"
51 " sbc %0, xzr, xzr\n"
52 : "=r" (mask)
53 : "r" (idx), "Ir" (sz)
54 : "cc");
55
56 csdb();
57 return mask;
58}
59
fd072df8
MT
60#define __smp_mb() dmb(ish)
61#define __smp_rmb() dmb(ishld)
62#define __smp_wmb() dmb(ishst)
47933ad4 63
994870be 64#define __smp_store_release(p, v) \
47933ad4 65do { \
131e135f 66 typeof(p) __p = (p); \
994870be 67 union { typeof(*p) __val; char __c[1]; } __u = \
131e135f 68 { .__val = (__force typeof(*p)) (v) }; \
47933ad4 69 compiletime_assert_atomic_type(*p); \
131e135f 70 kasan_check_write(__p, sizeof(*p)); \
47933ad4 71 switch (sizeof(*p)) { \
878a84d5
AP
72 case 1: \
73 asm volatile ("stlrb %w1, %0" \
131e135f 74 : "=Q" (*__p) \
994870be
MR
75 : "r" (*(__u8 *)__u.__c) \
76 : "memory"); \
878a84d5
AP
77 break; \
78 case 2: \
79 asm volatile ("stlrh %w1, %0" \
131e135f 80 : "=Q" (*__p) \
994870be
MR
81 : "r" (*(__u16 *)__u.__c) \
82 : "memory"); \
878a84d5 83 break; \
47933ad4
PZ
84 case 4: \
85 asm volatile ("stlr %w1, %0" \
131e135f 86 : "=Q" (*__p) \
994870be
MR
87 : "r" (*(__u32 *)__u.__c) \
88 : "memory"); \
47933ad4
PZ
89 break; \
90 case 8: \
91 asm volatile ("stlr %1, %0" \
131e135f 92 : "=Q" (*__p) \
994870be
MR
93 : "r" (*(__u64 *)__u.__c) \
94 : "memory"); \
47933ad4
PZ
95 break; \
96 } \
97} while (0)
98
fd072df8 99#define __smp_load_acquire(p) \
47933ad4 100({ \
c139aa60 101 union { typeof(*p) __val; char __c[1]; } __u; \
131e135f 102 typeof(p) __p = (p); \
47933ad4 103 compiletime_assert_atomic_type(*p); \
131e135f 104 kasan_check_read(__p, sizeof(*p)); \
47933ad4 105 switch (sizeof(*p)) { \
878a84d5
AP
106 case 1: \
107 asm volatile ("ldarb %w0, %1" \
c139aa60 108 : "=r" (*(__u8 *)__u.__c) \
131e135f 109 : "Q" (*__p) : "memory"); \
878a84d5
AP
110 break; \
111 case 2: \
112 asm volatile ("ldarh %w0, %1" \
c139aa60 113 : "=r" (*(__u16 *)__u.__c) \
131e135f 114 : "Q" (*__p) : "memory"); \
878a84d5 115 break; \
47933ad4
PZ
116 case 4: \
117 asm volatile ("ldar %w0, %1" \
c139aa60 118 : "=r" (*(__u32 *)__u.__c) \
131e135f 119 : "Q" (*__p) : "memory"); \
47933ad4
PZ
120 break; \
121 case 8: \
122 asm volatile ("ldar %0, %1" \
c139aa60 123 : "=r" (*(__u64 *)__u.__c) \
131e135f 124 : "Q" (*__p) : "memory"); \
47933ad4
PZ
125 break; \
126 } \
c139aa60 127 __u.__val; \
47933ad4
PZ
128})
129
598865c5
WD
130#define smp_cond_load_relaxed(ptr, cond_expr) \
131({ \
132 typeof(ptr) __PTR = (ptr); \
133 typeof(*ptr) VAL; \
134 for (;;) { \
135 VAL = READ_ONCE(*__PTR); \
136 if (cond_expr) \
137 break; \
138 __cmpwait_relaxed(__PTR, VAL); \
139 } \
140 VAL; \
141})
142
03e3c2b7
WD
143#define smp_cond_load_acquire(ptr, cond_expr) \
144({ \
145 typeof(ptr) __PTR = (ptr); \
146 typeof(*ptr) VAL; \
147 for (;;) { \
148 VAL = smp_load_acquire(__PTR); \
149 if (cond_expr) \
150 break; \
151 __cmpwait_relaxed(__PTR, VAL); \
152 } \
153 VAL; \
154})
155
90ff6a17 156#include <asm-generic/barrier.h>
8715466b 157
10b663ae
CM
158#endif /* __ASSEMBLY__ */
159
160#endif /* __ASM_BARRIER_H */