]>
Commit | Line | Data |
---|---|---|
b2441318 | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
f05e798a DH |
2 | #ifndef _ASM_X86_BARRIER_H |
3 | #define _ASM_X86_BARRIER_H | |
4 | ||
5 | #include <asm/alternative.h> | |
6 | #include <asm/nops.h> | |
7 | ||
8 | /* | |
9 | * Force strict CPU ordering. | |
57d9b1b4 | 10 | * And yes, this might be required on UP too when we're talking |
f05e798a DH |
11 | * to devices. |
12 | */ | |
13 | ||
14 | #ifdef CONFIG_X86_32 | |
450cbdd0 | 15 | #define mb() asm volatile(ALTERNATIVE("lock; addl $0,-4(%%esp)", "mfence", \ |
bd922477 | 16 | X86_FEATURE_XMM2) ::: "memory", "cc") |
450cbdd0 | 17 | #define rmb() asm volatile(ALTERNATIVE("lock; addl $0,-4(%%esp)", "lfence", \ |
bd922477 | 18 | X86_FEATURE_XMM2) ::: "memory", "cc") |
450cbdd0 | 19 | #define wmb() asm volatile(ALTERNATIVE("lock; addl $0,-4(%%esp)", "sfence", \ |
bd922477 | 20 | X86_FEATURE_XMM2) ::: "memory", "cc") |
f05e798a DH |
21 | #else |
22 | #define mb() asm volatile("mfence":::"memory") | |
23 | #define rmb() asm volatile("lfence":::"memory") | |
24 | #define wmb() asm volatile("sfence" ::: "memory") | |
25 | #endif | |
26 | ||
c933d8c7 DW |
27 | /** |
28 | * array_index_mask_nospec() - generate a mask that is ~0UL when the | |
29 | * bounds check succeeds and 0 otherwise | |
30 | * @index: array element index | |
31 | * @size: number of elements in array | |
32 | * | |
33 | * Returns: | |
34 | * 0 - (index < size) | |
35 | */ | |
36 | static inline unsigned long array_index_mask_nospec(unsigned long index, | |
37 | unsigned long size) | |
38 | { | |
39 | unsigned long mask; | |
40 | ||
41 | asm ("cmp %1,%2; sbb %0,%0;" | |
42 | :"=r" (mask) | |
43 | :"r"(size),"r" (index) | |
44 | :"cc"); | |
45 | return mask; | |
46 | } | |
47 | ||
48 | /* Override the default implementation from linux/nospec.h. */ | |
49 | #define array_index_mask_nospec array_index_mask_nospec | |
50 | ||
f05e798a | 51 | #ifdef CONFIG_X86_PPRO_FENCE |
1077fa36 | 52 | #define dma_rmb() rmb() |
f05e798a | 53 | #else |
1077fa36 | 54 | #define dma_rmb() barrier() |
f05e798a | 55 | #endif |
1077fa36 AD |
56 | #define dma_wmb() barrier() |
57 | ||
450cbdd0 MT |
58 | #ifdef CONFIG_X86_32 |
59 | #define __smp_mb() asm volatile("lock; addl $0,-4(%%esp)" ::: "memory", "cc") | |
60 | #else | |
61 | #define __smp_mb() asm volatile("lock; addl $0,-4(%%rsp)" ::: "memory", "cc") | |
62 | #endif | |
1638fb72 MT |
63 | #define __smp_rmb() dma_rmb() |
64 | #define __smp_wmb() barrier() | |
65 | #define __smp_store_mb(var, value) do { (void)xchg(&var, value); } while (0) | |
47933ad4 | 66 | |
09df7c4c | 67 | #if defined(CONFIG_X86_PPRO_FENCE) |
47933ad4 PZ |
68 | |
69 | /* | |
4f3aaf2c | 70 | * For this option x86 doesn't have a strong TSO memory |
47933ad4 PZ |
71 | * model and we should fall back to full barriers. |
72 | */ | |
73 | ||
1638fb72 | 74 | #define __smp_store_release(p, v) \ |
47933ad4 PZ |
75 | do { \ |
76 | compiletime_assert_atomic_type(*p); \ | |
1638fb72 | 77 | __smp_mb(); \ |
76695af2 | 78 | WRITE_ONCE(*p, v); \ |
47933ad4 PZ |
79 | } while (0) |
80 | ||
1638fb72 | 81 | #define __smp_load_acquire(p) \ |
47933ad4 | 82 | ({ \ |
76695af2 | 83 | typeof(*p) ___p1 = READ_ONCE(*p); \ |
47933ad4 | 84 | compiletime_assert_atomic_type(*p); \ |
1638fb72 | 85 | __smp_mb(); \ |
47933ad4 PZ |
86 | ___p1; \ |
87 | }) | |
88 | ||
89 | #else /* regular x86 TSO memory ordering */ | |
90 | ||
1638fb72 | 91 | #define __smp_store_release(p, v) \ |
47933ad4 PZ |
92 | do { \ |
93 | compiletime_assert_atomic_type(*p); \ | |
94 | barrier(); \ | |
76695af2 | 95 | WRITE_ONCE(*p, v); \ |
47933ad4 PZ |
96 | } while (0) |
97 | ||
1638fb72 | 98 | #define __smp_load_acquire(p) \ |
47933ad4 | 99 | ({ \ |
76695af2 | 100 | typeof(*p) ___p1 = READ_ONCE(*p); \ |
47933ad4 PZ |
101 | compiletime_assert_atomic_type(*p); \ |
102 | barrier(); \ | |
103 | ___p1; \ | |
104 | }) | |
105 | ||
f05e798a DH |
106 | #endif |
107 | ||
d00a5692 | 108 | /* Atomic operations are already serializing on x86 */ |
1638fb72 MT |
109 | #define __smp_mb__before_atomic() barrier() |
110 | #define __smp_mb__after_atomic() barrier() | |
d00a5692 | 111 | |
300b06d4 MT |
112 | #include <asm-generic/barrier.h> |
113 | ||
f05e798a | 114 | #endif /* _ASM_X86_BARRIER_H */ |