]> git.proxmox.com Git - mirror_ubuntu-artful-kernel.git/blame - arch/arm/include/asm/futex.h
futex: Remove duplicated code and fix undefined behaviour
[mirror_ubuntu-artful-kernel.git] / arch / arm / include / asm / futex.h
CommitLineData
e589ed23
MP
1#ifndef _ASM_ARM_FUTEX_H
2#define _ASM_ARM_FUTEX_H
3
4#ifdef __KERNEL__
5
c1b0db56
WD
6#include <linux/futex.h>
7#include <linux/uaccess.h>
8#include <asm/errno.h>
9
10#define __futex_atomic_ex_table(err_reg) \
11 "3:\n" \
12 " .pushsection __ex_table,\"a\"\n" \
13 " .align 3\n" \
14 " .long 1b, 4f, 2b, 4f\n" \
15 " .popsection\n" \
c4a84ae3 16 " .pushsection .text.fixup,\"ax\"\n" \
667d1b48 17 " .align 2\n" \
c1b0db56
WD
18 "4: mov %0, " err_reg "\n" \
19 " b 3b\n" \
20 " .popsection"
21
e589ed23 22#ifdef CONFIG_SMP
4732efbe 23
df77abca 24#define __futex_atomic_op(insn, ret, oldval, tmp, uaddr, oparg) \
3fba7e23
RK
25({ \
26 unsigned int __ua_flags; \
c1b0db56 27 smp_mb(); \
c32ffce0 28 prefetchw(uaddr); \
3fba7e23 29 __ua_flags = uaccess_save_and_enable(); \
c1b0db56 30 __asm__ __volatile__( \
df77abca 31 "1: ldrex %1, [%3]\n" \
c1b0db56 32 " " insn "\n" \
df77abca
WD
33 "2: strex %2, %0, [%3]\n" \
34 " teq %2, #0\n" \
c1b0db56
WD
35 " bne 1b\n" \
36 " mov %0, #0\n" \
df77abca
WD
37 __futex_atomic_ex_table("%5") \
38 : "=&r" (ret), "=&r" (oldval), "=&r" (tmp) \
c1b0db56 39 : "r" (uaddr), "r" (oparg), "Ir" (-EFAULT) \
3fba7e23
RK
40 : "cc", "memory"); \
41 uaccess_restore(__ua_flags); \
42})
c1b0db56
WD
43
44static inline int
45futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
46 u32 oldval, u32 newval)
47{
3fba7e23 48 unsigned int __ua_flags;
c1b0db56
WD
49 int ret;
50 u32 val;
51
52 if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
53 return -EFAULT;
54
55 smp_mb();
c32ffce0
WD
56 /* Prefetching cannot fault */
57 prefetchw(uaddr);
3fba7e23 58 __ua_flags = uaccess_save_and_enable();
c1b0db56
WD
59 __asm__ __volatile__("@futex_atomic_cmpxchg_inatomic\n"
60 "1: ldrex %1, [%4]\n"
61 " teq %1, %2\n"
62 " ite eq @ explicit IT needed for the 2b label\n"
63 "2: strexeq %0, %3, [%4]\n"
64 " movne %0, #0\n"
65 " teq %0, #0\n"
66 " bne 1b\n"
67 __futex_atomic_ex_table("%5")
68 : "=&r" (ret), "=&r" (val)
69 : "r" (oldval), "r" (newval), "r" (uaddr), "Ir" (-EFAULT)
70 : "cc", "memory");
3fba7e23 71 uaccess_restore(__ua_flags);
c1b0db56
WD
72 smp_mb();
73
74 *uval = val;
75 return ret;
76}
4732efbe 77
e589ed23
MP
78#else /* !SMP, we can work around lack of atomic ops by disabling preemption */
79
e589ed23 80#include <linux/preempt.h>
247055aa 81#include <asm/domain.h>
e589ed23 82
df77abca 83#define __futex_atomic_op(insn, ret, oldval, tmp, uaddr, oparg) \
3fba7e23
RK
84({ \
85 unsigned int __ua_flags = uaccess_save_and_enable(); \
e589ed23 86 __asm__ __volatile__( \
4e7682d0 87 "1: " TUSER(ldr) " %1, [%3]\n" \
e589ed23 88 " " insn "\n" \
4e7682d0 89 "2: " TUSER(str) " %0, [%3]\n" \
e589ed23 90 " mov %0, #0\n" \
df77abca
WD
91 __futex_atomic_ex_table("%5") \
92 : "=&r" (ret), "=&r" (oldval), "=&r" (tmp) \
e589ed23 93 : "r" (uaddr), "r" (oparg), "Ir" (-EFAULT) \
3fba7e23
RK
94 : "cc", "memory"); \
95 uaccess_restore(__ua_flags); \
96})
e589ed23 97
c1b0db56
WD
98static inline int
99futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
100 u32 oldval, u32 newval)
101{
3fba7e23 102 unsigned int __ua_flags;
c1b0db56
WD
103 int ret = 0;
104 u32 val;
105
106 if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
107 return -EFAULT;
108
39919b01 109 preempt_disable();
3fba7e23 110 __ua_flags = uaccess_save_and_enable();
c1b0db56 111 __asm__ __volatile__("@futex_atomic_cmpxchg_inatomic\n"
4e7682d0 112 "1: " TUSER(ldr) " %1, [%4]\n"
c1b0db56
WD
113 " teq %1, %2\n"
114 " it eq @ explicit IT needed for the 2b label\n"
4e7682d0 115 "2: " TUSER(streq) " %3, [%4]\n"
c1b0db56
WD
116 __futex_atomic_ex_table("%5")
117 : "+r" (ret), "=&r" (val)
118 : "r" (oldval), "r" (newval), "r" (uaddr), "Ir" (-EFAULT)
119 : "cc", "memory");
3fba7e23 120 uaccess_restore(__ua_flags);
c1b0db56
WD
121
122 *uval = val;
39919b01
DH
123 preempt_enable();
124
c1b0db56
WD
125 return ret;
126}
127
128#endif /* !SMP */
129
e589ed23 130static inline int
5961958c 131arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *uaddr)
e589ed23 132{
df77abca 133 int oldval = 0, ret, tmp;
e589ed23 134
388b0e0a
DH
135#ifndef CONFIG_SMP
136 preempt_disable();
137#endif
138 pagefault_disable();
e589ed23
MP
139
140 switch (op) {
141 case FUTEX_OP_SET:
df77abca 142 __futex_atomic_op("mov %0, %4", ret, oldval, tmp, uaddr, oparg);
e589ed23
MP
143 break;
144 case FUTEX_OP_ADD:
df77abca 145 __futex_atomic_op("add %0, %1, %4", ret, oldval, tmp, uaddr, oparg);
e589ed23
MP
146 break;
147 case FUTEX_OP_OR:
df77abca 148 __futex_atomic_op("orr %0, %1, %4", ret, oldval, tmp, uaddr, oparg);
e589ed23
MP
149 break;
150 case FUTEX_OP_ANDN:
df77abca 151 __futex_atomic_op("and %0, %1, %4", ret, oldval, tmp, uaddr, ~oparg);
e589ed23
MP
152 break;
153 case FUTEX_OP_XOR:
df77abca 154 __futex_atomic_op("eor %0, %1, %4", ret, oldval, tmp, uaddr, oparg);
e589ed23
MP
155 break;
156 default:
157 ret = -ENOSYS;
158 }
159
388b0e0a
DH
160 pagefault_enable();
161#ifndef CONFIG_SMP
162 preempt_enable();
163#endif
e589ed23 164
5961958c
JS
165 if (!ret)
166 *oval = oldval;
167
e589ed23
MP
168 return ret;
169}
170
e589ed23
MP
171#endif /* __KERNEL__ */
172#endif /* _ASM_ARM_FUTEX_H */