]>
Commit | Line | Data |
---|---|---|
8feca0e1 RK |
1 | #ifndef _ASM_HEXAGON_FUTEX_H |
2 | #define _ASM_HEXAGON_FUTEX_H | |
3 | ||
4 | #ifdef __KERNEL__ | |
5 | ||
6 | #include <linux/futex.h> | |
7 | #include <linux/uaccess.h> | |
8 | #include <asm/errno.h> | |
9 | ||
10 | /* XXX TODO-- need to add sync barriers! */ | |
11 | ||
12 | #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \ | |
13 | __asm__ __volatile( \ | |
14 | "1: %0 = memw_locked(%3);\n" \ | |
15 | /* For example: %1 = %4 */ \ | |
16 | insn \ | |
17 | "2: memw_locked(%3,p2) = %1;\n" \ | |
18 | " if !p2 jump 1b;\n" \ | |
19 | " %1 = #0;\n" \ | |
20 | "3:\n" \ | |
21 | ".section .fixup,\"ax\"\n" \ | |
22 | "4: %1 = #%5;\n" \ | |
23 | " jump 3b\n" \ | |
24 | ".previous\n" \ | |
25 | ".section __ex_table,\"a\"\n" \ | |
26 | ".long 1b,4b,2b,4b\n" \ | |
27 | ".previous\n" \ | |
28 | : "=&r" (oldval), "=&r" (ret), "+m" (*uaddr) \ | |
29 | : "r" (uaddr), "r" (oparg), "i" (-EFAULT) \ | |
30 | : "p2", "memory") | |
31 | ||
32 | ||
33 | static inline int | |
34 | futex_atomic_op_inuser(int encoded_op, int __user *uaddr) | |
35 | { | |
36 | int op = (encoded_op >> 28) & 7; | |
37 | int cmp = (encoded_op >> 24) & 15; | |
38 | int oparg = (encoded_op << 8) >> 20; | |
39 | int cmparg = (encoded_op << 20) >> 20; | |
40 | int oldval = 0, ret; | |
41 | if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28)) | |
42 | oparg = 1 << oparg; | |
43 | ||
44 | if (!access_ok(VERIFY_WRITE, uaddr, sizeof(int))) | |
45 | return -EFAULT; | |
46 | ||
47 | pagefault_disable(); | |
48 | ||
49 | switch (op) { | |
50 | case FUTEX_OP_SET: | |
51 | __futex_atomic_op("%1 = %4\n", ret, oldval, uaddr, oparg); | |
52 | break; | |
53 | case FUTEX_OP_ADD: | |
54 | __futex_atomic_op("%1 = add(%0,%4)\n", ret, oldval, uaddr, | |
55 | oparg); | |
56 | break; | |
57 | case FUTEX_OP_OR: | |
58 | __futex_atomic_op("%1 = or(%0,%4)\n", ret, oldval, uaddr, | |
59 | oparg); | |
60 | break; | |
61 | case FUTEX_OP_ANDN: | |
62 | __futex_atomic_op("%1 = not(%4); %1 = and(%0,%1)\n", ret, | |
63 | oldval, uaddr, oparg); | |
64 | break; | |
65 | case FUTEX_OP_XOR: | |
66 | __futex_atomic_op("%1 = xor(%0,%4)\n", ret, oldval, uaddr, | |
67 | oparg); | |
68 | break; | |
69 | default: | |
70 | ret = -ENOSYS; | |
71 | } | |
72 | ||
73 | pagefault_enable(); | |
74 | ||
75 | if (!ret) { | |
76 | switch (cmp) { | |
77 | case FUTEX_OP_CMP_EQ: | |
78 | ret = (oldval == cmparg); | |
79 | break; | |
80 | case FUTEX_OP_CMP_NE: | |
81 | ret = (oldval != cmparg); | |
82 | break; | |
83 | case FUTEX_OP_CMP_LT: | |
84 | ret = (oldval < cmparg); | |
85 | break; | |
86 | case FUTEX_OP_CMP_GE: | |
87 | ret = (oldval >= cmparg); | |
88 | break; | |
89 | case FUTEX_OP_CMP_LE: | |
90 | ret = (oldval <= cmparg); | |
91 | break; | |
92 | case FUTEX_OP_CMP_GT: | |
93 | ret = (oldval > cmparg); | |
94 | break; | |
95 | default: | |
96 | ret = -ENOSYS; | |
97 | } | |
98 | } | |
99 | return ret; | |
100 | } | |
101 | ||
102 | static inline int | |
103 | futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, u32 oldval, | |
104 | u32 newval) | |
105 | { | |
106 | int prev; | |
107 | int ret; | |
108 | ||
109 | if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32))) | |
110 | return -EFAULT; | |
111 | ||
112 | __asm__ __volatile__ ( | |
113 | "1: %1 = memw_locked(%3)\n" | |
114 | " {\n" | |
115 | " p2 = cmp.eq(%1,%4)\n" | |
116 | " if !p2.new jump:NT 3f\n" | |
117 | " }\n" | |
118 | "2: memw_locked(%3,p2) = %5\n" | |
119 | " if !p2 jump 1b\n" | |
120 | "3:\n" | |
121 | ".section .fixup,\"ax\"\n" | |
122 | "4: %0 = #%6\n" | |
123 | " jump 3b\n" | |
124 | ".previous\n" | |
125 | ".section __ex_table,\"a\"\n" | |
126 | ".long 1b,4b,2b,4b\n" | |
127 | ".previous\n" | |
128 | : "+r" (ret), "=&r" (prev), "+m" (*uaddr) | |
129 | : "r" (uaddr), "r" (oldval), "r" (newval), "i"(-EFAULT) | |
130 | : "p2", "memory"); | |
131 | ||
132 | *uval = prev; | |
133 | return ret; | |
134 | } | |
135 | ||
136 | #endif /* __KERNEL__ */ | |
137 | #endif /* _ASM_HEXAGON_FUTEX_H */ |