Commit | Line | Data |
---|---|---|
e589ed23 MP |
1 | #ifndef _ASM_ARM_FUTEX_H |
2 | #define _ASM_ARM_FUTEX_H | |
3 | ||
4 | #ifdef __KERNEL__ | |
5 | ||
c1b0db56 WD |
6 | #include <linux/futex.h> |
7 | #include <linux/uaccess.h> | |
8 | #include <asm/errno.h> | |
9 | ||
10 | #define __futex_atomic_ex_table(err_reg) \ | |
11 | "3:\n" \ | |
12 | " .pushsection __ex_table,\"a\"\n" \ | |
13 | " .align 3\n" \ | |
14 | " .long 1b, 4f, 2b, 4f\n" \ | |
15 | " .popsection\n" \ | |
c4a84ae3 | 16 | " .pushsection .text.fixup,\"ax\"\n" \ |
667d1b48 | 17 | " .align 2\n" \ |
c1b0db56 WD |
18 | "4: mov %0, " err_reg "\n" \ |
19 | " b 3b\n" \ | |
20 | " .popsection" | |
21 | ||
e589ed23 | 22 | #ifdef CONFIG_SMP |
4732efbe | 23 | |
df77abca | 24 | #define __futex_atomic_op(insn, ret, oldval, tmp, uaddr, oparg) \ |
c1b0db56 | 25 | smp_mb(); \ |
c32ffce0 | 26 | prefetchw(uaddr); \ |
c1b0db56 | 27 | __asm__ __volatile__( \ |
df77abca | 28 | "1: ldrex %1, [%3]\n" \ |
c1b0db56 | 29 | " " insn "\n" \ |
df77abca WD |
30 | "2: strex %2, %0, [%3]\n" \ |
31 | " teq %2, #0\n" \ | |
c1b0db56 WD |
32 | " bne 1b\n" \ |
33 | " mov %0, #0\n" \ | |
df77abca WD |
34 | __futex_atomic_ex_table("%5") \ |
35 | : "=&r" (ret), "=&r" (oldval), "=&r" (tmp) \ | |
c1b0db56 WD |
36 | : "r" (uaddr), "r" (oparg), "Ir" (-EFAULT) \ |
37 | : "cc", "memory") | |
38 | ||
39 | static inline int | |
40 | futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, | |
41 | u32 oldval, u32 newval) | |
42 | { | |
43 | int ret; | |
44 | u32 val; | |
45 | ||
46 | if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32))) | |
47 | return -EFAULT; | |
48 | ||
49 | smp_mb(); | |
c32ffce0 WD |
50 | /* Prefetching cannot fault */ |
51 | prefetchw(uaddr); | |
c1b0db56 WD |
52 | __asm__ __volatile__("@futex_atomic_cmpxchg_inatomic\n" |
53 | "1: ldrex %1, [%4]\n" | |
54 | " teq %1, %2\n" | |
55 | " ite eq @ explicit IT needed for the 2b label\n" | |
56 | "2: strexeq %0, %3, [%4]\n" | |
57 | " movne %0, #0\n" | |
58 | " teq %0, #0\n" | |
59 | " bne 1b\n" | |
60 | __futex_atomic_ex_table("%5") | |
61 | : "=&r" (ret), "=&r" (val) | |
62 | : "r" (oldval), "r" (newval), "r" (uaddr), "Ir" (-EFAULT) | |
63 | : "cc", "memory"); | |
64 | smp_mb(); | |
65 | ||
66 | *uval = val; | |
67 | return ret; | |
68 | } | |
4732efbe | 69 | |
e589ed23 MP |
70 | #else /* !SMP, we can work around lack of atomic ops by disabling preemption */ |
71 | ||
e589ed23 | 72 | #include <linux/preempt.h> |
247055aa | 73 | #include <asm/domain.h> |
e589ed23 | 74 | |
df77abca | 75 | #define __futex_atomic_op(insn, ret, oldval, tmp, uaddr, oparg) \ |
e589ed23 | 76 | __asm__ __volatile__( \ |
4e7682d0 | 77 | "1: " TUSER(ldr) " %1, [%3]\n" \ |
e589ed23 | 78 | " " insn "\n" \ |
4e7682d0 | 79 | "2: " TUSER(str) " %0, [%3]\n" \ |
e589ed23 | 80 | " mov %0, #0\n" \ |
df77abca WD |
81 | __futex_atomic_ex_table("%5") \ |
82 | : "=&r" (ret), "=&r" (oldval), "=&r" (tmp) \ | |
e589ed23 MP |
83 | : "r" (uaddr), "r" (oparg), "Ir" (-EFAULT) \ |
84 | : "cc", "memory") | |
85 | ||
c1b0db56 WD |
86 | static inline int |
87 | futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, | |
88 | u32 oldval, u32 newval) | |
89 | { | |
90 | int ret = 0; | |
91 | u32 val; | |
92 | ||
93 | if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32))) | |
94 | return -EFAULT; | |
95 | ||
39919b01 | 96 | preempt_disable(); |
c1b0db56 | 97 | __asm__ __volatile__("@futex_atomic_cmpxchg_inatomic\n" |
4e7682d0 | 98 | "1: " TUSER(ldr) " %1, [%4]\n" |
c1b0db56 WD |
99 | " teq %1, %2\n" |
100 | " it eq @ explicit IT needed for the 2b label\n" | |
4e7682d0 | 101 | "2: " TUSER(streq) " %3, [%4]\n" |
c1b0db56 WD |
102 | __futex_atomic_ex_table("%5") |
103 | : "+r" (ret), "=&r" (val) | |
104 | : "r" (oldval), "r" (newval), "r" (uaddr), "Ir" (-EFAULT) | |
105 | : "cc", "memory"); | |
106 | ||
107 | *uval = val; | |
39919b01 DH |
108 | preempt_enable(); |
109 | ||
c1b0db56 WD |
110 | return ret; |
111 | } | |
112 | ||
113 | #endif /* !SMP */ | |
114 | ||
e589ed23 | 115 | static inline int |
8d7718aa | 116 | futex_atomic_op_inuser (int encoded_op, u32 __user *uaddr) |
e589ed23 MP |
117 | { |
118 | int op = (encoded_op >> 28) & 7; | |
119 | int cmp = (encoded_op >> 24) & 15; | |
120 | int oparg = (encoded_op << 8) >> 20; | |
121 | int cmparg = (encoded_op << 20) >> 20; | |
df77abca | 122 | int oldval = 0, ret, tmp; |
e589ed23 MP |
123 | |
124 | if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28)) | |
125 | oparg = 1 << oparg; | |
126 | ||
8d7718aa | 127 | if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32))) |
e589ed23 MP |
128 | return -EFAULT; |
129 | ||
388b0e0a DH |
130 | #ifndef CONFIG_SMP |
131 | preempt_disable(); | |
132 | #endif | |
133 | pagefault_disable(); | |
e589ed23 MP |
134 | |
135 | switch (op) { | |
136 | case FUTEX_OP_SET: | |
df77abca | 137 | __futex_atomic_op("mov %0, %4", ret, oldval, tmp, uaddr, oparg); |
e589ed23 MP |
138 | break; |
139 | case FUTEX_OP_ADD: | |
df77abca | 140 | __futex_atomic_op("add %0, %1, %4", ret, oldval, tmp, uaddr, oparg); |
e589ed23 MP |
141 | break; |
142 | case FUTEX_OP_OR: | |
df77abca | 143 | __futex_atomic_op("orr %0, %1, %4", ret, oldval, tmp, uaddr, oparg); |
e589ed23 MP |
144 | break; |
145 | case FUTEX_OP_ANDN: | |
df77abca | 146 | __futex_atomic_op("and %0, %1, %4", ret, oldval, tmp, uaddr, ~oparg); |
e589ed23 MP |
147 | break; |
148 | case FUTEX_OP_XOR: | |
df77abca | 149 | __futex_atomic_op("eor %0, %1, %4", ret, oldval, tmp, uaddr, oparg); |
e589ed23 MP |
150 | break; |
151 | default: | |
152 | ret = -ENOSYS; | |
153 | } | |
154 | ||
388b0e0a DH |
155 | pagefault_enable(); |
156 | #ifndef CONFIG_SMP | |
157 | preempt_enable(); | |
158 | #endif | |
e589ed23 MP |
159 | |
160 | if (!ret) { | |
161 | switch (cmp) { | |
162 | case FUTEX_OP_CMP_EQ: ret = (oldval == cmparg); break; | |
163 | case FUTEX_OP_CMP_NE: ret = (oldval != cmparg); break; | |
164 | case FUTEX_OP_CMP_LT: ret = (oldval < cmparg); break; | |
165 | case FUTEX_OP_CMP_GE: ret = (oldval >= cmparg); break; | |
166 | case FUTEX_OP_CMP_LE: ret = (oldval <= cmparg); break; | |
167 | case FUTEX_OP_CMP_GT: ret = (oldval > cmparg); break; | |
168 | default: ret = -ENOSYS; | |
169 | } | |
170 | } | |
171 | return ret; | |
172 | } | |
173 | ||
e589ed23 MP |
174 | #endif /* __KERNEL__ */ |
175 | #endif /* _ASM_ARM_FUTEX_H */ |