Commit | Line | Data |
---|---|---|
e589ed23 MP |
1 | #ifndef _ASM_ARM_FUTEX_H |
2 | #define _ASM_ARM_FUTEX_H | |
3 | ||
4 | #ifdef __KERNEL__ | |
5 | ||
c1b0db56 WD |
6 | #include <linux/futex.h> |
7 | #include <linux/uaccess.h> | |
8 | #include <asm/errno.h> | |
9 | ||
10 | #define __futex_atomic_ex_table(err_reg) \ | |
11 | "3:\n" \ | |
12 | " .pushsection __ex_table,\"a\"\n" \ | |
13 | " .align 3\n" \ | |
14 | " .long 1b, 4f, 2b, 4f\n" \ | |
15 | " .popsection\n" \ | |
16 | " .pushsection .fixup,\"ax\"\n" \ | |
667d1b48 | 17 | " .align 2\n" \ |
c1b0db56 WD |
18 | "4: mov %0, " err_reg "\n" \ |
19 | " b 3b\n" \ | |
20 | " .popsection" | |
21 | ||
e589ed23 | 22 | #ifdef CONFIG_SMP |
4732efbe | 23 | |
df77abca | 24 | #define __futex_atomic_op(insn, ret, oldval, tmp, uaddr, oparg) \ |
c1b0db56 | 25 | smp_mb(); \ |
c32ffce0 | 26 | prefetchw(uaddr); \ |
c1b0db56 | 27 | __asm__ __volatile__( \ |
df77abca | 28 | "1: ldrex %1, [%3]\n" \ |
c1b0db56 | 29 | " " insn "\n" \ |
df77abca WD |
30 | "2: strex %2, %0, [%3]\n" \ |
31 | " teq %2, #0\n" \ | |
c1b0db56 WD |
32 | " bne 1b\n" \ |
33 | " mov %0, #0\n" \ | |
df77abca WD |
34 | __futex_atomic_ex_table("%5") \ |
35 | : "=&r" (ret), "=&r" (oldval), "=&r" (tmp) \ | |
c1b0db56 WD |
36 | : "r" (uaddr), "r" (oparg), "Ir" (-EFAULT) \ |
37 | : "cc", "memory") | |
38 | ||
39 | static inline int | |
40 | futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, | |
41 | u32 oldval, u32 newval) | |
42 | { | |
43 | int ret; | |
44 | u32 val; | |
45 | ||
46 | if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32))) | |
47 | return -EFAULT; | |
48 | ||
49 | smp_mb(); | |
c32ffce0 WD |
50 | /* Prefetching cannot fault */ |
51 | prefetchw(uaddr); | |
c1b0db56 WD |
52 | __asm__ __volatile__("@futex_atomic_cmpxchg_inatomic\n" |
53 | "1: ldrex %1, [%4]\n" | |
54 | " teq %1, %2\n" | |
55 | " ite eq @ explicit IT needed for the 2b label\n" | |
56 | "2: strexeq %0, %3, [%4]\n" | |
57 | " movne %0, #0\n" | |
58 | " teq %0, #0\n" | |
59 | " bne 1b\n" | |
60 | __futex_atomic_ex_table("%5") | |
61 | : "=&r" (ret), "=&r" (val) | |
62 | : "r" (oldval), "r" (newval), "r" (uaddr), "Ir" (-EFAULT) | |
63 | : "cc", "memory"); | |
64 | smp_mb(); | |
65 | ||
66 | *uval = val; | |
67 | return ret; | |
68 | } | |
4732efbe | 69 | |
e589ed23 MP |
70 | #else /* !SMP, we can work around lack of atomic ops by disabling preemption */ |
71 | ||
e589ed23 | 72 | #include <linux/preempt.h> |
247055aa | 73 | #include <asm/domain.h> |
e589ed23 | 74 | |
df77abca | 75 | #define __futex_atomic_op(insn, ret, oldval, tmp, uaddr, oparg) \ |
e589ed23 | 76 | __asm__ __volatile__( \ |
4e7682d0 | 77 | "1: " TUSER(ldr) " %1, [%3]\n" \ |
e589ed23 | 78 | " " insn "\n" \ |
4e7682d0 | 79 | "2: " TUSER(str) " %0, [%3]\n" \ |
e589ed23 | 80 | " mov %0, #0\n" \ |
df77abca WD |
81 | __futex_atomic_ex_table("%5") \ |
82 | : "=&r" (ret), "=&r" (oldval), "=&r" (tmp) \ | |
e589ed23 MP |
83 | : "r" (uaddr), "r" (oparg), "Ir" (-EFAULT) \ |
84 | : "cc", "memory") | |
85 | ||
c1b0db56 WD |
86 | static inline int |
87 | futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, | |
88 | u32 oldval, u32 newval) | |
89 | { | |
90 | int ret = 0; | |
91 | u32 val; | |
92 | ||
93 | if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32))) | |
94 | return -EFAULT; | |
95 | ||
96 | __asm__ __volatile__("@futex_atomic_cmpxchg_inatomic\n" | |
4e7682d0 | 97 | "1: " TUSER(ldr) " %1, [%4]\n" |
c1b0db56 WD |
98 | " teq %1, %2\n" |
99 | " it eq @ explicit IT needed for the 2b label\n" | |
4e7682d0 | 100 | "2: " TUSER(streq) " %3, [%4]\n" |
c1b0db56 WD |
101 | __futex_atomic_ex_table("%5") |
102 | : "+r" (ret), "=&r" (val) | |
103 | : "r" (oldval), "r" (newval), "r" (uaddr), "Ir" (-EFAULT) | |
104 | : "cc", "memory"); | |
105 | ||
106 | *uval = val; | |
107 | return ret; | |
108 | } | |
109 | ||
110 | #endif /* !SMP */ | |
111 | ||
e589ed23 | 112 | static inline int |
8d7718aa | 113 | futex_atomic_op_inuser (int encoded_op, u32 __user *uaddr) |
e589ed23 MP |
114 | { |
115 | int op = (encoded_op >> 28) & 7; | |
116 | int cmp = (encoded_op >> 24) & 15; | |
117 | int oparg = (encoded_op << 8) >> 20; | |
118 | int cmparg = (encoded_op << 20) >> 20; | |
df77abca | 119 | int oldval = 0, ret, tmp; |
e589ed23 MP |
120 | |
121 | if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28)) | |
122 | oparg = 1 << oparg; | |
123 | ||
8d7718aa | 124 | if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32))) |
e589ed23 MP |
125 | return -EFAULT; |
126 | ||
127 | pagefault_disable(); /* implies preempt_disable() */ | |
128 | ||
129 | switch (op) { | |
130 | case FUTEX_OP_SET: | |
df77abca | 131 | __futex_atomic_op("mov %0, %4", ret, oldval, tmp, uaddr, oparg); |
e589ed23 MP |
132 | break; |
133 | case FUTEX_OP_ADD: | |
df77abca | 134 | __futex_atomic_op("add %0, %1, %4", ret, oldval, tmp, uaddr, oparg); |
e589ed23 MP |
135 | break; |
136 | case FUTEX_OP_OR: | |
df77abca | 137 | __futex_atomic_op("orr %0, %1, %4", ret, oldval, tmp, uaddr, oparg); |
e589ed23 MP |
138 | break; |
139 | case FUTEX_OP_ANDN: | |
df77abca | 140 | __futex_atomic_op("and %0, %1, %4", ret, oldval, tmp, uaddr, ~oparg); |
e589ed23 MP |
141 | break; |
142 | case FUTEX_OP_XOR: | |
df77abca | 143 | __futex_atomic_op("eor %0, %1, %4", ret, oldval, tmp, uaddr, oparg); |
e589ed23 MP |
144 | break; |
145 | default: | |
146 | ret = -ENOSYS; | |
147 | } | |
148 | ||
149 | pagefault_enable(); /* subsumes preempt_enable() */ | |
150 | ||
151 | if (!ret) { | |
152 | switch (cmp) { | |
153 | case FUTEX_OP_CMP_EQ: ret = (oldval == cmparg); break; | |
154 | case FUTEX_OP_CMP_NE: ret = (oldval != cmparg); break; | |
155 | case FUTEX_OP_CMP_LT: ret = (oldval < cmparg); break; | |
156 | case FUTEX_OP_CMP_GE: ret = (oldval >= cmparg); break; | |
157 | case FUTEX_OP_CMP_LE: ret = (oldval <= cmparg); break; | |
158 | case FUTEX_OP_CMP_GT: ret = (oldval > cmparg); break; | |
159 | default: ret = -ENOSYS; | |
160 | } | |
161 | } | |
162 | return ret; | |
163 | } | |
164 | ||
e589ed23 MP |
165 | #endif /* __KERNEL__ */ |
166 | #endif /* _ASM_ARM_FUTEX_H */ |