Loading...
1#ifndef _ASM_ARM_FUTEX_H
2#define _ASM_ARM_FUTEX_H
3
4#ifdef __KERNEL__
5
6#include <linux/futex.h>
7#include <linux/uaccess.h>
8#include <asm/errno.h>
9
10#define __futex_atomic_ex_table(err_reg) \
11 "3:\n" \
12 " .pushsection __ex_table,\"a\"\n" \
13 " .align 3\n" \
14 " .long 1b, 4f, 2b, 4f\n" \
15 " .popsection\n" \
16 " .pushsection .text.fixup,\"ax\"\n" \
17 " .align 2\n" \
18 "4: mov %0, " err_reg "\n" \
19 " b 3b\n" \
20 " .popsection"
21
22#ifdef CONFIG_SMP
23
24#define __futex_atomic_op(insn, ret, oldval, tmp, uaddr, oparg) \
25({ \
26 unsigned int __ua_flags; \
27 smp_mb(); \
28 prefetchw(uaddr); \
29 __ua_flags = uaccess_save_and_enable(); \
30 __asm__ __volatile__( \
31 "1: ldrex %1, [%3]\n" \
32 " " insn "\n" \
33 "2: strex %2, %0, [%3]\n" \
34 " teq %2, #0\n" \
35 " bne 1b\n" \
36 " mov %0, #0\n" \
37 __futex_atomic_ex_table("%5") \
38 : "=&r" (ret), "=&r" (oldval), "=&r" (tmp) \
39 : "r" (uaddr), "r" (oparg), "Ir" (-EFAULT) \
40 : "cc", "memory"); \
41 uaccess_restore(__ua_flags); \
42})
43
44static inline int
45futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
46 u32 oldval, u32 newval)
47{
48 unsigned int __ua_flags;
49 int ret;
50 u32 val;
51
52 if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
53 return -EFAULT;
54
55 smp_mb();
56 /* Prefetching cannot fault */
57 prefetchw(uaddr);
58 __ua_flags = uaccess_save_and_enable();
59 __asm__ __volatile__("@futex_atomic_cmpxchg_inatomic\n"
60 "1: ldrex %1, [%4]\n"
61 " teq %1, %2\n"
62 " ite eq @ explicit IT needed for the 2b label\n"
63 "2: strexeq %0, %3, [%4]\n"
64 " movne %0, #0\n"
65 " teq %0, #0\n"
66 " bne 1b\n"
67 __futex_atomic_ex_table("%5")
68 : "=&r" (ret), "=&r" (val)
69 : "r" (oldval), "r" (newval), "r" (uaddr), "Ir" (-EFAULT)
70 : "cc", "memory");
71 uaccess_restore(__ua_flags);
72 smp_mb();
73
74 *uval = val;
75 return ret;
76}
77
78#else /* !SMP, we can work around lack of atomic ops by disabling preemption */
79
80#include <linux/preempt.h>
81#include <asm/domain.h>
82
83#define __futex_atomic_op(insn, ret, oldval, tmp, uaddr, oparg) \
84({ \
85 unsigned int __ua_flags = uaccess_save_and_enable(); \
86 __asm__ __volatile__( \
87 "1: " TUSER(ldr) " %1, [%3]\n" \
88 " " insn "\n" \
89 "2: " TUSER(str) " %0, [%3]\n" \
90 " mov %0, #0\n" \
91 __futex_atomic_ex_table("%5") \
92 : "=&r" (ret), "=&r" (oldval), "=&r" (tmp) \
93 : "r" (uaddr), "r" (oparg), "Ir" (-EFAULT) \
94 : "cc", "memory"); \
95 uaccess_restore(__ua_flags); \
96})
97
98static inline int
99futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
100 u32 oldval, u32 newval)
101{
102 unsigned int __ua_flags;
103 int ret = 0;
104 u32 val;
105
106 if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
107 return -EFAULT;
108
109 preempt_disable();
110 __ua_flags = uaccess_save_and_enable();
111 __asm__ __volatile__("@futex_atomic_cmpxchg_inatomic\n"
112 "1: " TUSER(ldr) " %1, [%4]\n"
113 " teq %1, %2\n"
114 " it eq @ explicit IT needed for the 2b label\n"
115 "2: " TUSER(streq) " %3, [%4]\n"
116 __futex_atomic_ex_table("%5")
117 : "+r" (ret), "=&r" (val)
118 : "r" (oldval), "r" (newval), "r" (uaddr), "Ir" (-EFAULT)
119 : "cc", "memory");
120 uaccess_restore(__ua_flags);
121
122 *uval = val;
123 preempt_enable();
124
125 return ret;
126}
127
128#endif /* !SMP */
129
130static inline int
131futex_atomic_op_inuser (int encoded_op, u32 __user *uaddr)
132{
133 int op = (encoded_op >> 28) & 7;
134 int cmp = (encoded_op >> 24) & 15;
135 int oparg = (encoded_op << 8) >> 20;
136 int cmparg = (encoded_op << 20) >> 20;
137 int oldval = 0, ret, tmp;
138
139 if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28))
140 oparg = 1 << oparg;
141
142 if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
143 return -EFAULT;
144
145#ifndef CONFIG_SMP
146 preempt_disable();
147#endif
148 pagefault_disable();
149
150 switch (op) {
151 case FUTEX_OP_SET:
152 __futex_atomic_op("mov %0, %4", ret, oldval, tmp, uaddr, oparg);
153 break;
154 case FUTEX_OP_ADD:
155 __futex_atomic_op("add %0, %1, %4", ret, oldval, tmp, uaddr, oparg);
156 break;
157 case FUTEX_OP_OR:
158 __futex_atomic_op("orr %0, %1, %4", ret, oldval, tmp, uaddr, oparg);
159 break;
160 case FUTEX_OP_ANDN:
161 __futex_atomic_op("and %0, %1, %4", ret, oldval, tmp, uaddr, ~oparg);
162 break;
163 case FUTEX_OP_XOR:
164 __futex_atomic_op("eor %0, %1, %4", ret, oldval, tmp, uaddr, oparg);
165 break;
166 default:
167 ret = -ENOSYS;
168 }
169
170 pagefault_enable();
171#ifndef CONFIG_SMP
172 preempt_enable();
173#endif
174
175 if (!ret) {
176 switch (cmp) {
177 case FUTEX_OP_CMP_EQ: ret = (oldval == cmparg); break;
178 case FUTEX_OP_CMP_NE: ret = (oldval != cmparg); break;
179 case FUTEX_OP_CMP_LT: ret = (oldval < cmparg); break;
180 case FUTEX_OP_CMP_GE: ret = (oldval >= cmparg); break;
181 case FUTEX_OP_CMP_LE: ret = (oldval <= cmparg); break;
182 case FUTEX_OP_CMP_GT: ret = (oldval > cmparg); break;
183 default: ret = -ENOSYS;
184 }
185 }
186 return ret;
187}
188
189#endif /* __KERNEL__ */
190#endif /* _ASM_ARM_FUTEX_H */
1#ifndef _ASM_ARM_FUTEX_H
2#define _ASM_ARM_FUTEX_H
3
4#ifdef __KERNEL__
5
6#if defined(CONFIG_CPU_USE_DOMAINS) && defined(CONFIG_SMP)
7/* ARM doesn't provide unprivileged exclusive memory accessors */
8#include <asm-generic/futex.h>
9#else
10
11#include <linux/futex.h>
12#include <linux/uaccess.h>
13#include <asm/errno.h>
14
15#define __futex_atomic_ex_table(err_reg) \
16 "3:\n" \
17 " .pushsection __ex_table,\"a\"\n" \
18 " .align 3\n" \
19 " .long 1b, 4f, 2b, 4f\n" \
20 " .popsection\n" \
21 " .pushsection .fixup,\"ax\"\n" \
22 "4: mov %0, " err_reg "\n" \
23 " b 3b\n" \
24 " .popsection"
25
26#ifdef CONFIG_SMP
27
28#define __futex_atomic_op(insn, ret, oldval, tmp, uaddr, oparg) \
29 smp_mb(); \
30 __asm__ __volatile__( \
31 "1: ldrex %1, [%3]\n" \
32 " " insn "\n" \
33 "2: strex %2, %0, [%3]\n" \
34 " teq %2, #0\n" \
35 " bne 1b\n" \
36 " mov %0, #0\n" \
37 __futex_atomic_ex_table("%5") \
38 : "=&r" (ret), "=&r" (oldval), "=&r" (tmp) \
39 : "r" (uaddr), "r" (oparg), "Ir" (-EFAULT) \
40 : "cc", "memory")
41
42static inline int
43futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
44 u32 oldval, u32 newval)
45{
46 int ret;
47 u32 val;
48
49 if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
50 return -EFAULT;
51
52 smp_mb();
53 __asm__ __volatile__("@futex_atomic_cmpxchg_inatomic\n"
54 "1: ldrex %1, [%4]\n"
55 " teq %1, %2\n"
56 " ite eq @ explicit IT needed for the 2b label\n"
57 "2: strexeq %0, %3, [%4]\n"
58 " movne %0, #0\n"
59 " teq %0, #0\n"
60 " bne 1b\n"
61 __futex_atomic_ex_table("%5")
62 : "=&r" (ret), "=&r" (val)
63 : "r" (oldval), "r" (newval), "r" (uaddr), "Ir" (-EFAULT)
64 : "cc", "memory");
65 smp_mb();
66
67 *uval = val;
68 return ret;
69}
70
71#else /* !SMP, we can work around lack of atomic ops by disabling preemption */
72
73#include <linux/preempt.h>
74#include <asm/domain.h>
75
76#define __futex_atomic_op(insn, ret, oldval, tmp, uaddr, oparg) \
77 __asm__ __volatile__( \
78 "1: " T(ldr) " %1, [%3]\n" \
79 " " insn "\n" \
80 "2: " T(str) " %0, [%3]\n" \
81 " mov %0, #0\n" \
82 __futex_atomic_ex_table("%5") \
83 : "=&r" (ret), "=&r" (oldval), "=&r" (tmp) \
84 : "r" (uaddr), "r" (oparg), "Ir" (-EFAULT) \
85 : "cc", "memory")
86
87static inline int
88futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
89 u32 oldval, u32 newval)
90{
91 int ret = 0;
92 u32 val;
93
94 if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
95 return -EFAULT;
96
97 __asm__ __volatile__("@futex_atomic_cmpxchg_inatomic\n"
98 "1: " T(ldr) " %1, [%4]\n"
99 " teq %1, %2\n"
100 " it eq @ explicit IT needed for the 2b label\n"
101 "2: " T(streq) " %3, [%4]\n"
102 __futex_atomic_ex_table("%5")
103 : "+r" (ret), "=&r" (val)
104 : "r" (oldval), "r" (newval), "r" (uaddr), "Ir" (-EFAULT)
105 : "cc", "memory");
106
107 *uval = val;
108 return ret;
109}
110
111#endif /* !SMP */
112
113static inline int
114futex_atomic_op_inuser (int encoded_op, u32 __user *uaddr)
115{
116 int op = (encoded_op >> 28) & 7;
117 int cmp = (encoded_op >> 24) & 15;
118 int oparg = (encoded_op << 8) >> 20;
119 int cmparg = (encoded_op << 20) >> 20;
120 int oldval = 0, ret, tmp;
121
122 if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28))
123 oparg = 1 << oparg;
124
125 if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
126 return -EFAULT;
127
128 pagefault_disable(); /* implies preempt_disable() */
129
130 switch (op) {
131 case FUTEX_OP_SET:
132 __futex_atomic_op("mov %0, %4", ret, oldval, tmp, uaddr, oparg);
133 break;
134 case FUTEX_OP_ADD:
135 __futex_atomic_op("add %0, %1, %4", ret, oldval, tmp, uaddr, oparg);
136 break;
137 case FUTEX_OP_OR:
138 __futex_atomic_op("orr %0, %1, %4", ret, oldval, tmp, uaddr, oparg);
139 break;
140 case FUTEX_OP_ANDN:
141 __futex_atomic_op("and %0, %1, %4", ret, oldval, tmp, uaddr, ~oparg);
142 break;
143 case FUTEX_OP_XOR:
144 __futex_atomic_op("eor %0, %1, %4", ret, oldval, tmp, uaddr, oparg);
145 break;
146 default:
147 ret = -ENOSYS;
148 }
149
150 pagefault_enable(); /* subsumes preempt_enable() */
151
152 if (!ret) {
153 switch (cmp) {
154 case FUTEX_OP_CMP_EQ: ret = (oldval == cmparg); break;
155 case FUTEX_OP_CMP_NE: ret = (oldval != cmparg); break;
156 case FUTEX_OP_CMP_LT: ret = (oldval < cmparg); break;
157 case FUTEX_OP_CMP_GE: ret = (oldval >= cmparg); break;
158 case FUTEX_OP_CMP_LE: ret = (oldval <= cmparg); break;
159 case FUTEX_OP_CMP_GT: ret = (oldval > cmparg); break;
160 default: ret = -ENOSYS;
161 }
162 }
163 return ret;
164}
165
166#endif /* !(CPU_USE_DOMAINS && SMP) */
167#endif /* __KERNEL__ */
168#endif /* _ASM_ARM_FUTEX_H */