Loading...
1/* SPDX-License-Identifier: GPL-2.0 */
2#ifndef _ASM_HEXAGON_FUTEX_H
3#define _ASM_HEXAGON_FUTEX_H
4
5#ifdef __KERNEL__
6
7#include <linux/futex.h>
8#include <linux/uaccess.h>
9#include <asm/errno.h>
10
11/* XXX TODO-- need to add sync barriers! */
12
13#define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \
14 __asm__ __volatile( \
15 "1: %0 = memw_locked(%3);\n" \
16 /* For example: %1 = %4 */ \
17 insn \
18 "2: memw_locked(%3,p2) = %1;\n" \
19 " if !p2 jump 1b;\n" \
20 " %1 = #0;\n" \
21 "3:\n" \
22 ".section .fixup,\"ax\"\n" \
23 "4: %1 = #%5;\n" \
24 " jump 3b\n" \
25 ".previous\n" \
26 ".section __ex_table,\"a\"\n" \
27 ".long 1b,4b,2b,4b\n" \
28 ".previous\n" \
29 : "=&r" (oldval), "=&r" (ret), "+m" (*uaddr) \
30 : "r" (uaddr), "r" (oparg), "i" (-EFAULT) \
31 : "p2", "memory")
32
33
34static inline int
35arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *uaddr)
36{
37 int oldval = 0, ret;
38
39 pagefault_disable();
40
41 switch (op) {
42 case FUTEX_OP_SET:
43 __futex_atomic_op("%1 = %4\n", ret, oldval, uaddr, oparg);
44 break;
45 case FUTEX_OP_ADD:
46 __futex_atomic_op("%1 = add(%0,%4)\n", ret, oldval, uaddr,
47 oparg);
48 break;
49 case FUTEX_OP_OR:
50 __futex_atomic_op("%1 = or(%0,%4)\n", ret, oldval, uaddr,
51 oparg);
52 break;
53 case FUTEX_OP_ANDN:
54 __futex_atomic_op("%1 = not(%4); %1 = and(%0,%1)\n", ret,
55 oldval, uaddr, oparg);
56 break;
57 case FUTEX_OP_XOR:
58 __futex_atomic_op("%1 = xor(%0,%4)\n", ret, oldval, uaddr,
59 oparg);
60 break;
61 default:
62 ret = -ENOSYS;
63 }
64
65 pagefault_enable();
66
67 if (!ret)
68 *oval = oldval;
69
70 return ret;
71}
72
73static inline int
74futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, u32 oldval,
75 u32 newval)
76{
77 int prev;
78 int ret;
79
80 if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
81 return -EFAULT;
82
83 __asm__ __volatile__ (
84 "1: %1 = memw_locked(%3)\n"
85 " {\n"
86 " p2 = cmp.eq(%1,%4)\n"
87 " if !p2.new jump:NT 3f\n"
88 " }\n"
89 "2: memw_locked(%3,p2) = %5\n"
90 " if !p2 jump 1b\n"
91 "3:\n"
92 ".section .fixup,\"ax\"\n"
93 "4: %0 = #%6\n"
94 " jump 3b\n"
95 ".previous\n"
96 ".section __ex_table,\"a\"\n"
97 ".long 1b,4b,2b,4b\n"
98 ".previous\n"
99 : "+r" (ret), "=&r" (prev), "+m" (*uaddr)
100 : "r" (uaddr), "r" (oldval), "r" (newval), "i"(-EFAULT)
101 : "p2", "memory");
102
103 *uval = prev;
104 return ret;
105}
106
107#endif /* __KERNEL__ */
108#endif /* _ASM_HEXAGON_FUTEX_H */
1/* SPDX-License-Identifier: GPL-2.0 */
2#ifndef _ASM_HEXAGON_FUTEX_H
3#define _ASM_HEXAGON_FUTEX_H
4
5#ifdef __KERNEL__
6
7#include <linux/futex.h>
8#include <linux/uaccess.h>
9#include <asm/errno.h>
10
11/* XXX TODO-- need to add sync barriers! */
12
13#define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \
14 __asm__ __volatile( \
15 "1: %0 = memw_locked(%3);\n" \
16 /* For example: %1 = %4 */ \
17 insn \
18 "2: memw_locked(%3,p2) = %1;\n" \
19 " if (!p2) jump 1b;\n" \
20 " %1 = #0;\n" \
21 "3:\n" \
22 ".section .fixup,\"ax\"\n" \
23 "4: %1 = #%5;\n" \
24 " jump ##3b\n" \
25 ".previous\n" \
26 ".section __ex_table,\"a\"\n" \
27 ".long 1b,4b,2b,4b\n" \
28 ".previous\n" \
29 : "=&r" (oldval), "=&r" (ret), "+m" (*uaddr) \
30 : "r" (uaddr), "r" (oparg), "i" (-EFAULT) \
31 : "p2", "memory")
32
33
34static inline int
35arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *uaddr)
36{
37 int oldval = 0, ret;
38
39 if (!access_ok(uaddr, sizeof(u32)))
40 return -EFAULT;
41
42 switch (op) {
43 case FUTEX_OP_SET:
44 __futex_atomic_op("%1 = %4\n", ret, oldval, uaddr, oparg);
45 break;
46 case FUTEX_OP_ADD:
47 __futex_atomic_op("%1 = add(%0,%4)\n", ret, oldval, uaddr,
48 oparg);
49 break;
50 case FUTEX_OP_OR:
51 __futex_atomic_op("%1 = or(%0,%4)\n", ret, oldval, uaddr,
52 oparg);
53 break;
54 case FUTEX_OP_ANDN:
55 __futex_atomic_op("%1 = not(%4); %1 = and(%0,%1)\n", ret,
56 oldval, uaddr, oparg);
57 break;
58 case FUTEX_OP_XOR:
59 __futex_atomic_op("%1 = xor(%0,%4)\n", ret, oldval, uaddr,
60 oparg);
61 break;
62 default:
63 ret = -ENOSYS;
64 }
65
66 if (!ret)
67 *oval = oldval;
68
69 return ret;
70}
71
72static inline int
73futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, u32 oldval,
74 u32 newval)
75{
76 int prev;
77 int ret;
78
79 if (!access_ok(uaddr, sizeof(u32)))
80 return -EFAULT;
81
82 __asm__ __volatile__ (
83 "1: %1 = memw_locked(%3)\n"
84 " {\n"
85 " p2 = cmp.eq(%1,%4)\n"
86 " if (!p2.new) jump:NT 3f\n"
87 " }\n"
88 "2: memw_locked(%3,p2) = %5\n"
89 " if (!p2) jump 1b\n"
90 "3:\n"
91 ".section .fixup,\"ax\"\n"
92 "4: %0 = #%6\n"
93 " jump ##3b\n"
94 ".previous\n"
95 ".section __ex_table,\"a\"\n"
96 ".long 1b,4b,2b,4b\n"
97 ".previous\n"
98 : "+r" (ret), "=&r" (prev), "+m" (*uaddr)
99 : "r" (uaddr), "r" (oldval), "r" (newval), "i"(-EFAULT)
100 : "p2", "memory");
101
102 *uval = prev;
103 return ret;
104}
105
106#endif /* __KERNEL__ */
107#endif /* _ASM_HEXAGON_FUTEX_H */