Loading...
1/* SPDX-License-Identifier: GPL-2.0 */
2#ifndef __ASM_OPENRISC_FUTEX_H
3#define __ASM_OPENRISC_FUTEX_H
4
5#ifdef __KERNEL__
6
7#include <linux/futex.h>
8#include <linux/uaccess.h>
9#include <asm/errno.h>
10
11#define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \
12({ \
13 __asm__ __volatile__ ( \
14 "1: l.lwa %0, %2 \n" \
15 insn "\n" \
16 "2: l.swa %2, %1 \n" \
17 " l.bnf 1b \n" \
18 " l.ori %1, r0, 0 \n" \
19 "3: \n" \
20 ".section .fixup,\"ax\" \n" \
21 "4: l.j 3b \n" \
22 " l.addi %1, r0, %3 \n" \
23 ".previous \n" \
24 ".section __ex_table,\"a\" \n" \
25 ".word 1b,4b,2b,4b \n" \
26 ".previous \n" \
27 : "=&r" (oldval), "=&r" (ret), "+m" (*uaddr) \
28 : "i" (-EFAULT), "r" (oparg) \
29 : "cc", "memory" \
30 ); \
31})
32
33static inline int
34arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *uaddr)
35{
36 int oldval = 0, ret;
37
38 if (!access_ok(uaddr, sizeof(u32)))
39 return -EFAULT;
40
41 switch (op) {
42 case FUTEX_OP_SET:
43 __futex_atomic_op("l.or %1,%4,%4", ret, oldval, uaddr, oparg);
44 break;
45 case FUTEX_OP_ADD:
46 __futex_atomic_op("l.add %1,%0,%4", ret, oldval, uaddr, oparg);
47 break;
48 case FUTEX_OP_OR:
49 __futex_atomic_op("l.or %1,%0,%4", ret, oldval, uaddr, oparg);
50 break;
51 case FUTEX_OP_ANDN:
52 __futex_atomic_op("l.and %1,%0,%4", ret, oldval, uaddr, ~oparg);
53 break;
54 case FUTEX_OP_XOR:
55 __futex_atomic_op("l.xor %1,%0,%4", ret, oldval, uaddr, oparg);
56 break;
57 default:
58 ret = -ENOSYS;
59 }
60
61 if (!ret)
62 *oval = oldval;
63
64 return ret;
65}
66
67static inline int
68futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
69 u32 oldval, u32 newval)
70{
71 int ret = 0;
72 u32 prev;
73
74 if (!access_ok(uaddr, sizeof(u32)))
75 return -EFAULT;
76
77 __asm__ __volatile__ ( \
78 "1: l.lwa %1, %2 \n" \
79 " l.sfeq %1, %3 \n" \
80 " l.bnf 3f \n" \
81 " l.nop \n" \
82 "2: l.swa %2, %4 \n" \
83 " l.bnf 1b \n" \
84 " l.nop \n" \
85 "3: \n" \
86 ".section .fixup,\"ax\" \n" \
87 "4: l.j 3b \n" \
88 " l.addi %0, r0, %5 \n" \
89 ".previous \n" \
90 ".section __ex_table,\"a\" \n" \
91 ".word 1b,4b,2b,4b \n" \
92 ".previous \n" \
93 : "+r" (ret), "=&r" (prev), "+m" (*uaddr) \
94 : "r" (oldval), "r" (newval), "i" (-EFAULT) \
95 : "cc", "memory" \
96 );
97
98 *uval = prev;
99 return ret;
100}
101
102#endif /* __KERNEL__ */
103
104#endif /* __ASM_OPENRISC_FUTEX_H */
1/* SPDX-License-Identifier: GPL-2.0 */
2#ifndef __ASM_OPENRISC_FUTEX_H
3#define __ASM_OPENRISC_FUTEX_H
4
5#ifdef __KERNEL__
6
7#include <linux/futex.h>
8#include <linux/uaccess.h>
9#include <asm/errno.h>
10
11#define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \
12({ \
13 __asm__ __volatile__ ( \
14 "1: l.lwa %0, %2 \n" \
15 insn "\n" \
16 "2: l.swa %2, %1 \n" \
17 " l.bnf 1b \n" \
18 " l.ori %1, r0, 0 \n" \
19 "3: \n" \
20 ".section .fixup,\"ax\" \n" \
21 "4: l.j 3b \n" \
22 " l.addi %1, r0, %3 \n" \
23 ".previous \n" \
24 ".section __ex_table,\"a\" \n" \
25 ".word 1b,4b,2b,4b \n" \
26 ".previous \n" \
27 : "=&r" (oldval), "=&r" (ret), "+m" (*uaddr) \
28 : "i" (-EFAULT), "r" (oparg) \
29 : "cc", "memory" \
30 ); \
31})
32
33static inline int
34arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *uaddr)
35{
36 int oldval = 0, ret;
37
38 if (!access_ok(uaddr, sizeof(u32)))
39 return -EFAULT;
40
41 switch (op) {
42 case FUTEX_OP_SET:
43 __futex_atomic_op("l.or %1,%4,%4", ret, oldval, uaddr, oparg);
44 break;
45 case FUTEX_OP_ADD:
46 __futex_atomic_op("l.add %1,%0,%4", ret, oldval, uaddr, oparg);
47 break;
48 case FUTEX_OP_OR:
49 __futex_atomic_op("l.or %1,%0,%4", ret, oldval, uaddr, oparg);
50 break;
51 case FUTEX_OP_ANDN:
52 __futex_atomic_op("l.and %1,%0,%4", ret, oldval, uaddr, ~oparg);
53 break;
54 case FUTEX_OP_XOR:
55 __futex_atomic_op("l.xor %1,%0,%4", ret, oldval, uaddr, oparg);
56 break;
57 default:
58 ret = -ENOSYS;
59 }
60
61 if (!ret)
62 *oval = oldval;
63
64 return ret;
65}
66
67static inline int
68futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
69 u32 oldval, u32 newval)
70{
71 int ret = 0;
72 u32 prev;
73
74 if (!access_ok(uaddr, sizeof(u32)))
75 return -EFAULT;
76
77 __asm__ __volatile__ ( \
78 "1: l.lwa %1, %2 \n" \
79 " l.sfeq %1, %3 \n" \
80 " l.bnf 3f \n" \
81 " l.nop \n" \
82 "2: l.swa %2, %4 \n" \
83 " l.bnf 1b \n" \
84 " l.nop \n" \
85 "3: \n" \
86 ".section .fixup,\"ax\" \n" \
87 "4: l.j 3b \n" \
88 " l.addi %0, r0, %5 \n" \
89 ".previous \n" \
90 ".section __ex_table,\"a\" \n" \
91 ".word 1b,4b,2b,4b \n" \
92 ".previous \n" \
93 : "+r" (ret), "=&r" (prev), "+m" (*uaddr) \
94 : "r" (oldval), "r" (newval), "i" (-EFAULT) \
95 : "cc", "memory" \
96 );
97
98 *uval = prev;
99 return ret;
100}
101
102#endif /* __KERNEL__ */
103
104#endif /* __ASM_OPENRISC_FUTEX_H */