Loading...
1#ifndef __ASM_SH_FUTEX_H
2#define __ASM_SH_FUTEX_H
3
4#ifdef __KERNEL__
5
6#include <linux/futex.h>
7#include <linux/uaccess.h>
8#include <asm/errno.h>
9
10/* XXX: UP variants, fix for SH-4A and SMP.. */
11#include <asm/futex-irq.h>
12
13static inline int futex_atomic_op_inuser(int encoded_op, u32 __user *uaddr)
14{
15 int op = (encoded_op >> 28) & 7;
16 int cmp = (encoded_op >> 24) & 15;
17 int oparg = (encoded_op << 8) >> 20;
18 int cmparg = (encoded_op << 20) >> 20;
19 int oldval = 0, ret;
20
21 if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28))
22 oparg = 1 << oparg;
23
24 if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
25 return -EFAULT;
26
27 pagefault_disable();
28
29 switch (op) {
30 case FUTEX_OP_SET:
31 ret = atomic_futex_op_xchg_set(oparg, uaddr, &oldval);
32 break;
33 case FUTEX_OP_ADD:
34 ret = atomic_futex_op_xchg_add(oparg, uaddr, &oldval);
35 break;
36 case FUTEX_OP_OR:
37 ret = atomic_futex_op_xchg_or(oparg, uaddr, &oldval);
38 break;
39 case FUTEX_OP_ANDN:
40 ret = atomic_futex_op_xchg_and(~oparg, uaddr, &oldval);
41 break;
42 case FUTEX_OP_XOR:
43 ret = atomic_futex_op_xchg_xor(oparg, uaddr, &oldval);
44 break;
45 default:
46 ret = -ENOSYS;
47 break;
48 }
49
50 pagefault_enable();
51
52 if (!ret) {
53 switch (cmp) {
54 case FUTEX_OP_CMP_EQ: ret = (oldval == cmparg); break;
55 case FUTEX_OP_CMP_NE: ret = (oldval != cmparg); break;
56 case FUTEX_OP_CMP_LT: ret = (oldval < cmparg); break;
57 case FUTEX_OP_CMP_GE: ret = (oldval >= cmparg); break;
58 case FUTEX_OP_CMP_LE: ret = (oldval <= cmparg); break;
59 case FUTEX_OP_CMP_GT: ret = (oldval > cmparg); break;
60 default: ret = -ENOSYS;
61 }
62 }
63
64 return ret;
65}
66
67static inline int
68futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
69 u32 oldval, u32 newval)
70{
71 if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
72 return -EFAULT;
73
74 return atomic_futex_op_cmpxchg_inatomic(uval, uaddr, oldval, newval);
75}
76
77#endif /* __KERNEL__ */
78#endif /* __ASM_SH_FUTEX_H */
1/* SPDX-License-Identifier: GPL-2.0 */
2#ifndef __ASM_SH_FUTEX_H
3#define __ASM_SH_FUTEX_H
4
5#include <linux/futex.h>
6#include <linux/uaccess.h>
7#include <asm/errno.h>
8
9#if !defined(CONFIG_SMP)
10#include <asm/futex-irq.h>
11#elif defined(CONFIG_CPU_J2)
12#include <asm/futex-cas.h>
13#elif defined(CONFIG_CPU_SH4A)
14#include <asm/futex-llsc.h>
15#else
16#error SMP not supported on this configuration.
17#endif
18
19static inline int
20futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
21 u32 oldval, u32 newval)
22{
23 if (!access_ok(uaddr, sizeof(u32)))
24 return -EFAULT;
25
26 return atomic_futex_op_cmpxchg_inatomic(uval, uaddr, oldval, newval);
27}
28
29static inline int arch_futex_atomic_op_inuser(int op, u32 oparg, int *oval,
30 u32 __user *uaddr)
31{
32 u32 oldval, newval, prev;
33 int ret;
34
35 do {
36 ret = get_user(oldval, uaddr);
37
38 if (ret) break;
39
40 switch (op) {
41 case FUTEX_OP_SET:
42 newval = oparg;
43 break;
44 case FUTEX_OP_ADD:
45 newval = oldval + oparg;
46 break;
47 case FUTEX_OP_OR:
48 newval = oldval | oparg;
49 break;
50 case FUTEX_OP_ANDN:
51 newval = oldval & ~oparg;
52 break;
53 case FUTEX_OP_XOR:
54 newval = oldval ^ oparg;
55 break;
56 default:
57 ret = -ENOSYS;
58 break;
59 }
60
61 if (ret) break;
62
63 ret = futex_atomic_cmpxchg_inatomic(&prev, uaddr, oldval, newval);
64 } while (!ret && prev != oldval);
65
66 if (!ret)
67 *oval = oldval;
68
69 return ret;
70}
71
72#endif /* __ASM_SH_FUTEX_H */