Linux Audio

Check our new training course

Loading...
v3.1
 
  1#ifndef _ASM_ALPHA_FUTEX_H
  2#define _ASM_ALPHA_FUTEX_H
  3
  4#ifdef __KERNEL__
  5
  6#include <linux/futex.h>
  7#include <linux/uaccess.h>
  8#include <asm/errno.h>
  9#include <asm/barrier.h>
 10
 11#define __futex_atomic_op(insn, ret, oldval, uaddr, oparg)	\
 12	__asm__ __volatile__(					\
 13		__ASM_SMP_MB					\
 14	"1:	ldl_l	%0,0(%2)\n"				\
 15		insn						\
 16	"2:	stl_c	%1,0(%2)\n"				\
 17	"	beq	%1,4f\n"				\
 18	"	mov	$31,%1\n"				\
 19	"3:	.subsection 2\n"				\
 20	"4:	br	1b\n"					\
 21	"	.previous\n"					\
 22	"	.section __ex_table,\"a\"\n"			\
 23	"	.long	1b-.\n"					\
 24	"	lda	$31,3b-1b(%1)\n"			\
 25	"	.long	2b-.\n"					\
 26	"	lda	$31,3b-2b(%1)\n"			\
 27	"	.previous\n"					\
 28	:	"=&r" (oldval), "=&r"(ret)			\
 29	:	"r" (uaddr), "r"(oparg)				\
 30	:	"memory")
 31
 32static inline int futex_atomic_op_inuser (int encoded_op, u32 __user *uaddr)
 
 33{
 34	int op = (encoded_op >> 28) & 7;
 35	int cmp = (encoded_op >> 24) & 15;
 36	int oparg = (encoded_op << 8) >> 20;
 37	int cmparg = (encoded_op << 20) >> 20;
 38	int oldval = 0, ret;
 39	if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28))
 40		oparg = 1 << oparg;
 41
 42	if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
 43		return -EFAULT;
 44
 45	pagefault_disable();
 46
 47	switch (op) {
 48	case FUTEX_OP_SET:
 49		__futex_atomic_op("mov %3,%1\n", ret, oldval, uaddr, oparg);
 50		break;
 51	case FUTEX_OP_ADD:
 52		__futex_atomic_op("addl %0,%3,%1\n", ret, oldval, uaddr, oparg);
 53		break;
 54	case FUTEX_OP_OR:
 55		__futex_atomic_op("or %0,%3,%1\n", ret, oldval, uaddr, oparg);
 56		break;
 57	case FUTEX_OP_ANDN:
 58		__futex_atomic_op("andnot %0,%3,%1\n", ret, oldval, uaddr, oparg);
 59		break;
 60	case FUTEX_OP_XOR:
 61		__futex_atomic_op("xor %0,%3,%1\n", ret, oldval, uaddr, oparg);
 62		break;
 63	default:
 64		ret = -ENOSYS;
 65	}
 66
 67	pagefault_enable();
 
 68
 69	if (!ret) {
 70		switch (cmp) {
 71		case FUTEX_OP_CMP_EQ: ret = (oldval == cmparg); break;
 72		case FUTEX_OP_CMP_NE: ret = (oldval != cmparg); break;
 73		case FUTEX_OP_CMP_LT: ret = (oldval < cmparg); break;
 74		case FUTEX_OP_CMP_GE: ret = (oldval >= cmparg); break;
 75		case FUTEX_OP_CMP_LE: ret = (oldval <= cmparg); break;
 76		case FUTEX_OP_CMP_GT: ret = (oldval > cmparg); break;
 77		default: ret = -ENOSYS;
 78		}
 79	}
 80	return ret;
 81}
 82
 83static inline int
 84futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
 85			      u32 oldval, u32 newval)
 86{
 87	int ret = 0, cmp;
 88	u32 prev;
 89
 90	if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
 91		return -EFAULT;
 92
 93	__asm__ __volatile__ (
 94		__ASM_SMP_MB
 95	"1:	ldl_l	%1,0(%3)\n"
 96	"	cmpeq	%1,%4,%2\n"
 97	"	beq	%2,3f\n"
 98	"	mov	%5,%2\n"
 99	"2:	stl_c	%2,0(%3)\n"
100	"	beq	%2,4f\n"
101	"3:	.subsection 2\n"
102	"4:	br	1b\n"
103	"	.previous\n"
104	"	.section __ex_table,\"a\"\n"
105	"	.long	1b-.\n"
106	"	lda	$31,3b-1b(%0)\n"
107	"	.long	2b-.\n"
108	"	lda	$31,3b-2b(%0)\n"
109	"	.previous\n"
110	:	"+r"(ret), "=&r"(prev), "=&r"(cmp)
111	:	"r"(uaddr), "r"((long)oldval), "r"(newval)
112	:	"memory");
113
114	*uval = prev;
115	return ret;
116}
117
118#endif /* __KERNEL__ */
119#endif /* _ASM_ALPHA_FUTEX_H */
v5.9
 1/* SPDX-License-Identifier: GPL-2.0 */
 2#ifndef _ASM_ALPHA_FUTEX_H
 3#define _ASM_ALPHA_FUTEX_H
 4
 5#ifdef __KERNEL__
 6
 7#include <linux/futex.h>
 8#include <linux/uaccess.h>
 9#include <asm/errno.h>
10#include <asm/barrier.h>
11
12#define __futex_atomic_op(insn, ret, oldval, uaddr, oparg)	\
13	__asm__ __volatile__(					\
14		__ASM_SMP_MB					\
15	"1:	ldl_l	%0,0(%2)\n"				\
16		insn						\
17	"2:	stl_c	%1,0(%2)\n"				\
18	"	beq	%1,4f\n"				\
19	"	mov	$31,%1\n"				\
20	"3:	.subsection 2\n"				\
21	"4:	br	1b\n"					\
22	"	.previous\n"					\
23	EXC(1b,3b,$31,%1)					\
24	EXC(2b,3b,$31,%1)					\
 
 
 
 
25	:	"=&r" (oldval), "=&r"(ret)			\
26	:	"r" (uaddr), "r"(oparg)				\
27	:	"memory")
28
29static inline int arch_futex_atomic_op_inuser(int op, int oparg, int *oval,
30		u32 __user *uaddr)
31{
 
 
 
 
32	int oldval = 0, ret;
 
 
33
34	if (!access_ok(uaddr, sizeof(u32)))
35		return -EFAULT;
36
 
 
37	switch (op) {
38	case FUTEX_OP_SET:
39		__futex_atomic_op("mov %3,%1\n", ret, oldval, uaddr, oparg);
40		break;
41	case FUTEX_OP_ADD:
42		__futex_atomic_op("addl %0,%3,%1\n", ret, oldval, uaddr, oparg);
43		break;
44	case FUTEX_OP_OR:
45		__futex_atomic_op("or %0,%3,%1\n", ret, oldval, uaddr, oparg);
46		break;
47	case FUTEX_OP_ANDN:
48		__futex_atomic_op("andnot %0,%3,%1\n", ret, oldval, uaddr, oparg);
49		break;
50	case FUTEX_OP_XOR:
51		__futex_atomic_op("xor %0,%3,%1\n", ret, oldval, uaddr, oparg);
52		break;
53	default:
54		ret = -ENOSYS;
55	}
56
57	if (!ret)
58		*oval = oldval;
59
 
 
 
 
 
 
 
 
 
 
 
60	return ret;
61}
62
63static inline int
64futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
65			      u32 oldval, u32 newval)
66{
67	int ret = 0, cmp;
68	u32 prev;
69
70	if (!access_ok(uaddr, sizeof(u32)))
71		return -EFAULT;
72
73	__asm__ __volatile__ (
74		__ASM_SMP_MB
75	"1:	ldl_l	%1,0(%3)\n"
76	"	cmpeq	%1,%4,%2\n"
77	"	beq	%2,3f\n"
78	"	mov	%5,%2\n"
79	"2:	stl_c	%2,0(%3)\n"
80	"	beq	%2,4f\n"
81	"3:	.subsection 2\n"
82	"4:	br	1b\n"
83	"	.previous\n"
84	EXC(1b,3b,$31,%0)
85	EXC(2b,3b,$31,%0)
 
 
 
 
86	:	"+r"(ret), "=&r"(prev), "=&r"(cmp)
87	:	"r"(uaddr), "r"((long)(int)oldval), "r"(newval)
88	:	"memory");
89
90	*uval = prev;
91	return ret;
92}
93
94#endif /* __KERNEL__ */
95#endif /* _ASM_ALPHA_FUTEX_H */