Linux Audio

Check our new training course

Loading...
v3.1
 
  1#ifndef _ASM_ALPHA_FUTEX_H
  2#define _ASM_ALPHA_FUTEX_H
  3
  4#ifdef __KERNEL__
  5
  6#include <linux/futex.h>
  7#include <linux/uaccess.h>
  8#include <asm/errno.h>
  9#include <asm/barrier.h>
 10
 11#define __futex_atomic_op(insn, ret, oldval, uaddr, oparg)	\
 12	__asm__ __volatile__(					\
 13		__ASM_SMP_MB					\
 14	"1:	ldl_l	%0,0(%2)\n"				\
 15		insn						\
 16	"2:	stl_c	%1,0(%2)\n"				\
 17	"	beq	%1,4f\n"				\
 18	"	mov	$31,%1\n"				\
 19	"3:	.subsection 2\n"				\
 20	"4:	br	1b\n"					\
 21	"	.previous\n"					\
 22	"	.section __ex_table,\"a\"\n"			\
 23	"	.long	1b-.\n"					\
 24	"	lda	$31,3b-1b(%1)\n"			\
 25	"	.long	2b-.\n"					\
 26	"	lda	$31,3b-2b(%1)\n"			\
 27	"	.previous\n"					\
 28	:	"=&r" (oldval), "=&r"(ret)			\
 29	:	"r" (uaddr), "r"(oparg)				\
 30	:	"memory")
 31
 32static inline int futex_atomic_op_inuser (int encoded_op, u32 __user *uaddr)
 
 33{
 34	int op = (encoded_op >> 28) & 7;
 35	int cmp = (encoded_op >> 24) & 15;
 36	int oparg = (encoded_op << 8) >> 20;
 37	int cmparg = (encoded_op << 20) >> 20;
 38	int oldval = 0, ret;
 39	if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28))
 40		oparg = 1 << oparg;
 41
 42	if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
 43		return -EFAULT;
 44
 45	pagefault_disable();
 46
 47	switch (op) {
 48	case FUTEX_OP_SET:
 49		__futex_atomic_op("mov %3,%1\n", ret, oldval, uaddr, oparg);
 50		break;
 51	case FUTEX_OP_ADD:
 52		__futex_atomic_op("addl %0,%3,%1\n", ret, oldval, uaddr, oparg);
 53		break;
 54	case FUTEX_OP_OR:
 55		__futex_atomic_op("or %0,%3,%1\n", ret, oldval, uaddr, oparg);
 56		break;
 57	case FUTEX_OP_ANDN:
 58		__futex_atomic_op("andnot %0,%3,%1\n", ret, oldval, uaddr, oparg);
 59		break;
 60	case FUTEX_OP_XOR:
 61		__futex_atomic_op("xor %0,%3,%1\n", ret, oldval, uaddr, oparg);
 62		break;
 63	default:
 64		ret = -ENOSYS;
 65	}
 66
 67	pagefault_enable();
 68
 69	if (!ret) {
 70		switch (cmp) {
 71		case FUTEX_OP_CMP_EQ: ret = (oldval == cmparg); break;
 72		case FUTEX_OP_CMP_NE: ret = (oldval != cmparg); break;
 73		case FUTEX_OP_CMP_LT: ret = (oldval < cmparg); break;
 74		case FUTEX_OP_CMP_GE: ret = (oldval >= cmparg); break;
 75		case FUTEX_OP_CMP_LE: ret = (oldval <= cmparg); break;
 76		case FUTEX_OP_CMP_GT: ret = (oldval > cmparg); break;
 77		default: ret = -ENOSYS;
 78		}
 79	}
 80	return ret;
 81}
 82
 83static inline int
 84futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
 85			      u32 oldval, u32 newval)
 86{
 87	int ret = 0, cmp;
 88	u32 prev;
 89
 90	if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
 91		return -EFAULT;
 92
 93	__asm__ __volatile__ (
 94		__ASM_SMP_MB
 95	"1:	ldl_l	%1,0(%3)\n"
 96	"	cmpeq	%1,%4,%2\n"
 97	"	beq	%2,3f\n"
 98	"	mov	%5,%2\n"
 99	"2:	stl_c	%2,0(%3)\n"
100	"	beq	%2,4f\n"
101	"3:	.subsection 2\n"
102	"4:	br	1b\n"
103	"	.previous\n"
104	"	.section __ex_table,\"a\"\n"
105	"	.long	1b-.\n"
106	"	lda	$31,3b-1b(%0)\n"
107	"	.long	2b-.\n"
108	"	lda	$31,3b-2b(%0)\n"
109	"	.previous\n"
110	:	"+r"(ret), "=&r"(prev), "=&r"(cmp)
111	:	"r"(uaddr), "r"((long)oldval), "r"(newval)
112	:	"memory");
113
114	*uval = prev;
115	return ret;
116}
117
118#endif /* __KERNEL__ */
119#endif /* _ASM_ALPHA_FUTEX_H */
v5.4
 1/* SPDX-License-Identifier: GPL-2.0 */
 2#ifndef _ASM_ALPHA_FUTEX_H
 3#define _ASM_ALPHA_FUTEX_H
 4
 5#ifdef __KERNEL__
 6
 7#include <linux/futex.h>
 8#include <linux/uaccess.h>
 9#include <asm/errno.h>
10#include <asm/barrier.h>
11
12#define __futex_atomic_op(insn, ret, oldval, uaddr, oparg)	\
13	__asm__ __volatile__(					\
14		__ASM_SMP_MB					\
15	"1:	ldl_l	%0,0(%2)\n"				\
16		insn						\
17	"2:	stl_c	%1,0(%2)\n"				\
18	"	beq	%1,4f\n"				\
19	"	mov	$31,%1\n"				\
20	"3:	.subsection 2\n"				\
21	"4:	br	1b\n"					\
22	"	.previous\n"					\
23	EXC(1b,3b,$31,%1)					\
24	EXC(2b,3b,$31,%1)					\
 
 
 
 
25	:	"=&r" (oldval), "=&r"(ret)			\
26	:	"r" (uaddr), "r"(oparg)				\
27	:	"memory")
28
29static inline int arch_futex_atomic_op_inuser(int op, int oparg, int *oval,
30		u32 __user *uaddr)
31{
 
 
 
 
32	int oldval = 0, ret;
 
 
 
 
 
33
34	pagefault_disable();
35
36	switch (op) {
37	case FUTEX_OP_SET:
38		__futex_atomic_op("mov %3,%1\n", ret, oldval, uaddr, oparg);
39		break;
40	case FUTEX_OP_ADD:
41		__futex_atomic_op("addl %0,%3,%1\n", ret, oldval, uaddr, oparg);
42		break;
43	case FUTEX_OP_OR:
44		__futex_atomic_op("or %0,%3,%1\n", ret, oldval, uaddr, oparg);
45		break;
46	case FUTEX_OP_ANDN:
47		__futex_atomic_op("andnot %0,%3,%1\n", ret, oldval, uaddr, oparg);
48		break;
49	case FUTEX_OP_XOR:
50		__futex_atomic_op("xor %0,%3,%1\n", ret, oldval, uaddr, oparg);
51		break;
52	default:
53		ret = -ENOSYS;
54	}
55
56	pagefault_enable();
57
58	if (!ret)
59		*oval = oldval;
60
 
 
 
 
 
 
 
 
61	return ret;
62}
63
64static inline int
65futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
66			      u32 oldval, u32 newval)
67{
68	int ret = 0, cmp;
69	u32 prev;
70
71	if (!access_ok(uaddr, sizeof(u32)))
72		return -EFAULT;
73
74	__asm__ __volatile__ (
75		__ASM_SMP_MB
76	"1:	ldl_l	%1,0(%3)\n"
77	"	cmpeq	%1,%4,%2\n"
78	"	beq	%2,3f\n"
79	"	mov	%5,%2\n"
80	"2:	stl_c	%2,0(%3)\n"
81	"	beq	%2,4f\n"
82	"3:	.subsection 2\n"
83	"4:	br	1b\n"
84	"	.previous\n"
85	EXC(1b,3b,$31,%0)
86	EXC(2b,3b,$31,%0)
 
 
 
 
87	:	"+r"(ret), "=&r"(prev), "=&r"(cmp)
88	:	"r"(uaddr), "r"((long)(int)oldval), "r"(newval)
89	:	"memory");
90
91	*uval = prev;
92	return ret;
93}
94
95#endif /* __KERNEL__ */
96#endif /* _ASM_ALPHA_FUTEX_H */