Linux Audio

Check our new training course

Loading...
v3.15
 
  1#ifndef _ASM_ARM_FUTEX_H
  2#define _ASM_ARM_FUTEX_H
  3
  4#ifdef __KERNEL__
  5
  6#include <linux/futex.h>
  7#include <linux/uaccess.h>
  8#include <asm/errno.h>
  9
 10#define __futex_atomic_ex_table(err_reg)			\
 11	"3:\n"							\
 12	"	.pushsection __ex_table,\"a\"\n"		\
 13	"	.align	3\n"					\
 14	"	.long	1b, 4f, 2b, 4f\n"			\
 15	"	.popsection\n"					\
 16	"	.pushsection .fixup,\"ax\"\n"			\
 17	"	.align	2\n"					\
 18	"4:	mov	%0, " err_reg "\n"			\
 19	"	b	3b\n"					\
 20	"	.popsection"
 21
 22#ifdef CONFIG_SMP
 23
 24#define __futex_atomic_op(insn, ret, oldval, tmp, uaddr, oparg)	\
 
 
 25	smp_mb();						\
 26	prefetchw(uaddr);					\
 
 27	__asm__ __volatile__(					\
 28	"1:	ldrex	%1, [%3]\n"				\
 29	"	" insn "\n"					\
 30	"2:	strex	%2, %0, [%3]\n"				\
 31	"	teq	%2, #0\n"				\
 32	"	bne	1b\n"					\
 33	"	mov	%0, #0\n"				\
 34	__futex_atomic_ex_table("%5")				\
 35	: "=&r" (ret), "=&r" (oldval), "=&r" (tmp)		\
 36	: "r" (uaddr), "r" (oparg), "Ir" (-EFAULT)		\
 37	: "cc", "memory")
 
 
 38
 39static inline int
 40futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
 41			      u32 oldval, u32 newval)
 42{
 
 43	int ret;
 44	u32 val;
 45
 46	if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
 47		return -EFAULT;
 48
 49	smp_mb();
 50	/* Prefetching cannot fault */
 51	prefetchw(uaddr);
 
 52	__asm__ __volatile__("@futex_atomic_cmpxchg_inatomic\n"
 53	"1:	ldrex	%1, [%4]\n"
 54	"	teq	%1, %2\n"
 55	"	ite	eq	@ explicit IT needed for the 2b label\n"
 56	"2:	strexeq	%0, %3, [%4]\n"
 57	"	movne	%0, #0\n"
 58	"	teq	%0, #0\n"
 59	"	bne	1b\n"
 60	__futex_atomic_ex_table("%5")
 61	: "=&r" (ret), "=&r" (val)
 62	: "r" (oldval), "r" (newval), "r" (uaddr), "Ir" (-EFAULT)
 63	: "cc", "memory");
 
 64	smp_mb();
 65
 66	*uval = val;
 67	return ret;
 68}
 69
 70#else /* !SMP, we can work around lack of atomic ops by disabling preemption */
 71
 72#include <linux/preempt.h>
 73#include <asm/domain.h>
 74
 75#define __futex_atomic_op(insn, ret, oldval, tmp, uaddr, oparg)	\
 
 
 76	__asm__ __volatile__(					\
 77	"1:	" TUSER(ldr) "	%1, [%3]\n"			\
 78	"	" insn "\n"					\
 79	"2:	" TUSER(str) "	%0, [%3]\n"			\
 80	"	mov	%0, #0\n"				\
 81	__futex_atomic_ex_table("%5")				\
 82	: "=&r" (ret), "=&r" (oldval), "=&r" (tmp)		\
 83	: "r" (uaddr), "r" (oparg), "Ir" (-EFAULT)		\
 84	: "cc", "memory")
 
 
 85
 86static inline int
 87futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
 88			      u32 oldval, u32 newval)
 89{
 
 90	int ret = 0;
 91	u32 val;
 92
 93	if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
 94		return -EFAULT;
 95
 
 
 96	__asm__ __volatile__("@futex_atomic_cmpxchg_inatomic\n"
 
 97	"1:	" TUSER(ldr) "	%1, [%4]\n"
 98	"	teq	%1, %2\n"
 99	"	it	eq	@ explicit IT needed for the 2b label\n"
100	"2:	" TUSER(streq) "	%3, [%4]\n"
101	__futex_atomic_ex_table("%5")
102	: "+r" (ret), "=&r" (val)
103	: "r" (oldval), "r" (newval), "r" (uaddr), "Ir" (-EFAULT)
104	: "cc", "memory");
 
105
106	*uval = val;
 
 
107	return ret;
108}
109
110#endif /* !SMP */
111
112static inline int
113futex_atomic_op_inuser (int encoded_op, u32 __user *uaddr)
114{
115	int op = (encoded_op >> 28) & 7;
116	int cmp = (encoded_op >> 24) & 15;
117	int oparg = (encoded_op << 8) >> 20;
118	int cmparg = (encoded_op << 20) >> 20;
119	int oldval = 0, ret, tmp;
120
121	if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28))
122		oparg = 1 << oparg;
123
124	if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
125		return -EFAULT;
126
127	pagefault_disable();	/* implies preempt_disable() */
 
 
128
129	switch (op) {
130	case FUTEX_OP_SET:
131		__futex_atomic_op("mov	%0, %4", ret, oldval, tmp, uaddr, oparg);
132		break;
133	case FUTEX_OP_ADD:
134		__futex_atomic_op("add	%0, %1, %4", ret, oldval, tmp, uaddr, oparg);
135		break;
136	case FUTEX_OP_OR:
137		__futex_atomic_op("orr	%0, %1, %4", ret, oldval, tmp, uaddr, oparg);
138		break;
139	case FUTEX_OP_ANDN:
140		__futex_atomic_op("and	%0, %1, %4", ret, oldval, tmp, uaddr, ~oparg);
141		break;
142	case FUTEX_OP_XOR:
143		__futex_atomic_op("eor	%0, %1, %4", ret, oldval, tmp, uaddr, oparg);
144		break;
145	default:
146		ret = -ENOSYS;
147	}
148
149	pagefault_enable();	/* subsumes preempt_enable() */
 
 
 
 
 
 
 
 
 
 
150
151	if (!ret) {
152		switch (cmp) {
153		case FUTEX_OP_CMP_EQ: ret = (oldval == cmparg); break;
154		case FUTEX_OP_CMP_NE: ret = (oldval != cmparg); break;
155		case FUTEX_OP_CMP_LT: ret = (oldval < cmparg); break;
156		case FUTEX_OP_CMP_GE: ret = (oldval >= cmparg); break;
157		case FUTEX_OP_CMP_LE: ret = (oldval <= cmparg); break;
158		case FUTEX_OP_CMP_GT: ret = (oldval > cmparg); break;
159		default: ret = -ENOSYS;
160		}
161	}
162	return ret;
163}
164
165#endif /* __KERNEL__ */
166#endif /* _ASM_ARM_FUTEX_H */
v6.13.7
  1/* SPDX-License-Identifier: GPL-2.0 */
  2#ifndef _ASM_ARM_FUTEX_H
  3#define _ASM_ARM_FUTEX_H
  4
  5#ifdef __KERNEL__
  6
  7#include <linux/futex.h>
  8#include <linux/uaccess.h>
  9#include <asm/errno.h>
 10
 11#define __futex_atomic_ex_table(err_reg)			\
 12	"3:\n"							\
 13	"	.pushsection __ex_table,\"a\"\n"		\
 14	"	.align	3\n"					\
 15	"	.long	1b, 4f, 2b, 4f\n"			\
 16	"	.popsection\n"					\
 17	"	.pushsection .text.fixup,\"ax\"\n"		\
 18	"	.align	2\n"					\
 19	"4:	mov	%0, " err_reg "\n"			\
 20	"	b	3b\n"					\
 21	"	.popsection"
 22
 23#ifdef CONFIG_SMP
 24
 25#define __futex_atomic_op(insn, ret, oldval, tmp, uaddr, oparg)	\
 26({								\
 27	unsigned int __ua_flags;				\
 28	smp_mb();						\
 29	prefetchw(uaddr);					\
 30	__ua_flags = uaccess_save_and_enable();			\
 31	__asm__ __volatile__(					\
 32	"1:	ldrex	%1, [%3]\n"				\
 33	"	" insn "\n"					\
 34	"2:	strex	%2, %0, [%3]\n"				\
 35	"	teq	%2, #0\n"				\
 36	"	bne	1b\n"					\
 37	"	mov	%0, #0\n"				\
 38	__futex_atomic_ex_table("%5")				\
 39	: "=&r" (ret), "=&r" (oldval), "=&r" (tmp)		\
 40	: "r" (uaddr), "r" (oparg), "Ir" (-EFAULT)		\
 41	: "cc", "memory");					\
 42	uaccess_restore(__ua_flags);				\
 43})
 44
 45static inline int
 46futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
 47			      u32 oldval, u32 newval)
 48{
 49	unsigned int __ua_flags;
 50	int ret;
 51	u32 val;
 52
 53	if (!access_ok(uaddr, sizeof(u32)))
 54		return -EFAULT;
 55
 56	smp_mb();
 57	/* Prefetching cannot fault */
 58	prefetchw(uaddr);
 59	__ua_flags = uaccess_save_and_enable();
 60	__asm__ __volatile__("@futex_atomic_cmpxchg_inatomic\n"
 61	"1:	ldrex	%1, [%4]\n"
 62	"	teq	%1, %2\n"
 63	"	ite	eq	@ explicit IT needed for the 2b label\n"
 64	"2:	strexeq	%0, %3, [%4]\n"
 65	"	movne	%0, #0\n"
 66	"	teq	%0, #0\n"
 67	"	bne	1b\n"
 68	__futex_atomic_ex_table("%5")
 69	: "=&r" (ret), "=&r" (val)
 70	: "r" (oldval), "r" (newval), "r" (uaddr), "Ir" (-EFAULT)
 71	: "cc", "memory");
 72	uaccess_restore(__ua_flags);
 73	smp_mb();
 74
 75	*uval = val;
 76	return ret;
 77}
 78
 79#else /* !SMP, we can work around lack of atomic ops by disabling preemption */
 80
 81#include <linux/preempt.h>
 82#include <asm/domain.h>
 83
 84#define __futex_atomic_op(insn, ret, oldval, tmp, uaddr, oparg)	\
 85({								\
 86	unsigned int __ua_flags = uaccess_save_and_enable();	\
 87	__asm__ __volatile__(					\
 88	"1:	" TUSER(ldr) "	%1, [%3]\n"			\
 89	"	" insn "\n"					\
 90	"2:	" TUSER(str) "	%0, [%3]\n"			\
 91	"	mov	%0, #0\n"				\
 92	__futex_atomic_ex_table("%5")				\
 93	: "=&r" (ret), "=&r" (oldval), "=&r" (tmp)		\
 94	: "r" (uaddr), "r" (oparg), "Ir" (-EFAULT)		\
 95	: "cc", "memory");					\
 96	uaccess_restore(__ua_flags);				\
 97})
 98
 99static inline int
100futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
101			      u32 oldval, u32 newval)
102{
103	unsigned int __ua_flags;
104	int ret = 0;
105	u32 val;
106
107	if (!access_ok(uaddr, sizeof(u32)))
108		return -EFAULT;
109
110	preempt_disable();
111	__ua_flags = uaccess_save_and_enable();
112	__asm__ __volatile__("@futex_atomic_cmpxchg_inatomic\n"
113	"	.syntax unified\n"
114	"1:	" TUSER(ldr) "	%1, [%4]\n"
115	"	teq	%1, %2\n"
116	"	it	eq	@ explicit IT needed for the 2b label\n"
117	"2:	" TUSERCOND(str, eq) "	%3, [%4]\n"
118	__futex_atomic_ex_table("%5")
119	: "+r" (ret), "=&r" (val)
120	: "r" (oldval), "r" (newval), "r" (uaddr), "Ir" (-EFAULT)
121	: "cc", "memory");
122	uaccess_restore(__ua_flags);
123
124	*uval = val;
125	preempt_enable();
126
127	return ret;
128}
129
130#endif /* !SMP */
131
132static inline int
133arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *uaddr)
134{
 
 
 
 
135	int oldval = 0, ret, tmp;
136
137	if (!access_ok(uaddr, sizeof(u32)))
 
 
 
138		return -EFAULT;
139
140#ifndef CONFIG_SMP
141	preempt_disable();
142#endif
143
144	switch (op) {
145	case FUTEX_OP_SET:
146		__futex_atomic_op("mov	%0, %4", ret, oldval, tmp, uaddr, oparg);
147		break;
148	case FUTEX_OP_ADD:
149		__futex_atomic_op("add	%0, %1, %4", ret, oldval, tmp, uaddr, oparg);
150		break;
151	case FUTEX_OP_OR:
152		__futex_atomic_op("orr	%0, %1, %4", ret, oldval, tmp, uaddr, oparg);
153		break;
154	case FUTEX_OP_ANDN:
155		__futex_atomic_op("and	%0, %1, %4", ret, oldval, tmp, uaddr, ~oparg);
156		break;
157	case FUTEX_OP_XOR:
158		__futex_atomic_op("eor	%0, %1, %4", ret, oldval, tmp, uaddr, oparg);
159		break;
160	default:
161		ret = -ENOSYS;
162	}
163
164#ifndef CONFIG_SMP
165	preempt_enable();
166#endif
167
168	/*
169	 * Store unconditionally. If ret != 0 the extra store is the least
170	 * of the worries but GCC cannot figure out that __futex_atomic_op()
171	 * is either setting ret to -EFAULT or storing the old value in
172	 * oldval which results in a uninitialized warning at the call site.
173	 */
174	*oval = oldval;
175
 
 
 
 
 
 
 
 
 
 
 
176	return ret;
177}
178
179#endif /* __KERNEL__ */
180#endif /* _ASM_ARM_FUTEX_H */