Linux Audio

Check our new training course

Loading...
v5.4
  1/*
  2 * This file is subject to the terms and conditions of the GNU General Public
  3 * License.  See the file "COPYING" in the main directory of this archive
  4 * for more details.
  5 *
  6 * Copyright (C) 2003, 06, 07 by Ralf Baechle (ralf@linux-mips.org)
  7 */
  8#ifndef __ASM_CMPXCHG_H
  9#define __ASM_CMPXCHG_H
 10
 11#include <linux/bug.h>
 12#include <linux/irqflags.h>
 13#include <asm/compiler.h>
 
 
 14#include <asm/war.h>
 15
 16/*
 17 * Using a branch-likely instruction to check the result of an sc instruction
 18 * works around a bug present in R10000 CPUs prior to revision 3.0 that could
 19 * cause ll-sc sequences to execute non-atomically.
 20 */
 21#if R10000_LLSC_WAR
 22# define __scbeqz "beqzl"
 23#else
 24# define __scbeqz "beqz"
 25#endif
 26
 27/*
 28 * These functions doesn't exist, so if they are called you'll either:
 29 *
 30 * - Get an error at compile-time due to __compiletime_error, if supported by
 31 *   your compiler.
 32 *
 33 * or:
 34 *
 35 * - Get an error at link-time due to the call to the missing function.
 36 */
 37extern unsigned long __cmpxchg_called_with_bad_pointer(void)
 38	__compiletime_error("Bad argument size for cmpxchg");
 39extern unsigned long __cmpxchg64_unsupported(void)
 40	__compiletime_error("cmpxchg64 not available; cpu_has_64bits may be false");
 41extern unsigned long __xchg_called_with_bad_pointer(void)
 42	__compiletime_error("Bad argument size for xchg");
 43
 44#define __xchg_asm(ld, st, m, val)					\
 45({									\
 46	__typeof(*(m)) __ret;						\
 47									\
 48	if (kernel_uses_llsc) {						\
 49		loongson_llsc_mb();					\
 50		__asm__ __volatile__(					\
 51		"	.set	push				\n"	\
 52		"	.set	noat				\n"	\
 53		"	.set	push				\n"	\
 54		"	.set	" MIPS_ISA_ARCH_LEVEL "		\n"	\
 
 55		"1:	" ld "	%0, %2		# __xchg_asm	\n"	\
 56		"	.set	pop				\n"	\
 57		"	move	$1, %z3				\n"	\
 58		"	.set	" MIPS_ISA_ARCH_LEVEL "		\n"	\
 59		"	" st "	$1, %1				\n"	\
 60		"\t" __scbeqz "	$1, 1b				\n"	\
 61		"	.set	pop				\n"	\
 62		: "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m)		\
 63		: GCC_OFF_SMALL_ASM() (*m), "Jr" (val)			\
 64		: __LLSC_CLOBBER);					\
 65	} else {							\
 66		unsigned long __flags;					\
 67									\
 68		raw_local_irq_save(__flags);				\
 69		__ret = *m;						\
 70		*m = val;						\
 71		raw_local_irq_restore(__flags);				\
 72	}								\
 73									\
 74	__ret;								\
 75})
 76
 77extern unsigned long __xchg_small(volatile void *ptr, unsigned long val,
 78				  unsigned int size);
 79
 80static __always_inline
 81unsigned long __xchg(volatile void *ptr, unsigned long x, int size)
 82{
 83	switch (size) {
 84	case 1:
 85	case 2:
 86		return __xchg_small(ptr, x, size);
 87
 88	case 4:
 89		return __xchg_asm("ll", "sc", (volatile u32 *)ptr, x);
 90
 91	case 8:
 92		if (!IS_ENABLED(CONFIG_64BIT))
 93			return __xchg_called_with_bad_pointer();
 94
 95		return __xchg_asm("lld", "scd", (volatile u64 *)ptr, x);
 96
 97	default:
 98		return __xchg_called_with_bad_pointer();
 99	}
100}
101
102#define xchg(ptr, x)							\
103({									\
104	__typeof__(*(ptr)) __res;					\
105									\
106	smp_mb__before_llsc();						\
 
 
 
 
 
 
107									\
108	__res = (__typeof__(*(ptr)))					\
109		__xchg((ptr), (unsigned long)(x), sizeof(*(ptr)));	\
110									\
111	smp_llsc_mb();							\
112									\
113	__res;								\
114})
115
116#define __cmpxchg_asm(ld, st, m, old, new)				\
117({									\
118	__typeof(*(m)) __ret;						\
119									\
120	if (kernel_uses_llsc) {						\
121		loongson_llsc_mb();					\
122		__asm__ __volatile__(					\
123		"	.set	push				\n"	\
124		"	.set	noat				\n"	\
125		"	.set	push				\n"	\
126		"	.set	"MIPS_ISA_ARCH_LEVEL"		\n"	\
 
127		"1:	" ld "	%0, %2		# __cmpxchg_asm \n"	\
128		"	bne	%0, %z3, 2f			\n"	\
129		"	.set	pop				\n"	\
130		"	move	$1, %z4				\n"	\
131		"	.set	"MIPS_ISA_ARCH_LEVEL"		\n"	\
132		"	" st "	$1, %1				\n"	\
133		"\t" __scbeqz "	$1, 1b				\n"	\
134		"	.set	pop				\n"	\
135		"2:						\n"	\
136		: "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m)		\
137		: GCC_OFF_SMALL_ASM() (*m), "Jr" (old), "Jr" (new)	\
138		: __LLSC_CLOBBER);					\
139		loongson_llsc_mb();					\
140	} else {							\
141		unsigned long __flags;					\
142									\
143		raw_local_irq_save(__flags);				\
144		__ret = *m;						\
145		if (__ret == old)					\
146			*m = new;					\
147		raw_local_irq_restore(__flags);				\
148	}								\
149									\
150	__ret;								\
151})
152
153extern unsigned long __cmpxchg_small(volatile void *ptr, unsigned long old,
154				     unsigned long new, unsigned int size);
155
156static __always_inline
157unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
158			unsigned long new, unsigned int size)
159{
160	switch (size) {
161	case 1:
162	case 2:
163		return __cmpxchg_small(ptr, old, new, size);
164
165	case 4:
166		return __cmpxchg_asm("ll", "sc", (volatile u32 *)ptr,
167				     (u32)old, new);
168
169	case 8:
170		/* lld/scd are only available for MIPS64 */
171		if (!IS_ENABLED(CONFIG_64BIT))
172			return __cmpxchg_called_with_bad_pointer();
173
174		return __cmpxchg_asm("lld", "scd", (volatile u64 *)ptr,
175				     (u64)old, new);
176
177	default:
178		return __cmpxchg_called_with_bad_pointer();
179	}
180}
181
182#define cmpxchg_local(ptr, old, new)					\
183	((__typeof__(*(ptr)))						\
184		__cmpxchg((ptr),					\
185			  (unsigned long)(__typeof__(*(ptr)))(old),	\
186			  (unsigned long)(__typeof__(*(ptr)))(new),	\
187			  sizeof(*(ptr))))
188
189#define cmpxchg(ptr, old, new)						\
190({									\
191	__typeof__(*(ptr)) __res;					\
192									\
193	smp_mb__before_llsc();						\
 
 
 
 
 
 
 
194	__res = cmpxchg_local((ptr), (old), (new));			\
195	smp_llsc_mb();							\
 
 
 
 
 
 
 
196									\
197	__res;								\
198})
199
200#ifdef CONFIG_64BIT
201#define cmpxchg64_local(ptr, o, n)					\
202  ({									\
203	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
204	cmpxchg_local((ptr), (o), (n));					\
205  })
206
207#define cmpxchg64(ptr, o, n)						\
208  ({									\
209	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
210	cmpxchg((ptr), (o), (n));					\
211  })
212#else
213
214# include <asm-generic/cmpxchg-local.h>
215# define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
216
217# ifdef CONFIG_SMP
218
219static inline unsigned long __cmpxchg64(volatile void *ptr,
220					unsigned long long old,
221					unsigned long long new)
222{
223	unsigned long long tmp, ret;
224	unsigned long flags;
225
226	/*
227	 * The assembly below has to combine 32 bit values into a 64 bit
228	 * register, and split 64 bit values from one register into two. If we
229	 * were to take an interrupt in the middle of this we'd only save the
230	 * least significant 32 bits of each register & probably clobber the
231	 * most significant 32 bits of the 64 bit values we're using. In order
232	 * to avoid this we must disable interrupts.
233	 */
234	local_irq_save(flags);
235
236	loongson_llsc_mb();
237	asm volatile(
238	"	.set	push				\n"
239	"	.set	" MIPS_ISA_ARCH_LEVEL "		\n"
240	/* Load 64 bits from ptr */
 
241	"1:	lld	%L0, %3		# __cmpxchg64	\n"
242	/*
243	 * Split the 64 bit value we loaded into the 2 registers that hold the
244	 * ret variable.
245	 */
246	"	dsra	%M0, %L0, 32			\n"
247	"	sll	%L0, %L0, 0			\n"
248	/*
249	 * Compare ret against old, breaking out of the loop if they don't
250	 * match.
251	 */
252	"	bne	%M0, %M4, 2f			\n"
253	"	bne	%L0, %L4, 2f			\n"
254	/*
255	 * Combine the 32 bit halves from the 2 registers that hold the new
256	 * variable into a single 64 bit register.
257	 */
258#  if MIPS_ISA_REV >= 2
259	"	move	%L1, %L5			\n"
260	"	dins	%L1, %M5, 32, 32		\n"
261#  else
262	"	dsll	%L1, %L5, 32			\n"
263	"	dsrl	%L1, %L1, 32			\n"
264	"	.set	noat				\n"
265	"	dsll	$at, %M5, 32			\n"
266	"	or	%L1, %L1, $at			\n"
267	"	.set	at				\n"
268#  endif
269	/* Attempt to store new at ptr */
270	"	scd	%L1, %2				\n"
271	/* If we failed, loop! */
272	"\t" __scbeqz "	%L1, 1b				\n"
273	"	.set	pop				\n"
274	"2:						\n"
275	: "=&r"(ret),
276	  "=&r"(tmp),
277	  "=" GCC_OFF_SMALL_ASM() (*(unsigned long long *)ptr)
278	: GCC_OFF_SMALL_ASM() (*(unsigned long long *)ptr),
279	  "r" (old),
280	  "r" (new)
281	: "memory");
282	loongson_llsc_mb();
283
284	local_irq_restore(flags);
285	return ret;
286}
287
288#  define cmpxchg64(ptr, o, n) ({					\
289	unsigned long long __old = (__typeof__(*(ptr)))(o);		\
290	unsigned long long __new = (__typeof__(*(ptr)))(n);		\
291	__typeof__(*(ptr)) __res;					\
292									\
293	/*								\
294	 * We can only use cmpxchg64 if we know that the CPU supports	\
295	 * 64-bits, ie. lld & scd. Our call to __cmpxchg64_unsupported	\
296	 * will cause a build error unless cpu_has_64bits is a		\
297	 * compile-time constant 1.					\
298	 */								\
299	if (cpu_has_64bits && kernel_uses_llsc) {			\
300		smp_mb__before_llsc();					\
301		__res = __cmpxchg64((ptr), __old, __new);		\
302		smp_llsc_mb();						\
303	} else {							\
304		__res = __cmpxchg64_unsupported();			\
305	}								\
306									\
307	__res;								\
308})
309
310# else /* !CONFIG_SMP */
311#  define cmpxchg64(ptr, o, n) cmpxchg64_local((ptr), (o), (n))
312# endif /* !CONFIG_SMP */
313#endif /* !CONFIG_64BIT */
314
315#undef __scbeqz
316
317#endif /* __ASM_CMPXCHG_H */
v5.9
  1/*
  2 * This file is subject to the terms and conditions of the GNU General Public
  3 * License.  See the file "COPYING" in the main directory of this archive
  4 * for more details.
  5 *
  6 * Copyright (C) 2003, 06, 07 by Ralf Baechle (ralf@linux-mips.org)
  7 */
  8#ifndef __ASM_CMPXCHG_H
  9#define __ASM_CMPXCHG_H
 10
 11#include <linux/bug.h>
 12#include <linux/irqflags.h>
 13#include <asm/compiler.h>
 14#include <asm/llsc.h>
 15#include <asm/sync.h>
 16#include <asm/war.h>
 17
 18/*
 
 
 
 
 
 
 
 
 
 
 
 19 * These functions doesn't exist, so if they are called you'll either:
 20 *
 21 * - Get an error at compile-time due to __compiletime_error, if supported by
 22 *   your compiler.
 23 *
 24 * or:
 25 *
 26 * - Get an error at link-time due to the call to the missing function.
 27 */
 28extern unsigned long __cmpxchg_called_with_bad_pointer(void)
 29	__compiletime_error("Bad argument size for cmpxchg");
 30extern unsigned long __cmpxchg64_unsupported(void)
 31	__compiletime_error("cmpxchg64 not available; cpu_has_64bits may be false");
 32extern unsigned long __xchg_called_with_bad_pointer(void)
 33	__compiletime_error("Bad argument size for xchg");
 34
 35#define __xchg_asm(ld, st, m, val)					\
 36({									\
 37	__typeof(*(m)) __ret;						\
 38									\
 39	if (kernel_uses_llsc) {						\
 
 40		__asm__ __volatile__(					\
 41		"	.set	push				\n"	\
 42		"	.set	noat				\n"	\
 43		"	.set	push				\n"	\
 44		"	.set	" MIPS_ISA_ARCH_LEVEL "		\n"	\
 45		"	" __SYNC(full, loongson3_war) "		\n"	\
 46		"1:	" ld "	%0, %2		# __xchg_asm	\n"	\
 47		"	.set	pop				\n"	\
 48		"	move	$1, %z3				\n"	\
 49		"	.set	" MIPS_ISA_ARCH_LEVEL "		\n"	\
 50		"	" st "	$1, %1				\n"	\
 51		"\t" __SC_BEQZ	"$1, 1b				\n"	\
 52		"	.set	pop				\n"	\
 53		: "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m)		\
 54		: GCC_OFF_SMALL_ASM() (*m), "Jr" (val)			\
 55		: __LLSC_CLOBBER);					\
 56	} else {							\
 57		unsigned long __flags;					\
 58									\
 59		raw_local_irq_save(__flags);				\
 60		__ret = *m;						\
 61		*m = val;						\
 62		raw_local_irq_restore(__flags);				\
 63	}								\
 64									\
 65	__ret;								\
 66})
 67
 68extern unsigned long __xchg_small(volatile void *ptr, unsigned long val,
 69				  unsigned int size);
 70
 71static __always_inline
 72unsigned long __xchg(volatile void *ptr, unsigned long x, int size)
 73{
 74	switch (size) {
 75	case 1:
 76	case 2:
 77		return __xchg_small(ptr, x, size);
 78
 79	case 4:
 80		return __xchg_asm("ll", "sc", (volatile u32 *)ptr, x);
 81
 82	case 8:
 83		if (!IS_ENABLED(CONFIG_64BIT))
 84			return __xchg_called_with_bad_pointer();
 85
 86		return __xchg_asm("lld", "scd", (volatile u64 *)ptr, x);
 87
 88	default:
 89		return __xchg_called_with_bad_pointer();
 90	}
 91}
 92
 93#define xchg(ptr, x)							\
 94({									\
 95	__typeof__(*(ptr)) __res;					\
 96									\
 97	/*								\
 98	 * In the Loongson3 workaround case __xchg_asm() already	\
 99	 * contains a completion barrier prior to the LL, so we don't	\
100	 * need to emit an extra one here.				\
101	 */								\
102	if (!__SYNC_loongson3_war)					\
103		smp_mb__before_llsc();					\
104									\
105	__res = (__typeof__(*(ptr)))					\
106		__xchg((ptr), (unsigned long)(x), sizeof(*(ptr)));	\
107									\
108	smp_llsc_mb();							\
109									\
110	__res;								\
111})
112
113#define __cmpxchg_asm(ld, st, m, old, new)				\
114({									\
115	__typeof(*(m)) __ret;						\
116									\
117	if (kernel_uses_llsc) {						\
 
118		__asm__ __volatile__(					\
119		"	.set	push				\n"	\
120		"	.set	noat				\n"	\
121		"	.set	push				\n"	\
122		"	.set	"MIPS_ISA_ARCH_LEVEL"		\n"	\
123		"	" __SYNC(full, loongson3_war) "		\n"	\
124		"1:	" ld "	%0, %2		# __cmpxchg_asm \n"	\
125		"	bne	%0, %z3, 2f			\n"	\
126		"	.set	pop				\n"	\
127		"	move	$1, %z4				\n"	\
128		"	.set	"MIPS_ISA_ARCH_LEVEL"		\n"	\
129		"	" st "	$1, %1				\n"	\
130		"\t" __SC_BEQZ	"$1, 1b				\n"	\
131		"	.set	pop				\n"	\
132		"2:	" __SYNC(full, loongson3_war) "		\n"	\
133		: "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m)		\
134		: GCC_OFF_SMALL_ASM() (*m), "Jr" (old), "Jr" (new)	\
135		: __LLSC_CLOBBER);					\
 
136	} else {							\
137		unsigned long __flags;					\
138									\
139		raw_local_irq_save(__flags);				\
140		__ret = *m;						\
141		if (__ret == old)					\
142			*m = new;					\
143		raw_local_irq_restore(__flags);				\
144	}								\
145									\
146	__ret;								\
147})
148
149extern unsigned long __cmpxchg_small(volatile void *ptr, unsigned long old,
150				     unsigned long new, unsigned int size);
151
152static __always_inline
153unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
154			unsigned long new, unsigned int size)
155{
156	switch (size) {
157	case 1:
158	case 2:
159		return __cmpxchg_small(ptr, old, new, size);
160
161	case 4:
162		return __cmpxchg_asm("ll", "sc", (volatile u32 *)ptr,
163				     (u32)old, new);
164
165	case 8:
166		/* lld/scd are only available for MIPS64 */
167		if (!IS_ENABLED(CONFIG_64BIT))
168			return __cmpxchg_called_with_bad_pointer();
169
170		return __cmpxchg_asm("lld", "scd", (volatile u64 *)ptr,
171				     (u64)old, new);
172
173	default:
174		return __cmpxchg_called_with_bad_pointer();
175	}
176}
177
178#define cmpxchg_local(ptr, old, new)					\
179	((__typeof__(*(ptr)))						\
180		__cmpxchg((ptr),					\
181			  (unsigned long)(__typeof__(*(ptr)))(old),	\
182			  (unsigned long)(__typeof__(*(ptr)))(new),	\
183			  sizeof(*(ptr))))
184
185#define cmpxchg(ptr, old, new)						\
186({									\
187	__typeof__(*(ptr)) __res;					\
188									\
189	/*								\
190	 * In the Loongson3 workaround case __cmpxchg_asm() already	\
191	 * contains a completion barrier prior to the LL, so we don't	\
192	 * need to emit an extra one here.				\
193	 */								\
194	if (!__SYNC_loongson3_war)					\
195		smp_mb__before_llsc();					\
196									\
197	__res = cmpxchg_local((ptr), (old), (new));			\
198									\
199	/*								\
200	 * In the Loongson3 workaround case __cmpxchg_asm() already	\
201	 * contains a completion barrier after the SC, so we don't	\
202	 * need to emit an extra one here.				\
203	 */								\
204	if (!__SYNC_loongson3_war)					\
205		smp_llsc_mb();						\
206									\
207	__res;								\
208})
209
210#ifdef CONFIG_64BIT
211#define cmpxchg64_local(ptr, o, n)					\
212  ({									\
213	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
214	cmpxchg_local((ptr), (o), (n));					\
215  })
216
217#define cmpxchg64(ptr, o, n)						\
218  ({									\
219	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
220	cmpxchg((ptr), (o), (n));					\
221  })
222#else
223
224# include <asm-generic/cmpxchg-local.h>
225# define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
226
227# ifdef CONFIG_SMP
228
229static inline unsigned long __cmpxchg64(volatile void *ptr,
230					unsigned long long old,
231					unsigned long long new)
232{
233	unsigned long long tmp, ret;
234	unsigned long flags;
235
236	/*
237	 * The assembly below has to combine 32 bit values into a 64 bit
238	 * register, and split 64 bit values from one register into two. If we
239	 * were to take an interrupt in the middle of this we'd only save the
240	 * least significant 32 bits of each register & probably clobber the
241	 * most significant 32 bits of the 64 bit values we're using. In order
242	 * to avoid this we must disable interrupts.
243	 */
244	local_irq_save(flags);
245
 
246	asm volatile(
247	"	.set	push				\n"
248	"	.set	" MIPS_ISA_ARCH_LEVEL "		\n"
249	/* Load 64 bits from ptr */
250	"	" __SYNC(full, loongson3_war) "		\n"
251	"1:	lld	%L0, %3		# __cmpxchg64	\n"
252	/*
253	 * Split the 64 bit value we loaded into the 2 registers that hold the
254	 * ret variable.
255	 */
256	"	dsra	%M0, %L0, 32			\n"
257	"	sll	%L0, %L0, 0			\n"
258	/*
259	 * Compare ret against old, breaking out of the loop if they don't
260	 * match.
261	 */
262	"	bne	%M0, %M4, 2f			\n"
263	"	bne	%L0, %L4, 2f			\n"
264	/*
265	 * Combine the 32 bit halves from the 2 registers that hold the new
266	 * variable into a single 64 bit register.
267	 */
268#  if MIPS_ISA_REV >= 2
269	"	move	%L1, %L5			\n"
270	"	dins	%L1, %M5, 32, 32		\n"
271#  else
272	"	dsll	%L1, %L5, 32			\n"
273	"	dsrl	%L1, %L1, 32			\n"
274	"	.set	noat				\n"
275	"	dsll	$at, %M5, 32			\n"
276	"	or	%L1, %L1, $at			\n"
277	"	.set	at				\n"
278#  endif
279	/* Attempt to store new at ptr */
280	"	scd	%L1, %2				\n"
281	/* If we failed, loop! */
282	"\t" __SC_BEQZ "%L1, 1b				\n"
283	"	.set	pop				\n"
284	"2:	" __SYNC(full, loongson3_war) "		\n"
285	: "=&r"(ret),
286	  "=&r"(tmp),
287	  "=" GCC_OFF_SMALL_ASM() (*(unsigned long long *)ptr)
288	: GCC_OFF_SMALL_ASM() (*(unsigned long long *)ptr),
289	  "r" (old),
290	  "r" (new)
291	: "memory");
 
292
293	local_irq_restore(flags);
294	return ret;
295}
296
297#  define cmpxchg64(ptr, o, n) ({					\
298	unsigned long long __old = (__typeof__(*(ptr)))(o);		\
299	unsigned long long __new = (__typeof__(*(ptr)))(n);		\
300	__typeof__(*(ptr)) __res;					\
301									\
302	/*								\
303	 * We can only use cmpxchg64 if we know that the CPU supports	\
304	 * 64-bits, ie. lld & scd. Our call to __cmpxchg64_unsupported	\
305	 * will cause a build error unless cpu_has_64bits is a		\
306	 * compile-time constant 1.					\
307	 */								\
308	if (cpu_has_64bits && kernel_uses_llsc) {			\
309		smp_mb__before_llsc();					\
310		__res = __cmpxchg64((ptr), __old, __new);		\
311		smp_llsc_mb();						\
312	} else {							\
313		__res = __cmpxchg64_unsupported();			\
314	}								\
315									\
316	__res;								\
317})
318
319# else /* !CONFIG_SMP */
320#  define cmpxchg64(ptr, o, n) cmpxchg64_local((ptr), (o), (n))
321# endif /* !CONFIG_SMP */
322#endif /* !CONFIG_64BIT */
 
 
323
324#endif /* __ASM_CMPXCHG_H */