Linux Audio

Check our new training course

Loading...
  1/*
  2 * Copyright IBM Corp. 1999, 2011
  3 *
  4 * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>,
  5 */
  6
  7#ifndef __ASM_CMPXCHG_H
  8#define __ASM_CMPXCHG_H
  9
 10#include <linux/mmdebug.h>
 11#include <linux/types.h>
 12#include <linux/bug.h>
 13
 14extern void __xchg_called_with_bad_pointer(void);
 15
 16static inline unsigned long __xchg(unsigned long x, void *ptr, int size)
 17{
 18	unsigned long addr, old;
 19	int shift;
 20
 21	switch (size) {
 22	case 1:
 23		addr = (unsigned long) ptr;
 24		shift = (3 ^ (addr & 3)) << 3;
 25		addr ^= addr & 3;
 26		asm volatile(
 27			"	l	%0,%4\n"
 28			"0:	lr	0,%0\n"
 29			"	nr	0,%3\n"
 30			"	or	0,%2\n"
 31			"	cs	%0,0,%4\n"
 32			"	jl	0b\n"
 33			: "=&d" (old), "=Q" (*(int *) addr)
 34			: "d" ((x & 0xff) << shift), "d" (~(0xff << shift)),
 35			  "Q" (*(int *) addr) : "memory", "cc", "0");
 36		return old >> shift;
 37	case 2:
 38		addr = (unsigned long) ptr;
 39		shift = (2 ^ (addr & 2)) << 3;
 40		addr ^= addr & 2;
 41		asm volatile(
 42			"	l	%0,%4\n"
 43			"0:	lr	0,%0\n"
 44			"	nr	0,%3\n"
 45			"	or	0,%2\n"
 46			"	cs	%0,0,%4\n"
 47			"	jl	0b\n"
 48			: "=&d" (old), "=Q" (*(int *) addr)
 49			: "d" ((x & 0xffff) << shift), "d" (~(0xffff << shift)),
 50			  "Q" (*(int *) addr) : "memory", "cc", "0");
 51		return old >> shift;
 52	case 4:
 53		asm volatile(
 54			"	l	%0,%3\n"
 55			"0:	cs	%0,%2,%3\n"
 56			"	jl	0b\n"
 57			: "=&d" (old), "=Q" (*(int *) ptr)
 58			: "d" (x), "Q" (*(int *) ptr)
 59			: "memory", "cc");
 60		return old;
 61#ifdef CONFIG_64BIT
 62	case 8:
 63		asm volatile(
 64			"	lg	%0,%3\n"
 65			"0:	csg	%0,%2,%3\n"
 66			"	jl	0b\n"
 67			: "=&d" (old), "=m" (*(long *) ptr)
 68			: "d" (x), "Q" (*(long *) ptr)
 69			: "memory", "cc");
 70		return old;
 71#endif /* CONFIG_64BIT */
 72	}
 73	__xchg_called_with_bad_pointer();
 74	return x;
 75}
 76
 77#define xchg(ptr, x)							  \
 78({									  \
 79	__typeof__(*(ptr)) __ret;					  \
 80	__ret = (__typeof__(*(ptr)))					  \
 81		__xchg((unsigned long)(x), (void *)(ptr), sizeof(*(ptr)));\
 82	__ret;								  \
 83})
 84
 85/*
 86 * Atomic compare and exchange.	 Compare OLD with MEM, if identical,
 87 * store NEW in MEM.  Return the initial value in MEM.	Success is
 88 * indicated by comparing RETURN with OLD.
 89 */
 90
 91#define __HAVE_ARCH_CMPXCHG
 92
 93extern void __cmpxchg_called_with_bad_pointer(void);
 94
 95static inline unsigned long __cmpxchg(void *ptr, unsigned long old,
 96				      unsigned long new, int size)
 97{
 98	unsigned long addr, prev, tmp;
 99	int shift;
100
101	switch (size) {
102	case 1:
103		addr = (unsigned long) ptr;
104		shift = (3 ^ (addr & 3)) << 3;
105		addr ^= addr & 3;
106		asm volatile(
107			"	l	%0,%2\n"
108			"0:	nr	%0,%5\n"
109			"	lr	%1,%0\n"
110			"	or	%0,%3\n"
111			"	or	%1,%4\n"
112			"	cs	%0,%1,%2\n"
113			"	jnl	1f\n"
114			"	xr	%1,%0\n"
115			"	nr	%1,%5\n"
116			"	jnz	0b\n"
117			"1:"
118			: "=&d" (prev), "=&d" (tmp), "+Q" (*(int *) addr)
119			: "d" ((old & 0xff) << shift),
120			  "d" ((new & 0xff) << shift),
121			  "d" (~(0xff << shift))
122			: "memory", "cc");
123		return prev >> shift;
124	case 2:
125		addr = (unsigned long) ptr;
126		shift = (2 ^ (addr & 2)) << 3;
127		addr ^= addr & 2;
128		asm volatile(
129			"	l	%0,%2\n"
130			"0:	nr	%0,%5\n"
131			"	lr	%1,%0\n"
132			"	or	%0,%3\n"
133			"	or	%1,%4\n"
134			"	cs	%0,%1,%2\n"
135			"	jnl	1f\n"
136			"	xr	%1,%0\n"
137			"	nr	%1,%5\n"
138			"	jnz	0b\n"
139			"1:"
140			: "=&d" (prev), "=&d" (tmp), "+Q" (*(int *) addr)
141			: "d" ((old & 0xffff) << shift),
142			  "d" ((new & 0xffff) << shift),
143			  "d" (~(0xffff << shift))
144			: "memory", "cc");
145		return prev >> shift;
146	case 4:
147		asm volatile(
148			"	cs	%0,%3,%1\n"
149			: "=&d" (prev), "=Q" (*(int *) ptr)
150			: "0" (old), "d" (new), "Q" (*(int *) ptr)
151			: "memory", "cc");
152		return prev;
153#ifdef CONFIG_64BIT
154	case 8:
155		asm volatile(
156			"	csg	%0,%3,%1\n"
157			: "=&d" (prev), "=Q" (*(long *) ptr)
158			: "0" (old), "d" (new), "Q" (*(long *) ptr)
159			: "memory", "cc");
160		return prev;
161#endif /* CONFIG_64BIT */
162	}
163	__cmpxchg_called_with_bad_pointer();
164	return old;
165}
166
167#define cmpxchg(ptr, o, n)						 \
168({									 \
169	__typeof__(*(ptr)) __ret;					 \
170	__ret = (__typeof__(*(ptr)))					 \
171		__cmpxchg((ptr), (unsigned long)(o), (unsigned long)(n), \
172			  sizeof(*(ptr)));				 \
173	__ret;								 \
174})
175
176#ifdef CONFIG_64BIT
177#define cmpxchg64(ptr, o, n)						\
178({									\
179	cmpxchg((ptr), (o), (n));					\
180})
181#else /* CONFIG_64BIT */
182static inline unsigned long long __cmpxchg64(void *ptr,
183					     unsigned long long old,
184					     unsigned long long new)
185{
186	register_pair rp_old = {.pair = old};
187	register_pair rp_new = {.pair = new};
188	unsigned long long *ullptr = ptr;
189
190	asm volatile(
191		"	cds	%0,%2,%1"
192		: "+d" (rp_old), "+Q" (*ullptr)
193		: "d" (rp_new)
194		: "memory", "cc");
195	return rp_old.pair;
196}
197
198#define cmpxchg64(ptr, o, n)				\
199({							\
200	__typeof__(*(ptr)) __ret;			\
201	__ret = (__typeof__(*(ptr)))			\
202		__cmpxchg64((ptr),			\
203			    (unsigned long long)(o),	\
204			    (unsigned long long)(n));	\
205	__ret;						\
206})
207#endif /* CONFIG_64BIT */
208
209#define __cmpxchg_double_op(p1, p2, o1, o2, n1, n2, insn)		\
210({									\
211	register __typeof__(*(p1)) __old1 asm("2") = (o1);		\
212	register __typeof__(*(p2)) __old2 asm("3") = (o2);		\
213	register __typeof__(*(p1)) __new1 asm("4") = (n1);		\
214	register __typeof__(*(p2)) __new2 asm("5") = (n2);		\
215	int cc;								\
216	asm volatile(							\
217			insn   " %[old],%[new],%[ptr]\n"		\
218		"	ipm	%[cc]\n"				\
219		"	srl	%[cc],28"				\
220		: [cc] "=d" (cc), [old] "+d" (__old1), "+d" (__old2)	\
221		: [new] "d" (__new1), "d" (__new2),			\
222		  [ptr] "Q" (*(p1)), "Q" (*(p2))			\
223		: "memory", "cc");					\
224	!cc;								\
225})
226
227#define __cmpxchg_double_4(p1, p2, o1, o2, n1, n2) \
228	__cmpxchg_double_op(p1, p2, o1, o2, n1, n2, "cds")
229
230#define __cmpxchg_double_8(p1, p2, o1, o2, n1, n2) \
231	__cmpxchg_double_op(p1, p2, o1, o2, n1, n2, "cdsg")
232
233extern void __cmpxchg_double_called_with_bad_pointer(void);
234
235#define __cmpxchg_double(p1, p2, o1, o2, n1, n2)			\
236({									\
237	int __ret;							\
238	switch (sizeof(*(p1))) {					\
239	case 4:								\
240		__ret = __cmpxchg_double_4(p1, p2, o1, o2, n1, n2);	\
241		break;							\
242	case 8:								\
243		__ret = __cmpxchg_double_8(p1, p2, o1, o2, n1, n2);	\
244		break;							\
245	default:							\
246		__cmpxchg_double_called_with_bad_pointer();		\
247	}								\
248	__ret;								\
249})
250
251#define cmpxchg_double(p1, p2, o1, o2, n1, n2)				\
252({									\
253	__typeof__(p1) __p1 = (p1);					\
254	__typeof__(p2) __p2 = (p2);					\
255	int __ret;							\
256	BUILD_BUG_ON(sizeof(*(p1)) != sizeof(long));			\
257	BUILD_BUG_ON(sizeof(*(p2)) != sizeof(long));			\
258	VM_BUG_ON((unsigned long)((__p1) + 1) != (unsigned long)(__p2));\
259	if (sizeof(long) == 4)						\
260		__ret = __cmpxchg_double_4(__p1, __p2, o1, o2, n1, n2);	\
261	else								\
262		__ret = __cmpxchg_double_8(__p1, __p2, o1, o2, n1, n2);	\
263	__ret;								\
264})
265
266#define system_has_cmpxchg_double()	1
267
268#include <asm-generic/cmpxchg-local.h>
269
270static inline unsigned long __cmpxchg_local(void *ptr,
271					    unsigned long old,
272					    unsigned long new, int size)
273{
274	switch (size) {
275	case 1:
276	case 2:
277	case 4:
278#ifdef CONFIG_64BIT
279	case 8:
280#endif
281		return __cmpxchg(ptr, old, new, size);
282	default:
283		return __cmpxchg_local_generic(ptr, old, new, size);
284	}
285
286	return old;
287}
288
289/*
290 * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
291 * them available.
292 */
293#define cmpxchg_local(ptr, o, n)					\
294({									\
295	__typeof__(*(ptr)) __ret;					\
296	__ret = (__typeof__(*(ptr)))					\
297		__cmpxchg_local((ptr), (unsigned long)(o),		\
298				(unsigned long)(n), sizeof(*(ptr)));	\
299	__ret;								\
300})
301
302#define cmpxchg64_local(ptr, o, n)	cmpxchg64((ptr), (o), (n))
303
304#endif /* __ASM_CMPXCHG_H */
  1/*
  2 * Copyright IBM Corp. 1999, 2011
  3 *
  4 * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>,
  5 */
  6
  7#ifndef __ASM_CMPXCHG_H
  8#define __ASM_CMPXCHG_H
  9
 
 10#include <linux/types.h>
 
 11
 12extern void __xchg_called_with_bad_pointer(void);
 13
 14static inline unsigned long __xchg(unsigned long x, void *ptr, int size)
 15{
 16	unsigned long addr, old;
 17	int shift;
 18
 19	switch (size) {
 20	case 1:
 21		addr = (unsigned long) ptr;
 22		shift = (3 ^ (addr & 3)) << 3;
 23		addr ^= addr & 3;
 24		asm volatile(
 25			"	l	%0,%4\n"
 26			"0:	lr	0,%0\n"
 27			"	nr	0,%3\n"
 28			"	or	0,%2\n"
 29			"	cs	%0,0,%4\n"
 30			"	jl	0b\n"
 31			: "=&d" (old), "=Q" (*(int *) addr)
 32			: "d" ((x & 0xff) << shift), "d" (~(0xff << shift)),
 33			  "Q" (*(int *) addr) : "memory", "cc", "0");
 34		return old >> shift;
 35	case 2:
 36		addr = (unsigned long) ptr;
 37		shift = (2 ^ (addr & 2)) << 3;
 38		addr ^= addr & 2;
 39		asm volatile(
 40			"	l	%0,%4\n"
 41			"0:	lr	0,%0\n"
 42			"	nr	0,%3\n"
 43			"	or	0,%2\n"
 44			"	cs	%0,0,%4\n"
 45			"	jl	0b\n"
 46			: "=&d" (old), "=Q" (*(int *) addr)
 47			: "d" ((x & 0xffff) << shift), "d" (~(0xffff << shift)),
 48			  "Q" (*(int *) addr) : "memory", "cc", "0");
 49		return old >> shift;
 50	case 4:
 51		asm volatile(
 52			"	l	%0,%3\n"
 53			"0:	cs	%0,%2,%3\n"
 54			"	jl	0b\n"
 55			: "=&d" (old), "=Q" (*(int *) ptr)
 56			: "d" (x), "Q" (*(int *) ptr)
 57			: "memory", "cc");
 58		return old;
 59#ifdef CONFIG_64BIT
 60	case 8:
 61		asm volatile(
 62			"	lg	%0,%3\n"
 63			"0:	csg	%0,%2,%3\n"
 64			"	jl	0b\n"
 65			: "=&d" (old), "=m" (*(long *) ptr)
 66			: "d" (x), "Q" (*(long *) ptr)
 67			: "memory", "cc");
 68		return old;
 69#endif /* CONFIG_64BIT */
 70	}
 71	__xchg_called_with_bad_pointer();
 72	return x;
 73}
 74
 75#define xchg(ptr, x)							  \
 76({									  \
 77	__typeof__(*(ptr)) __ret;					  \
 78	__ret = (__typeof__(*(ptr)))					  \
 79		__xchg((unsigned long)(x), (void *)(ptr), sizeof(*(ptr)));\
 80	__ret;								  \
 81})
 82
 83/*
 84 * Atomic compare and exchange.	 Compare OLD with MEM, if identical,
 85 * store NEW in MEM.  Return the initial value in MEM.	Success is
 86 * indicated by comparing RETURN with OLD.
 87 */
 88
 89#define __HAVE_ARCH_CMPXCHG
 90
 91extern void __cmpxchg_called_with_bad_pointer(void);
 92
 93static inline unsigned long __cmpxchg(void *ptr, unsigned long old,
 94				      unsigned long new, int size)
 95{
 96	unsigned long addr, prev, tmp;
 97	int shift;
 98
 99	switch (size) {
100	case 1:
101		addr = (unsigned long) ptr;
102		shift = (3 ^ (addr & 3)) << 3;
103		addr ^= addr & 3;
104		asm volatile(
105			"	l	%0,%2\n"
106			"0:	nr	%0,%5\n"
107			"	lr	%1,%0\n"
108			"	or	%0,%3\n"
109			"	or	%1,%4\n"
110			"	cs	%0,%1,%2\n"
111			"	jnl	1f\n"
112			"	xr	%1,%0\n"
113			"	nr	%1,%5\n"
114			"	jnz	0b\n"
115			"1:"
116			: "=&d" (prev), "=&d" (tmp), "+Q" (*(int *) addr)
117			: "d" ((old & 0xff) << shift),
118			  "d" ((new & 0xff) << shift),
119			  "d" (~(0xff << shift))
120			: "memory", "cc");
121		return prev >> shift;
122	case 2:
123		addr = (unsigned long) ptr;
124		shift = (2 ^ (addr & 2)) << 3;
125		addr ^= addr & 2;
126		asm volatile(
127			"	l	%0,%2\n"
128			"0:	nr	%0,%5\n"
129			"	lr	%1,%0\n"
130			"	or	%0,%3\n"
131			"	or	%1,%4\n"
132			"	cs	%0,%1,%2\n"
133			"	jnl	1f\n"
134			"	xr	%1,%0\n"
135			"	nr	%1,%5\n"
136			"	jnz	0b\n"
137			"1:"
138			: "=&d" (prev), "=&d" (tmp), "+Q" (*(int *) addr)
139			: "d" ((old & 0xffff) << shift),
140			  "d" ((new & 0xffff) << shift),
141			  "d" (~(0xffff << shift))
142			: "memory", "cc");
143		return prev >> shift;
144	case 4:
145		asm volatile(
146			"	cs	%0,%3,%1\n"
147			: "=&d" (prev), "=Q" (*(int *) ptr)
148			: "0" (old), "d" (new), "Q" (*(int *) ptr)
149			: "memory", "cc");
150		return prev;
151#ifdef CONFIG_64BIT
152	case 8:
153		asm volatile(
154			"	csg	%0,%3,%1\n"
155			: "=&d" (prev), "=Q" (*(long *) ptr)
156			: "0" (old), "d" (new), "Q" (*(long *) ptr)
157			: "memory", "cc");
158		return prev;
159#endif /* CONFIG_64BIT */
160	}
161	__cmpxchg_called_with_bad_pointer();
162	return old;
163}
164
165#define cmpxchg(ptr, o, n)						 \
166({									 \
167	__typeof__(*(ptr)) __ret;					 \
168	__ret = (__typeof__(*(ptr)))					 \
169		__cmpxchg((ptr), (unsigned long)(o), (unsigned long)(n), \
170			  sizeof(*(ptr)));				 \
171	__ret;								 \
172})
173
174#ifdef CONFIG_64BIT
175#define cmpxchg64(ptr, o, n)						\
176({									\
177	cmpxchg((ptr), (o), (n));					\
178})
179#else /* CONFIG_64BIT */
180static inline unsigned long long __cmpxchg64(void *ptr,
181					     unsigned long long old,
182					     unsigned long long new)
183{
184	register_pair rp_old = {.pair = old};
185	register_pair rp_new = {.pair = new};
 
186
187	asm volatile(
188		"	cds	%0,%2,%1"
189		: "+&d" (rp_old), "=Q" (ptr)
190		: "d" (rp_new), "Q" (ptr)
191		: "memory", "cc");
192	return rp_old.pair;
193}
194
195#define cmpxchg64(ptr, o, n)				\
196({							\
197	__typeof__(*(ptr)) __ret;			\
198	__ret = (__typeof__(*(ptr)))			\
199		__cmpxchg64((ptr),			\
200			    (unsigned long long)(o),	\
201			    (unsigned long long)(n));	\
202	__ret;						\
203})
204#endif /* CONFIG_64BIT */
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
205
206#include <asm-generic/cmpxchg-local.h>
207
208static inline unsigned long __cmpxchg_local(void *ptr,
209					    unsigned long old,
210					    unsigned long new, int size)
211{
212	switch (size) {
213	case 1:
214	case 2:
215	case 4:
216#ifdef CONFIG_64BIT
217	case 8:
218#endif
219		return __cmpxchg(ptr, old, new, size);
220	default:
221		return __cmpxchg_local_generic(ptr, old, new, size);
222	}
223
224	return old;
225}
226
227/*
228 * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
229 * them available.
230 */
231#define cmpxchg_local(ptr, o, n)					\
232({									\
233	__typeof__(*(ptr)) __ret;					\
234	__ret = (__typeof__(*(ptr)))					\
235		__cmpxchg_local((ptr), (unsigned long)(o),		\
236				(unsigned long)(n), sizeof(*(ptr)));	\
237	__ret;								\
238})
239
240#define cmpxchg64_local(ptr, o, n)	cmpxchg64((ptr), (o), (n))
241
242#endif /* __ASM_CMPXCHG_H */