Linux Audio

Check our new training course

Loading...
v6.2
  1/* SPDX-License-Identifier: GPL-2.0 */
  2/* atomic.S: These things are too big to do inline.
  3 *
  4 * Copyright (C) 1999, 2007 2012 David S. Miller (davem@davemloft.net)
  5 */
  6
  7#include <linux/linkage.h>
  8#include <asm/asi.h>
  9#include <asm/backoff.h>
 10#include <asm/export.h>
 11
 12	.text
 13
 14	/* Three versions of the atomic routines, one that
 15	 * does not return a value and does not perform
 16	 * memory barriers, and a two which return
 17	 * a value, the new and old value resp. and does the
 18	 * barriers.
 19	 */
 20
 21#define ATOMIC_OP(op)							\
 22ENTRY(arch_atomic_##op) /* %o0 = increment, %o1 = atomic_ptr */		\
 23	BACKOFF_SETUP(%o2);						\
 241:	lduw	[%o1], %g1;						\
 25	op	%g1, %o0, %g7;						\
 26	cas	[%o1], %g1, %g7;					\
 27	cmp	%g1, %g7;						\
 28	bne,pn	%icc, BACKOFF_LABEL(2f, 1b);				\
 29	 nop;								\
 30	retl;								\
 31	 nop;								\
 322:	BACKOFF_SPIN(%o2, %o3, 1b);					\
 33ENDPROC(arch_atomic_##op);						\
 34EXPORT_SYMBOL(arch_atomic_##op);
 35
 36#define ATOMIC_OP_RETURN(op)						\
 37ENTRY(arch_atomic_##op##_return) /* %o0 = increment, %o1 = atomic_ptr */\
 38	BACKOFF_SETUP(%o2);						\
 391:	lduw	[%o1], %g1;						\
 40	op	%g1, %o0, %g7;						\
 41	cas	[%o1], %g1, %g7;					\
 42	cmp	%g1, %g7;						\
 43	bne,pn	%icc, BACKOFF_LABEL(2f, 1b);				\
 44	 op	%g1, %o0, %g1;						\
 45	retl;								\
 46	 sra	%g1, 0, %o0;						\
 472:	BACKOFF_SPIN(%o2, %o3, 1b);					\
 48ENDPROC(arch_atomic_##op##_return);					\
 49EXPORT_SYMBOL(arch_atomic_##op##_return);
 50
 51#define ATOMIC_FETCH_OP(op)						\
 52ENTRY(arch_atomic_fetch_##op) /* %o0 = increment, %o1 = atomic_ptr */	\
 53	BACKOFF_SETUP(%o2);						\
 541:	lduw	[%o1], %g1;						\
 55	op	%g1, %o0, %g7;						\
 56	cas	[%o1], %g1, %g7;					\
 57	cmp	%g1, %g7;						\
 58	bne,pn	%icc, BACKOFF_LABEL(2f, 1b);				\
 59	 nop;								\
 60	retl;								\
 61	 sra	%g1, 0, %o0;						\
 622:	BACKOFF_SPIN(%o2, %o3, 1b);					\
 63ENDPROC(arch_atomic_fetch_##op);					\
 64EXPORT_SYMBOL(arch_atomic_fetch_##op);
 65
 66ATOMIC_OP(add)
 67ATOMIC_OP_RETURN(add)
 68ATOMIC_FETCH_OP(add)
 69
 70ATOMIC_OP(sub)
 71ATOMIC_OP_RETURN(sub)
 72ATOMIC_FETCH_OP(sub)
 73
 
 
 74ATOMIC_OP(and)
 75ATOMIC_FETCH_OP(and)
 76
 77ATOMIC_OP(or)
 78ATOMIC_FETCH_OP(or)
 79
 80ATOMIC_OP(xor)
 81ATOMIC_FETCH_OP(xor)
 82
 83#undef ATOMIC_FETCH_OP
 84#undef ATOMIC_OP_RETURN
 85#undef ATOMIC_OP
 86
 87#define ATOMIC64_OP(op)							\
 88ENTRY(arch_atomic64_##op) /* %o0 = increment, %o1 = atomic_ptr */	\
 89	BACKOFF_SETUP(%o2);						\
 901:	ldx	[%o1], %g1;						\
 91	op	%g1, %o0, %g7;						\
 92	casx	[%o1], %g1, %g7;					\
 93	cmp	%g1, %g7;						\
 94	bne,pn	%xcc, BACKOFF_LABEL(2f, 1b);				\
 95	 nop;								\
 96	retl;								\
 97	 nop;								\
 982:	BACKOFF_SPIN(%o2, %o3, 1b);					\
 99ENDPROC(arch_atomic64_##op);						\
100EXPORT_SYMBOL(arch_atomic64_##op);
101
102#define ATOMIC64_OP_RETURN(op)						\
103ENTRY(arch_atomic64_##op##_return) /* %o0 = increment, %o1 = atomic_ptr */	\
104	BACKOFF_SETUP(%o2);						\
1051:	ldx	[%o1], %g1;						\
106	op	%g1, %o0, %g7;						\
107	casx	[%o1], %g1, %g7;					\
108	cmp	%g1, %g7;						\
109	bne,pn	%xcc, BACKOFF_LABEL(2f, 1b);				\
110	 nop;								\
111	retl;								\
112	 op	%g1, %o0, %o0;						\
1132:	BACKOFF_SPIN(%o2, %o3, 1b);					\
114ENDPROC(arch_atomic64_##op##_return);					\
115EXPORT_SYMBOL(arch_atomic64_##op##_return);
116
117#define ATOMIC64_FETCH_OP(op)						\
118ENTRY(arch_atomic64_fetch_##op) /* %o0 = increment, %o1 = atomic_ptr */	\
119	BACKOFF_SETUP(%o2);						\
1201:	ldx	[%o1], %g1;						\
121	op	%g1, %o0, %g7;						\
122	casx	[%o1], %g1, %g7;					\
123	cmp	%g1, %g7;						\
124	bne,pn	%xcc, BACKOFF_LABEL(2f, 1b);				\
125	 nop;								\
126	retl;								\
127	 mov	%g1, %o0;						\
1282:	BACKOFF_SPIN(%o2, %o3, 1b);					\
129ENDPROC(arch_atomic64_fetch_##op);					\
130EXPORT_SYMBOL(arch_atomic64_fetch_##op);
131
132ATOMIC64_OP(add)
133ATOMIC64_OP_RETURN(add)
134ATOMIC64_FETCH_OP(add)
135
136ATOMIC64_OP(sub)
137ATOMIC64_OP_RETURN(sub)
138ATOMIC64_FETCH_OP(sub)
139
 
 
140ATOMIC64_OP(and)
141ATOMIC64_FETCH_OP(and)
142
143ATOMIC64_OP(or)
144ATOMIC64_FETCH_OP(or)
145
146ATOMIC64_OP(xor)
147ATOMIC64_FETCH_OP(xor)
148
149#undef ATOMIC64_FETCH_OP
150#undef ATOMIC64_OP_RETURN
151#undef ATOMIC64_OP
152
153ENTRY(arch_atomic64_dec_if_positive) /* %o0 = atomic_ptr */
154	BACKOFF_SETUP(%o2)
1551:	ldx	[%o0], %g1
156	brlez,pn %g1, 3f
157	 sub	%g1, 1, %g7
158	casx	[%o0], %g1, %g7
159	cmp	%g1, %g7
160	bne,pn	%xcc, BACKOFF_LABEL(2f, 1b)
161	 nop
1623:	retl
163	 sub	%g1, 1, %o0
1642:	BACKOFF_SPIN(%o2, %o3, 1b)
165ENDPROC(arch_atomic64_dec_if_positive)
166EXPORT_SYMBOL(arch_atomic64_dec_if_positive)
v4.6
 
  1/* atomic.S: These things are too big to do inline.
  2 *
  3 * Copyright (C) 1999, 2007 2012 David S. Miller (davem@davemloft.net)
  4 */
  5
  6#include <linux/linkage.h>
  7#include <asm/asi.h>
  8#include <asm/backoff.h>
 
  9
 10	.text
 11
 12	/* Two versions of the atomic routines, one that
 13	 * does not return a value and does not perform
 14	 * memory barriers, and a second which returns
 15	 * a value and does the barriers.
 
 16	 */
 17
 18#define ATOMIC_OP(op)							\
 19ENTRY(atomic_##op) /* %o0 = increment, %o1 = atomic_ptr */		\
 20	BACKOFF_SETUP(%o2);						\
 211:	lduw	[%o1], %g1;						\
 22	op	%g1, %o0, %g7;						\
 23	cas	[%o1], %g1, %g7;					\
 24	cmp	%g1, %g7;						\
 25	bne,pn	%icc, BACKOFF_LABEL(2f, 1b);				\
 26	 nop;								\
 27	retl;								\
 28	 nop;								\
 292:	BACKOFF_SPIN(%o2, %o3, 1b);					\
 30ENDPROC(atomic_##op);							\
 
 31
 32#define ATOMIC_OP_RETURN(op)						\
 33ENTRY(atomic_##op##_return) /* %o0 = increment, %o1 = atomic_ptr */	\
 34	BACKOFF_SETUP(%o2);						\
 351:	lduw	[%o1], %g1;						\
 36	op	%g1, %o0, %g7;						\
 37	cas	[%o1], %g1, %g7;					\
 38	cmp	%g1, %g7;						\
 39	bne,pn	%icc, BACKOFF_LABEL(2f, 1b);				\
 40	 op	%g1, %o0, %g1;						\
 41	retl;								\
 42	 sra	%g1, 0, %o0;						\
 432:	BACKOFF_SPIN(%o2, %o3, 1b);					\
 44ENDPROC(atomic_##op##_return);
 
 45
 46#define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 47
 48ATOMIC_OPS(add)
 49ATOMIC_OPS(sub)
 50ATOMIC_OP(and)
 
 
 51ATOMIC_OP(or)
 
 
 52ATOMIC_OP(xor)
 
 53
 54#undef ATOMIC_OPS
 55#undef ATOMIC_OP_RETURN
 56#undef ATOMIC_OP
 57
 58#define ATOMIC64_OP(op)							\
 59ENTRY(atomic64_##op) /* %o0 = increment, %o1 = atomic_ptr */		\
 60	BACKOFF_SETUP(%o2);						\
 611:	ldx	[%o1], %g1;						\
 62	op	%g1, %o0, %g7;						\
 63	casx	[%o1], %g1, %g7;					\
 64	cmp	%g1, %g7;						\
 65	bne,pn	%xcc, BACKOFF_LABEL(2f, 1b);				\
 66	 nop;								\
 67	retl;								\
 68	 nop;								\
 692:	BACKOFF_SPIN(%o2, %o3, 1b);					\
 70ENDPROC(atomic64_##op);							\
 
 71
 72#define ATOMIC64_OP_RETURN(op)						\
 73ENTRY(atomic64_##op##_return) /* %o0 = increment, %o1 = atomic_ptr */	\
 74	BACKOFF_SETUP(%o2);						\
 751:	ldx	[%o1], %g1;						\
 76	op	%g1, %o0, %g7;						\
 77	casx	[%o1], %g1, %g7;					\
 78	cmp	%g1, %g7;						\
 79	bne,pn	%xcc, BACKOFF_LABEL(2f, 1b);				\
 80	 nop;								\
 81	retl;								\
 82	 op	%g1, %o0, %o0;						\
 832:	BACKOFF_SPIN(%o2, %o3, 1b);					\
 84ENDPROC(atomic64_##op##_return);
 
 85
 86#define ATOMIC64_OPS(op) ATOMIC64_OP(op) ATOMIC64_OP_RETURN(op)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 87
 88ATOMIC64_OPS(add)
 89ATOMIC64_OPS(sub)
 90ATOMIC64_OP(and)
 
 
 91ATOMIC64_OP(or)
 
 
 92ATOMIC64_OP(xor)
 
 93
 94#undef ATOMIC64_OPS
 95#undef ATOMIC64_OP_RETURN
 96#undef ATOMIC64_OP
 97
 98ENTRY(atomic64_dec_if_positive) /* %o0 = atomic_ptr */
 99	BACKOFF_SETUP(%o2)
1001:	ldx	[%o0], %g1
101	brlez,pn %g1, 3f
102	 sub	%g1, 1, %g7
103	casx	[%o0], %g1, %g7
104	cmp	%g1, %g7
105	bne,pn	%xcc, BACKOFF_LABEL(2f, 1b)
106	 nop
1073:	retl
108	 sub	%g1, 1, %o0
1092:	BACKOFF_SPIN(%o2, %o3, 1b)
110ENDPROC(atomic64_dec_if_positive)