Linux Audio

Check our new training course

Loading...
v3.15
 
  1/* atomic.S: These things are too big to do inline.
  2 *
  3 * Copyright (C) 1999, 2007 2012 David S. Miller (davem@davemloft.net)
  4 */
  5
 
  6#include <linux/linkage.h>
  7#include <asm/asi.h>
  8#include <asm/backoff.h>
  9
 10	.text
 11
 12	/* Two versions of the atomic routines, one that
 13	 * does not return a value and does not perform
 14	 * memory barriers, and a second which returns
 15	 * a value and does the barriers.
 
 16	 */
 17ENTRY(atomic_add) /* %o0 = increment, %o1 = atomic_ptr */
 18	BACKOFF_SETUP(%o2)
 191:	lduw	[%o1], %g1
 20	add	%g1, %o0, %g7
 21	cas	[%o1], %g1, %g7
 22	cmp	%g1, %g7
 23	bne,pn	%icc, BACKOFF_LABEL(2f, 1b)
 24	 nop
 25	retl
 26	 nop
 272:	BACKOFF_SPIN(%o2, %o3, 1b)
 28ENDPROC(atomic_add)
 29
 30ENTRY(atomic_sub) /* %o0 = decrement, %o1 = atomic_ptr */
 31	BACKOFF_SETUP(%o2)
 321:	lduw	[%o1], %g1
 33	sub	%g1, %o0, %g7
 34	cas	[%o1], %g1, %g7
 35	cmp	%g1, %g7
 36	bne,pn	%icc, BACKOFF_LABEL(2f, 1b)
 37	 nop
 38	retl
 39	 nop
 402:	BACKOFF_SPIN(%o2, %o3, 1b)
 41ENDPROC(atomic_sub)
 42
 43ENTRY(atomic_add_ret) /* %o0 = increment, %o1 = atomic_ptr */
 44	BACKOFF_SETUP(%o2)
 451:	lduw	[%o1], %g1
 46	add	%g1, %o0, %g7
 47	cas	[%o1], %g1, %g7
 48	cmp	%g1, %g7
 49	bne,pn	%icc, BACKOFF_LABEL(2f, 1b)
 50	 add	%g1, %o0, %g1
 51	retl
 52	 sra	%g1, 0, %o0
 532:	BACKOFF_SPIN(%o2, %o3, 1b)
 54ENDPROC(atomic_add_ret)
 55
 56ENTRY(atomic_sub_ret) /* %o0 = decrement, %o1 = atomic_ptr */
 57	BACKOFF_SETUP(%o2)
 581:	lduw	[%o1], %g1
 59	sub	%g1, %o0, %g7
 60	cas	[%o1], %g1, %g7
 61	cmp	%g1, %g7
 62	bne,pn	%icc, BACKOFF_LABEL(2f, 1b)
 63	 sub	%g1, %o0, %g1
 64	retl
 65	 sra	%g1, 0, %o0
 662:	BACKOFF_SPIN(%o2, %o3, 1b)
 67ENDPROC(atomic_sub_ret)
 68
 69ENTRY(atomic64_add) /* %o0 = increment, %o1 = atomic_ptr */
 70	BACKOFF_SETUP(%o2)
 711:	ldx	[%o1], %g1
 72	add	%g1, %o0, %g7
 73	casx	[%o1], %g1, %g7
 74	cmp	%g1, %g7
 75	bne,pn	%xcc, BACKOFF_LABEL(2f, 1b)
 76	 nop
 77	retl
 78	 nop
 792:	BACKOFF_SPIN(%o2, %o3, 1b)
 80ENDPROC(atomic64_add)
 81
 82ENTRY(atomic64_sub) /* %o0 = decrement, %o1 = atomic_ptr */
 83	BACKOFF_SETUP(%o2)
 841:	ldx	[%o1], %g1
 85	sub	%g1, %o0, %g7
 86	casx	[%o1], %g1, %g7
 87	cmp	%g1, %g7
 88	bne,pn	%xcc, BACKOFF_LABEL(2f, 1b)
 89	 nop
 90	retl
 91	 nop
 922:	BACKOFF_SPIN(%o2, %o3, 1b)
 93ENDPROC(atomic64_sub)
 94
 95ENTRY(atomic64_add_ret) /* %o0 = increment, %o1 = atomic_ptr */
 96	BACKOFF_SETUP(%o2)
 971:	ldx	[%o1], %g1
 98	add	%g1, %o0, %g7
 99	casx	[%o1], %g1, %g7
100	cmp	%g1, %g7
101	bne,pn	%xcc, BACKOFF_LABEL(2f, 1b)
102	 nop
103	retl
104	 add	%g1, %o0, %o0
1052:	BACKOFF_SPIN(%o2, %o3, 1b)
106ENDPROC(atomic64_add_ret)
107
108ENTRY(atomic64_sub_ret) /* %o0 = decrement, %o1 = atomic_ptr */
109	BACKOFF_SETUP(%o2)
1101:	ldx	[%o1], %g1
111	sub	%g1, %o0, %g7
112	casx	[%o1], %g1, %g7
113	cmp	%g1, %g7
114	bne,pn	%xcc, BACKOFF_LABEL(2f, 1b)
115	 nop
116	retl
117	 sub	%g1, %o0, %o0
1182:	BACKOFF_SPIN(%o2, %o3, 1b)
119ENDPROC(atomic64_sub_ret)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
120
121ENTRY(atomic64_dec_if_positive) /* %o0 = atomic_ptr */
122	BACKOFF_SETUP(%o2)
1231:	ldx	[%o0], %g1
124	brlez,pn %g1, 3f
125	 sub	%g1, 1, %g7
126	casx	[%o0], %g1, %g7
127	cmp	%g1, %g7
128	bne,pn	%xcc, BACKOFF_LABEL(2f, 1b)
129	 nop
1303:	retl
131	 sub	%g1, 1, %o0
1322:	BACKOFF_SPIN(%o2, %o3, 1b)
133ENDPROC(atomic64_dec_if_positive)
 
v6.13.7
  1/* SPDX-License-Identifier: GPL-2.0 */
  2/* atomic.S: These things are too big to do inline.
  3 *
  4 * Copyright (C) 1999, 2007 2012 David S. Miller (davem@davemloft.net)
  5 */
  6
  7#include <linux/export.h>
  8#include <linux/linkage.h>
  9#include <asm/asi.h>
 10#include <asm/backoff.h>
 11
 12	.text
 13
 14	/* Three versions of the atomic routines, one that
 15	 * does not return a value and does not perform
 16	 * memory barriers, and a two which return
 17	 * a value, the new and old value resp. and does the
 18	 * barriers.
 19	 */
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 20
 21#define ATOMIC_OP(op)							\
 22ENTRY(arch_atomic_##op) /* %o0 = increment, %o1 = atomic_ptr */		\
 23	BACKOFF_SETUP(%o2);						\
 241:	lduw	[%o1], %g1;						\
 25	op	%g1, %o0, %g7;						\
 26	cas	[%o1], %g1, %g7;					\
 27	cmp	%g1, %g7;						\
 28	bne,pn	%icc, BACKOFF_LABEL(2f, 1b);				\
 29	 nop;								\
 30	retl;								\
 31	 nop;								\
 322:	BACKOFF_SPIN(%o2, %o3, 1b);					\
 33ENDPROC(arch_atomic_##op);						\
 34EXPORT_SYMBOL(arch_atomic_##op);
 35
 36#define ATOMIC_OP_RETURN(op)						\
 37ENTRY(arch_atomic_##op##_return) /* %o0 = increment, %o1 = atomic_ptr */\
 38	BACKOFF_SETUP(%o2);						\
 391:	lduw	[%o1], %g1;						\
 40	op	%g1, %o0, %g7;						\
 41	cas	[%o1], %g1, %g7;					\
 42	cmp	%g1, %g7;						\
 43	bne,pn	%icc, BACKOFF_LABEL(2f, 1b);				\
 44	 op	%g1, %o0, %g1;						\
 45	retl;								\
 46	 sra	%g1, 0, %o0;						\
 472:	BACKOFF_SPIN(%o2, %o3, 1b);					\
 48ENDPROC(arch_atomic_##op##_return);					\
 49EXPORT_SYMBOL(arch_atomic_##op##_return);
 50
 51#define ATOMIC_FETCH_OP(op)						\
 52ENTRY(arch_atomic_fetch_##op) /* %o0 = increment, %o1 = atomic_ptr */	\
 53	BACKOFF_SETUP(%o2);						\
 541:	lduw	[%o1], %g1;						\
 55	op	%g1, %o0, %g7;						\
 56	cas	[%o1], %g1, %g7;					\
 57	cmp	%g1, %g7;						\
 58	bne,pn	%icc, BACKOFF_LABEL(2f, 1b);				\
 59	 nop;								\
 60	retl;								\
 61	 sra	%g1, 0, %o0;						\
 622:	BACKOFF_SPIN(%o2, %o3, 1b);					\
 63ENDPROC(arch_atomic_fetch_##op);					\
 64EXPORT_SYMBOL(arch_atomic_fetch_##op);
 65
 66ATOMIC_OP(add)
 67ATOMIC_OP_RETURN(add)
 68ATOMIC_FETCH_OP(add)
 69
 70ATOMIC_OP(sub)
 71ATOMIC_OP_RETURN(sub)
 72ATOMIC_FETCH_OP(sub)
 73
 74ATOMIC_OP(and)
 75ATOMIC_FETCH_OP(and)
 76
 77ATOMIC_OP(or)
 78ATOMIC_FETCH_OP(or)
 79
 80ATOMIC_OP(xor)
 81ATOMIC_FETCH_OP(xor)
 82
 83#undef ATOMIC_FETCH_OP
 84#undef ATOMIC_OP_RETURN
 85#undef ATOMIC_OP
 86
 87#define ATOMIC64_OP(op)							\
 88ENTRY(arch_atomic64_##op) /* %o0 = increment, %o1 = atomic_ptr */	\
 89	BACKOFF_SETUP(%o2);						\
 901:	ldx	[%o1], %g1;						\
 91	op	%g1, %o0, %g7;						\
 92	casx	[%o1], %g1, %g7;					\
 93	cmp	%g1, %g7;						\
 94	bne,pn	%xcc, BACKOFF_LABEL(2f, 1b);				\
 95	 nop;								\
 96	retl;								\
 97	 nop;								\
 982:	BACKOFF_SPIN(%o2, %o3, 1b);					\
 99ENDPROC(arch_atomic64_##op);						\
100EXPORT_SYMBOL(arch_atomic64_##op);
101
102#define ATOMIC64_OP_RETURN(op)						\
103ENTRY(arch_atomic64_##op##_return) /* %o0 = increment, %o1 = atomic_ptr */	\
104	BACKOFF_SETUP(%o2);						\
1051:	ldx	[%o1], %g1;						\
106	op	%g1, %o0, %g7;						\
107	casx	[%o1], %g1, %g7;					\
108	cmp	%g1, %g7;						\
109	bne,pn	%xcc, BACKOFF_LABEL(2f, 1b);				\
110	 nop;								\
111	retl;								\
112	 op	%g1, %o0, %o0;						\
1132:	BACKOFF_SPIN(%o2, %o3, 1b);					\
114ENDPROC(arch_atomic64_##op##_return);					\
115EXPORT_SYMBOL(arch_atomic64_##op##_return);
116
117#define ATOMIC64_FETCH_OP(op)						\
118ENTRY(arch_atomic64_fetch_##op) /* %o0 = increment, %o1 = atomic_ptr */	\
119	BACKOFF_SETUP(%o2);						\
1201:	ldx	[%o1], %g1;						\
121	op	%g1, %o0, %g7;						\
122	casx	[%o1], %g1, %g7;					\
123	cmp	%g1, %g7;						\
124	bne,pn	%xcc, BACKOFF_LABEL(2f, 1b);				\
125	 nop;								\
126	retl;								\
127	 mov	%g1, %o0;						\
1282:	BACKOFF_SPIN(%o2, %o3, 1b);					\
129ENDPROC(arch_atomic64_fetch_##op);					\
130EXPORT_SYMBOL(arch_atomic64_fetch_##op);
131
132ATOMIC64_OP(add)
133ATOMIC64_OP_RETURN(add)
134ATOMIC64_FETCH_OP(add)
135
136ATOMIC64_OP(sub)
137ATOMIC64_OP_RETURN(sub)
138ATOMIC64_FETCH_OP(sub)
139
140ATOMIC64_OP(and)
141ATOMIC64_FETCH_OP(and)
142
143ATOMIC64_OP(or)
144ATOMIC64_FETCH_OP(or)
145
146ATOMIC64_OP(xor)
147ATOMIC64_FETCH_OP(xor)
148
149#undef ATOMIC64_FETCH_OP
150#undef ATOMIC64_OP_RETURN
151#undef ATOMIC64_OP
152
153ENTRY(arch_atomic64_dec_if_positive) /* %o0 = atomic_ptr */
154	BACKOFF_SETUP(%o2)
1551:	ldx	[%o0], %g1
156	brlez,pn %g1, 3f
157	 sub	%g1, 1, %g7
158	casx	[%o0], %g1, %g7
159	cmp	%g1, %g7
160	bne,pn	%xcc, BACKOFF_LABEL(2f, 1b)
161	 nop
1623:	retl
163	 sub	%g1, 1, %o0
1642:	BACKOFF_SPIN(%o2, %o3, 1b)
165ENDPROC(arch_atomic64_dec_if_positive)
166EXPORT_SYMBOL(arch_atomic64_dec_if_positive)