Linux Audio

Check our new training course

Loading...
v3.1
  1/* atomic.S: These things are too big to do inline.
  2 *
  3 * Copyright (C) 1999, 2007 David S. Miller (davem@davemloft.net)
  4 */
  5
 
  6#include <asm/asi.h>
  7#include <asm/backoff.h>
  8
  9	.text
 10
 11	/* Two versions of the atomic routines, one that
 12	 * does not return a value and does not perform
 13	 * memory barriers, and a second which returns
 14	 * a value and does the barriers.
 15	 */
 16	.globl	atomic_add
 17	.type	atomic_add,#function
 18atomic_add: /* %o0 = increment, %o1 = atomic_ptr */
 19	BACKOFF_SETUP(%o2)
 201:	lduw	[%o1], %g1
 21	add	%g1, %o0, %g7
 22	cas	[%o1], %g1, %g7
 23	cmp	%g1, %g7
 24	bne,pn	%icc, BACKOFF_LABEL(2f, 1b)
 25	 nop
 26	retl
 27	 nop
 282:	BACKOFF_SPIN(%o2, %o3, 1b)
 29	.size	atomic_add, .-atomic_add
 30
 31	.globl	atomic_sub
 32	.type	atomic_sub,#function
 33atomic_sub: /* %o0 = decrement, %o1 = atomic_ptr */
 34	BACKOFF_SETUP(%o2)
 351:	lduw	[%o1], %g1
 36	sub	%g1, %o0, %g7
 37	cas	[%o1], %g1, %g7
 38	cmp	%g1, %g7
 39	bne,pn	%icc, BACKOFF_LABEL(2f, 1b)
 40	 nop
 41	retl
 42	 nop
 432:	BACKOFF_SPIN(%o2, %o3, 1b)
 44	.size	atomic_sub, .-atomic_sub
 45
 46	.globl	atomic_add_ret
 47	.type	atomic_add_ret,#function
 48atomic_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
 49	BACKOFF_SETUP(%o2)
 501:	lduw	[%o1], %g1
 51	add	%g1, %o0, %g7
 52	cas	[%o1], %g1, %g7
 53	cmp	%g1, %g7
 54	bne,pn	%icc, BACKOFF_LABEL(2f, 1b)
 55	 add	%g1, %o0, %g1
 56	retl
 57	 sra	%g1, 0, %o0
 582:	BACKOFF_SPIN(%o2, %o3, 1b)
 59	.size	atomic_add_ret, .-atomic_add_ret
 60
 61	.globl	atomic_sub_ret
 62	.type	atomic_sub_ret,#function
 63atomic_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */
 64	BACKOFF_SETUP(%o2)
 651:	lduw	[%o1], %g1
 66	sub	%g1, %o0, %g7
 67	cas	[%o1], %g1, %g7
 68	cmp	%g1, %g7
 69	bne,pn	%icc, BACKOFF_LABEL(2f, 1b)
 70	 sub	%g1, %o0, %g1
 71	retl
 72	 sra	%g1, 0, %o0
 732:	BACKOFF_SPIN(%o2, %o3, 1b)
 74	.size	atomic_sub_ret, .-atomic_sub_ret
 75
 76	.globl	atomic64_add
 77	.type	atomic64_add,#function
 78atomic64_add: /* %o0 = increment, %o1 = atomic_ptr */
 79	BACKOFF_SETUP(%o2)
 801:	ldx	[%o1], %g1
 81	add	%g1, %o0, %g7
 82	casx	[%o1], %g1, %g7
 83	cmp	%g1, %g7
 84	bne,pn	%xcc, BACKOFF_LABEL(2f, 1b)
 85	 nop
 86	retl
 87	 nop
 882:	BACKOFF_SPIN(%o2, %o3, 1b)
 89	.size	atomic64_add, .-atomic64_add
 90
 91	.globl	atomic64_sub
 92	.type	atomic64_sub,#function
 93atomic64_sub: /* %o0 = decrement, %o1 = atomic_ptr */
 94	BACKOFF_SETUP(%o2)
 951:	ldx	[%o1], %g1
 96	sub	%g1, %o0, %g7
 97	casx	[%o1], %g1, %g7
 98	cmp	%g1, %g7
 99	bne,pn	%xcc, BACKOFF_LABEL(2f, 1b)
100	 nop
101	retl
102	 nop
1032:	BACKOFF_SPIN(%o2, %o3, 1b)
104	.size	atomic64_sub, .-atomic64_sub
105
106	.globl	atomic64_add_ret
107	.type	atomic64_add_ret,#function
108atomic64_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
109	BACKOFF_SETUP(%o2)
1101:	ldx	[%o1], %g1
111	add	%g1, %o0, %g7
112	casx	[%o1], %g1, %g7
113	cmp	%g1, %g7
114	bne,pn	%xcc, BACKOFF_LABEL(2f, 1b)
115	 nop
116	retl
117	 add	%g1, %o0, %o0
1182:	BACKOFF_SPIN(%o2, %o3, 1b)
119	.size	atomic64_add_ret, .-atomic64_add_ret
120
121	.globl	atomic64_sub_ret
122	.type	atomic64_sub_ret,#function
123atomic64_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */
124	BACKOFF_SETUP(%o2)
1251:	ldx	[%o1], %g1
126	sub	%g1, %o0, %g7
127	casx	[%o1], %g1, %g7
128	cmp	%g1, %g7
129	bne,pn	%xcc, BACKOFF_LABEL(2f, 1b)
130	 nop
131	retl
132	 sub	%g1, %o0, %o0
1332:	BACKOFF_SPIN(%o2, %o3, 1b)
134	.size	atomic64_sub_ret, .-atomic64_sub_ret
 
 
 
 
 
 
 
 
 
 
 
 
 
 
v3.15
  1/* atomic.S: These things are too big to do inline.
  2 *
  3 * Copyright (C) 1999, 2007 2012 David S. Miller (davem@davemloft.net)
  4 */
  5
  6#include <linux/linkage.h>
  7#include <asm/asi.h>
  8#include <asm/backoff.h>
  9
 10	.text
 11
 12	/* Two versions of the atomic routines, one that
 13	 * does not return a value and does not perform
 14	 * memory barriers, and a second which returns
 15	 * a value and does the barriers.
 16	 */
 17ENTRY(atomic_add) /* %o0 = increment, %o1 = atomic_ptr */
 
 
 18	BACKOFF_SETUP(%o2)
 191:	lduw	[%o1], %g1
 20	add	%g1, %o0, %g7
 21	cas	[%o1], %g1, %g7
 22	cmp	%g1, %g7
 23	bne,pn	%icc, BACKOFF_LABEL(2f, 1b)
 24	 nop
 25	retl
 26	 nop
 272:	BACKOFF_SPIN(%o2, %o3, 1b)
 28ENDPROC(atomic_add)
 29
 30ENTRY(atomic_sub) /* %o0 = decrement, %o1 = atomic_ptr */
 
 
 31	BACKOFF_SETUP(%o2)
 321:	lduw	[%o1], %g1
 33	sub	%g1, %o0, %g7
 34	cas	[%o1], %g1, %g7
 35	cmp	%g1, %g7
 36	bne,pn	%icc, BACKOFF_LABEL(2f, 1b)
 37	 nop
 38	retl
 39	 nop
 402:	BACKOFF_SPIN(%o2, %o3, 1b)
 41ENDPROC(atomic_sub)
 42
 43ENTRY(atomic_add_ret) /* %o0 = increment, %o1 = atomic_ptr */
 
 
 44	BACKOFF_SETUP(%o2)
 451:	lduw	[%o1], %g1
 46	add	%g1, %o0, %g7
 47	cas	[%o1], %g1, %g7
 48	cmp	%g1, %g7
 49	bne,pn	%icc, BACKOFF_LABEL(2f, 1b)
 50	 add	%g1, %o0, %g1
 51	retl
 52	 sra	%g1, 0, %o0
 532:	BACKOFF_SPIN(%o2, %o3, 1b)
 54ENDPROC(atomic_add_ret)
 55
 56ENTRY(atomic_sub_ret) /* %o0 = decrement, %o1 = atomic_ptr */
 
 
 57	BACKOFF_SETUP(%o2)
 581:	lduw	[%o1], %g1
 59	sub	%g1, %o0, %g7
 60	cas	[%o1], %g1, %g7
 61	cmp	%g1, %g7
 62	bne,pn	%icc, BACKOFF_LABEL(2f, 1b)
 63	 sub	%g1, %o0, %g1
 64	retl
 65	 sra	%g1, 0, %o0
 662:	BACKOFF_SPIN(%o2, %o3, 1b)
 67ENDPROC(atomic_sub_ret)
 68
 69ENTRY(atomic64_add) /* %o0 = increment, %o1 = atomic_ptr */
 
 
 70	BACKOFF_SETUP(%o2)
 711:	ldx	[%o1], %g1
 72	add	%g1, %o0, %g7
 73	casx	[%o1], %g1, %g7
 74	cmp	%g1, %g7
 75	bne,pn	%xcc, BACKOFF_LABEL(2f, 1b)
 76	 nop
 77	retl
 78	 nop
 792:	BACKOFF_SPIN(%o2, %o3, 1b)
 80ENDPROC(atomic64_add)
 81
 82ENTRY(atomic64_sub) /* %o0 = decrement, %o1 = atomic_ptr */
 
 
 83	BACKOFF_SETUP(%o2)
 841:	ldx	[%o1], %g1
 85	sub	%g1, %o0, %g7
 86	casx	[%o1], %g1, %g7
 87	cmp	%g1, %g7
 88	bne,pn	%xcc, BACKOFF_LABEL(2f, 1b)
 89	 nop
 90	retl
 91	 nop
 922:	BACKOFF_SPIN(%o2, %o3, 1b)
 93ENDPROC(atomic64_sub)
 94
 95ENTRY(atomic64_add_ret) /* %o0 = increment, %o1 = atomic_ptr */
 
 
 96	BACKOFF_SETUP(%o2)
 971:	ldx	[%o1], %g1
 98	add	%g1, %o0, %g7
 99	casx	[%o1], %g1, %g7
100	cmp	%g1, %g7
101	bne,pn	%xcc, BACKOFF_LABEL(2f, 1b)
102	 nop
103	retl
104	 add	%g1, %o0, %o0
1052:	BACKOFF_SPIN(%o2, %o3, 1b)
106ENDPROC(atomic64_add_ret)
107
108ENTRY(atomic64_sub_ret) /* %o0 = decrement, %o1 = atomic_ptr */
 
 
109	BACKOFF_SETUP(%o2)
1101:	ldx	[%o1], %g1
111	sub	%g1, %o0, %g7
112	casx	[%o1], %g1, %g7
113	cmp	%g1, %g7
114	bne,pn	%xcc, BACKOFF_LABEL(2f, 1b)
115	 nop
116	retl
117	 sub	%g1, %o0, %o0
1182:	BACKOFF_SPIN(%o2, %o3, 1b)
119ENDPROC(atomic64_sub_ret)
120
121ENTRY(atomic64_dec_if_positive) /* %o0 = atomic_ptr */
122	BACKOFF_SETUP(%o2)
1231:	ldx	[%o0], %g1
124	brlez,pn %g1, 3f
125	 sub	%g1, 1, %g7
126	casx	[%o0], %g1, %g7
127	cmp	%g1, %g7
128	bne,pn	%xcc, BACKOFF_LABEL(2f, 1b)
129	 nop
1303:	retl
131	 sub	%g1, 1, %o0
1322:	BACKOFF_SPIN(%o2, %o3, 1b)
133ENDPROC(atomic64_dec_if_positive)