Loading...
1/* SPDX-License-Identifier: GPL-2.0 */
2/* atomic.S: These things are too big to do inline.
3 *
4 * Copyright (C) 1999, 2007 2012 David S. Miller (davem@davemloft.net)
5 */
6
7#include <linux/linkage.h>
8#include <asm/asi.h>
9#include <asm/backoff.h>
10#include <asm/export.h>
11
12 .text
13
14 /* Three versions of the atomic routines, one that
15 * does not return a value and does not perform
16 * memory barriers, and a two which return
17 * a value, the new and old value resp. and does the
18 * barriers.
19 */
20
21#define ATOMIC_OP(op) \
22ENTRY(arch_atomic_##op) /* %o0 = increment, %o1 = atomic_ptr */ \
23 BACKOFF_SETUP(%o2); \
241: lduw [%o1], %g1; \
25 op %g1, %o0, %g7; \
26 cas [%o1], %g1, %g7; \
27 cmp %g1, %g7; \
28 bne,pn %icc, BACKOFF_LABEL(2f, 1b); \
29 nop; \
30 retl; \
31 nop; \
322: BACKOFF_SPIN(%o2, %o3, 1b); \
33ENDPROC(arch_atomic_##op); \
34EXPORT_SYMBOL(arch_atomic_##op);
35
36#define ATOMIC_OP_RETURN(op) \
37ENTRY(arch_atomic_##op##_return) /* %o0 = increment, %o1 = atomic_ptr */\
38 BACKOFF_SETUP(%o2); \
391: lduw [%o1], %g1; \
40 op %g1, %o0, %g7; \
41 cas [%o1], %g1, %g7; \
42 cmp %g1, %g7; \
43 bne,pn %icc, BACKOFF_LABEL(2f, 1b); \
44 op %g1, %o0, %g1; \
45 retl; \
46 sra %g1, 0, %o0; \
472: BACKOFF_SPIN(%o2, %o3, 1b); \
48ENDPROC(arch_atomic_##op##_return); \
49EXPORT_SYMBOL(arch_atomic_##op##_return);
50
51#define ATOMIC_FETCH_OP(op) \
52ENTRY(arch_atomic_fetch_##op) /* %o0 = increment, %o1 = atomic_ptr */ \
53 BACKOFF_SETUP(%o2); \
541: lduw [%o1], %g1; \
55 op %g1, %o0, %g7; \
56 cas [%o1], %g1, %g7; \
57 cmp %g1, %g7; \
58 bne,pn %icc, BACKOFF_LABEL(2f, 1b); \
59 nop; \
60 retl; \
61 sra %g1, 0, %o0; \
622: BACKOFF_SPIN(%o2, %o3, 1b); \
63ENDPROC(arch_atomic_fetch_##op); \
64EXPORT_SYMBOL(arch_atomic_fetch_##op);
65
66ATOMIC_OP(add)
67ATOMIC_OP_RETURN(add)
68ATOMIC_FETCH_OP(add)
69
70ATOMIC_OP(sub)
71ATOMIC_OP_RETURN(sub)
72ATOMIC_FETCH_OP(sub)
73
74ATOMIC_OP(and)
75ATOMIC_FETCH_OP(and)
76
77ATOMIC_OP(or)
78ATOMIC_FETCH_OP(or)
79
80ATOMIC_OP(xor)
81ATOMIC_FETCH_OP(xor)
82
83#undef ATOMIC_FETCH_OP
84#undef ATOMIC_OP_RETURN
85#undef ATOMIC_OP
86
87#define ATOMIC64_OP(op) \
88ENTRY(arch_atomic64_##op) /* %o0 = increment, %o1 = atomic_ptr */ \
89 BACKOFF_SETUP(%o2); \
901: ldx [%o1], %g1; \
91 op %g1, %o0, %g7; \
92 casx [%o1], %g1, %g7; \
93 cmp %g1, %g7; \
94 bne,pn %xcc, BACKOFF_LABEL(2f, 1b); \
95 nop; \
96 retl; \
97 nop; \
982: BACKOFF_SPIN(%o2, %o3, 1b); \
99ENDPROC(arch_atomic64_##op); \
100EXPORT_SYMBOL(arch_atomic64_##op);
101
102#define ATOMIC64_OP_RETURN(op) \
103ENTRY(arch_atomic64_##op##_return) /* %o0 = increment, %o1 = atomic_ptr */ \
104 BACKOFF_SETUP(%o2); \
1051: ldx [%o1], %g1; \
106 op %g1, %o0, %g7; \
107 casx [%o1], %g1, %g7; \
108 cmp %g1, %g7; \
109 bne,pn %xcc, BACKOFF_LABEL(2f, 1b); \
110 nop; \
111 retl; \
112 op %g1, %o0, %o0; \
1132: BACKOFF_SPIN(%o2, %o3, 1b); \
114ENDPROC(arch_atomic64_##op##_return); \
115EXPORT_SYMBOL(arch_atomic64_##op##_return);
116
117#define ATOMIC64_FETCH_OP(op) \
118ENTRY(arch_atomic64_fetch_##op) /* %o0 = increment, %o1 = atomic_ptr */ \
119 BACKOFF_SETUP(%o2); \
1201: ldx [%o1], %g1; \
121 op %g1, %o0, %g7; \
122 casx [%o1], %g1, %g7; \
123 cmp %g1, %g7; \
124 bne,pn %xcc, BACKOFF_LABEL(2f, 1b); \
125 nop; \
126 retl; \
127 mov %g1, %o0; \
1282: BACKOFF_SPIN(%o2, %o3, 1b); \
129ENDPROC(arch_atomic64_fetch_##op); \
130EXPORT_SYMBOL(arch_atomic64_fetch_##op);
131
132ATOMIC64_OP(add)
133ATOMIC64_OP_RETURN(add)
134ATOMIC64_FETCH_OP(add)
135
136ATOMIC64_OP(sub)
137ATOMIC64_OP_RETURN(sub)
138ATOMIC64_FETCH_OP(sub)
139
140ATOMIC64_OP(and)
141ATOMIC64_FETCH_OP(and)
142
143ATOMIC64_OP(or)
144ATOMIC64_FETCH_OP(or)
145
146ATOMIC64_OP(xor)
147ATOMIC64_FETCH_OP(xor)
148
149#undef ATOMIC64_FETCH_OP
150#undef ATOMIC64_OP_RETURN
151#undef ATOMIC64_OP
152
153ENTRY(arch_atomic64_dec_if_positive) /* %o0 = atomic_ptr */
154 BACKOFF_SETUP(%o2)
1551: ldx [%o0], %g1
156 brlez,pn %g1, 3f
157 sub %g1, 1, %g7
158 casx [%o0], %g1, %g7
159 cmp %g1, %g7
160 bne,pn %xcc, BACKOFF_LABEL(2f, 1b)
161 nop
1623: retl
163 sub %g1, 1, %o0
1642: BACKOFF_SPIN(%o2, %o3, 1b)
165ENDPROC(arch_atomic64_dec_if_positive)
166EXPORT_SYMBOL(arch_atomic64_dec_if_positive)
1/* atomic.S: These things are too big to do inline.
2 *
3 * Copyright (C) 1999, 2007 David S. Miller (davem@davemloft.net)
4 */
5
6#include <asm/asi.h>
7#include <asm/backoff.h>
8
9 .text
10
11 /* Two versions of the atomic routines, one that
12 * does not return a value and does not perform
13 * memory barriers, and a second which returns
14 * a value and does the barriers.
15 */
16 .globl atomic_add
17 .type atomic_add,#function
18atomic_add: /* %o0 = increment, %o1 = atomic_ptr */
19 BACKOFF_SETUP(%o2)
201: lduw [%o1], %g1
21 add %g1, %o0, %g7
22 cas [%o1], %g1, %g7
23 cmp %g1, %g7
24 bne,pn %icc, BACKOFF_LABEL(2f, 1b)
25 nop
26 retl
27 nop
282: BACKOFF_SPIN(%o2, %o3, 1b)
29 .size atomic_add, .-atomic_add
30
31 .globl atomic_sub
32 .type atomic_sub,#function
33atomic_sub: /* %o0 = decrement, %o1 = atomic_ptr */
34 BACKOFF_SETUP(%o2)
351: lduw [%o1], %g1
36 sub %g1, %o0, %g7
37 cas [%o1], %g1, %g7
38 cmp %g1, %g7
39 bne,pn %icc, BACKOFF_LABEL(2f, 1b)
40 nop
41 retl
42 nop
432: BACKOFF_SPIN(%o2, %o3, 1b)
44 .size atomic_sub, .-atomic_sub
45
46 .globl atomic_add_ret
47 .type atomic_add_ret,#function
48atomic_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
49 BACKOFF_SETUP(%o2)
501: lduw [%o1], %g1
51 add %g1, %o0, %g7
52 cas [%o1], %g1, %g7
53 cmp %g1, %g7
54 bne,pn %icc, BACKOFF_LABEL(2f, 1b)
55 add %g1, %o0, %g1
56 retl
57 sra %g1, 0, %o0
582: BACKOFF_SPIN(%o2, %o3, 1b)
59 .size atomic_add_ret, .-atomic_add_ret
60
61 .globl atomic_sub_ret
62 .type atomic_sub_ret,#function
63atomic_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */
64 BACKOFF_SETUP(%o2)
651: lduw [%o1], %g1
66 sub %g1, %o0, %g7
67 cas [%o1], %g1, %g7
68 cmp %g1, %g7
69 bne,pn %icc, BACKOFF_LABEL(2f, 1b)
70 sub %g1, %o0, %g1
71 retl
72 sra %g1, 0, %o0
732: BACKOFF_SPIN(%o2, %o3, 1b)
74 .size atomic_sub_ret, .-atomic_sub_ret
75
76 .globl atomic64_add
77 .type atomic64_add,#function
78atomic64_add: /* %o0 = increment, %o1 = atomic_ptr */
79 BACKOFF_SETUP(%o2)
801: ldx [%o1], %g1
81 add %g1, %o0, %g7
82 casx [%o1], %g1, %g7
83 cmp %g1, %g7
84 bne,pn %xcc, BACKOFF_LABEL(2f, 1b)
85 nop
86 retl
87 nop
882: BACKOFF_SPIN(%o2, %o3, 1b)
89 .size atomic64_add, .-atomic64_add
90
91 .globl atomic64_sub
92 .type atomic64_sub,#function
93atomic64_sub: /* %o0 = decrement, %o1 = atomic_ptr */
94 BACKOFF_SETUP(%o2)
951: ldx [%o1], %g1
96 sub %g1, %o0, %g7
97 casx [%o1], %g1, %g7
98 cmp %g1, %g7
99 bne,pn %xcc, BACKOFF_LABEL(2f, 1b)
100 nop
101 retl
102 nop
1032: BACKOFF_SPIN(%o2, %o3, 1b)
104 .size atomic64_sub, .-atomic64_sub
105
106 .globl atomic64_add_ret
107 .type atomic64_add_ret,#function
108atomic64_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
109 BACKOFF_SETUP(%o2)
1101: ldx [%o1], %g1
111 add %g1, %o0, %g7
112 casx [%o1], %g1, %g7
113 cmp %g1, %g7
114 bne,pn %xcc, BACKOFF_LABEL(2f, 1b)
115 nop
116 retl
117 add %g1, %o0, %o0
1182: BACKOFF_SPIN(%o2, %o3, 1b)
119 .size atomic64_add_ret, .-atomic64_add_ret
120
121 .globl atomic64_sub_ret
122 .type atomic64_sub_ret,#function
123atomic64_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */
124 BACKOFF_SETUP(%o2)
1251: ldx [%o1], %g1
126 sub %g1, %o0, %g7
127 casx [%o1], %g1, %g7
128 cmp %g1, %g7
129 bne,pn %xcc, BACKOFF_LABEL(2f, 1b)
130 nop
131 retl
132 sub %g1, %o0, %o0
1332: BACKOFF_SPIN(%o2, %o3, 1b)
134 .size atomic64_sub_ret, .-atomic64_sub_ret