Loading...
1/*
2 * atomic64_t for 586+
3 *
4 * Copyright © 2010 Luca Barbieri
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 */
11
12#include <linux/linkage.h>
13#include <asm/alternative-asm.h>
14
15.macro read64 reg
16 movl %ebx, %eax
17 movl %ecx, %edx
18/* we need LOCK_PREFIX since otherwise cmpxchg8b always does the write */
19 LOCK_PREFIX
20 cmpxchg8b (\reg)
21.endm
22
23ENTRY(atomic64_read_cx8)
24 read64 %ecx
25 ret
26ENDPROC(atomic64_read_cx8)
27
28ENTRY(atomic64_set_cx8)
291:
30/* we don't need LOCK_PREFIX since aligned 64-bit writes
31 * are atomic on 586 and newer */
32 cmpxchg8b (%esi)
33 jne 1b
34
35 ret
36ENDPROC(atomic64_set_cx8)
37
38ENTRY(atomic64_xchg_cx8)
391:
40 LOCK_PREFIX
41 cmpxchg8b (%esi)
42 jne 1b
43
44 ret
45ENDPROC(atomic64_xchg_cx8)
46
47.macro addsub_return func ins insc
48ENTRY(atomic64_\func\()_return_cx8)
49 pushl %ebp
50 pushl %ebx
51 pushl %esi
52 pushl %edi
53
54 movl %eax, %esi
55 movl %edx, %edi
56 movl %ecx, %ebp
57
58 read64 %ecx
591:
60 movl %eax, %ebx
61 movl %edx, %ecx
62 \ins\()l %esi, %ebx
63 \insc\()l %edi, %ecx
64 LOCK_PREFIX
65 cmpxchg8b (%ebp)
66 jne 1b
67
6810:
69 movl %ebx, %eax
70 movl %ecx, %edx
71 popl %edi
72 popl %esi
73 popl %ebx
74 popl %ebp
75 ret
76ENDPROC(atomic64_\func\()_return_cx8)
77.endm
78
79addsub_return add add adc
80addsub_return sub sub sbb
81
82.macro incdec_return func ins insc
83ENTRY(atomic64_\func\()_return_cx8)
84 pushl %ebx
85
86 read64 %esi
871:
88 movl %eax, %ebx
89 movl %edx, %ecx
90 \ins\()l $1, %ebx
91 \insc\()l $0, %ecx
92 LOCK_PREFIX
93 cmpxchg8b (%esi)
94 jne 1b
95
9610:
97 movl %ebx, %eax
98 movl %ecx, %edx
99 popl %ebx
100 ret
101ENDPROC(atomic64_\func\()_return_cx8)
102.endm
103
104incdec_return inc add adc
105incdec_return dec sub sbb
106
107ENTRY(atomic64_dec_if_positive_cx8)
108 pushl %ebx
109
110 read64 %esi
1111:
112 movl %eax, %ebx
113 movl %edx, %ecx
114 subl $1, %ebx
115 sbb $0, %ecx
116 js 2f
117 LOCK_PREFIX
118 cmpxchg8b (%esi)
119 jne 1b
120
1212:
122 movl %ebx, %eax
123 movl %ecx, %edx
124 popl %ebx
125 ret
126ENDPROC(atomic64_dec_if_positive_cx8)
127
128ENTRY(atomic64_add_unless_cx8)
129 pushl %ebp
130 pushl %ebx
131/* these just push these two parameters on the stack */
132 pushl %edi
133 pushl %ecx
134
135 movl %eax, %ebp
136 movl %edx, %edi
137
138 read64 %esi
1391:
140 cmpl %eax, 0(%esp)
141 je 4f
1422:
143 movl %eax, %ebx
144 movl %edx, %ecx
145 addl %ebp, %ebx
146 adcl %edi, %ecx
147 LOCK_PREFIX
148 cmpxchg8b (%esi)
149 jne 1b
150
151 movl $1, %eax
1523:
153 addl $8, %esp
154 popl %ebx
155 popl %ebp
156 ret
1574:
158 cmpl %edx, 4(%esp)
159 jne 2b
160 xorl %eax, %eax
161 jmp 3b
162ENDPROC(atomic64_add_unless_cx8)
163
164ENTRY(atomic64_inc_not_zero_cx8)
165 pushl %ebx
166
167 read64 %esi
1681:
169 movl %eax, %ecx
170 orl %edx, %ecx
171 jz 3f
172 movl %eax, %ebx
173 xorl %ecx, %ecx
174 addl $1, %ebx
175 adcl %edx, %ecx
176 LOCK_PREFIX
177 cmpxchg8b (%esi)
178 jne 1b
179
180 movl $1, %eax
1813:
182 popl %ebx
183 ret
184ENDPROC(atomic64_inc_not_zero_cx8)
1/*
2 * atomic64_t for 586+
3 *
4 * Copyright © 2010 Luca Barbieri
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 */
11
12#include <linux/linkage.h>
13#include <asm/alternative-asm.h>
14#include <asm/dwarf2.h>
15
16.macro SAVE reg
17 pushl_cfi %\reg
18 CFI_REL_OFFSET \reg, 0
19.endm
20
21.macro RESTORE reg
22 popl_cfi %\reg
23 CFI_RESTORE \reg
24.endm
25
26.macro read64 reg
27 movl %ebx, %eax
28 movl %ecx, %edx
29/* we need LOCK_PREFIX since otherwise cmpxchg8b always does the write */
30 LOCK_PREFIX
31 cmpxchg8b (\reg)
32.endm
33
34ENTRY(atomic64_read_cx8)
35 CFI_STARTPROC
36
37 read64 %ecx
38 ret
39 CFI_ENDPROC
40ENDPROC(atomic64_read_cx8)
41
42ENTRY(atomic64_set_cx8)
43 CFI_STARTPROC
44
451:
46/* we don't need LOCK_PREFIX since aligned 64-bit writes
47 * are atomic on 586 and newer */
48 cmpxchg8b (%esi)
49 jne 1b
50
51 ret
52 CFI_ENDPROC
53ENDPROC(atomic64_set_cx8)
54
55ENTRY(atomic64_xchg_cx8)
56 CFI_STARTPROC
57
58 movl %ebx, %eax
59 movl %ecx, %edx
601:
61 LOCK_PREFIX
62 cmpxchg8b (%esi)
63 jne 1b
64
65 ret
66 CFI_ENDPROC
67ENDPROC(atomic64_xchg_cx8)
68
69.macro addsub_return func ins insc
70ENTRY(atomic64_\func\()_return_cx8)
71 CFI_STARTPROC
72 SAVE ebp
73 SAVE ebx
74 SAVE esi
75 SAVE edi
76
77 movl %eax, %esi
78 movl %edx, %edi
79 movl %ecx, %ebp
80
81 read64 %ebp
821:
83 movl %eax, %ebx
84 movl %edx, %ecx
85 \ins\()l %esi, %ebx
86 \insc\()l %edi, %ecx
87 LOCK_PREFIX
88 cmpxchg8b (%ebp)
89 jne 1b
90
9110:
92 movl %ebx, %eax
93 movl %ecx, %edx
94 RESTORE edi
95 RESTORE esi
96 RESTORE ebx
97 RESTORE ebp
98 ret
99 CFI_ENDPROC
100ENDPROC(atomic64_\func\()_return_cx8)
101.endm
102
103addsub_return add add adc
104addsub_return sub sub sbb
105
106.macro incdec_return func ins insc
107ENTRY(atomic64_\func\()_return_cx8)
108 CFI_STARTPROC
109 SAVE ebx
110
111 read64 %esi
1121:
113 movl %eax, %ebx
114 movl %edx, %ecx
115 \ins\()l $1, %ebx
116 \insc\()l $0, %ecx
117 LOCK_PREFIX
118 cmpxchg8b (%esi)
119 jne 1b
120
12110:
122 movl %ebx, %eax
123 movl %ecx, %edx
124 RESTORE ebx
125 ret
126 CFI_ENDPROC
127ENDPROC(atomic64_\func\()_return_cx8)
128.endm
129
130incdec_return inc add adc
131incdec_return dec sub sbb
132
133ENTRY(atomic64_dec_if_positive_cx8)
134 CFI_STARTPROC
135 SAVE ebx
136
137 read64 %esi
1381:
139 movl %eax, %ebx
140 movl %edx, %ecx
141 subl $1, %ebx
142 sbb $0, %ecx
143 js 2f
144 LOCK_PREFIX
145 cmpxchg8b (%esi)
146 jne 1b
147
1482:
149 movl %ebx, %eax
150 movl %ecx, %edx
151 RESTORE ebx
152 ret
153 CFI_ENDPROC
154ENDPROC(atomic64_dec_if_positive_cx8)
155
156ENTRY(atomic64_add_unless_cx8)
157 CFI_STARTPROC
158 SAVE ebp
159 SAVE ebx
160/* these just push these two parameters on the stack */
161 SAVE edi
162 SAVE esi
163
164 movl %ecx, %ebp
165 movl %eax, %esi
166 movl %edx, %edi
167
168 read64 %ebp
1691:
170 cmpl %eax, 0(%esp)
171 je 4f
1722:
173 movl %eax, %ebx
174 movl %edx, %ecx
175 addl %esi, %ebx
176 adcl %edi, %ecx
177 LOCK_PREFIX
178 cmpxchg8b (%ebp)
179 jne 1b
180
181 movl $1, %eax
1823:
183 addl $8, %esp
184 CFI_ADJUST_CFA_OFFSET -8
185 RESTORE ebx
186 RESTORE ebp
187 ret
1884:
189 cmpl %edx, 4(%esp)
190 jne 2b
191 xorl %eax, %eax
192 jmp 3b
193 CFI_ENDPROC
194ENDPROC(atomic64_add_unless_cx8)
195
196ENTRY(atomic64_inc_not_zero_cx8)
197 CFI_STARTPROC
198 SAVE ebx
199
200 read64 %esi
2011:
202 testl %eax, %eax
203 je 4f
2042:
205 movl %eax, %ebx
206 movl %edx, %ecx
207 addl $1, %ebx
208 adcl $0, %ecx
209 LOCK_PREFIX
210 cmpxchg8b (%esi)
211 jne 1b
212
213 movl $1, %eax
2143:
215 RESTORE ebx
216 ret
2174:
218 testl %edx, %edx
219 jne 2b
220 jmp 3b
221 CFI_ENDPROC
222ENDPROC(atomic64_inc_not_zero_cx8)