Loading...
1/*
2 * This file is subject to the terms and conditions of the GNU General Public
3 * License. See the file "COPYING" in the main directory of this archive
4 * for more details.
5 *
6 * Copyright (C) 2003, 06, 07 by Ralf Baechle (ralf@linux-mips.org)
7 */
8#ifndef __ASM_CMPXCHG_H
9#define __ASM_CMPXCHG_H
10
11#include <linux/bug.h>
12#include <linux/irqflags.h>
13#include <asm/war.h>
14
15static inline unsigned long __xchg_u32(volatile int * m, unsigned int val)
16{
17 __u32 retval;
18
19 smp_mb__before_llsc();
20
21 if (kernel_uses_llsc && R10000_LLSC_WAR) {
22 unsigned long dummy;
23
24 __asm__ __volatile__(
25 " .set arch=r4000 \n"
26 "1: ll %0, %3 # xchg_u32 \n"
27 " .set mips0 \n"
28 " move %2, %z4 \n"
29 " .set arch=r4000 \n"
30 " sc %2, %1 \n"
31 " beqzl %2, 1b \n"
32 " .set mips0 \n"
33 : "=&r" (retval), "=m" (*m), "=&r" (dummy)
34 : "R" (*m), "Jr" (val)
35 : "memory");
36 } else if (kernel_uses_llsc) {
37 unsigned long dummy;
38
39 do {
40 __asm__ __volatile__(
41 " .set arch=r4000 \n"
42 " ll %0, %3 # xchg_u32 \n"
43 " .set mips0 \n"
44 " move %2, %z4 \n"
45 " .set arch=r4000 \n"
46 " sc %2, %1 \n"
47 " .set mips0 \n"
48 : "=&r" (retval), "=m" (*m), "=&r" (dummy)
49 : "R" (*m), "Jr" (val)
50 : "memory");
51 } while (unlikely(!dummy));
52 } else {
53 unsigned long flags;
54
55 raw_local_irq_save(flags);
56 retval = *m;
57 *m = val;
58 raw_local_irq_restore(flags); /* implies memory barrier */
59 }
60
61 smp_llsc_mb();
62
63 return retval;
64}
65
66#ifdef CONFIG_64BIT
67static inline __u64 __xchg_u64(volatile __u64 * m, __u64 val)
68{
69 __u64 retval;
70
71 smp_mb__before_llsc();
72
73 if (kernel_uses_llsc && R10000_LLSC_WAR) {
74 unsigned long dummy;
75
76 __asm__ __volatile__(
77 " .set arch=r4000 \n"
78 "1: lld %0, %3 # xchg_u64 \n"
79 " move %2, %z4 \n"
80 " scd %2, %1 \n"
81 " beqzl %2, 1b \n"
82 " .set mips0 \n"
83 : "=&r" (retval), "=m" (*m), "=&r" (dummy)
84 : "R" (*m), "Jr" (val)
85 : "memory");
86 } else if (kernel_uses_llsc) {
87 unsigned long dummy;
88
89 do {
90 __asm__ __volatile__(
91 " .set arch=r4000 \n"
92 " lld %0, %3 # xchg_u64 \n"
93 " move %2, %z4 \n"
94 " scd %2, %1 \n"
95 " .set mips0 \n"
96 : "=&r" (retval), "=m" (*m), "=&r" (dummy)
97 : "R" (*m), "Jr" (val)
98 : "memory");
99 } while (unlikely(!dummy));
100 } else {
101 unsigned long flags;
102
103 raw_local_irq_save(flags);
104 retval = *m;
105 *m = val;
106 raw_local_irq_restore(flags); /* implies memory barrier */
107 }
108
109 smp_llsc_mb();
110
111 return retval;
112}
113#else
114extern __u64 __xchg_u64_unsupported_on_32bit_kernels(volatile __u64 * m, __u64 val);
115#define __xchg_u64 __xchg_u64_unsupported_on_32bit_kernels
116#endif
117
118static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size)
119{
120 switch (size) {
121 case 4:
122 return __xchg_u32(ptr, x);
123 case 8:
124 return __xchg_u64(ptr, x);
125 }
126
127 return x;
128}
129
130#define xchg(ptr, x) \
131({ \
132 BUILD_BUG_ON(sizeof(*(ptr)) & ~0xc); \
133 \
134 ((__typeof__(*(ptr))) \
135 __xchg((unsigned long)(x), (ptr), sizeof(*(ptr)))); \
136})
137
138#define __HAVE_ARCH_CMPXCHG 1
139
140#define __cmpxchg_asm(ld, st, m, old, new) \
141({ \
142 __typeof(*(m)) __ret; \
143 \
144 if (kernel_uses_llsc && R10000_LLSC_WAR) { \
145 __asm__ __volatile__( \
146 " .set push \n" \
147 " .set noat \n" \
148 " .set arch=r4000 \n" \
149 "1: " ld " %0, %2 # __cmpxchg_asm \n" \
150 " bne %0, %z3, 2f \n" \
151 " .set mips0 \n" \
152 " move $1, %z4 \n" \
153 " .set arch=r4000 \n" \
154 " " st " $1, %1 \n" \
155 " beqzl $1, 1b \n" \
156 "2: \n" \
157 " .set pop \n" \
158 : "=&r" (__ret), "=R" (*m) \
159 : "R" (*m), "Jr" (old), "Jr" (new) \
160 : "memory"); \
161 } else if (kernel_uses_llsc) { \
162 __asm__ __volatile__( \
163 " .set push \n" \
164 " .set noat \n" \
165 " .set arch=r4000 \n" \
166 "1: " ld " %0, %2 # __cmpxchg_asm \n" \
167 " bne %0, %z3, 2f \n" \
168 " .set mips0 \n" \
169 " move $1, %z4 \n" \
170 " .set arch=r4000 \n" \
171 " " st " $1, %1 \n" \
172 " beqz $1, 1b \n" \
173 " .set pop \n" \
174 "2: \n" \
175 : "=&r" (__ret), "=R" (*m) \
176 : "R" (*m), "Jr" (old), "Jr" (new) \
177 : "memory"); \
178 } else { \
179 unsigned long __flags; \
180 \
181 raw_local_irq_save(__flags); \
182 __ret = *m; \
183 if (__ret == old) \
184 *m = new; \
185 raw_local_irq_restore(__flags); \
186 } \
187 \
188 __ret; \
189})
190
191/*
192 * This function doesn't exist, so you'll get a linker error
193 * if something tries to do an invalid cmpxchg().
194 */
195extern void __cmpxchg_called_with_bad_pointer(void);
196
197#define __cmpxchg(ptr, old, new, pre_barrier, post_barrier) \
198({ \
199 __typeof__(ptr) __ptr = (ptr); \
200 __typeof__(*(ptr)) __old = (old); \
201 __typeof__(*(ptr)) __new = (new); \
202 __typeof__(*(ptr)) __res = 0; \
203 \
204 pre_barrier; \
205 \
206 switch (sizeof(*(__ptr))) { \
207 case 4: \
208 __res = __cmpxchg_asm("ll", "sc", __ptr, __old, __new); \
209 break; \
210 case 8: \
211 if (sizeof(long) == 8) { \
212 __res = __cmpxchg_asm("lld", "scd", __ptr, \
213 __old, __new); \
214 break; \
215 } \
216 default: \
217 __cmpxchg_called_with_bad_pointer(); \
218 break; \
219 } \
220 \
221 post_barrier; \
222 \
223 __res; \
224})
225
226#define cmpxchg(ptr, old, new) __cmpxchg(ptr, old, new, smp_mb__before_llsc(), smp_llsc_mb())
227#define cmpxchg_local(ptr, old, new) __cmpxchg(ptr, old, new, , )
228
229#define cmpxchg64(ptr, o, n) \
230 ({ \
231 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
232 cmpxchg((ptr), (o), (n)); \
233 })
234
235#ifdef CONFIG_64BIT
236#define cmpxchg64_local(ptr, o, n) \
237 ({ \
238 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
239 cmpxchg_local((ptr), (o), (n)); \
240 })
241#else
242#include <asm-generic/cmpxchg-local.h>
243#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
244#endif
245
246#endif /* __ASM_CMPXCHG_H */
1/*
2 * This file is subject to the terms and conditions of the GNU General Public
3 * License. See the file "COPYING" in the main directory of this archive
4 * for more details.
5 *
6 * Copyright (C) 2003, 06, 07 by Ralf Baechle (ralf@linux-mips.org)
7 */
8#ifndef __ASM_CMPXCHG_H
9#define __ASM_CMPXCHG_H
10
11#include <linux/bug.h>
12#include <linux/irqflags.h>
13#include <asm/compiler.h>
14#include <asm/llsc.h>
15#include <asm/sync.h>
16#include <asm/war.h>
17
18/*
19 * These functions doesn't exist, so if they are called you'll either:
20 *
21 * - Get an error at compile-time due to __compiletime_error, if supported by
22 * your compiler.
23 *
24 * or:
25 *
26 * - Get an error at link-time due to the call to the missing function.
27 */
28extern unsigned long __cmpxchg_called_with_bad_pointer(void)
29 __compiletime_error("Bad argument size for cmpxchg");
30extern unsigned long __cmpxchg64_unsupported(void)
31 __compiletime_error("cmpxchg64 not available; cpu_has_64bits may be false");
32extern unsigned long __xchg_called_with_bad_pointer(void)
33 __compiletime_error("Bad argument size for xchg");
34
35#define __xchg_asm(ld, st, m, val) \
36({ \
37 __typeof(*(m)) __ret; \
38 \
39 if (kernel_uses_llsc) { \
40 __asm__ __volatile__( \
41 " .set push \n" \
42 " .set noat \n" \
43 " .set push \n" \
44 " .set " MIPS_ISA_ARCH_LEVEL " \n" \
45 " " __SYNC(full, loongson3_war) " \n" \
46 "1: " ld " %0, %2 # __xchg_asm \n" \
47 " .set pop \n" \
48 " move $1, %z3 \n" \
49 " .set " MIPS_ISA_ARCH_LEVEL " \n" \
50 " " st " $1, %1 \n" \
51 "\t" __SC_BEQZ "$1, 1b \n" \
52 " .set pop \n" \
53 : "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m) \
54 : GCC_OFF_SMALL_ASM() (*m), "Jr" (val) \
55 : __LLSC_CLOBBER); \
56 } else { \
57 unsigned long __flags; \
58 \
59 raw_local_irq_save(__flags); \
60 __ret = *m; \
61 *m = val; \
62 raw_local_irq_restore(__flags); \
63 } \
64 \
65 __ret; \
66})
67
68extern unsigned long __xchg_small(volatile void *ptr, unsigned long val,
69 unsigned int size);
70
71static __always_inline
72unsigned long __xchg(volatile void *ptr, unsigned long x, int size)
73{
74 switch (size) {
75 case 1:
76 case 2:
77 return __xchg_small(ptr, x, size);
78
79 case 4:
80 return __xchg_asm("ll", "sc", (volatile u32 *)ptr, x);
81
82 case 8:
83 if (!IS_ENABLED(CONFIG_64BIT))
84 return __xchg_called_with_bad_pointer();
85
86 return __xchg_asm("lld", "scd", (volatile u64 *)ptr, x);
87
88 default:
89 return __xchg_called_with_bad_pointer();
90 }
91}
92
93#define arch_xchg(ptr, x) \
94({ \
95 __typeof__(*(ptr)) __res; \
96 \
97 /* \
98 * In the Loongson3 workaround case __xchg_asm() already \
99 * contains a completion barrier prior to the LL, so we don't \
100 * need to emit an extra one here. \
101 */ \
102 if (__SYNC_loongson3_war == 0) \
103 smp_mb__before_llsc(); \
104 \
105 __res = (__typeof__(*(ptr))) \
106 __xchg((ptr), (unsigned long)(x), sizeof(*(ptr))); \
107 \
108 smp_llsc_mb(); \
109 \
110 __res; \
111})
112
113#define __cmpxchg_asm(ld, st, m, old, new) \
114({ \
115 __typeof(*(m)) __ret; \
116 \
117 if (kernel_uses_llsc) { \
118 __asm__ __volatile__( \
119 " .set push \n" \
120 " .set noat \n" \
121 " .set push \n" \
122 " .set "MIPS_ISA_ARCH_LEVEL" \n" \
123 " " __SYNC(full, loongson3_war) " \n" \
124 "1: " ld " %0, %2 # __cmpxchg_asm \n" \
125 " bne %0, %z3, 2f \n" \
126 " .set pop \n" \
127 " move $1, %z4 \n" \
128 " .set "MIPS_ISA_ARCH_LEVEL" \n" \
129 " " st " $1, %1 \n" \
130 "\t" __SC_BEQZ "$1, 1b \n" \
131 " .set pop \n" \
132 "2: " __SYNC(full, loongson3_war) " \n" \
133 : "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m) \
134 : GCC_OFF_SMALL_ASM() (*m), "Jr" (old), "Jr" (new) \
135 : __LLSC_CLOBBER); \
136 } else { \
137 unsigned long __flags; \
138 \
139 raw_local_irq_save(__flags); \
140 __ret = *m; \
141 if (__ret == old) \
142 *m = new; \
143 raw_local_irq_restore(__flags); \
144 } \
145 \
146 __ret; \
147})
148
149extern unsigned long __cmpxchg_small(volatile void *ptr, unsigned long old,
150 unsigned long new, unsigned int size);
151
152static __always_inline
153unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
154 unsigned long new, unsigned int size)
155{
156 switch (size) {
157 case 1:
158 case 2:
159 return __cmpxchg_small(ptr, old, new, size);
160
161 case 4:
162 return __cmpxchg_asm("ll", "sc", (volatile u32 *)ptr,
163 (u32)old, new);
164
165 case 8:
166 /* lld/scd are only available for MIPS64 */
167 if (!IS_ENABLED(CONFIG_64BIT))
168 return __cmpxchg_called_with_bad_pointer();
169
170 return __cmpxchg_asm("lld", "scd", (volatile u64 *)ptr,
171 (u64)old, new);
172
173 default:
174 return __cmpxchg_called_with_bad_pointer();
175 }
176}
177
178#define arch_cmpxchg_local(ptr, old, new) \
179 ((__typeof__(*(ptr))) \
180 __cmpxchg((ptr), \
181 (unsigned long)(__typeof__(*(ptr)))(old), \
182 (unsigned long)(__typeof__(*(ptr)))(new), \
183 sizeof(*(ptr))))
184
185#define arch_cmpxchg(ptr, old, new) \
186({ \
187 __typeof__(*(ptr)) __res; \
188 \
189 /* \
190 * In the Loongson3 workaround case __cmpxchg_asm() already \
191 * contains a completion barrier prior to the LL, so we don't \
192 * need to emit an extra one here. \
193 */ \
194 if (__SYNC_loongson3_war == 0) \
195 smp_mb__before_llsc(); \
196 \
197 __res = arch_cmpxchg_local((ptr), (old), (new)); \
198 \
199 /* \
200 * In the Loongson3 workaround case __cmpxchg_asm() already \
201 * contains a completion barrier after the SC, so we don't \
202 * need to emit an extra one here. \
203 */ \
204 if (__SYNC_loongson3_war == 0) \
205 smp_llsc_mb(); \
206 \
207 __res; \
208})
209
210#ifdef CONFIG_64BIT
211#define arch_cmpxchg64_local(ptr, o, n) \
212 ({ \
213 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
214 arch_cmpxchg_local((ptr), (o), (n)); \
215 })
216
217#define arch_cmpxchg64(ptr, o, n) \
218 ({ \
219 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
220 arch_cmpxchg((ptr), (o), (n)); \
221 })
222#else
223
224# include <asm-generic/cmpxchg-local.h>
225# define arch_cmpxchg64_local(ptr, o, n) __generic_cmpxchg64_local((ptr), (o), (n))
226
227# ifdef CONFIG_SMP
228
229static inline unsigned long __cmpxchg64(volatile void *ptr,
230 unsigned long long old,
231 unsigned long long new)
232{
233 unsigned long long tmp, ret;
234 unsigned long flags;
235
236 /*
237 * The assembly below has to combine 32 bit values into a 64 bit
238 * register, and split 64 bit values from one register into two. If we
239 * were to take an interrupt in the middle of this we'd only save the
240 * least significant 32 bits of each register & probably clobber the
241 * most significant 32 bits of the 64 bit values we're using. In order
242 * to avoid this we must disable interrupts.
243 */
244 local_irq_save(flags);
245
246 asm volatile(
247 " .set push \n"
248 " .set " MIPS_ISA_ARCH_LEVEL " \n"
249 /* Load 64 bits from ptr */
250 " " __SYNC(full, loongson3_war) " \n"
251 "1: lld %L0, %3 # __cmpxchg64 \n"
252 /*
253 * Split the 64 bit value we loaded into the 2 registers that hold the
254 * ret variable.
255 */
256 " dsra %M0, %L0, 32 \n"
257 " sll %L0, %L0, 0 \n"
258 /*
259 * Compare ret against old, breaking out of the loop if they don't
260 * match.
261 */
262 " bne %M0, %M4, 2f \n"
263 " bne %L0, %L4, 2f \n"
264 /*
265 * Combine the 32 bit halves from the 2 registers that hold the new
266 * variable into a single 64 bit register.
267 */
268# if MIPS_ISA_REV >= 2
269 " move %L1, %L5 \n"
270 " dins %L1, %M5, 32, 32 \n"
271# else
272 " dsll %L1, %L5, 32 \n"
273 " dsrl %L1, %L1, 32 \n"
274 " .set noat \n"
275 " dsll $at, %M5, 32 \n"
276 " or %L1, %L1, $at \n"
277 " .set at \n"
278# endif
279 /* Attempt to store new at ptr */
280 " scd %L1, %2 \n"
281 /* If we failed, loop! */
282 "\t" __SC_BEQZ "%L1, 1b \n"
283 " .set pop \n"
284 "2: " __SYNC(full, loongson3_war) " \n"
285 : "=&r"(ret),
286 "=&r"(tmp),
287 "=" GCC_OFF_SMALL_ASM() (*(unsigned long long *)ptr)
288 : GCC_OFF_SMALL_ASM() (*(unsigned long long *)ptr),
289 "r" (old),
290 "r" (new)
291 : "memory");
292
293 local_irq_restore(flags);
294 return ret;
295}
296
297# define arch_cmpxchg64(ptr, o, n) ({ \
298 unsigned long long __old = (__typeof__(*(ptr)))(o); \
299 unsigned long long __new = (__typeof__(*(ptr)))(n); \
300 __typeof__(*(ptr)) __res; \
301 \
302 /* \
303 * We can only use cmpxchg64 if we know that the CPU supports \
304 * 64-bits, ie. lld & scd. Our call to __cmpxchg64_unsupported \
305 * will cause a build error unless cpu_has_64bits is a \
306 * compile-time constant 1. \
307 */ \
308 if (cpu_has_64bits && kernel_uses_llsc) { \
309 smp_mb__before_llsc(); \
310 __res = __cmpxchg64((ptr), __old, __new); \
311 smp_llsc_mb(); \
312 } else { \
313 __res = __cmpxchg64_unsupported(); \
314 } \
315 \
316 __res; \
317})
318
319# else /* !CONFIG_SMP */
320# define arch_cmpxchg64(ptr, o, n) arch_cmpxchg64_local((ptr), (o), (n))
321# endif /* !CONFIG_SMP */
322#endif /* !CONFIG_64BIT */
323
324#endif /* __ASM_CMPXCHG_H */