Loading...
1/*
2 * Copyright IBM Corp. 1999, 2011
3 *
4 * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>,
5 */
6
7#ifndef __ASM_CMPXCHG_H
8#define __ASM_CMPXCHG_H
9
10#include <linux/mmdebug.h>
11#include <linux/types.h>
12#include <linux/bug.h>
13
14extern void __xchg_called_with_bad_pointer(void);
15
16static inline unsigned long __xchg(unsigned long x, void *ptr, int size)
17{
18 unsigned long addr, old;
19 int shift;
20
21 switch (size) {
22 case 1:
23 addr = (unsigned long) ptr;
24 shift = (3 ^ (addr & 3)) << 3;
25 addr ^= addr & 3;
26 asm volatile(
27 " l %0,%4\n"
28 "0: lr 0,%0\n"
29 " nr 0,%3\n"
30 " or 0,%2\n"
31 " cs %0,0,%4\n"
32 " jl 0b\n"
33 : "=&d" (old), "=Q" (*(int *) addr)
34 : "d" ((x & 0xff) << shift), "d" (~(0xff << shift)),
35 "Q" (*(int *) addr) : "memory", "cc", "0");
36 return old >> shift;
37 case 2:
38 addr = (unsigned long) ptr;
39 shift = (2 ^ (addr & 2)) << 3;
40 addr ^= addr & 2;
41 asm volatile(
42 " l %0,%4\n"
43 "0: lr 0,%0\n"
44 " nr 0,%3\n"
45 " or 0,%2\n"
46 " cs %0,0,%4\n"
47 " jl 0b\n"
48 : "=&d" (old), "=Q" (*(int *) addr)
49 : "d" ((x & 0xffff) << shift), "d" (~(0xffff << shift)),
50 "Q" (*(int *) addr) : "memory", "cc", "0");
51 return old >> shift;
52 case 4:
53 asm volatile(
54 " l %0,%3\n"
55 "0: cs %0,%2,%3\n"
56 " jl 0b\n"
57 : "=&d" (old), "=Q" (*(int *) ptr)
58 : "d" (x), "Q" (*(int *) ptr)
59 : "memory", "cc");
60 return old;
61#ifdef CONFIG_64BIT
62 case 8:
63 asm volatile(
64 " lg %0,%3\n"
65 "0: csg %0,%2,%3\n"
66 " jl 0b\n"
67 : "=&d" (old), "=m" (*(long *) ptr)
68 : "d" (x), "Q" (*(long *) ptr)
69 : "memory", "cc");
70 return old;
71#endif /* CONFIG_64BIT */
72 }
73 __xchg_called_with_bad_pointer();
74 return x;
75}
76
77#define xchg(ptr, x) \
78({ \
79 __typeof__(*(ptr)) __ret; \
80 __ret = (__typeof__(*(ptr))) \
81 __xchg((unsigned long)(x), (void *)(ptr), sizeof(*(ptr)));\
82 __ret; \
83})
84
85/*
86 * Atomic compare and exchange. Compare OLD with MEM, if identical,
87 * store NEW in MEM. Return the initial value in MEM. Success is
88 * indicated by comparing RETURN with OLD.
89 */
90
91#define __HAVE_ARCH_CMPXCHG
92
93extern void __cmpxchg_called_with_bad_pointer(void);
94
95static inline unsigned long __cmpxchg(void *ptr, unsigned long old,
96 unsigned long new, int size)
97{
98 unsigned long addr, prev, tmp;
99 int shift;
100
101 switch (size) {
102 case 1:
103 addr = (unsigned long) ptr;
104 shift = (3 ^ (addr & 3)) << 3;
105 addr ^= addr & 3;
106 asm volatile(
107 " l %0,%2\n"
108 "0: nr %0,%5\n"
109 " lr %1,%0\n"
110 " or %0,%3\n"
111 " or %1,%4\n"
112 " cs %0,%1,%2\n"
113 " jnl 1f\n"
114 " xr %1,%0\n"
115 " nr %1,%5\n"
116 " jnz 0b\n"
117 "1:"
118 : "=&d" (prev), "=&d" (tmp), "+Q" (*(int *) addr)
119 : "d" ((old & 0xff) << shift),
120 "d" ((new & 0xff) << shift),
121 "d" (~(0xff << shift))
122 : "memory", "cc");
123 return prev >> shift;
124 case 2:
125 addr = (unsigned long) ptr;
126 shift = (2 ^ (addr & 2)) << 3;
127 addr ^= addr & 2;
128 asm volatile(
129 " l %0,%2\n"
130 "0: nr %0,%5\n"
131 " lr %1,%0\n"
132 " or %0,%3\n"
133 " or %1,%4\n"
134 " cs %0,%1,%2\n"
135 " jnl 1f\n"
136 " xr %1,%0\n"
137 " nr %1,%5\n"
138 " jnz 0b\n"
139 "1:"
140 : "=&d" (prev), "=&d" (tmp), "+Q" (*(int *) addr)
141 : "d" ((old & 0xffff) << shift),
142 "d" ((new & 0xffff) << shift),
143 "d" (~(0xffff << shift))
144 : "memory", "cc");
145 return prev >> shift;
146 case 4:
147 asm volatile(
148 " cs %0,%3,%1\n"
149 : "=&d" (prev), "=Q" (*(int *) ptr)
150 : "0" (old), "d" (new), "Q" (*(int *) ptr)
151 : "memory", "cc");
152 return prev;
153#ifdef CONFIG_64BIT
154 case 8:
155 asm volatile(
156 " csg %0,%3,%1\n"
157 : "=&d" (prev), "=Q" (*(long *) ptr)
158 : "0" (old), "d" (new), "Q" (*(long *) ptr)
159 : "memory", "cc");
160 return prev;
161#endif /* CONFIG_64BIT */
162 }
163 __cmpxchg_called_with_bad_pointer();
164 return old;
165}
166
167#define cmpxchg(ptr, o, n) \
168({ \
169 __typeof__(*(ptr)) __ret; \
170 __ret = (__typeof__(*(ptr))) \
171 __cmpxchg((ptr), (unsigned long)(o), (unsigned long)(n), \
172 sizeof(*(ptr))); \
173 __ret; \
174})
175
176#ifdef CONFIG_64BIT
177#define cmpxchg64(ptr, o, n) \
178({ \
179 cmpxchg((ptr), (o), (n)); \
180})
181#else /* CONFIG_64BIT */
182static inline unsigned long long __cmpxchg64(void *ptr,
183 unsigned long long old,
184 unsigned long long new)
185{
186 register_pair rp_old = {.pair = old};
187 register_pair rp_new = {.pair = new};
188 unsigned long long *ullptr = ptr;
189
190 asm volatile(
191 " cds %0,%2,%1"
192 : "+d" (rp_old), "+Q" (*ullptr)
193 : "d" (rp_new)
194 : "memory", "cc");
195 return rp_old.pair;
196}
197
198#define cmpxchg64(ptr, o, n) \
199({ \
200 __typeof__(*(ptr)) __ret; \
201 __ret = (__typeof__(*(ptr))) \
202 __cmpxchg64((ptr), \
203 (unsigned long long)(o), \
204 (unsigned long long)(n)); \
205 __ret; \
206})
207#endif /* CONFIG_64BIT */
208
209#define __cmpxchg_double_op(p1, p2, o1, o2, n1, n2, insn) \
210({ \
211 register __typeof__(*(p1)) __old1 asm("2") = (o1); \
212 register __typeof__(*(p2)) __old2 asm("3") = (o2); \
213 register __typeof__(*(p1)) __new1 asm("4") = (n1); \
214 register __typeof__(*(p2)) __new2 asm("5") = (n2); \
215 int cc; \
216 asm volatile( \
217 insn " %[old],%[new],%[ptr]\n" \
218 " ipm %[cc]\n" \
219 " srl %[cc],28" \
220 : [cc] "=d" (cc), [old] "+d" (__old1), "+d" (__old2) \
221 : [new] "d" (__new1), "d" (__new2), \
222 [ptr] "Q" (*(p1)), "Q" (*(p2)) \
223 : "memory", "cc"); \
224 !cc; \
225})
226
227#define __cmpxchg_double_4(p1, p2, o1, o2, n1, n2) \
228 __cmpxchg_double_op(p1, p2, o1, o2, n1, n2, "cds")
229
230#define __cmpxchg_double_8(p1, p2, o1, o2, n1, n2) \
231 __cmpxchg_double_op(p1, p2, o1, o2, n1, n2, "cdsg")
232
233extern void __cmpxchg_double_called_with_bad_pointer(void);
234
235#define __cmpxchg_double(p1, p2, o1, o2, n1, n2) \
236({ \
237 int __ret; \
238 switch (sizeof(*(p1))) { \
239 case 4: \
240 __ret = __cmpxchg_double_4(p1, p2, o1, o2, n1, n2); \
241 break; \
242 case 8: \
243 __ret = __cmpxchg_double_8(p1, p2, o1, o2, n1, n2); \
244 break; \
245 default: \
246 __cmpxchg_double_called_with_bad_pointer(); \
247 } \
248 __ret; \
249})
250
251#define cmpxchg_double(p1, p2, o1, o2, n1, n2) \
252({ \
253 __typeof__(p1) __p1 = (p1); \
254 __typeof__(p2) __p2 = (p2); \
255 int __ret; \
256 BUILD_BUG_ON(sizeof(*(p1)) != sizeof(long)); \
257 BUILD_BUG_ON(sizeof(*(p2)) != sizeof(long)); \
258 VM_BUG_ON((unsigned long)((__p1) + 1) != (unsigned long)(__p2));\
259 if (sizeof(long) == 4) \
260 __ret = __cmpxchg_double_4(__p1, __p2, o1, o2, n1, n2); \
261 else \
262 __ret = __cmpxchg_double_8(__p1, __p2, o1, o2, n1, n2); \
263 __ret; \
264})
265
266#define system_has_cmpxchg_double() 1
267
268#include <asm-generic/cmpxchg-local.h>
269
270static inline unsigned long __cmpxchg_local(void *ptr,
271 unsigned long old,
272 unsigned long new, int size)
273{
274 switch (size) {
275 case 1:
276 case 2:
277 case 4:
278#ifdef CONFIG_64BIT
279 case 8:
280#endif
281 return __cmpxchg(ptr, old, new, size);
282 default:
283 return __cmpxchg_local_generic(ptr, old, new, size);
284 }
285
286 return old;
287}
288
289/*
290 * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
291 * them available.
292 */
293#define cmpxchg_local(ptr, o, n) \
294({ \
295 __typeof__(*(ptr)) __ret; \
296 __ret = (__typeof__(*(ptr))) \
297 __cmpxchg_local((ptr), (unsigned long)(o), \
298 (unsigned long)(n), sizeof(*(ptr))); \
299 __ret; \
300})
301
302#define cmpxchg64_local(ptr, o, n) cmpxchg64((ptr), (o), (n))
303
304#endif /* __ASM_CMPXCHG_H */
1/* SPDX-License-Identifier: GPL-2.0 */
2/*
3 * Copyright IBM Corp. 1999, 2011
4 *
5 * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>,
6 */
7
8#ifndef __ASM_CMPXCHG_H
9#define __ASM_CMPXCHG_H
10
11#include <linux/mmdebug.h>
12#include <linux/types.h>
13#include <linux/bug.h>
14
15void __xchg_called_with_bad_pointer(void);
16
17static __always_inline unsigned long __xchg(unsigned long x,
18 unsigned long address, int size)
19{
20 unsigned long old;
21 int shift;
22
23 switch (size) {
24 case 1:
25 shift = (3 ^ (address & 3)) << 3;
26 address ^= address & 3;
27 asm volatile(
28 " l %0,%1\n"
29 "0: lr 0,%0\n"
30 " nr 0,%3\n"
31 " or 0,%2\n"
32 " cs %0,0,%1\n"
33 " jl 0b\n"
34 : "=&d" (old), "+Q" (*(int *) address)
35 : "d" ((x & 0xff) << shift), "d" (~(0xff << shift))
36 : "memory", "cc", "0");
37 return old >> shift;
38 case 2:
39 shift = (2 ^ (address & 2)) << 3;
40 address ^= address & 2;
41 asm volatile(
42 " l %0,%1\n"
43 "0: lr 0,%0\n"
44 " nr 0,%3\n"
45 " or 0,%2\n"
46 " cs %0,0,%1\n"
47 " jl 0b\n"
48 : "=&d" (old), "+Q" (*(int *) address)
49 : "d" ((x & 0xffff) << shift), "d" (~(0xffff << shift))
50 : "memory", "cc", "0");
51 return old >> shift;
52 case 4:
53 asm volatile(
54 " l %0,%1\n"
55 "0: cs %0,%2,%1\n"
56 " jl 0b\n"
57 : "=&d" (old), "+Q" (*(int *) address)
58 : "d" (x)
59 : "memory", "cc");
60 return old;
61 case 8:
62 asm volatile(
63 " lg %0,%1\n"
64 "0: csg %0,%2,%1\n"
65 " jl 0b\n"
66 : "=&d" (old), "+QS" (*(long *) address)
67 : "d" (x)
68 : "memory", "cc");
69 return old;
70 }
71 __xchg_called_with_bad_pointer();
72 return x;
73}
74
75#define arch_xchg(ptr, x) \
76({ \
77 __typeof__(*(ptr)) __ret; \
78 \
79 __ret = (__typeof__(*(ptr))) \
80 __xchg((unsigned long)(x), (unsigned long)(ptr), \
81 sizeof(*(ptr))); \
82 __ret; \
83})
84
85void __cmpxchg_called_with_bad_pointer(void);
86
87static __always_inline unsigned long __cmpxchg(unsigned long address,
88 unsigned long old,
89 unsigned long new, int size)
90{
91 unsigned long prev, tmp;
92 int shift;
93
94 switch (size) {
95 case 1:
96 shift = (3 ^ (address & 3)) << 3;
97 address ^= address & 3;
98 asm volatile(
99 " l %0,%2\n"
100 "0: nr %0,%5\n"
101 " lr %1,%0\n"
102 " or %0,%3\n"
103 " or %1,%4\n"
104 " cs %0,%1,%2\n"
105 " jnl 1f\n"
106 " xr %1,%0\n"
107 " nr %1,%5\n"
108 " jnz 0b\n"
109 "1:"
110 : "=&d" (prev), "=&d" (tmp), "+Q" (*(int *) address)
111 : "d" ((old & 0xff) << shift),
112 "d" ((new & 0xff) << shift),
113 "d" (~(0xff << shift))
114 : "memory", "cc");
115 return prev >> shift;
116 case 2:
117 shift = (2 ^ (address & 2)) << 3;
118 address ^= address & 2;
119 asm volatile(
120 " l %0,%2\n"
121 "0: nr %0,%5\n"
122 " lr %1,%0\n"
123 " or %0,%3\n"
124 " or %1,%4\n"
125 " cs %0,%1,%2\n"
126 " jnl 1f\n"
127 " xr %1,%0\n"
128 " nr %1,%5\n"
129 " jnz 0b\n"
130 "1:"
131 : "=&d" (prev), "=&d" (tmp), "+Q" (*(int *) address)
132 : "d" ((old & 0xffff) << shift),
133 "d" ((new & 0xffff) << shift),
134 "d" (~(0xffff << shift))
135 : "memory", "cc");
136 return prev >> shift;
137 case 4:
138 asm volatile(
139 " cs %0,%3,%1\n"
140 : "=&d" (prev), "+Q" (*(int *) address)
141 : "0" (old), "d" (new)
142 : "memory", "cc");
143 return prev;
144 case 8:
145 asm volatile(
146 " csg %0,%3,%1\n"
147 : "=&d" (prev), "+QS" (*(long *) address)
148 : "0" (old), "d" (new)
149 : "memory", "cc");
150 return prev;
151 }
152 __cmpxchg_called_with_bad_pointer();
153 return old;
154}
155
156#define arch_cmpxchg(ptr, o, n) \
157({ \
158 __typeof__(*(ptr)) __ret; \
159 \
160 __ret = (__typeof__(*(ptr))) \
161 __cmpxchg((unsigned long)(ptr), (unsigned long)(o), \
162 (unsigned long)(n), sizeof(*(ptr))); \
163 __ret; \
164})
165
166#define arch_cmpxchg64 arch_cmpxchg
167#define arch_cmpxchg_local arch_cmpxchg
168#define arch_cmpxchg64_local arch_cmpxchg
169
170#define system_has_cmpxchg_double() 1
171
172static __always_inline int __cmpxchg_double(unsigned long p1, unsigned long p2,
173 unsigned long o1, unsigned long o2,
174 unsigned long n1, unsigned long n2)
175{
176 union register_pair old = { .even = o1, .odd = o2, };
177 union register_pair new = { .even = n1, .odd = n2, };
178 int cc;
179
180 asm volatile(
181 " cdsg %[old],%[new],%[ptr]\n"
182 " ipm %[cc]\n"
183 " srl %[cc],28\n"
184 : [cc] "=&d" (cc), [old] "+&d" (old.pair)
185 : [new] "d" (new.pair),
186 [ptr] "QS" (*(unsigned long *)p1), "Q" (*(unsigned long *)p2)
187 : "memory", "cc");
188 return !cc;
189}
190
191#define arch_cmpxchg_double(p1, p2, o1, o2, n1, n2) \
192({ \
193 typeof(p1) __p1 = (p1); \
194 typeof(p2) __p2 = (p2); \
195 \
196 BUILD_BUG_ON(sizeof(*(p1)) != sizeof(long)); \
197 BUILD_BUG_ON(sizeof(*(p2)) != sizeof(long)); \
198 VM_BUG_ON((unsigned long)((__p1) + 1) != (unsigned long)(__p2));\
199 __cmpxchg_double((unsigned long)__p1, (unsigned long)__p2, \
200 (unsigned long)(o1), (unsigned long)(o2), \
201 (unsigned long)(n1), (unsigned long)(n2)); \
202})
203
204#endif /* __ASM_CMPXCHG_H */