Loading...
1/*
2 * Atomic operations that C can't guarantee us. Useful for
3 * resource counting etc..
4 *
5 * But use these as seldom as possible since they are much more slower
6 * than regular operations.
7 *
8 * This file is subject to the terms and conditions of the GNU General Public
9 * License. See the file "COPYING" in the main directory of this archive
10 * for more details.
11 *
12 * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
13 */
14#ifndef _ASM_ATOMIC_H
15#define _ASM_ATOMIC_H
16
17#include <linux/irqflags.h>
18#include <linux/types.h>
19#include <asm/barrier.h>
20#include <asm/compiler.h>
21#include <asm/cpu-features.h>
22#include <asm/cmpxchg.h>
23#include <asm/war.h>
24
25#define ATOMIC_INIT(i) { (i) }
26
27/*
28 * atomic_read - read atomic variable
29 * @v: pointer of type atomic_t
30 *
31 * Atomically reads the value of @v.
32 */
33#define atomic_read(v) READ_ONCE((v)->counter)
34
35/*
36 * atomic_set - set atomic variable
37 * @v: pointer of type atomic_t
38 * @i: required value
39 *
40 * Atomically sets the value of @v to @i.
41 */
42#define atomic_set(v, i) WRITE_ONCE((v)->counter, (i))
43
44#define ATOMIC_OP(op, c_op, asm_op) \
45static __inline__ void atomic_##op(int i, atomic_t * v) \
46{ \
47 if (kernel_uses_llsc && R10000_LLSC_WAR) { \
48 int temp; \
49 \
50 __asm__ __volatile__( \
51 " .set arch=r4000 \n" \
52 "1: ll %0, %1 # atomic_" #op " \n" \
53 " " #asm_op " %0, %2 \n" \
54 " sc %0, %1 \n" \
55 " beqzl %0, 1b \n" \
56 " .set mips0 \n" \
57 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \
58 : "Ir" (i)); \
59 } else if (kernel_uses_llsc) { \
60 int temp; \
61 \
62 do { \
63 __asm__ __volatile__( \
64 " .set "MIPS_ISA_LEVEL" \n" \
65 " ll %0, %1 # atomic_" #op "\n" \
66 " " #asm_op " %0, %2 \n" \
67 " sc %0, %1 \n" \
68 " .set mips0 \n" \
69 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \
70 : "Ir" (i)); \
71 } while (unlikely(!temp)); \
72 } else { \
73 unsigned long flags; \
74 \
75 raw_local_irq_save(flags); \
76 v->counter c_op i; \
77 raw_local_irq_restore(flags); \
78 } \
79}
80
81#define ATOMIC_OP_RETURN(op, c_op, asm_op) \
82static __inline__ int atomic_##op##_return(int i, atomic_t * v) \
83{ \
84 int result; \
85 \
86 smp_mb__before_llsc(); \
87 \
88 if (kernel_uses_llsc && R10000_LLSC_WAR) { \
89 int temp; \
90 \
91 __asm__ __volatile__( \
92 " .set arch=r4000 \n" \
93 "1: ll %1, %2 # atomic_" #op "_return \n" \
94 " " #asm_op " %0, %1, %3 \n" \
95 " sc %0, %2 \n" \
96 " beqzl %0, 1b \n" \
97 " " #asm_op " %0, %1, %3 \n" \
98 " .set mips0 \n" \
99 : "=&r" (result), "=&r" (temp), \
100 "+" GCC_OFF_SMALL_ASM() (v->counter) \
101 : "Ir" (i)); \
102 } else if (kernel_uses_llsc) { \
103 int temp; \
104 \
105 do { \
106 __asm__ __volatile__( \
107 " .set "MIPS_ISA_LEVEL" \n" \
108 " ll %1, %2 # atomic_" #op "_return \n" \
109 " " #asm_op " %0, %1, %3 \n" \
110 " sc %0, %2 \n" \
111 " .set mips0 \n" \
112 : "=&r" (result), "=&r" (temp), \
113 "+" GCC_OFF_SMALL_ASM() (v->counter) \
114 : "Ir" (i)); \
115 } while (unlikely(!result)); \
116 \
117 result = temp; result c_op i; \
118 } else { \
119 unsigned long flags; \
120 \
121 raw_local_irq_save(flags); \
122 result = v->counter; \
123 result c_op i; \
124 v->counter = result; \
125 raw_local_irq_restore(flags); \
126 } \
127 \
128 smp_llsc_mb(); \
129 \
130 return result; \
131}
132
133#define ATOMIC_OPS(op, c_op, asm_op) \
134 ATOMIC_OP(op, c_op, asm_op) \
135 ATOMIC_OP_RETURN(op, c_op, asm_op)
136
137ATOMIC_OPS(add, +=, addu)
138ATOMIC_OPS(sub, -=, subu)
139
140ATOMIC_OP(and, &=, and)
141ATOMIC_OP(or, |=, or)
142ATOMIC_OP(xor, ^=, xor)
143
144#undef ATOMIC_OPS
145#undef ATOMIC_OP_RETURN
146#undef ATOMIC_OP
147
148/*
149 * atomic_sub_if_positive - conditionally subtract integer from atomic variable
150 * @i: integer value to subtract
151 * @v: pointer of type atomic_t
152 *
153 * Atomically test @v and subtract @i if @v is greater or equal than @i.
154 * The function returns the old value of @v minus @i.
155 */
156static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
157{
158 int result;
159
160 smp_mb__before_llsc();
161
162 if (kernel_uses_llsc && R10000_LLSC_WAR) {
163 int temp;
164
165 __asm__ __volatile__(
166 " .set arch=r4000 \n"
167 "1: ll %1, %2 # atomic_sub_if_positive\n"
168 " subu %0, %1, %3 \n"
169 " bltz %0, 1f \n"
170 " sc %0, %2 \n"
171 " .set noreorder \n"
172 " beqzl %0, 1b \n"
173 " subu %0, %1, %3 \n"
174 " .set reorder \n"
175 "1: \n"
176 " .set mips0 \n"
177 : "=&r" (result), "=&r" (temp),
178 "+" GCC_OFF_SMALL_ASM() (v->counter)
179 : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter)
180 : "memory");
181 } else if (kernel_uses_llsc) {
182 int temp;
183
184 __asm__ __volatile__(
185 " .set "MIPS_ISA_LEVEL" \n"
186 "1: ll %1, %2 # atomic_sub_if_positive\n"
187 " subu %0, %1, %3 \n"
188 " bltz %0, 1f \n"
189 " sc %0, %2 \n"
190 " .set noreorder \n"
191 " beqz %0, 1b \n"
192 " subu %0, %1, %3 \n"
193 " .set reorder \n"
194 "1: \n"
195 " .set mips0 \n"
196 : "=&r" (result), "=&r" (temp),
197 "+" GCC_OFF_SMALL_ASM() (v->counter)
198 : "Ir" (i));
199 } else {
200 unsigned long flags;
201
202 raw_local_irq_save(flags);
203 result = v->counter;
204 result -= i;
205 if (result >= 0)
206 v->counter = result;
207 raw_local_irq_restore(flags);
208 }
209
210 smp_llsc_mb();
211
212 return result;
213}
214
215#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
216#define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
217
218/**
219 * __atomic_add_unless - add unless the number is a given value
220 * @v: pointer of type atomic_t
221 * @a: the amount to add to v...
222 * @u: ...unless v is equal to u.
223 *
224 * Atomically adds @a to @v, so long as it was not @u.
225 * Returns the old value of @v.
226 */
227static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
228{
229 int c, old;
230 c = atomic_read(v);
231 for (;;) {
232 if (unlikely(c == (u)))
233 break;
234 old = atomic_cmpxchg((v), c, c + (a));
235 if (likely(old == c))
236 break;
237 c = old;
238 }
239 return c;
240}
241
242#define atomic_dec_return(v) atomic_sub_return(1, (v))
243#define atomic_inc_return(v) atomic_add_return(1, (v))
244
245/*
246 * atomic_sub_and_test - subtract value from variable and test result
247 * @i: integer value to subtract
248 * @v: pointer of type atomic_t
249 *
250 * Atomically subtracts @i from @v and returns
251 * true if the result is zero, or false for all
252 * other cases.
253 */
254#define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0)
255
256/*
257 * atomic_inc_and_test - increment and test
258 * @v: pointer of type atomic_t
259 *
260 * Atomically increments @v by 1
261 * and returns true if the result is zero, or false for all
262 * other cases.
263 */
264#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
265
266/*
267 * atomic_dec_and_test - decrement by 1 and test
268 * @v: pointer of type atomic_t
269 *
270 * Atomically decrements @v by 1 and
271 * returns true if the result is 0, or false for all other
272 * cases.
273 */
274#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
275
276/*
277 * atomic_dec_if_positive - decrement by 1 if old value positive
278 * @v: pointer of type atomic_t
279 */
280#define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
281
282/*
283 * atomic_inc - increment atomic variable
284 * @v: pointer of type atomic_t
285 *
286 * Atomically increments @v by 1.
287 */
288#define atomic_inc(v) atomic_add(1, (v))
289
290/*
291 * atomic_dec - decrement and test
292 * @v: pointer of type atomic_t
293 *
294 * Atomically decrements @v by 1.
295 */
296#define atomic_dec(v) atomic_sub(1, (v))
297
298/*
299 * atomic_add_negative - add and test if negative
300 * @v: pointer of type atomic_t
301 * @i: integer value to add
302 *
303 * Atomically adds @i to @v and returns true
304 * if the result is negative, or false when
305 * result is greater than or equal to zero.
306 */
307#define atomic_add_negative(i, v) (atomic_add_return(i, (v)) < 0)
308
309#ifdef CONFIG_64BIT
310
311#define ATOMIC64_INIT(i) { (i) }
312
313/*
314 * atomic64_read - read atomic variable
315 * @v: pointer of type atomic64_t
316 *
317 */
318#define atomic64_read(v) READ_ONCE((v)->counter)
319
320/*
321 * atomic64_set - set atomic variable
322 * @v: pointer of type atomic64_t
323 * @i: required value
324 */
325#define atomic64_set(v, i) WRITE_ONCE((v)->counter, (i))
326
327#define ATOMIC64_OP(op, c_op, asm_op) \
328static __inline__ void atomic64_##op(long i, atomic64_t * v) \
329{ \
330 if (kernel_uses_llsc && R10000_LLSC_WAR) { \
331 long temp; \
332 \
333 __asm__ __volatile__( \
334 " .set arch=r4000 \n" \
335 "1: lld %0, %1 # atomic64_" #op " \n" \
336 " " #asm_op " %0, %2 \n" \
337 " scd %0, %1 \n" \
338 " beqzl %0, 1b \n" \
339 " .set mips0 \n" \
340 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \
341 : "Ir" (i)); \
342 } else if (kernel_uses_llsc) { \
343 long temp; \
344 \
345 do { \
346 __asm__ __volatile__( \
347 " .set "MIPS_ISA_LEVEL" \n" \
348 " lld %0, %1 # atomic64_" #op "\n" \
349 " " #asm_op " %0, %2 \n" \
350 " scd %0, %1 \n" \
351 " .set mips0 \n" \
352 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \
353 : "Ir" (i)); \
354 } while (unlikely(!temp)); \
355 } else { \
356 unsigned long flags; \
357 \
358 raw_local_irq_save(flags); \
359 v->counter c_op i; \
360 raw_local_irq_restore(flags); \
361 } \
362}
363
364#define ATOMIC64_OP_RETURN(op, c_op, asm_op) \
365static __inline__ long atomic64_##op##_return(long i, atomic64_t * v) \
366{ \
367 long result; \
368 \
369 smp_mb__before_llsc(); \
370 \
371 if (kernel_uses_llsc && R10000_LLSC_WAR) { \
372 long temp; \
373 \
374 __asm__ __volatile__( \
375 " .set arch=r4000 \n" \
376 "1: lld %1, %2 # atomic64_" #op "_return\n" \
377 " " #asm_op " %0, %1, %3 \n" \
378 " scd %0, %2 \n" \
379 " beqzl %0, 1b \n" \
380 " " #asm_op " %0, %1, %3 \n" \
381 " .set mips0 \n" \
382 : "=&r" (result), "=&r" (temp), \
383 "+" GCC_OFF_SMALL_ASM() (v->counter) \
384 : "Ir" (i)); \
385 } else if (kernel_uses_llsc) { \
386 long temp; \
387 \
388 do { \
389 __asm__ __volatile__( \
390 " .set "MIPS_ISA_LEVEL" \n" \
391 " lld %1, %2 # atomic64_" #op "_return\n" \
392 " " #asm_op " %0, %1, %3 \n" \
393 " scd %0, %2 \n" \
394 " .set mips0 \n" \
395 : "=&r" (result), "=&r" (temp), \
396 "=" GCC_OFF_SMALL_ASM() (v->counter) \
397 : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter) \
398 : "memory"); \
399 } while (unlikely(!result)); \
400 \
401 result = temp; result c_op i; \
402 } else { \
403 unsigned long flags; \
404 \
405 raw_local_irq_save(flags); \
406 result = v->counter; \
407 result c_op i; \
408 v->counter = result; \
409 raw_local_irq_restore(flags); \
410 } \
411 \
412 smp_llsc_mb(); \
413 \
414 return result; \
415}
416
417#define ATOMIC64_OPS(op, c_op, asm_op) \
418 ATOMIC64_OP(op, c_op, asm_op) \
419 ATOMIC64_OP_RETURN(op, c_op, asm_op)
420
421ATOMIC64_OPS(add, +=, daddu)
422ATOMIC64_OPS(sub, -=, dsubu)
423ATOMIC64_OP(and, &=, and)
424ATOMIC64_OP(or, |=, or)
425ATOMIC64_OP(xor, ^=, xor)
426
427#undef ATOMIC64_OPS
428#undef ATOMIC64_OP_RETURN
429#undef ATOMIC64_OP
430
431/*
432 * atomic64_sub_if_positive - conditionally subtract integer from atomic
433 * variable
434 * @i: integer value to subtract
435 * @v: pointer of type atomic64_t
436 *
437 * Atomically test @v and subtract @i if @v is greater or equal than @i.
438 * The function returns the old value of @v minus @i.
439 */
440static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
441{
442 long result;
443
444 smp_mb__before_llsc();
445
446 if (kernel_uses_llsc && R10000_LLSC_WAR) {
447 long temp;
448
449 __asm__ __volatile__(
450 " .set arch=r4000 \n"
451 "1: lld %1, %2 # atomic64_sub_if_positive\n"
452 " dsubu %0, %1, %3 \n"
453 " bltz %0, 1f \n"
454 " scd %0, %2 \n"
455 " .set noreorder \n"
456 " beqzl %0, 1b \n"
457 " dsubu %0, %1, %3 \n"
458 " .set reorder \n"
459 "1: \n"
460 " .set mips0 \n"
461 : "=&r" (result), "=&r" (temp),
462 "=" GCC_OFF_SMALL_ASM() (v->counter)
463 : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter)
464 : "memory");
465 } else if (kernel_uses_llsc) {
466 long temp;
467
468 __asm__ __volatile__(
469 " .set "MIPS_ISA_LEVEL" \n"
470 "1: lld %1, %2 # atomic64_sub_if_positive\n"
471 " dsubu %0, %1, %3 \n"
472 " bltz %0, 1f \n"
473 " scd %0, %2 \n"
474 " .set noreorder \n"
475 " beqz %0, 1b \n"
476 " dsubu %0, %1, %3 \n"
477 " .set reorder \n"
478 "1: \n"
479 " .set mips0 \n"
480 : "=&r" (result), "=&r" (temp),
481 "+" GCC_OFF_SMALL_ASM() (v->counter)
482 : "Ir" (i));
483 } else {
484 unsigned long flags;
485
486 raw_local_irq_save(flags);
487 result = v->counter;
488 result -= i;
489 if (result >= 0)
490 v->counter = result;
491 raw_local_irq_restore(flags);
492 }
493
494 smp_llsc_mb();
495
496 return result;
497}
498
499#define atomic64_cmpxchg(v, o, n) \
500 ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
501#define atomic64_xchg(v, new) (xchg(&((v)->counter), (new)))
502
503/**
504 * atomic64_add_unless - add unless the number is a given value
505 * @v: pointer of type atomic64_t
506 * @a: the amount to add to v...
507 * @u: ...unless v is equal to u.
508 *
509 * Atomically adds @a to @v, so long as it was not @u.
510 * Returns true iff @v was not @u.
511 */
512static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
513{
514 long c, old;
515 c = atomic64_read(v);
516 for (;;) {
517 if (unlikely(c == (u)))
518 break;
519 old = atomic64_cmpxchg((v), c, c + (a));
520 if (likely(old == c))
521 break;
522 c = old;
523 }
524 return c != (u);
525}
526
527#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
528
529#define atomic64_dec_return(v) atomic64_sub_return(1, (v))
530#define atomic64_inc_return(v) atomic64_add_return(1, (v))
531
532/*
533 * atomic64_sub_and_test - subtract value from variable and test result
534 * @i: integer value to subtract
535 * @v: pointer of type atomic64_t
536 *
537 * Atomically subtracts @i from @v and returns
538 * true if the result is zero, or false for all
539 * other cases.
540 */
541#define atomic64_sub_and_test(i, v) (atomic64_sub_return((i), (v)) == 0)
542
543/*
544 * atomic64_inc_and_test - increment and test
545 * @v: pointer of type atomic64_t
546 *
547 * Atomically increments @v by 1
548 * and returns true if the result is zero, or false for all
549 * other cases.
550 */
551#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
552
553/*
554 * atomic64_dec_and_test - decrement by 1 and test
555 * @v: pointer of type atomic64_t
556 *
557 * Atomically decrements @v by 1 and
558 * returns true if the result is 0, or false for all other
559 * cases.
560 */
561#define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
562
563/*
564 * atomic64_dec_if_positive - decrement by 1 if old value positive
565 * @v: pointer of type atomic64_t
566 */
567#define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v)
568
569/*
570 * atomic64_inc - increment atomic variable
571 * @v: pointer of type atomic64_t
572 *
573 * Atomically increments @v by 1.
574 */
575#define atomic64_inc(v) atomic64_add(1, (v))
576
577/*
578 * atomic64_dec - decrement and test
579 * @v: pointer of type atomic64_t
580 *
581 * Atomically decrements @v by 1.
582 */
583#define atomic64_dec(v) atomic64_sub(1, (v))
584
585/*
586 * atomic64_add_negative - add and test if negative
587 * @v: pointer of type atomic64_t
588 * @i: integer value to add
589 *
590 * Atomically adds @i to @v and returns true
591 * if the result is negative, or false when
592 * result is greater than or equal to zero.
593 */
594#define atomic64_add_negative(i, v) (atomic64_add_return(i, (v)) < 0)
595
596#endif /* CONFIG_64BIT */
597
598#endif /* _ASM_ATOMIC_H */
1/*
2 * Atomic operations that C can't guarantee us. Useful for
3 * resource counting etc..
4 *
5 * But use these as seldom as possible since they are much more slower
6 * than regular operations.
7 *
8 * This file is subject to the terms and conditions of the GNU General Public
9 * License. See the file "COPYING" in the main directory of this archive
10 * for more details.
11 *
12 * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
13 */
14#ifndef _ASM_ATOMIC_H
15#define _ASM_ATOMIC_H
16
17#include <linux/irqflags.h>
18#include <linux/types.h>
19#include <asm/barrier.h>
20#include <asm/cpu-features.h>
21#include <asm/cmpxchg.h>
22#include <asm/war.h>
23
24#define ATOMIC_INIT(i) { (i) }
25
26/*
27 * atomic_read - read atomic variable
28 * @v: pointer of type atomic_t
29 *
30 * Atomically reads the value of @v.
31 */
32#define atomic_read(v) (*(volatile int *)&(v)->counter)
33
34/*
35 * atomic_set - set atomic variable
36 * @v: pointer of type atomic_t
37 * @i: required value
38 *
39 * Atomically sets the value of @v to @i.
40 */
41#define atomic_set(v, i) ((v)->counter = (i))
42
43/*
44 * atomic_add - add integer to atomic variable
45 * @i: integer value to add
46 * @v: pointer of type atomic_t
47 *
48 * Atomically adds @i to @v.
49 */
50static __inline__ void atomic_add(int i, atomic_t * v)
51{
52 if (kernel_uses_llsc && R10000_LLSC_WAR) {
53 int temp;
54
55 __asm__ __volatile__(
56 " .set mips3 \n"
57 "1: ll %0, %1 # atomic_add \n"
58 " addu %0, %2 \n"
59 " sc %0, %1 \n"
60 " beqzl %0, 1b \n"
61 " .set mips0 \n"
62 : "=&r" (temp), "=m" (v->counter)
63 : "Ir" (i), "m" (v->counter));
64 } else if (kernel_uses_llsc) {
65 int temp;
66
67 do {
68 __asm__ __volatile__(
69 " .set mips3 \n"
70 " ll %0, %1 # atomic_add \n"
71 " addu %0, %2 \n"
72 " sc %0, %1 \n"
73 " .set mips0 \n"
74 : "=&r" (temp), "=m" (v->counter)
75 : "Ir" (i), "m" (v->counter));
76 } while (unlikely(!temp));
77 } else {
78 unsigned long flags;
79
80 raw_local_irq_save(flags);
81 v->counter += i;
82 raw_local_irq_restore(flags);
83 }
84}
85
86/*
87 * atomic_sub - subtract the atomic variable
88 * @i: integer value to subtract
89 * @v: pointer of type atomic_t
90 *
91 * Atomically subtracts @i from @v.
92 */
93static __inline__ void atomic_sub(int i, atomic_t * v)
94{
95 if (kernel_uses_llsc && R10000_LLSC_WAR) {
96 int temp;
97
98 __asm__ __volatile__(
99 " .set mips3 \n"
100 "1: ll %0, %1 # atomic_sub \n"
101 " subu %0, %2 \n"
102 " sc %0, %1 \n"
103 " beqzl %0, 1b \n"
104 " .set mips0 \n"
105 : "=&r" (temp), "=m" (v->counter)
106 : "Ir" (i), "m" (v->counter));
107 } else if (kernel_uses_llsc) {
108 int temp;
109
110 do {
111 __asm__ __volatile__(
112 " .set mips3 \n"
113 " ll %0, %1 # atomic_sub \n"
114 " subu %0, %2 \n"
115 " sc %0, %1 \n"
116 " .set mips0 \n"
117 : "=&r" (temp), "=m" (v->counter)
118 : "Ir" (i), "m" (v->counter));
119 } while (unlikely(!temp));
120 } else {
121 unsigned long flags;
122
123 raw_local_irq_save(flags);
124 v->counter -= i;
125 raw_local_irq_restore(flags);
126 }
127}
128
129/*
130 * Same as above, but return the result value
131 */
132static __inline__ int atomic_add_return(int i, atomic_t * v)
133{
134 int result;
135
136 smp_mb__before_llsc();
137
138 if (kernel_uses_llsc && R10000_LLSC_WAR) {
139 int temp;
140
141 __asm__ __volatile__(
142 " .set mips3 \n"
143 "1: ll %1, %2 # atomic_add_return \n"
144 " addu %0, %1, %3 \n"
145 " sc %0, %2 \n"
146 " beqzl %0, 1b \n"
147 " addu %0, %1, %3 \n"
148 " .set mips0 \n"
149 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
150 : "Ir" (i), "m" (v->counter)
151 : "memory");
152 } else if (kernel_uses_llsc) {
153 int temp;
154
155 do {
156 __asm__ __volatile__(
157 " .set mips3 \n"
158 " ll %1, %2 # atomic_add_return \n"
159 " addu %0, %1, %3 \n"
160 " sc %0, %2 \n"
161 " .set mips0 \n"
162 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
163 : "Ir" (i), "m" (v->counter)
164 : "memory");
165 } while (unlikely(!result));
166
167 result = temp + i;
168 } else {
169 unsigned long flags;
170
171 raw_local_irq_save(flags);
172 result = v->counter;
173 result += i;
174 v->counter = result;
175 raw_local_irq_restore(flags);
176 }
177
178 smp_llsc_mb();
179
180 return result;
181}
182
183static __inline__ int atomic_sub_return(int i, atomic_t * v)
184{
185 int result;
186
187 smp_mb__before_llsc();
188
189 if (kernel_uses_llsc && R10000_LLSC_WAR) {
190 int temp;
191
192 __asm__ __volatile__(
193 " .set mips3 \n"
194 "1: ll %1, %2 # atomic_sub_return \n"
195 " subu %0, %1, %3 \n"
196 " sc %0, %2 \n"
197 " beqzl %0, 1b \n"
198 " subu %0, %1, %3 \n"
199 " .set mips0 \n"
200 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
201 : "Ir" (i), "m" (v->counter)
202 : "memory");
203
204 result = temp - i;
205 } else if (kernel_uses_llsc) {
206 int temp;
207
208 do {
209 __asm__ __volatile__(
210 " .set mips3 \n"
211 " ll %1, %2 # atomic_sub_return \n"
212 " subu %0, %1, %3 \n"
213 " sc %0, %2 \n"
214 " .set mips0 \n"
215 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
216 : "Ir" (i), "m" (v->counter)
217 : "memory");
218 } while (unlikely(!result));
219
220 result = temp - i;
221 } else {
222 unsigned long flags;
223
224 raw_local_irq_save(flags);
225 result = v->counter;
226 result -= i;
227 v->counter = result;
228 raw_local_irq_restore(flags);
229 }
230
231 smp_llsc_mb();
232
233 return result;
234}
235
236/*
237 * atomic_sub_if_positive - conditionally subtract integer from atomic variable
238 * @i: integer value to subtract
239 * @v: pointer of type atomic_t
240 *
241 * Atomically test @v and subtract @i if @v is greater or equal than @i.
242 * The function returns the old value of @v minus @i.
243 */
244static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
245{
246 int result;
247
248 smp_mb__before_llsc();
249
250 if (kernel_uses_llsc && R10000_LLSC_WAR) {
251 int temp;
252
253 __asm__ __volatile__(
254 " .set mips3 \n"
255 "1: ll %1, %2 # atomic_sub_if_positive\n"
256 " subu %0, %1, %3 \n"
257 " bltz %0, 1f \n"
258 " sc %0, %2 \n"
259 " .set noreorder \n"
260 " beqzl %0, 1b \n"
261 " subu %0, %1, %3 \n"
262 " .set reorder \n"
263 "1: \n"
264 " .set mips0 \n"
265 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
266 : "Ir" (i), "m" (v->counter)
267 : "memory");
268 } else if (kernel_uses_llsc) {
269 int temp;
270
271 __asm__ __volatile__(
272 " .set mips3 \n"
273 "1: ll %1, %2 # atomic_sub_if_positive\n"
274 " subu %0, %1, %3 \n"
275 " bltz %0, 1f \n"
276 " sc %0, %2 \n"
277 " .set noreorder \n"
278 " beqz %0, 1b \n"
279 " subu %0, %1, %3 \n"
280 " .set reorder \n"
281 "1: \n"
282 " .set mips0 \n"
283 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
284 : "Ir" (i), "m" (v->counter)
285 : "memory");
286 } else {
287 unsigned long flags;
288
289 raw_local_irq_save(flags);
290 result = v->counter;
291 result -= i;
292 if (result >= 0)
293 v->counter = result;
294 raw_local_irq_restore(flags);
295 }
296
297 smp_llsc_mb();
298
299 return result;
300}
301
302#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
303#define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
304
305/**
306 * __atomic_add_unless - add unless the number is a given value
307 * @v: pointer of type atomic_t
308 * @a: the amount to add to v...
309 * @u: ...unless v is equal to u.
310 *
311 * Atomically adds @a to @v, so long as it was not @u.
312 * Returns the old value of @v.
313 */
314static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
315{
316 int c, old;
317 c = atomic_read(v);
318 for (;;) {
319 if (unlikely(c == (u)))
320 break;
321 old = atomic_cmpxchg((v), c, c + (a));
322 if (likely(old == c))
323 break;
324 c = old;
325 }
326 return c;
327}
328
329#define atomic_dec_return(v) atomic_sub_return(1, (v))
330#define atomic_inc_return(v) atomic_add_return(1, (v))
331
332/*
333 * atomic_sub_and_test - subtract value from variable and test result
334 * @i: integer value to subtract
335 * @v: pointer of type atomic_t
336 *
337 * Atomically subtracts @i from @v and returns
338 * true if the result is zero, or false for all
339 * other cases.
340 */
341#define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0)
342
343/*
344 * atomic_inc_and_test - increment and test
345 * @v: pointer of type atomic_t
346 *
347 * Atomically increments @v by 1
348 * and returns true if the result is zero, or false for all
349 * other cases.
350 */
351#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
352
353/*
354 * atomic_dec_and_test - decrement by 1 and test
355 * @v: pointer of type atomic_t
356 *
357 * Atomically decrements @v by 1 and
358 * returns true if the result is 0, or false for all other
359 * cases.
360 */
361#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
362
363/*
364 * atomic_dec_if_positive - decrement by 1 if old value positive
365 * @v: pointer of type atomic_t
366 */
367#define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
368
369/*
370 * atomic_inc - increment atomic variable
371 * @v: pointer of type atomic_t
372 *
373 * Atomically increments @v by 1.
374 */
375#define atomic_inc(v) atomic_add(1, (v))
376
377/*
378 * atomic_dec - decrement and test
379 * @v: pointer of type atomic_t
380 *
381 * Atomically decrements @v by 1.
382 */
383#define atomic_dec(v) atomic_sub(1, (v))
384
385/*
386 * atomic_add_negative - add and test if negative
387 * @v: pointer of type atomic_t
388 * @i: integer value to add
389 *
390 * Atomically adds @i to @v and returns true
391 * if the result is negative, or false when
392 * result is greater than or equal to zero.
393 */
394#define atomic_add_negative(i, v) (atomic_add_return(i, (v)) < 0)
395
396#ifdef CONFIG_64BIT
397
398#define ATOMIC64_INIT(i) { (i) }
399
400/*
401 * atomic64_read - read atomic variable
402 * @v: pointer of type atomic64_t
403 *
404 */
405#define atomic64_read(v) (*(volatile long *)&(v)->counter)
406
407/*
408 * atomic64_set - set atomic variable
409 * @v: pointer of type atomic64_t
410 * @i: required value
411 */
412#define atomic64_set(v, i) ((v)->counter = (i))
413
414/*
415 * atomic64_add - add integer to atomic variable
416 * @i: integer value to add
417 * @v: pointer of type atomic64_t
418 *
419 * Atomically adds @i to @v.
420 */
421static __inline__ void atomic64_add(long i, atomic64_t * v)
422{
423 if (kernel_uses_llsc && R10000_LLSC_WAR) {
424 long temp;
425
426 __asm__ __volatile__(
427 " .set mips3 \n"
428 "1: lld %0, %1 # atomic64_add \n"
429 " daddu %0, %2 \n"
430 " scd %0, %1 \n"
431 " beqzl %0, 1b \n"
432 " .set mips0 \n"
433 : "=&r" (temp), "=m" (v->counter)
434 : "Ir" (i), "m" (v->counter));
435 } else if (kernel_uses_llsc) {
436 long temp;
437
438 do {
439 __asm__ __volatile__(
440 " .set mips3 \n"
441 " lld %0, %1 # atomic64_add \n"
442 " daddu %0, %2 \n"
443 " scd %0, %1 \n"
444 " .set mips0 \n"
445 : "=&r" (temp), "=m" (v->counter)
446 : "Ir" (i), "m" (v->counter));
447 } while (unlikely(!temp));
448 } else {
449 unsigned long flags;
450
451 raw_local_irq_save(flags);
452 v->counter += i;
453 raw_local_irq_restore(flags);
454 }
455}
456
457/*
458 * atomic64_sub - subtract the atomic variable
459 * @i: integer value to subtract
460 * @v: pointer of type atomic64_t
461 *
462 * Atomically subtracts @i from @v.
463 */
464static __inline__ void atomic64_sub(long i, atomic64_t * v)
465{
466 if (kernel_uses_llsc && R10000_LLSC_WAR) {
467 long temp;
468
469 __asm__ __volatile__(
470 " .set mips3 \n"
471 "1: lld %0, %1 # atomic64_sub \n"
472 " dsubu %0, %2 \n"
473 " scd %0, %1 \n"
474 " beqzl %0, 1b \n"
475 " .set mips0 \n"
476 : "=&r" (temp), "=m" (v->counter)
477 : "Ir" (i), "m" (v->counter));
478 } else if (kernel_uses_llsc) {
479 long temp;
480
481 do {
482 __asm__ __volatile__(
483 " .set mips3 \n"
484 " lld %0, %1 # atomic64_sub \n"
485 " dsubu %0, %2 \n"
486 " scd %0, %1 \n"
487 " .set mips0 \n"
488 : "=&r" (temp), "=m" (v->counter)
489 : "Ir" (i), "m" (v->counter));
490 } while (unlikely(!temp));
491 } else {
492 unsigned long flags;
493
494 raw_local_irq_save(flags);
495 v->counter -= i;
496 raw_local_irq_restore(flags);
497 }
498}
499
500/*
501 * Same as above, but return the result value
502 */
503static __inline__ long atomic64_add_return(long i, atomic64_t * v)
504{
505 long result;
506
507 smp_mb__before_llsc();
508
509 if (kernel_uses_llsc && R10000_LLSC_WAR) {
510 long temp;
511
512 __asm__ __volatile__(
513 " .set mips3 \n"
514 "1: lld %1, %2 # atomic64_add_return \n"
515 " daddu %0, %1, %3 \n"
516 " scd %0, %2 \n"
517 " beqzl %0, 1b \n"
518 " daddu %0, %1, %3 \n"
519 " .set mips0 \n"
520 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
521 : "Ir" (i), "m" (v->counter)
522 : "memory");
523 } else if (kernel_uses_llsc) {
524 long temp;
525
526 do {
527 __asm__ __volatile__(
528 " .set mips3 \n"
529 " lld %1, %2 # atomic64_add_return \n"
530 " daddu %0, %1, %3 \n"
531 " scd %0, %2 \n"
532 " .set mips0 \n"
533 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
534 : "Ir" (i), "m" (v->counter)
535 : "memory");
536 } while (unlikely(!result));
537
538 result = temp + i;
539 } else {
540 unsigned long flags;
541
542 raw_local_irq_save(flags);
543 result = v->counter;
544 result += i;
545 v->counter = result;
546 raw_local_irq_restore(flags);
547 }
548
549 smp_llsc_mb();
550
551 return result;
552}
553
554static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
555{
556 long result;
557
558 smp_mb__before_llsc();
559
560 if (kernel_uses_llsc && R10000_LLSC_WAR) {
561 long temp;
562
563 __asm__ __volatile__(
564 " .set mips3 \n"
565 "1: lld %1, %2 # atomic64_sub_return \n"
566 " dsubu %0, %1, %3 \n"
567 " scd %0, %2 \n"
568 " beqzl %0, 1b \n"
569 " dsubu %0, %1, %3 \n"
570 " .set mips0 \n"
571 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
572 : "Ir" (i), "m" (v->counter)
573 : "memory");
574 } else if (kernel_uses_llsc) {
575 long temp;
576
577 do {
578 __asm__ __volatile__(
579 " .set mips3 \n"
580 " lld %1, %2 # atomic64_sub_return \n"
581 " dsubu %0, %1, %3 \n"
582 " scd %0, %2 \n"
583 " .set mips0 \n"
584 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
585 : "Ir" (i), "m" (v->counter)
586 : "memory");
587 } while (unlikely(!result));
588
589 result = temp - i;
590 } else {
591 unsigned long flags;
592
593 raw_local_irq_save(flags);
594 result = v->counter;
595 result -= i;
596 v->counter = result;
597 raw_local_irq_restore(flags);
598 }
599
600 smp_llsc_mb();
601
602 return result;
603}
604
605/*
606 * atomic64_sub_if_positive - conditionally subtract integer from atomic variable
607 * @i: integer value to subtract
608 * @v: pointer of type atomic64_t
609 *
610 * Atomically test @v and subtract @i if @v is greater or equal than @i.
611 * The function returns the old value of @v minus @i.
612 */
613static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
614{
615 long result;
616
617 smp_mb__before_llsc();
618
619 if (kernel_uses_llsc && R10000_LLSC_WAR) {
620 long temp;
621
622 __asm__ __volatile__(
623 " .set mips3 \n"
624 "1: lld %1, %2 # atomic64_sub_if_positive\n"
625 " dsubu %0, %1, %3 \n"
626 " bltz %0, 1f \n"
627 " scd %0, %2 \n"
628 " .set noreorder \n"
629 " beqzl %0, 1b \n"
630 " dsubu %0, %1, %3 \n"
631 " .set reorder \n"
632 "1: \n"
633 " .set mips0 \n"
634 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
635 : "Ir" (i), "m" (v->counter)
636 : "memory");
637 } else if (kernel_uses_llsc) {
638 long temp;
639
640 __asm__ __volatile__(
641 " .set mips3 \n"
642 "1: lld %1, %2 # atomic64_sub_if_positive\n"
643 " dsubu %0, %1, %3 \n"
644 " bltz %0, 1f \n"
645 " scd %0, %2 \n"
646 " .set noreorder \n"
647 " beqz %0, 1b \n"
648 " dsubu %0, %1, %3 \n"
649 " .set reorder \n"
650 "1: \n"
651 " .set mips0 \n"
652 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
653 : "Ir" (i), "m" (v->counter)
654 : "memory");
655 } else {
656 unsigned long flags;
657
658 raw_local_irq_save(flags);
659 result = v->counter;
660 result -= i;
661 if (result >= 0)
662 v->counter = result;
663 raw_local_irq_restore(flags);
664 }
665
666 smp_llsc_mb();
667
668 return result;
669}
670
671#define atomic64_cmpxchg(v, o, n) \
672 ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
673#define atomic64_xchg(v, new) (xchg(&((v)->counter), (new)))
674
675/**
676 * atomic64_add_unless - add unless the number is a given value
677 * @v: pointer of type atomic64_t
678 * @a: the amount to add to v...
679 * @u: ...unless v is equal to u.
680 *
681 * Atomically adds @a to @v, so long as it was not @u.
682 * Returns the old value of @v.
683 */
684static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
685{
686 long c, old;
687 c = atomic64_read(v);
688 for (;;) {
689 if (unlikely(c == (u)))
690 break;
691 old = atomic64_cmpxchg((v), c, c + (a));
692 if (likely(old == c))
693 break;
694 c = old;
695 }
696 return c != (u);
697}
698
699#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
700
701#define atomic64_dec_return(v) atomic64_sub_return(1, (v))
702#define atomic64_inc_return(v) atomic64_add_return(1, (v))
703
704/*
705 * atomic64_sub_and_test - subtract value from variable and test result
706 * @i: integer value to subtract
707 * @v: pointer of type atomic64_t
708 *
709 * Atomically subtracts @i from @v and returns
710 * true if the result is zero, or false for all
711 * other cases.
712 */
713#define atomic64_sub_and_test(i, v) (atomic64_sub_return((i), (v)) == 0)
714
715/*
716 * atomic64_inc_and_test - increment and test
717 * @v: pointer of type atomic64_t
718 *
719 * Atomically increments @v by 1
720 * and returns true if the result is zero, or false for all
721 * other cases.
722 */
723#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
724
725/*
726 * atomic64_dec_and_test - decrement by 1 and test
727 * @v: pointer of type atomic64_t
728 *
729 * Atomically decrements @v by 1 and
730 * returns true if the result is 0, or false for all other
731 * cases.
732 */
733#define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
734
735/*
736 * atomic64_dec_if_positive - decrement by 1 if old value positive
737 * @v: pointer of type atomic64_t
738 */
739#define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v)
740
741/*
742 * atomic64_inc - increment atomic variable
743 * @v: pointer of type atomic64_t
744 *
745 * Atomically increments @v by 1.
746 */
747#define atomic64_inc(v) atomic64_add(1, (v))
748
749/*
750 * atomic64_dec - decrement and test
751 * @v: pointer of type atomic64_t
752 *
753 * Atomically decrements @v by 1.
754 */
755#define atomic64_dec(v) atomic64_sub(1, (v))
756
757/*
758 * atomic64_add_negative - add and test if negative
759 * @v: pointer of type atomic64_t
760 * @i: integer value to add
761 *
762 * Atomically adds @i to @v and returns true
763 * if the result is negative, or false when
764 * result is greater than or equal to zero.
765 */
766#define atomic64_add_negative(i, v) (atomic64_add_return(i, (v)) < 0)
767
768#endif /* CONFIG_64BIT */
769
770/*
771 * atomic*_return operations are serializing but not the non-*_return
772 * versions.
773 */
774#define smp_mb__before_atomic_dec() smp_mb__before_llsc()
775#define smp_mb__after_atomic_dec() smp_llsc_mb()
776#define smp_mb__before_atomic_inc() smp_mb__before_llsc()
777#define smp_mb__after_atomic_inc() smp_llsc_mb()
778
779#endif /* _ASM_ATOMIC_H */