Loading...
1/* atomic.h: Thankfully the V9 is at least reasonable for this
2 * stuff.
3 *
4 * Copyright (C) 1996, 1997, 2000 David S. Miller (davem@redhat.com)
5 */
6
7#ifndef __ARCH_SPARC64_ATOMIC__
8#define __ARCH_SPARC64_ATOMIC__
9
10#include <linux/types.h>
11#include <asm/system.h>
12
13#define ATOMIC_INIT(i) { (i) }
14#define ATOMIC64_INIT(i) { (i) }
15
16#define atomic_read(v) (*(volatile int *)&(v)->counter)
17#define atomic64_read(v) (*(volatile long *)&(v)->counter)
18
19#define atomic_set(v, i) (((v)->counter) = i)
20#define atomic64_set(v, i) (((v)->counter) = i)
21
22extern void atomic_add(int, atomic_t *);
23extern void atomic64_add(long, atomic64_t *);
24extern void atomic_sub(int, atomic_t *);
25extern void atomic64_sub(long, atomic64_t *);
26
27extern int atomic_add_ret(int, atomic_t *);
28extern long atomic64_add_ret(long, atomic64_t *);
29extern int atomic_sub_ret(int, atomic_t *);
30extern long atomic64_sub_ret(long, atomic64_t *);
31
32#define atomic_dec_return(v) atomic_sub_ret(1, v)
33#define atomic64_dec_return(v) atomic64_sub_ret(1, v)
34
35#define atomic_inc_return(v) atomic_add_ret(1, v)
36#define atomic64_inc_return(v) atomic64_add_ret(1, v)
37
38#define atomic_sub_return(i, v) atomic_sub_ret(i, v)
39#define atomic64_sub_return(i, v) atomic64_sub_ret(i, v)
40
41#define atomic_add_return(i, v) atomic_add_ret(i, v)
42#define atomic64_add_return(i, v) atomic64_add_ret(i, v)
43
44/*
45 * atomic_inc_and_test - increment and test
46 * @v: pointer of type atomic_t
47 *
48 * Atomically increments @v by 1
49 * and returns true if the result is zero, or false for all
50 * other cases.
51 */
52#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
53#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
54
55#define atomic_sub_and_test(i, v) (atomic_sub_ret(i, v) == 0)
56#define atomic64_sub_and_test(i, v) (atomic64_sub_ret(i, v) == 0)
57
58#define atomic_dec_and_test(v) (atomic_sub_ret(1, v) == 0)
59#define atomic64_dec_and_test(v) (atomic64_sub_ret(1, v) == 0)
60
61#define atomic_inc(v) atomic_add(1, v)
62#define atomic64_inc(v) atomic64_add(1, v)
63
64#define atomic_dec(v) atomic_sub(1, v)
65#define atomic64_dec(v) atomic64_sub(1, v)
66
67#define atomic_add_negative(i, v) (atomic_add_ret(i, v) < 0)
68#define atomic64_add_negative(i, v) (atomic64_add_ret(i, v) < 0)
69
70#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
71#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
72
73static inline int __atomic_add_unless(atomic_t *v, int a, int u)
74{
75 int c, old;
76 c = atomic_read(v);
77 for (;;) {
78 if (unlikely(c == (u)))
79 break;
80 old = atomic_cmpxchg((v), c, c + (a));
81 if (likely(old == c))
82 break;
83 c = old;
84 }
85 return c;
86}
87
88
89#define atomic64_cmpxchg(v, o, n) \
90 ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
91#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
92
93static inline long atomic64_add_unless(atomic64_t *v, long a, long u)
94{
95 long c, old;
96 c = atomic64_read(v);
97 for (;;) {
98 if (unlikely(c == (u)))
99 break;
100 old = atomic64_cmpxchg((v), c, c + (a));
101 if (likely(old == c))
102 break;
103 c = old;
104 }
105 return c != (u);
106}
107
108#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
109
110/* Atomic operations are already serializing */
111#define smp_mb__before_atomic_dec() barrier()
112#define smp_mb__after_atomic_dec() barrier()
113#define smp_mb__before_atomic_inc() barrier()
114#define smp_mb__after_atomic_inc() barrier()
115
116#endif /* !(__ARCH_SPARC64_ATOMIC__) */
1/* SPDX-License-Identifier: GPL-2.0 */
2/* atomic.h: Thankfully the V9 is at least reasonable for this
3 * stuff.
4 *
5 * Copyright (C) 1996, 1997, 2000, 2012 David S. Miller (davem@redhat.com)
6 */
7
8#ifndef __ARCH_SPARC64_ATOMIC__
9#define __ARCH_SPARC64_ATOMIC__
10
11#include <linux/types.h>
12#include <asm/cmpxchg.h>
13#include <asm/barrier.h>
14
15#define ATOMIC64_INIT(i) { (i) }
16
17#define arch_atomic_read(v) READ_ONCE((v)->counter)
18#define arch_atomic64_read(v) READ_ONCE((v)->counter)
19
20#define arch_atomic_set(v, i) WRITE_ONCE(((v)->counter), (i))
21#define arch_atomic64_set(v, i) WRITE_ONCE(((v)->counter), (i))
22
23#define ATOMIC_OP(op) \
24void arch_atomic_##op(int, atomic_t *); \
25void arch_atomic64_##op(s64, atomic64_t *);
26
27#define ATOMIC_OP_RETURN(op) \
28int arch_atomic_##op##_return(int, atomic_t *); \
29s64 arch_atomic64_##op##_return(s64, atomic64_t *);
30
31#define ATOMIC_FETCH_OP(op) \
32int arch_atomic_fetch_##op(int, atomic_t *); \
33s64 arch_atomic64_fetch_##op(s64, atomic64_t *);
34
35#define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op) ATOMIC_FETCH_OP(op)
36
37ATOMIC_OPS(add)
38ATOMIC_OPS(sub)
39
40#define arch_atomic_add_return arch_atomic_add_return
41#define arch_atomic_sub_return arch_atomic_sub_return
42#define arch_atomic_fetch_add arch_atomic_fetch_add
43#define arch_atomic_fetch_sub arch_atomic_fetch_sub
44
45#define arch_atomic64_add_return arch_atomic64_add_return
46#define arch_atomic64_sub_return arch_atomic64_sub_return
47#define arch_atomic64_fetch_add arch_atomic64_fetch_add
48#define arch_atomic64_fetch_sub arch_atomic64_fetch_sub
49
50#undef ATOMIC_OPS
51#define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_FETCH_OP(op)
52
53ATOMIC_OPS(and)
54ATOMIC_OPS(or)
55ATOMIC_OPS(xor)
56
57#define arch_atomic_fetch_and arch_atomic_fetch_and
58#define arch_atomic_fetch_or arch_atomic_fetch_or
59#define arch_atomic_fetch_xor arch_atomic_fetch_xor
60
61#define arch_atomic64_fetch_and arch_atomic64_fetch_and
62#define arch_atomic64_fetch_or arch_atomic64_fetch_or
63#define arch_atomic64_fetch_xor arch_atomic64_fetch_xor
64
65#undef ATOMIC_OPS
66#undef ATOMIC_FETCH_OP
67#undef ATOMIC_OP_RETURN
68#undef ATOMIC_OP
69
70s64 arch_atomic64_dec_if_positive(atomic64_t *v);
71#define arch_atomic64_dec_if_positive arch_atomic64_dec_if_positive
72
73#endif /* !(__ARCH_SPARC64_ATOMIC__) */