Loading...
1/* SPDX-License-Identifier: GPL-2.0 */
2/* atomic.h: Thankfully the V9 is at least reasonable for this
3 * stuff.
4 *
5 * Copyright (C) 1996, 1997, 2000, 2012 David S. Miller (davem@redhat.com)
6 */
7
8#ifndef __ARCH_SPARC64_ATOMIC__
9#define __ARCH_SPARC64_ATOMIC__
10
11#include <linux/types.h>
12#include <asm/cmpxchg.h>
13#include <asm/barrier.h>
14
15#define ATOMIC64_INIT(i) { (i) }
16
17#define arch_atomic_read(v) READ_ONCE((v)->counter)
18#define arch_atomic64_read(v) READ_ONCE((v)->counter)
19
20#define arch_atomic_set(v, i) WRITE_ONCE(((v)->counter), (i))
21#define arch_atomic64_set(v, i) WRITE_ONCE(((v)->counter), (i))
22
23#define ATOMIC_OP(op) \
24void arch_atomic_##op(int, atomic_t *); \
25void arch_atomic64_##op(s64, atomic64_t *);
26
27#define ATOMIC_OP_RETURN(op) \
28int arch_atomic_##op##_return(int, atomic_t *); \
29s64 arch_atomic64_##op##_return(s64, atomic64_t *);
30
31#define ATOMIC_FETCH_OP(op) \
32int arch_atomic_fetch_##op(int, atomic_t *); \
33s64 arch_atomic64_fetch_##op(s64, atomic64_t *);
34
35#define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op) ATOMIC_FETCH_OP(op)
36
37ATOMIC_OPS(add)
38ATOMIC_OPS(sub)
39
40#define arch_atomic_add_return arch_atomic_add_return
41#define arch_atomic_sub_return arch_atomic_sub_return
42#define arch_atomic_fetch_add arch_atomic_fetch_add
43#define arch_atomic_fetch_sub arch_atomic_fetch_sub
44
45#define arch_atomic64_add_return arch_atomic64_add_return
46#define arch_atomic64_sub_return arch_atomic64_sub_return
47#define arch_atomic64_fetch_add arch_atomic64_fetch_add
48#define arch_atomic64_fetch_sub arch_atomic64_fetch_sub
49
50#undef ATOMIC_OPS
51#define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_FETCH_OP(op)
52
53ATOMIC_OPS(and)
54ATOMIC_OPS(or)
55ATOMIC_OPS(xor)
56
57#define arch_atomic_fetch_and arch_atomic_fetch_and
58#define arch_atomic_fetch_or arch_atomic_fetch_or
59#define arch_atomic_fetch_xor arch_atomic_fetch_xor
60
61#define arch_atomic64_fetch_and arch_atomic64_fetch_and
62#define arch_atomic64_fetch_or arch_atomic64_fetch_or
63#define arch_atomic64_fetch_xor arch_atomic64_fetch_xor
64
65#undef ATOMIC_OPS
66#undef ATOMIC_FETCH_OP
67#undef ATOMIC_OP_RETURN
68#undef ATOMIC_OP
69
70s64 arch_atomic64_dec_if_positive(atomic64_t *v);
71#define arch_atomic64_dec_if_positive arch_atomic64_dec_if_positive
72
73#endif /* !(__ARCH_SPARC64_ATOMIC__) */
1/* atomic.h: Thankfully the V9 is at least reasonable for this
2 * stuff.
3 *
4 * Copyright (C) 1996, 1997, 2000, 2012 David S. Miller (davem@redhat.com)
5 */
6
7#ifndef __ARCH_SPARC64_ATOMIC__
8#define __ARCH_SPARC64_ATOMIC__
9
10#include <linux/types.h>
11#include <asm/cmpxchg.h>
12#include <asm/barrier.h>
13
14#define ATOMIC_INIT(i) { (i) }
15#define ATOMIC64_INIT(i) { (i) }
16
17#define atomic_read(v) READ_ONCE((v)->counter)
18#define atomic64_read(v) READ_ONCE((v)->counter)
19
20#define atomic_set(v, i) WRITE_ONCE(((v)->counter), (i))
21#define atomic64_set(v, i) WRITE_ONCE(((v)->counter), (i))
22
23#define ATOMIC_OP(op) \
24void atomic_##op(int, atomic_t *); \
25void atomic64_##op(long, atomic64_t *);
26
27#define ATOMIC_OP_RETURN(op) \
28int atomic_##op##_return(int, atomic_t *); \
29long atomic64_##op##_return(long, atomic64_t *);
30
31#define ATOMIC_FETCH_OP(op) \
32int atomic_fetch_##op(int, atomic_t *); \
33long atomic64_fetch_##op(long, atomic64_t *);
34
35#define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op) ATOMIC_FETCH_OP(op)
36
37ATOMIC_OPS(add)
38ATOMIC_OPS(sub)
39
40#undef ATOMIC_OPS
41#define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_FETCH_OP(op)
42
43ATOMIC_OPS(and)
44ATOMIC_OPS(or)
45ATOMIC_OPS(xor)
46
47#undef ATOMIC_OPS
48#undef ATOMIC_FETCH_OP
49#undef ATOMIC_OP_RETURN
50#undef ATOMIC_OP
51
52#define atomic_dec_return(v) atomic_sub_return(1, v)
53#define atomic64_dec_return(v) atomic64_sub_return(1, v)
54
55#define atomic_inc_return(v) atomic_add_return(1, v)
56#define atomic64_inc_return(v) atomic64_add_return(1, v)
57
58/*
59 * atomic_inc_and_test - increment and test
60 * @v: pointer of type atomic_t
61 *
62 * Atomically increments @v by 1
63 * and returns true if the result is zero, or false for all
64 * other cases.
65 */
66#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
67#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
68
69#define atomic_sub_and_test(i, v) (atomic_sub_return(i, v) == 0)
70#define atomic64_sub_and_test(i, v) (atomic64_sub_return(i, v) == 0)
71
72#define atomic_dec_and_test(v) (atomic_sub_return(1, v) == 0)
73#define atomic64_dec_and_test(v) (atomic64_sub_return(1, v) == 0)
74
75#define atomic_inc(v) atomic_add(1, v)
76#define atomic64_inc(v) atomic64_add(1, v)
77
78#define atomic_dec(v) atomic_sub(1, v)
79#define atomic64_dec(v) atomic64_sub(1, v)
80
81#define atomic_add_negative(i, v) (atomic_add_return(i, v) < 0)
82#define atomic64_add_negative(i, v) (atomic64_add_return(i, v) < 0)
83
84#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
85#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
86
87static inline int __atomic_add_unless(atomic_t *v, int a, int u)
88{
89 int c, old;
90 c = atomic_read(v);
91 for (;;) {
92 if (unlikely(c == (u)))
93 break;
94 old = atomic_cmpxchg((v), c, c + (a));
95 if (likely(old == c))
96 break;
97 c = old;
98 }
99 return c;
100}
101
102#define atomic64_cmpxchg(v, o, n) \
103 ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
104#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
105
106static inline long atomic64_add_unless(atomic64_t *v, long a, long u)
107{
108 long c, old;
109 c = atomic64_read(v);
110 for (;;) {
111 if (unlikely(c == (u)))
112 break;
113 old = atomic64_cmpxchg((v), c, c + (a));
114 if (likely(old == c))
115 break;
116 c = old;
117 }
118 return c != (u);
119}
120
121#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
122
123long atomic64_dec_if_positive(atomic64_t *v);
124
125#endif /* !(__ARCH_SPARC64_ATOMIC__) */