Loading...
1/* SPDX-License-Identifier: GPL-2.0 */
2/* atomic.h: Thankfully the V9 is at least reasonable for this
3 * stuff.
4 *
5 * Copyright (C) 1996, 1997, 2000, 2012 David S. Miller (davem@redhat.com)
6 */
7
8#ifndef __ARCH_SPARC64_ATOMIC__
9#define __ARCH_SPARC64_ATOMIC__
10
11#include <linux/types.h>
12#include <asm/cmpxchg.h>
13#include <asm/barrier.h>
14
15#define ATOMIC_INIT(i) { (i) }
16#define ATOMIC64_INIT(i) { (i) }
17
18#define atomic_read(v) READ_ONCE((v)->counter)
19#define atomic64_read(v) READ_ONCE((v)->counter)
20
21#define atomic_set(v, i) WRITE_ONCE(((v)->counter), (i))
22#define atomic64_set(v, i) WRITE_ONCE(((v)->counter), (i))
23
24#define ATOMIC_OP(op) \
25void atomic_##op(int, atomic_t *); \
26void atomic64_##op(s64, atomic64_t *);
27
28#define ATOMIC_OP_RETURN(op) \
29int atomic_##op##_return(int, atomic_t *); \
30s64 atomic64_##op##_return(s64, atomic64_t *);
31
32#define ATOMIC_FETCH_OP(op) \
33int atomic_fetch_##op(int, atomic_t *); \
34s64 atomic64_fetch_##op(s64, atomic64_t *);
35
36#define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op) ATOMIC_FETCH_OP(op)
37
38ATOMIC_OPS(add)
39ATOMIC_OPS(sub)
40
41#undef ATOMIC_OPS
42#define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_FETCH_OP(op)
43
44ATOMIC_OPS(and)
45ATOMIC_OPS(or)
46ATOMIC_OPS(xor)
47
48#undef ATOMIC_OPS
49#undef ATOMIC_FETCH_OP
50#undef ATOMIC_OP_RETURN
51#undef ATOMIC_OP
52
53#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
54
55static inline int atomic_xchg(atomic_t *v, int new)
56{
57 return xchg(&v->counter, new);
58}
59
60#define atomic64_cmpxchg(v, o, n) \
61 ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
62#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
63
64s64 atomic64_dec_if_positive(atomic64_t *v);
65#define atomic64_dec_if_positive atomic64_dec_if_positive
66
67#endif /* !(__ARCH_SPARC64_ATOMIC__) */
1/* atomic.h: Thankfully the V9 is at least reasonable for this
2 * stuff.
3 *
4 * Copyright (C) 1996, 1997, 2000, 2012 David S. Miller (davem@redhat.com)
5 */
6
7#ifndef __ARCH_SPARC64_ATOMIC__
8#define __ARCH_SPARC64_ATOMIC__
9
10#include <linux/types.h>
11#include <asm/cmpxchg.h>
12#include <asm/barrier.h>
13
14#define ATOMIC_INIT(i) { (i) }
15#define ATOMIC64_INIT(i) { (i) }
16
17#define atomic_read(v) READ_ONCE((v)->counter)
18#define atomic64_read(v) READ_ONCE((v)->counter)
19
20#define atomic_set(v, i) WRITE_ONCE(((v)->counter), (i))
21#define atomic64_set(v, i) WRITE_ONCE(((v)->counter), (i))
22
23#define ATOMIC_OP(op) \
24void atomic_##op(int, atomic_t *); \
25void atomic64_##op(long, atomic64_t *);
26
27#define ATOMIC_OP_RETURN(op) \
28int atomic_##op##_return(int, atomic_t *); \
29long atomic64_##op##_return(long, atomic64_t *);
30
31#define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op)
32
33ATOMIC_OPS(add)
34ATOMIC_OPS(sub)
35
36ATOMIC_OP(and)
37ATOMIC_OP(or)
38ATOMIC_OP(xor)
39
40#undef ATOMIC_OPS
41#undef ATOMIC_OP_RETURN
42#undef ATOMIC_OP
43
44#define atomic_dec_return(v) atomic_sub_return(1, v)
45#define atomic64_dec_return(v) atomic64_sub_return(1, v)
46
47#define atomic_inc_return(v) atomic_add_return(1, v)
48#define atomic64_inc_return(v) atomic64_add_return(1, v)
49
50/*
51 * atomic_inc_and_test - increment and test
52 * @v: pointer of type atomic_t
53 *
54 * Atomically increments @v by 1
55 * and returns true if the result is zero, or false for all
56 * other cases.
57 */
58#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
59#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
60
61#define atomic_sub_and_test(i, v) (atomic_sub_return(i, v) == 0)
62#define atomic64_sub_and_test(i, v) (atomic64_sub_return(i, v) == 0)
63
64#define atomic_dec_and_test(v) (atomic_sub_return(1, v) == 0)
65#define atomic64_dec_and_test(v) (atomic64_sub_return(1, v) == 0)
66
67#define atomic_inc(v) atomic_add(1, v)
68#define atomic64_inc(v) atomic64_add(1, v)
69
70#define atomic_dec(v) atomic_sub(1, v)
71#define atomic64_dec(v) atomic64_sub(1, v)
72
73#define atomic_add_negative(i, v) (atomic_add_return(i, v) < 0)
74#define atomic64_add_negative(i, v) (atomic64_add_return(i, v) < 0)
75
76#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
77#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
78
79static inline int __atomic_add_unless(atomic_t *v, int a, int u)
80{
81 int c, old;
82 c = atomic_read(v);
83 for (;;) {
84 if (unlikely(c == (u)))
85 break;
86 old = atomic_cmpxchg((v), c, c + (a));
87 if (likely(old == c))
88 break;
89 c = old;
90 }
91 return c;
92}
93
94#define atomic64_cmpxchg(v, o, n) \
95 ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
96#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
97
98static inline long atomic64_add_unless(atomic64_t *v, long a, long u)
99{
100 long c, old;
101 c = atomic64_read(v);
102 for (;;) {
103 if (unlikely(c == (u)))
104 break;
105 old = atomic64_cmpxchg((v), c, c + (a));
106 if (likely(old == c))
107 break;
108 c = old;
109 }
110 return c != (u);
111}
112
113#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
114
115long atomic64_dec_if_positive(atomic64_t *v);
116
117#endif /* !(__ARCH_SPARC64_ATOMIC__) */