Loading...
1/* SPDX-License-Identifier: GPL-2.0-or-later */
2/*
3 * Generic C implementation of atomic counter operations. Do not include in
4 * machine independent code.
5 *
6 * Copyright (C) 2007 Red Hat, Inc. All Rights Reserved.
7 * Written by David Howells (dhowells@redhat.com)
8 */
9#ifndef __ASM_GENERIC_ATOMIC_H
10#define __ASM_GENERIC_ATOMIC_H
11
12#include <asm/cmpxchg.h>
13#include <asm/barrier.h>
14
15#ifdef CONFIG_SMP
16
17/* we can build all atomic primitives from cmpxchg */
18
19#define ATOMIC_OP(op, c_op) \
20static inline void generic_atomic_##op(int i, atomic_t *v) \
21{ \
22 int c, old; \
23 \
24 c = v->counter; \
25 while ((old = arch_cmpxchg(&v->counter, c, c c_op i)) != c) \
26 c = old; \
27}
28
29#define ATOMIC_OP_RETURN(op, c_op) \
30static inline int generic_atomic_##op##_return(int i, atomic_t *v) \
31{ \
32 int c, old; \
33 \
34 c = v->counter; \
35 while ((old = arch_cmpxchg(&v->counter, c, c c_op i)) != c) \
36 c = old; \
37 \
38 return c c_op i; \
39}
40
41#define ATOMIC_FETCH_OP(op, c_op) \
42static inline int generic_atomic_fetch_##op(int i, atomic_t *v) \
43{ \
44 int c, old; \
45 \
46 c = v->counter; \
47 while ((old = arch_cmpxchg(&v->counter, c, c c_op i)) != c) \
48 c = old; \
49 \
50 return c; \
51}
52
53#else
54
55#include <linux/irqflags.h>
56
57#define ATOMIC_OP(op, c_op) \
58static inline void generic_atomic_##op(int i, atomic_t *v) \
59{ \
60 unsigned long flags; \
61 \
62 raw_local_irq_save(flags); \
63 v->counter = v->counter c_op i; \
64 raw_local_irq_restore(flags); \
65}
66
67#define ATOMIC_OP_RETURN(op, c_op) \
68static inline int generic_atomic_##op##_return(int i, atomic_t *v) \
69{ \
70 unsigned long flags; \
71 int ret; \
72 \
73 raw_local_irq_save(flags); \
74 ret = (v->counter = v->counter c_op i); \
75 raw_local_irq_restore(flags); \
76 \
77 return ret; \
78}
79
80#define ATOMIC_FETCH_OP(op, c_op) \
81static inline int generic_atomic_fetch_##op(int i, atomic_t *v) \
82{ \
83 unsigned long flags; \
84 int ret; \
85 \
86 raw_local_irq_save(flags); \
87 ret = v->counter; \
88 v->counter = v->counter c_op i; \
89 raw_local_irq_restore(flags); \
90 \
91 return ret; \
92}
93
94#endif /* CONFIG_SMP */
95
96ATOMIC_OP_RETURN(add, +)
97ATOMIC_OP_RETURN(sub, -)
98
99ATOMIC_FETCH_OP(add, +)
100ATOMIC_FETCH_OP(sub, -)
101ATOMIC_FETCH_OP(and, &)
102ATOMIC_FETCH_OP(or, |)
103ATOMIC_FETCH_OP(xor, ^)
104
105ATOMIC_OP(add, +)
106ATOMIC_OP(sub, -)
107ATOMIC_OP(and, &)
108ATOMIC_OP(or, |)
109ATOMIC_OP(xor, ^)
110
111#undef ATOMIC_FETCH_OP
112#undef ATOMIC_OP_RETURN
113#undef ATOMIC_OP
114
115#define arch_atomic_add_return generic_atomic_add_return
116#define arch_atomic_sub_return generic_atomic_sub_return
117
118#define arch_atomic_fetch_add generic_atomic_fetch_add
119#define arch_atomic_fetch_sub generic_atomic_fetch_sub
120#define arch_atomic_fetch_and generic_atomic_fetch_and
121#define arch_atomic_fetch_or generic_atomic_fetch_or
122#define arch_atomic_fetch_xor generic_atomic_fetch_xor
123
124#define arch_atomic_add generic_atomic_add
125#define arch_atomic_sub generic_atomic_sub
126#define arch_atomic_and generic_atomic_and
127#define arch_atomic_or generic_atomic_or
128#define arch_atomic_xor generic_atomic_xor
129
130#define arch_atomic_read(v) READ_ONCE((v)->counter)
131#define arch_atomic_set(v, i) WRITE_ONCE(((v)->counter), (i))
132
133#define arch_atomic_xchg(ptr, v) (arch_xchg(&(ptr)->counter, (v)))
134#define arch_atomic_cmpxchg(v, old, new) (arch_cmpxchg(&((v)->counter), (old), (new)))
135
136#endif /* __ASM_GENERIC_ATOMIC_H */
1/*
2 * Generic C implementation of atomic counter operations. Usable on
3 * UP systems only. Do not include in machine independent code.
4 *
5 * Copyright (C) 2007 Red Hat, Inc. All Rights Reserved.
6 * Written by David Howells (dhowells@redhat.com)
7 *
8 * This program is free software; you can redistribute it and/or
9 * modify it under the terms of the GNU General Public Licence
10 * as published by the Free Software Foundation; either version
11 * 2 of the Licence, or (at your option) any later version.
12 */
13#ifndef __ASM_GENERIC_ATOMIC_H
14#define __ASM_GENERIC_ATOMIC_H
15
16#include <asm/cmpxchg.h>
17#include <asm/barrier.h>
18
19/*
20 * atomic_$op() - $op integer to atomic variable
21 * @i: integer value to $op
22 * @v: pointer to the atomic variable
23 *
24 * Atomically $ops @i to @v. Does not strictly guarantee a memory-barrier, use
25 * smp_mb__{before,after}_atomic().
26 */
27
28/*
29 * atomic_$op_return() - $op interer to atomic variable and returns the result
30 * @i: integer value to $op
31 * @v: pointer to the atomic variable
32 *
33 * Atomically $ops @i to @v. Does imply a full memory barrier.
34 */
35
36#ifdef CONFIG_SMP
37
38/* we can build all atomic primitives from cmpxchg */
39
40#define ATOMIC_OP(op, c_op) \
41static inline void atomic_##op(int i, atomic_t *v) \
42{ \
43 int c, old; \
44 \
45 c = v->counter; \
46 while ((old = cmpxchg(&v->counter, c, c c_op i)) != c) \
47 c = old; \
48}
49
50#define ATOMIC_OP_RETURN(op, c_op) \
51static inline int atomic_##op##_return(int i, atomic_t *v) \
52{ \
53 int c, old; \
54 \
55 c = v->counter; \
56 while ((old = cmpxchg(&v->counter, c, c c_op i)) != c) \
57 c = old; \
58 \
59 return c c_op i; \
60}
61
62#define ATOMIC_FETCH_OP(op, c_op) \
63static inline int atomic_fetch_##op(int i, atomic_t *v) \
64{ \
65 int c, old; \
66 \
67 c = v->counter; \
68 while ((old = cmpxchg(&v->counter, c, c c_op i)) != c) \
69 c = old; \
70 \
71 return c; \
72}
73
74#else
75
76#include <linux/irqflags.h>
77
78#define ATOMIC_OP(op, c_op) \
79static inline void atomic_##op(int i, atomic_t *v) \
80{ \
81 unsigned long flags; \
82 \
83 raw_local_irq_save(flags); \
84 v->counter = v->counter c_op i; \
85 raw_local_irq_restore(flags); \
86}
87
88#define ATOMIC_OP_RETURN(op, c_op) \
89static inline int atomic_##op##_return(int i, atomic_t *v) \
90{ \
91 unsigned long flags; \
92 int ret; \
93 \
94 raw_local_irq_save(flags); \
95 ret = (v->counter = v->counter c_op i); \
96 raw_local_irq_restore(flags); \
97 \
98 return ret; \
99}
100
101#define ATOMIC_FETCH_OP(op, c_op) \
102static inline int atomic_fetch_##op(int i, atomic_t *v) \
103{ \
104 unsigned long flags; \
105 int ret; \
106 \
107 raw_local_irq_save(flags); \
108 ret = v->counter; \
109 v->counter = v->counter c_op i; \
110 raw_local_irq_restore(flags); \
111 \
112 return ret; \
113}
114
115#endif /* CONFIG_SMP */
116
117#ifndef atomic_add_return
118ATOMIC_OP_RETURN(add, +)
119#endif
120
121#ifndef atomic_sub_return
122ATOMIC_OP_RETURN(sub, -)
123#endif
124
125#ifndef atomic_fetch_add
126ATOMIC_FETCH_OP(add, +)
127#endif
128
129#ifndef atomic_fetch_sub
130ATOMIC_FETCH_OP(sub, -)
131#endif
132
133#ifndef atomic_fetch_and
134ATOMIC_FETCH_OP(and, &)
135#endif
136
137#ifndef atomic_fetch_or
138ATOMIC_FETCH_OP(or, |)
139#endif
140
141#ifndef atomic_fetch_xor
142ATOMIC_FETCH_OP(xor, ^)
143#endif
144
145#ifndef atomic_and
146ATOMIC_OP(and, &)
147#endif
148
149#ifndef atomic_or
150ATOMIC_OP(or, |)
151#endif
152
153#ifndef atomic_xor
154ATOMIC_OP(xor, ^)
155#endif
156
157#undef ATOMIC_FETCH_OP
158#undef ATOMIC_OP_RETURN
159#undef ATOMIC_OP
160
161/*
162 * Atomic operations that C can't guarantee us. Useful for
163 * resource counting etc..
164 */
165
166#define ATOMIC_INIT(i) { (i) }
167
168/**
169 * atomic_read - read atomic variable
170 * @v: pointer of type atomic_t
171 *
172 * Atomically reads the value of @v.
173 */
174#ifndef atomic_read
175#define atomic_read(v) READ_ONCE((v)->counter)
176#endif
177
178/**
179 * atomic_set - set atomic variable
180 * @v: pointer of type atomic_t
181 * @i: required value
182 *
183 * Atomically sets the value of @v to @i.
184 */
185#define atomic_set(v, i) WRITE_ONCE(((v)->counter), (i))
186
187#include <linux/irqflags.h>
188
189static inline int atomic_add_negative(int i, atomic_t *v)
190{
191 return atomic_add_return(i, v) < 0;
192}
193
194static inline void atomic_add(int i, atomic_t *v)
195{
196 atomic_add_return(i, v);
197}
198
199static inline void atomic_sub(int i, atomic_t *v)
200{
201 atomic_sub_return(i, v);
202}
203
204static inline void atomic_inc(atomic_t *v)
205{
206 atomic_add_return(1, v);
207}
208
209static inline void atomic_dec(atomic_t *v)
210{
211 atomic_sub_return(1, v);
212}
213
214#define atomic_dec_return(v) atomic_sub_return(1, (v))
215#define atomic_inc_return(v) atomic_add_return(1, (v))
216
217#define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0)
218#define atomic_dec_and_test(v) (atomic_dec_return(v) == 0)
219#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
220
221#define atomic_xchg(ptr, v) (xchg(&(ptr)->counter, (v)))
222#define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new)))
223
224#ifndef __atomic_add_unless
225static inline int __atomic_add_unless(atomic_t *v, int a, int u)
226{
227 int c, old;
228 c = atomic_read(v);
229 while (c != u && (old = atomic_cmpxchg(v, c, c + a)) != c)
230 c = old;
231 return c;
232}
233#endif
234
235#endif /* __ASM_GENERIC_ATOMIC_H */