Loading...
1#ifndef _ASM_X86_ATOMIC64_64_H
2#define _ASM_X86_ATOMIC64_64_H
3
4#include <linux/types.h>
5#include <asm/alternative.h>
6#include <asm/cmpxchg.h>
7
8/* The 64-bit atomic type */
9
10#define ATOMIC64_INIT(i) { (i) }
11
12/**
13 * atomic64_read - read atomic64 variable
14 * @v: pointer of type atomic64_t
15 *
16 * Atomically reads the value of @v.
17 * Doesn't imply a read memory barrier.
18 */
19static inline long atomic64_read(const atomic64_t *v)
20{
21 return READ_ONCE((v)->counter);
22}
23
24/**
25 * atomic64_set - set atomic64 variable
26 * @v: pointer to type atomic64_t
27 * @i: required value
28 *
29 * Atomically sets the value of @v to @i.
30 */
31static inline void atomic64_set(atomic64_t *v, long i)
32{
33 WRITE_ONCE(v->counter, i);
34}
35
36/**
37 * atomic64_add - add integer to atomic64 variable
38 * @i: integer value to add
39 * @v: pointer to type atomic64_t
40 *
41 * Atomically adds @i to @v.
42 */
43static __always_inline void atomic64_add(long i, atomic64_t *v)
44{
45 asm volatile(LOCK_PREFIX "addq %1,%0"
46 : "=m" (v->counter)
47 : "er" (i), "m" (v->counter));
48}
49
50/**
51 * atomic64_sub - subtract the atomic64 variable
52 * @i: integer value to subtract
53 * @v: pointer to type atomic64_t
54 *
55 * Atomically subtracts @i from @v.
56 */
57static inline void atomic64_sub(long i, atomic64_t *v)
58{
59 asm volatile(LOCK_PREFIX "subq %1,%0"
60 : "=m" (v->counter)
61 : "er" (i), "m" (v->counter));
62}
63
64/**
65 * atomic64_sub_and_test - subtract value from variable and test result
66 * @i: integer value to subtract
67 * @v: pointer to type atomic64_t
68 *
69 * Atomically subtracts @i from @v and returns
70 * true if the result is zero, or false for all
71 * other cases.
72 */
73static inline bool atomic64_sub_and_test(long i, atomic64_t *v)
74{
75 GEN_BINARY_RMWcc(LOCK_PREFIX "subq", v->counter, "er", i, "%0", e);
76}
77
78/**
79 * atomic64_inc - increment atomic64 variable
80 * @v: pointer to type atomic64_t
81 *
82 * Atomically increments @v by 1.
83 */
84static __always_inline void atomic64_inc(atomic64_t *v)
85{
86 asm volatile(LOCK_PREFIX "incq %0"
87 : "=m" (v->counter)
88 : "m" (v->counter));
89}
90
91/**
92 * atomic64_dec - decrement atomic64 variable
93 * @v: pointer to type atomic64_t
94 *
95 * Atomically decrements @v by 1.
96 */
97static __always_inline void atomic64_dec(atomic64_t *v)
98{
99 asm volatile(LOCK_PREFIX "decq %0"
100 : "=m" (v->counter)
101 : "m" (v->counter));
102}
103
104/**
105 * atomic64_dec_and_test - decrement and test
106 * @v: pointer to type atomic64_t
107 *
108 * Atomically decrements @v by 1 and
109 * returns true if the result is 0, or false for all other
110 * cases.
111 */
112static inline bool atomic64_dec_and_test(atomic64_t *v)
113{
114 GEN_UNARY_RMWcc(LOCK_PREFIX "decq", v->counter, "%0", e);
115}
116
117/**
118 * atomic64_inc_and_test - increment and test
119 * @v: pointer to type atomic64_t
120 *
121 * Atomically increments @v by 1
122 * and returns true if the result is zero, or false for all
123 * other cases.
124 */
125static inline bool atomic64_inc_and_test(atomic64_t *v)
126{
127 GEN_UNARY_RMWcc(LOCK_PREFIX "incq", v->counter, "%0", e);
128}
129
130/**
131 * atomic64_add_negative - add and test if negative
132 * @i: integer value to add
133 * @v: pointer to type atomic64_t
134 *
135 * Atomically adds @i to @v and returns true
136 * if the result is negative, or false when
137 * result is greater than or equal to zero.
138 */
139static inline bool atomic64_add_negative(long i, atomic64_t *v)
140{
141 GEN_BINARY_RMWcc(LOCK_PREFIX "addq", v->counter, "er", i, "%0", s);
142}
143
144/**
145 * atomic64_add_return - add and return
146 * @i: integer value to add
147 * @v: pointer to type atomic64_t
148 *
149 * Atomically adds @i to @v and returns @i + @v
150 */
151static __always_inline long atomic64_add_return(long i, atomic64_t *v)
152{
153 return i + xadd(&v->counter, i);
154}
155
156static inline long atomic64_sub_return(long i, atomic64_t *v)
157{
158 return atomic64_add_return(-i, v);
159}
160
161static inline long atomic64_fetch_add(long i, atomic64_t *v)
162{
163 return xadd(&v->counter, i);
164}
165
166static inline long atomic64_fetch_sub(long i, atomic64_t *v)
167{
168 return xadd(&v->counter, -i);
169}
170
171#define atomic64_inc_return(v) (atomic64_add_return(1, (v)))
172#define atomic64_dec_return(v) (atomic64_sub_return(1, (v)))
173
174static inline long atomic64_cmpxchg(atomic64_t *v, long old, long new)
175{
176 return cmpxchg(&v->counter, old, new);
177}
178
179static inline long atomic64_xchg(atomic64_t *v, long new)
180{
181 return xchg(&v->counter, new);
182}
183
184/**
185 * atomic64_add_unless - add unless the number is a given value
186 * @v: pointer of type atomic64_t
187 * @a: the amount to add to v...
188 * @u: ...unless v is equal to u.
189 *
190 * Atomically adds @a to @v, so long as it was not @u.
191 * Returns the old value of @v.
192 */
193static inline bool atomic64_add_unless(atomic64_t *v, long a, long u)
194{
195 long c, old;
196 c = atomic64_read(v);
197 for (;;) {
198 if (unlikely(c == (u)))
199 break;
200 old = atomic64_cmpxchg((v), c, c + (a));
201 if (likely(old == c))
202 break;
203 c = old;
204 }
205 return c != (u);
206}
207
208#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
209
210/*
211 * atomic64_dec_if_positive - decrement by 1 if old value positive
212 * @v: pointer of type atomic_t
213 *
214 * The function returns the old value of *v minus 1, even if
215 * the atomic variable, v, was not decremented.
216 */
217static inline long atomic64_dec_if_positive(atomic64_t *v)
218{
219 long c, old, dec;
220 c = atomic64_read(v);
221 for (;;) {
222 dec = c - 1;
223 if (unlikely(dec < 0))
224 break;
225 old = atomic64_cmpxchg((v), c, dec);
226 if (likely(old == c))
227 break;
228 c = old;
229 }
230 return dec;
231}
232
233#define ATOMIC64_OP(op) \
234static inline void atomic64_##op(long i, atomic64_t *v) \
235{ \
236 asm volatile(LOCK_PREFIX #op"q %1,%0" \
237 : "+m" (v->counter) \
238 : "er" (i) \
239 : "memory"); \
240}
241
242#define ATOMIC64_FETCH_OP(op, c_op) \
243static inline long atomic64_fetch_##op(long i, atomic64_t *v) \
244{ \
245 long old, val = atomic64_read(v); \
246 for (;;) { \
247 old = atomic64_cmpxchg(v, val, val c_op i); \
248 if (old == val) \
249 break; \
250 val = old; \
251 } \
252 return old; \
253}
254
255#define ATOMIC64_OPS(op, c_op) \
256 ATOMIC64_OP(op) \
257 ATOMIC64_FETCH_OP(op, c_op)
258
259ATOMIC64_OPS(and, &)
260ATOMIC64_OPS(or, |)
261ATOMIC64_OPS(xor, ^)
262
263#undef ATOMIC64_OPS
264#undef ATOMIC64_FETCH_OP
265#undef ATOMIC64_OP
266
267#endif /* _ASM_X86_ATOMIC64_64_H */
1/* SPDX-License-Identifier: GPL-2.0 */
2#ifndef _ASM_X86_ATOMIC64_64_H
3#define _ASM_X86_ATOMIC64_64_H
4
5#include <linux/types.h>
6#include <asm/alternative.h>
7#include <asm/cmpxchg.h>
8
9/* The 64-bit atomic type */
10
11#define ATOMIC64_INIT(i) { (i) }
12
13/**
14 * arch_atomic64_read - read atomic64 variable
15 * @v: pointer of type atomic64_t
16 *
17 * Atomically reads the value of @v.
18 * Doesn't imply a read memory barrier.
19 */
20static inline s64 arch_atomic64_read(const atomic64_t *v)
21{
22 return __READ_ONCE((v)->counter);
23}
24
25/**
26 * arch_atomic64_set - set atomic64 variable
27 * @v: pointer to type atomic64_t
28 * @i: required value
29 *
30 * Atomically sets the value of @v to @i.
31 */
32static inline void arch_atomic64_set(atomic64_t *v, s64 i)
33{
34 __WRITE_ONCE(v->counter, i);
35}
36
37/**
38 * arch_atomic64_add - add integer to atomic64 variable
39 * @i: integer value to add
40 * @v: pointer to type atomic64_t
41 *
42 * Atomically adds @i to @v.
43 */
44static __always_inline void arch_atomic64_add(s64 i, atomic64_t *v)
45{
46 asm volatile(LOCK_PREFIX "addq %1,%0"
47 : "=m" (v->counter)
48 : "er" (i), "m" (v->counter) : "memory");
49}
50
51/**
52 * arch_atomic64_sub - subtract the atomic64 variable
53 * @i: integer value to subtract
54 * @v: pointer to type atomic64_t
55 *
56 * Atomically subtracts @i from @v.
57 */
58static inline void arch_atomic64_sub(s64 i, atomic64_t *v)
59{
60 asm volatile(LOCK_PREFIX "subq %1,%0"
61 : "=m" (v->counter)
62 : "er" (i), "m" (v->counter) : "memory");
63}
64
65/**
66 * arch_atomic64_sub_and_test - subtract value from variable and test result
67 * @i: integer value to subtract
68 * @v: pointer to type atomic64_t
69 *
70 * Atomically subtracts @i from @v and returns
71 * true if the result is zero, or false for all
72 * other cases.
73 */
74static inline bool arch_atomic64_sub_and_test(s64 i, atomic64_t *v)
75{
76 return GEN_BINARY_RMWcc(LOCK_PREFIX "subq", v->counter, e, "er", i);
77}
78#define arch_atomic64_sub_and_test arch_atomic64_sub_and_test
79
80/**
81 * arch_atomic64_inc - increment atomic64 variable
82 * @v: pointer to type atomic64_t
83 *
84 * Atomically increments @v by 1.
85 */
86static __always_inline void arch_atomic64_inc(atomic64_t *v)
87{
88 asm volatile(LOCK_PREFIX "incq %0"
89 : "=m" (v->counter)
90 : "m" (v->counter) : "memory");
91}
92#define arch_atomic64_inc arch_atomic64_inc
93
94/**
95 * arch_atomic64_dec - decrement atomic64 variable
96 * @v: pointer to type atomic64_t
97 *
98 * Atomically decrements @v by 1.
99 */
100static __always_inline void arch_atomic64_dec(atomic64_t *v)
101{
102 asm volatile(LOCK_PREFIX "decq %0"
103 : "=m" (v->counter)
104 : "m" (v->counter) : "memory");
105}
106#define arch_atomic64_dec arch_atomic64_dec
107
108/**
109 * arch_atomic64_dec_and_test - decrement and test
110 * @v: pointer to type atomic64_t
111 *
112 * Atomically decrements @v by 1 and
113 * returns true if the result is 0, or false for all other
114 * cases.
115 */
116static inline bool arch_atomic64_dec_and_test(atomic64_t *v)
117{
118 return GEN_UNARY_RMWcc(LOCK_PREFIX "decq", v->counter, e);
119}
120#define arch_atomic64_dec_and_test arch_atomic64_dec_and_test
121
122/**
123 * arch_atomic64_inc_and_test - increment and test
124 * @v: pointer to type atomic64_t
125 *
126 * Atomically increments @v by 1
127 * and returns true if the result is zero, or false for all
128 * other cases.
129 */
130static inline bool arch_atomic64_inc_and_test(atomic64_t *v)
131{
132 return GEN_UNARY_RMWcc(LOCK_PREFIX "incq", v->counter, e);
133}
134#define arch_atomic64_inc_and_test arch_atomic64_inc_and_test
135
136/**
137 * arch_atomic64_add_negative - add and test if negative
138 * @i: integer value to add
139 * @v: pointer to type atomic64_t
140 *
141 * Atomically adds @i to @v and returns true
142 * if the result is negative, or false when
143 * result is greater than or equal to zero.
144 */
145static inline bool arch_atomic64_add_negative(s64 i, atomic64_t *v)
146{
147 return GEN_BINARY_RMWcc(LOCK_PREFIX "addq", v->counter, s, "er", i);
148}
149#define arch_atomic64_add_negative arch_atomic64_add_negative
150
151/**
152 * arch_atomic64_add_return - add and return
153 * @i: integer value to add
154 * @v: pointer to type atomic64_t
155 *
156 * Atomically adds @i to @v and returns @i + @v
157 */
158static __always_inline s64 arch_atomic64_add_return(s64 i, atomic64_t *v)
159{
160 return i + xadd(&v->counter, i);
161}
162#define arch_atomic64_add_return arch_atomic64_add_return
163
164static inline s64 arch_atomic64_sub_return(s64 i, atomic64_t *v)
165{
166 return arch_atomic64_add_return(-i, v);
167}
168#define arch_atomic64_sub_return arch_atomic64_sub_return
169
170static inline s64 arch_atomic64_fetch_add(s64 i, atomic64_t *v)
171{
172 return xadd(&v->counter, i);
173}
174#define arch_atomic64_fetch_add arch_atomic64_fetch_add
175
176static inline s64 arch_atomic64_fetch_sub(s64 i, atomic64_t *v)
177{
178 return xadd(&v->counter, -i);
179}
180#define arch_atomic64_fetch_sub arch_atomic64_fetch_sub
181
182static inline s64 arch_atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
183{
184 return arch_cmpxchg(&v->counter, old, new);
185}
186#define arch_atomic64_cmpxchg arch_atomic64_cmpxchg
187
188static __always_inline bool arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
189{
190 return arch_try_cmpxchg(&v->counter, old, new);
191}
192#define arch_atomic64_try_cmpxchg arch_atomic64_try_cmpxchg
193
194static inline s64 arch_atomic64_xchg(atomic64_t *v, s64 new)
195{
196 return arch_xchg(&v->counter, new);
197}
198#define arch_atomic64_xchg arch_atomic64_xchg
199
200static inline void arch_atomic64_and(s64 i, atomic64_t *v)
201{
202 asm volatile(LOCK_PREFIX "andq %1,%0"
203 : "+m" (v->counter)
204 : "er" (i)
205 : "memory");
206}
207
208static inline s64 arch_atomic64_fetch_and(s64 i, atomic64_t *v)
209{
210 s64 val = arch_atomic64_read(v);
211
212 do {
213 } while (!arch_atomic64_try_cmpxchg(v, &val, val & i));
214 return val;
215}
216#define arch_atomic64_fetch_and arch_atomic64_fetch_and
217
218static inline void arch_atomic64_or(s64 i, atomic64_t *v)
219{
220 asm volatile(LOCK_PREFIX "orq %1,%0"
221 : "+m" (v->counter)
222 : "er" (i)
223 : "memory");
224}
225
226static inline s64 arch_atomic64_fetch_or(s64 i, atomic64_t *v)
227{
228 s64 val = arch_atomic64_read(v);
229
230 do {
231 } while (!arch_atomic64_try_cmpxchg(v, &val, val | i));
232 return val;
233}
234#define arch_atomic64_fetch_or arch_atomic64_fetch_or
235
236static inline void arch_atomic64_xor(s64 i, atomic64_t *v)
237{
238 asm volatile(LOCK_PREFIX "xorq %1,%0"
239 : "+m" (v->counter)
240 : "er" (i)
241 : "memory");
242}
243
244static inline s64 arch_atomic64_fetch_xor(s64 i, atomic64_t *v)
245{
246 s64 val = arch_atomic64_read(v);
247
248 do {
249 } while (!arch_atomic64_try_cmpxchg(v, &val, val ^ i));
250 return val;
251}
252#define arch_atomic64_fetch_xor arch_atomic64_fetch_xor
253
254#endif /* _ASM_X86_ATOMIC64_64_H */