Linux Audio

Check our new training course

Loading...
v4.6
 
  1#ifndef _ASM_X86_ATOMIC64_32_H
  2#define _ASM_X86_ATOMIC64_32_H
  3
  4#include <linux/compiler.h>
  5#include <linux/types.h>
  6//#include <asm/cmpxchg.h>
  7
  8/* An 64bit atomic type */
  9
 10typedef struct {
 11	u64 __aligned(8) counter;
 12} atomic64_t;
 13
 14#define ATOMIC64_INIT(val)	{ (val) }
 15
 16#define __ATOMIC64_DECL(sym) void atomic64_##sym(atomic64_t *, ...)
 17#ifndef ATOMIC64_EXPORT
 18#define ATOMIC64_DECL_ONE __ATOMIC64_DECL
 19#else
 20#define ATOMIC64_DECL_ONE(sym) __ATOMIC64_DECL(sym); \
 21	ATOMIC64_EXPORT(atomic64_##sym)
 22#endif
 23
 24#ifdef CONFIG_X86_CMPXCHG64
 25#define __alternative_atomic64(f, g, out, in...) \
 26	asm volatile("call %P[func]" \
 27		     : out : [func] "i" (atomic64_##g##_cx8), ## in)
 28
 29#define ATOMIC64_DECL(sym) ATOMIC64_DECL_ONE(sym##_cx8)
 30#else
 31#define __alternative_atomic64(f, g, out, in...) \
 32	alternative_call(atomic64_##f##_386, atomic64_##g##_cx8, \
 33			 X86_FEATURE_CX8, ASM_OUTPUT2(out), ## in)
 34
 35#define ATOMIC64_DECL(sym) ATOMIC64_DECL_ONE(sym##_cx8); \
 36	ATOMIC64_DECL_ONE(sym##_386)
 37
 38ATOMIC64_DECL_ONE(add_386);
 39ATOMIC64_DECL_ONE(sub_386);
 40ATOMIC64_DECL_ONE(inc_386);
 41ATOMIC64_DECL_ONE(dec_386);
 42#endif
 43
 44#define alternative_atomic64(f, out, in...) \
 45	__alternative_atomic64(f, f, ASM_OUTPUT2(out), ## in)
 46
 47ATOMIC64_DECL(read);
 48ATOMIC64_DECL(set);
 49ATOMIC64_DECL(xchg);
 50ATOMIC64_DECL(add_return);
 51ATOMIC64_DECL(sub_return);
 52ATOMIC64_DECL(inc_return);
 53ATOMIC64_DECL(dec_return);
 54ATOMIC64_DECL(dec_if_positive);
 55ATOMIC64_DECL(inc_not_zero);
 56ATOMIC64_DECL(add_unless);
 57
 58#undef ATOMIC64_DECL
 59#undef ATOMIC64_DECL_ONE
 60#undef __ATOMIC64_DECL
 61#undef ATOMIC64_EXPORT
 62
 63/**
 64 * atomic64_cmpxchg - cmpxchg atomic64 variable
 65 * @v: pointer to type atomic64_t
 66 * @o: expected value
 67 * @n: new value
 68 *
 69 * Atomically sets @v to @n if it was equal to @o and returns
 70 * the old value.
 71 */
 72
 73static inline long long atomic64_cmpxchg(atomic64_t *v, long long o, long long n)
 74{
 75	return cmpxchg64(&v->counter, o, n);
 76}
 77
 78/**
 79 * atomic64_xchg - xchg atomic64 variable
 80 * @v: pointer to type atomic64_t
 81 * @n: value to assign
 82 *
 83 * Atomically xchgs the value of @v to @n and returns
 84 * the old value.
 85 */
 86static inline long long atomic64_xchg(atomic64_t *v, long long n)
 87{
 88	long long o;
 
 
 
 
 
 
 89	unsigned high = (unsigned)(n >> 32);
 90	unsigned low = (unsigned)n;
 91	alternative_atomic64(xchg, "=&A" (o),
 92			     "S" (v), "b" (low), "c" (high)
 93			     : "memory");
 94	return o;
 95}
 
 96
 97/**
 98 * atomic64_set - set atomic64 variable
 99 * @v: pointer to type atomic64_t
100 * @i: value to assign
101 *
102 * Atomically sets the value of @v to @n.
103 */
104static inline void atomic64_set(atomic64_t *v, long long i)
105{
106	unsigned high = (unsigned)(i >> 32);
107	unsigned low = (unsigned)i;
108	alternative_atomic64(set, /* no output */,
109			     "S" (v), "b" (low), "c" (high)
110			     : "eax", "edx", "memory");
111}
112
113/**
114 * atomic64_read - read atomic64 variable
115 * @v: pointer to type atomic64_t
116 *
117 * Atomically reads the value of @v and returns it.
118 */
119static inline long long atomic64_read(const atomic64_t *v)
120{
121	long long r;
122	alternative_atomic64(read, "=&A" (r), "c" (v) : "memory");
123	return r;
124 }
125
126/**
127 * atomic64_add_return - add and return
128 * @i: integer value to add
129 * @v: pointer to type atomic64_t
130 *
131 * Atomically adds @i to @v and returns @i + *@v
132 */
133static inline long long atomic64_add_return(long long i, atomic64_t *v)
134{
135	alternative_atomic64(add_return,
136			     ASM_OUTPUT2("+A" (i), "+c" (v)),
137			     ASM_NO_INPUT_CLOBBER("memory"));
138	return i;
139}
 
140
141/*
142 * Other variants with different arithmetic operators:
143 */
144static inline long long atomic64_sub_return(long long i, atomic64_t *v)
145{
146	alternative_atomic64(sub_return,
147			     ASM_OUTPUT2("+A" (i), "+c" (v)),
148			     ASM_NO_INPUT_CLOBBER("memory"));
149	return i;
150}
 
151
152static inline long long atomic64_inc_return(atomic64_t *v)
153{
154	long long a;
155	alternative_atomic64(inc_return, "=&A" (a),
156			     "S" (v) : "memory", "ecx");
157	return a;
158}
 
159
160static inline long long atomic64_dec_return(atomic64_t *v)
161{
162	long long a;
163	alternative_atomic64(dec_return, "=&A" (a),
164			     "S" (v) : "memory", "ecx");
165	return a;
166}
 
167
168/**
169 * atomic64_add - add integer to atomic64 variable
170 * @i: integer value to add
171 * @v: pointer to type atomic64_t
172 *
173 * Atomically adds @i to @v.
174 */
175static inline long long atomic64_add(long long i, atomic64_t *v)
176{
177	__alternative_atomic64(add, add_return,
178			       ASM_OUTPUT2("+A" (i), "+c" (v)),
179			       ASM_NO_INPUT_CLOBBER("memory"));
180	return i;
181}
182
183/**
184 * atomic64_sub - subtract the atomic64 variable
185 * @i: integer value to subtract
186 * @v: pointer to type atomic64_t
187 *
188 * Atomically subtracts @i from @v.
189 */
190static inline long long atomic64_sub(long long i, atomic64_t *v)
191{
192	__alternative_atomic64(sub, sub_return,
193			       ASM_OUTPUT2("+A" (i), "+c" (v)),
194			       ASM_NO_INPUT_CLOBBER("memory"));
195	return i;
196}
197
198/**
199 * atomic64_sub_and_test - subtract value from variable and test result
200 * @i: integer value to subtract
201 * @v: pointer to type atomic64_t
202 *
203 * Atomically subtracts @i from @v and returns
204 * true if the result is zero, or false for all
205 * other cases.
206 */
207static inline int atomic64_sub_and_test(long long i, atomic64_t *v)
208{
209	return atomic64_sub_return(i, v) == 0;
210}
211
212/**
213 * atomic64_inc - increment atomic64 variable
214 * @v: pointer to type atomic64_t
215 *
216 * Atomically increments @v by 1.
217 */
218static inline void atomic64_inc(atomic64_t *v)
219{
220	__alternative_atomic64(inc, inc_return, /* no output */,
221			       "S" (v) : "memory", "eax", "ecx", "edx");
222}
 
223
224/**
225 * atomic64_dec - decrement atomic64 variable
226 * @v: pointer to type atomic64_t
227 *
228 * Atomically decrements @v by 1.
229 */
230static inline void atomic64_dec(atomic64_t *v)
231{
232	__alternative_atomic64(dec, dec_return, /* no output */,
233			       "S" (v) : "memory", "eax", "ecx", "edx");
234}
 
235
236/**
237 * atomic64_dec_and_test - decrement and test
238 * @v: pointer to type atomic64_t
239 *
240 * Atomically decrements @v by 1 and
241 * returns true if the result is 0, or false for all other
242 * cases.
243 */
244static inline int atomic64_dec_and_test(atomic64_t *v)
245{
246	return atomic64_dec_return(v) == 0;
247}
248
249/**
250 * atomic64_inc_and_test - increment and test
251 * @v: pointer to type atomic64_t
252 *
253 * Atomically increments @v by 1
254 * and returns true if the result is zero, or false for all
255 * other cases.
256 */
257static inline int atomic64_inc_and_test(atomic64_t *v)
258{
259	return atomic64_inc_return(v) == 0;
260}
261
262/**
263 * atomic64_add_negative - add and test if negative
264 * @i: integer value to add
265 * @v: pointer to type atomic64_t
266 *
267 * Atomically adds @i to @v and returns true
268 * if the result is negative, or false when
269 * result is greater than or equal to zero.
270 */
271static inline int atomic64_add_negative(long long i, atomic64_t *v)
272{
273	return atomic64_add_return(i, v) < 0;
274}
275
276/**
277 * atomic64_add_unless - add unless the number is a given value
278 * @v: pointer of type atomic64_t
279 * @a: the amount to add to v...
280 * @u: ...unless v is equal to u.
281 *
282 * Atomically adds @a to @v, so long as it was not @u.
283 * Returns non-zero if the add was done, zero otherwise.
284 */
285static inline int atomic64_add_unless(atomic64_t *v, long long a, long long u)
286{
287	unsigned low = (unsigned)u;
288	unsigned high = (unsigned)(u >> 32);
289	alternative_atomic64(add_unless,
290			     ASM_OUTPUT2("+A" (a), "+c" (low), "+D" (high)),
291			     "S" (v) : "memory");
292	return (int)a;
293}
 
294
295
296static inline int atomic64_inc_not_zero(atomic64_t *v)
297{
298	int r;
299	alternative_atomic64(inc_not_zero, "=&a" (r),
300			     "S" (v) : "ecx", "edx", "memory");
301	return r;
302}
 
303
304static inline long long atomic64_dec_if_positive(atomic64_t *v)
305{
306	long long r;
307	alternative_atomic64(dec_if_positive, "=&A" (r),
308			     "S" (v) : "ecx", "memory");
309	return r;
310}
 
311
312#undef alternative_atomic64
313#undef __alternative_atomic64
314
315#define ATOMIC64_OP(op, c_op)						\
316static inline void atomic64_##op(long long i, atomic64_t *v)		\
317{									\
318	long long old, c = 0;						\
319	while ((old = atomic64_cmpxchg(v, c, c c_op i)) != c)		\
320		c = old;						\
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
321}
 
322
323ATOMIC64_OP(and, &)
324ATOMIC64_OP(or, |)
325ATOMIC64_OP(xor, ^)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
326
327#undef ATOMIC64_OP
328
329#endif /* _ASM_X86_ATOMIC64_32_H */
v6.8
  1/* SPDX-License-Identifier: GPL-2.0 */
  2#ifndef _ASM_X86_ATOMIC64_32_H
  3#define _ASM_X86_ATOMIC64_32_H
  4
  5#include <linux/compiler.h>
  6#include <linux/types.h>
  7//#include <asm/cmpxchg.h>
  8
  9/* An 64bit atomic type */
 10
 11typedef struct {
 12	s64 __aligned(8) counter;
 13} atomic64_t;
 14
 15#define ATOMIC64_INIT(val)	{ (val) }
 16
 17#define __ATOMIC64_DECL(sym) void atomic64_##sym(atomic64_t *, ...)
 18#ifndef ATOMIC64_EXPORT
 19#define ATOMIC64_DECL_ONE __ATOMIC64_DECL
 20#else
 21#define ATOMIC64_DECL_ONE(sym) __ATOMIC64_DECL(sym); \
 22	ATOMIC64_EXPORT(atomic64_##sym)
 23#endif
 24
 25#ifdef CONFIG_X86_CMPXCHG64
 26#define __alternative_atomic64(f, g, out, in...) \
 27	asm volatile("call %P[func]" \
 28		     : out : [func] "i" (atomic64_##g##_cx8), ## in)
 29
 30#define ATOMIC64_DECL(sym) ATOMIC64_DECL_ONE(sym##_cx8)
 31#else
 32#define __alternative_atomic64(f, g, out, in...) \
 33	alternative_call(atomic64_##f##_386, atomic64_##g##_cx8, \
 34			 X86_FEATURE_CX8, ASM_OUTPUT2(out), ## in)
 35
 36#define ATOMIC64_DECL(sym) ATOMIC64_DECL_ONE(sym##_cx8); \
 37	ATOMIC64_DECL_ONE(sym##_386)
 38
 39ATOMIC64_DECL_ONE(add_386);
 40ATOMIC64_DECL_ONE(sub_386);
 41ATOMIC64_DECL_ONE(inc_386);
 42ATOMIC64_DECL_ONE(dec_386);
 43#endif
 44
 45#define alternative_atomic64(f, out, in...) \
 46	__alternative_atomic64(f, f, ASM_OUTPUT2(out), ## in)
 47
 48ATOMIC64_DECL(read);
 49ATOMIC64_DECL(set);
 50ATOMIC64_DECL(xchg);
 51ATOMIC64_DECL(add_return);
 52ATOMIC64_DECL(sub_return);
 53ATOMIC64_DECL(inc_return);
 54ATOMIC64_DECL(dec_return);
 55ATOMIC64_DECL(dec_if_positive);
 56ATOMIC64_DECL(inc_not_zero);
 57ATOMIC64_DECL(add_unless);
 58
 59#undef ATOMIC64_DECL
 60#undef ATOMIC64_DECL_ONE
 61#undef __ATOMIC64_DECL
 62#undef ATOMIC64_EXPORT
 63
 64static __always_inline s64 arch_atomic64_cmpxchg(atomic64_t *v, s64 o, s64 n)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 65{
 66	return arch_cmpxchg64(&v->counter, o, n);
 67}
 68#define arch_atomic64_cmpxchg arch_atomic64_cmpxchg
 69
 70static __always_inline s64 arch_atomic64_xchg(atomic64_t *v, s64 n)
 71{
 72	s64 o;
 73	unsigned high = (unsigned)(n >> 32);
 74	unsigned low = (unsigned)n;
 75	alternative_atomic64(xchg, "=&A" (o),
 76			     "S" (v), "b" (low), "c" (high)
 77			     : "memory");
 78	return o;
 79}
 80#define arch_atomic64_xchg arch_atomic64_xchg
 81
 82static __always_inline void arch_atomic64_set(atomic64_t *v, s64 i)
 
 
 
 
 
 
 
 83{
 84	unsigned high = (unsigned)(i >> 32);
 85	unsigned low = (unsigned)i;
 86	alternative_atomic64(set, /* no output */,
 87			     "S" (v), "b" (low), "c" (high)
 88			     : "eax", "edx", "memory");
 89}
 90
 91static __always_inline s64 arch_atomic64_read(const atomic64_t *v)
 
 
 
 
 
 
 92{
 93	s64 r;
 94	alternative_atomic64(read, "=&A" (r), "c" (v) : "memory");
 95	return r;
 96}
 97
 98static __always_inline s64 arch_atomic64_add_return(s64 i, atomic64_t *v)
 
 
 
 
 
 
 
 99{
100	alternative_atomic64(add_return,
101			     ASM_OUTPUT2("+A" (i), "+c" (v)),
102			     ASM_NO_INPUT_CLOBBER("memory"));
103	return i;
104}
105#define arch_atomic64_add_return arch_atomic64_add_return
106
107static __always_inline s64 arch_atomic64_sub_return(s64 i, atomic64_t *v)
 
 
 
108{
109	alternative_atomic64(sub_return,
110			     ASM_OUTPUT2("+A" (i), "+c" (v)),
111			     ASM_NO_INPUT_CLOBBER("memory"));
112	return i;
113}
114#define arch_atomic64_sub_return arch_atomic64_sub_return
115
116static __always_inline s64 arch_atomic64_inc_return(atomic64_t *v)
117{
118	s64 a;
119	alternative_atomic64(inc_return, "=&A" (a),
120			     "S" (v) : "memory", "ecx");
121	return a;
122}
123#define arch_atomic64_inc_return arch_atomic64_inc_return
124
125static __always_inline s64 arch_atomic64_dec_return(atomic64_t *v)
126{
127	s64 a;
128	alternative_atomic64(dec_return, "=&A" (a),
129			     "S" (v) : "memory", "ecx");
130	return a;
131}
132#define arch_atomic64_dec_return arch_atomic64_dec_return
133
134static __always_inline s64 arch_atomic64_add(s64 i, atomic64_t *v)
 
 
 
 
 
 
 
135{
136	__alternative_atomic64(add, add_return,
137			       ASM_OUTPUT2("+A" (i), "+c" (v)),
138			       ASM_NO_INPUT_CLOBBER("memory"));
139	return i;
140}
141
142static __always_inline s64 arch_atomic64_sub(s64 i, atomic64_t *v)
 
 
 
 
 
 
 
143{
144	__alternative_atomic64(sub, sub_return,
145			       ASM_OUTPUT2("+A" (i), "+c" (v)),
146			       ASM_NO_INPUT_CLOBBER("memory"));
147	return i;
148}
149
150static __always_inline void arch_atomic64_inc(atomic64_t *v)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
151{
152	__alternative_atomic64(inc, inc_return, /* no output */,
153			       "S" (v) : "memory", "eax", "ecx", "edx");
154}
155#define arch_atomic64_inc arch_atomic64_inc
156
157static __always_inline void arch_atomic64_dec(atomic64_t *v)
 
 
 
 
 
 
158{
159	__alternative_atomic64(dec, dec_return, /* no output */,
160			       "S" (v) : "memory", "eax", "ecx", "edx");
161}
162#define arch_atomic64_dec arch_atomic64_dec
163
164static __always_inline int arch_atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
165{
166	unsigned low = (unsigned)u;
167	unsigned high = (unsigned)(u >> 32);
168	alternative_atomic64(add_unless,
169			     ASM_OUTPUT2("+A" (a), "+c" (low), "+D" (high)),
170			     "S" (v) : "memory");
171	return (int)a;
172}
173#define arch_atomic64_add_unless arch_atomic64_add_unless
174
175static __always_inline int arch_atomic64_inc_not_zero(atomic64_t *v)
 
176{
177	int r;
178	alternative_atomic64(inc_not_zero, "=&a" (r),
179			     "S" (v) : "ecx", "edx", "memory");
180	return r;
181}
182#define arch_atomic64_inc_not_zero arch_atomic64_inc_not_zero
183
184static __always_inline s64 arch_atomic64_dec_if_positive(atomic64_t *v)
185{
186	s64 r;
187	alternative_atomic64(dec_if_positive, "=&A" (r),
188			     "S" (v) : "ecx", "memory");
189	return r;
190}
191#define arch_atomic64_dec_if_positive arch_atomic64_dec_if_positive
192
193#undef alternative_atomic64
194#undef __alternative_atomic64
195
196static __always_inline void arch_atomic64_and(s64 i, atomic64_t *v)
197{
198	s64 old, c = 0;
199
200	while ((old = arch_atomic64_cmpxchg(v, c, c & i)) != c)
201		c = old;
202}
203
204static __always_inline s64 arch_atomic64_fetch_and(s64 i, atomic64_t *v)
205{
206	s64 old, c = 0;
207
208	while ((old = arch_atomic64_cmpxchg(v, c, c & i)) != c)
209		c = old;
210
211	return old;
212}
213#define arch_atomic64_fetch_and arch_atomic64_fetch_and
214
215static __always_inline void arch_atomic64_or(s64 i, atomic64_t *v)
216{
217	s64 old, c = 0;
218
219	while ((old = arch_atomic64_cmpxchg(v, c, c | i)) != c)
220		c = old;
221}
222
223static __always_inline s64 arch_atomic64_fetch_or(s64 i, atomic64_t *v)
224{
225	s64 old, c = 0;
226
227	while ((old = arch_atomic64_cmpxchg(v, c, c | i)) != c)
228		c = old;
229
230	return old;
231}
232#define arch_atomic64_fetch_or arch_atomic64_fetch_or
233
234static __always_inline void arch_atomic64_xor(s64 i, atomic64_t *v)
235{
236	s64 old, c = 0;
237
238	while ((old = arch_atomic64_cmpxchg(v, c, c ^ i)) != c)
239		c = old;
240}
241
242static __always_inline s64 arch_atomic64_fetch_xor(s64 i, atomic64_t *v)
243{
244	s64 old, c = 0;
245
246	while ((old = arch_atomic64_cmpxchg(v, c, c ^ i)) != c)
247		c = old;
248
249	return old;
250}
251#define arch_atomic64_fetch_xor arch_atomic64_fetch_xor
252
253static __always_inline s64 arch_atomic64_fetch_add(s64 i, atomic64_t *v)
254{
255	s64 old, c = 0;
256
257	while ((old = arch_atomic64_cmpxchg(v, c, c + i)) != c)
258		c = old;
259
260	return old;
261}
262#define arch_atomic64_fetch_add arch_atomic64_fetch_add
263
264#define arch_atomic64_fetch_sub(i, v)	arch_atomic64_fetch_add(-(i), (v))
265
266#endif /* _ASM_X86_ATOMIC64_32_H */