Linux Audio

Check our new training course

Loading...
v4.10.11
 
 
 
 
 
  1#ifndef _ASM_GENERIC_ATOMIC_LONG_H
  2#define _ASM_GENERIC_ATOMIC_LONG_H
  3/*
  4 * Copyright (C) 2005 Silicon Graphics, Inc.
  5 *	Christoph Lameter
  6 *
  7 * Allows to provide arch independent atomic definitions without the need to
  8 * edit all arch specific atomic.h files.
  9 */
 10
 
 11#include <asm/types.h>
 12
 13/*
 14 * Suppport for atomic_long_t
 15 *
 16 * Casts for parameters are avoided for existing atomic functions in order to
 17 * avoid issues with cast-as-lval under gcc 4.x and other limitations that the
 18 * macros of a platform may have.
 19 */
 
 
 
 
 20
 21#if BITS_PER_LONG == 64
 22
 23typedef atomic64_t atomic_long_t;
 
 
 
 
 24
 25#define ATOMIC_LONG_INIT(i)	ATOMIC64_INIT(i)
 26#define ATOMIC_LONG_PFX(x)	atomic64 ## x
 
 
 
 27
 28#else
 
 
 
 
 29
 30typedef atomic_t atomic_long_t;
 
 
 
 
 31
 32#define ATOMIC_LONG_INIT(i)	ATOMIC_INIT(i)
 33#define ATOMIC_LONG_PFX(x)	atomic ## x
 
 
 
 34
 35#endif
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 36
 37#define ATOMIC_LONG_READ_OP(mo)						\
 38static inline long atomic_long_read##mo(const atomic_long_t *l)		\
 39{									\
 40	ATOMIC_LONG_PFX(_t) *v = (ATOMIC_LONG_PFX(_t) *)l;		\
 41									\
 42	return (long)ATOMIC_LONG_PFX(_read##mo)(v);			\
 43}
 44ATOMIC_LONG_READ_OP()
 45ATOMIC_LONG_READ_OP(_acquire)
 46
 47#undef ATOMIC_LONG_READ_OP
 
 
 
 
 48
 49#define ATOMIC_LONG_SET_OP(mo)						\
 50static inline void atomic_long_set##mo(atomic_long_t *l, long i)	\
 51{									\
 52	ATOMIC_LONG_PFX(_t) *v = (ATOMIC_LONG_PFX(_t) *)l;		\
 53									\
 54	ATOMIC_LONG_PFX(_set##mo)(v, i);				\
 55}
 56ATOMIC_LONG_SET_OP()
 57ATOMIC_LONG_SET_OP(_release)
 58
 59#undef ATOMIC_LONG_SET_OP
 
 
 
 
 60
 61#define ATOMIC_LONG_ADD_SUB_OP(op, mo)					\
 62static inline long							\
 63atomic_long_##op##_return##mo(long i, atomic_long_t *l)			\
 64{									\
 65	ATOMIC_LONG_PFX(_t) *v = (ATOMIC_LONG_PFX(_t) *)l;		\
 66									\
 67	return (long)ATOMIC_LONG_PFX(_##op##_return##mo)(i, v);		\
 68}
 69ATOMIC_LONG_ADD_SUB_OP(add,)
 70ATOMIC_LONG_ADD_SUB_OP(add, _relaxed)
 71ATOMIC_LONG_ADD_SUB_OP(add, _acquire)
 72ATOMIC_LONG_ADD_SUB_OP(add, _release)
 73ATOMIC_LONG_ADD_SUB_OP(sub,)
 74ATOMIC_LONG_ADD_SUB_OP(sub, _relaxed)
 75ATOMIC_LONG_ADD_SUB_OP(sub, _acquire)
 76ATOMIC_LONG_ADD_SUB_OP(sub, _release)
 77
 78#undef ATOMIC_LONG_ADD_SUB_OP
 79
 80#define atomic_long_cmpxchg_relaxed(l, old, new) \
 81	(ATOMIC_LONG_PFX(_cmpxchg_relaxed)((ATOMIC_LONG_PFX(_t) *)(l), \
 82					   (old), (new)))
 83#define atomic_long_cmpxchg_acquire(l, old, new) \
 84	(ATOMIC_LONG_PFX(_cmpxchg_acquire)((ATOMIC_LONG_PFX(_t) *)(l), \
 85					   (old), (new)))
 86#define atomic_long_cmpxchg_release(l, old, new) \
 87	(ATOMIC_LONG_PFX(_cmpxchg_release)((ATOMIC_LONG_PFX(_t) *)(l), \
 88					   (old), (new)))
 89#define atomic_long_cmpxchg(l, old, new) \
 90	(ATOMIC_LONG_PFX(_cmpxchg)((ATOMIC_LONG_PFX(_t) *)(l), (old), (new)))
 91
 92#define atomic_long_xchg_relaxed(v, new) \
 93	(ATOMIC_LONG_PFX(_xchg_relaxed)((ATOMIC_LONG_PFX(_t) *)(v), (new)))
 94#define atomic_long_xchg_acquire(v, new) \
 95	(ATOMIC_LONG_PFX(_xchg_acquire)((ATOMIC_LONG_PFX(_t) *)(v), (new)))
 96#define atomic_long_xchg_release(v, new) \
 97	(ATOMIC_LONG_PFX(_xchg_release)((ATOMIC_LONG_PFX(_t) *)(v), (new)))
 98#define atomic_long_xchg(v, new) \
 99	(ATOMIC_LONG_PFX(_xchg)((ATOMIC_LONG_PFX(_t) *)(v), (new)))
100
101static __always_inline void atomic_long_inc(atomic_long_t *l)
 
102{
103	ATOMIC_LONG_PFX(_t) *v = (ATOMIC_LONG_PFX(_t) *)l;
 
104
105	ATOMIC_LONG_PFX(_inc)(v);
 
 
 
106}
107
108static __always_inline void atomic_long_dec(atomic_long_t *l)
 
109{
110	ATOMIC_LONG_PFX(_t) *v = (ATOMIC_LONG_PFX(_t) *)l;
 
111
112	ATOMIC_LONG_PFX(_dec)(v);
 
 
 
113}
114
115#define ATOMIC_LONG_FETCH_OP(op, mo)					\
116static inline long							\
117atomic_long_fetch_##op##mo(long i, atomic_long_t *l)			\
118{									\
119	ATOMIC_LONG_PFX(_t) *v = (ATOMIC_LONG_PFX(_t) *)l;		\
120									\
121	return (long)ATOMIC_LONG_PFX(_fetch_##op##mo)(i, v);		\
122}
123
124ATOMIC_LONG_FETCH_OP(add, )
125ATOMIC_LONG_FETCH_OP(add, _relaxed)
126ATOMIC_LONG_FETCH_OP(add, _acquire)
127ATOMIC_LONG_FETCH_OP(add, _release)
128ATOMIC_LONG_FETCH_OP(sub, )
129ATOMIC_LONG_FETCH_OP(sub, _relaxed)
130ATOMIC_LONG_FETCH_OP(sub, _acquire)
131ATOMIC_LONG_FETCH_OP(sub, _release)
132ATOMIC_LONG_FETCH_OP(and, )
133ATOMIC_LONG_FETCH_OP(and, _relaxed)
134ATOMIC_LONG_FETCH_OP(and, _acquire)
135ATOMIC_LONG_FETCH_OP(and, _release)
136ATOMIC_LONG_FETCH_OP(andnot, )
137ATOMIC_LONG_FETCH_OP(andnot, _relaxed)
138ATOMIC_LONG_FETCH_OP(andnot, _acquire)
139ATOMIC_LONG_FETCH_OP(andnot, _release)
140ATOMIC_LONG_FETCH_OP(or, )
141ATOMIC_LONG_FETCH_OP(or, _relaxed)
142ATOMIC_LONG_FETCH_OP(or, _acquire)
143ATOMIC_LONG_FETCH_OP(or, _release)
144ATOMIC_LONG_FETCH_OP(xor, )
145ATOMIC_LONG_FETCH_OP(xor, _relaxed)
146ATOMIC_LONG_FETCH_OP(xor, _acquire)
147ATOMIC_LONG_FETCH_OP(xor, _release)
148
149#undef ATOMIC_LONG_FETCH_OP
 
 
 
 
150
151#define ATOMIC_LONG_FETCH_INC_DEC_OP(op, mo)					\
152static inline long							\
153atomic_long_fetch_##op##mo(atomic_long_t *l)				\
154{									\
155	ATOMIC_LONG_PFX(_t) *v = (ATOMIC_LONG_PFX(_t) *)l;		\
156									\
157	return (long)ATOMIC_LONG_PFX(_fetch_##op##mo)(v);		\
158}
159
160ATOMIC_LONG_FETCH_INC_DEC_OP(inc,)
161ATOMIC_LONG_FETCH_INC_DEC_OP(inc, _relaxed)
162ATOMIC_LONG_FETCH_INC_DEC_OP(inc, _acquire)
163ATOMIC_LONG_FETCH_INC_DEC_OP(inc, _release)
164ATOMIC_LONG_FETCH_INC_DEC_OP(dec,)
165ATOMIC_LONG_FETCH_INC_DEC_OP(dec, _relaxed)
166ATOMIC_LONG_FETCH_INC_DEC_OP(dec, _acquire)
167ATOMIC_LONG_FETCH_INC_DEC_OP(dec, _release)
168
169#undef ATOMIC_LONG_FETCH_INC_DEC_OP
 
 
 
 
170
171#define ATOMIC_LONG_OP(op)						\
172static __always_inline void						\
173atomic_long_##op(long i, atomic_long_t *l)				\
174{									\
175	ATOMIC_LONG_PFX(_t) *v = (ATOMIC_LONG_PFX(_t) *)l;		\
176									\
177	ATOMIC_LONG_PFX(_##op)(i, v);					\
178}
179
180ATOMIC_LONG_OP(add)
181ATOMIC_LONG_OP(sub)
182ATOMIC_LONG_OP(and)
183ATOMIC_LONG_OP(andnot)
184ATOMIC_LONG_OP(or)
185ATOMIC_LONG_OP(xor)
186
187#undef ATOMIC_LONG_OP
 
 
 
 
188
189static inline int atomic_long_sub_and_test(long i, atomic_long_t *l)
 
190{
191	ATOMIC_LONG_PFX(_t) *v = (ATOMIC_LONG_PFX(_t) *)l;
 
192
193	return ATOMIC_LONG_PFX(_sub_and_test)(i, v);
 
 
 
194}
195
196static inline int atomic_long_dec_and_test(atomic_long_t *l)
 
197{
198	ATOMIC_LONG_PFX(_t) *v = (ATOMIC_LONG_PFX(_t) *)l;
 
199
200	return ATOMIC_LONG_PFX(_dec_and_test)(v);
 
 
 
201}
202
203static inline int atomic_long_inc_and_test(atomic_long_t *l)
 
204{
205	ATOMIC_LONG_PFX(_t) *v = (ATOMIC_LONG_PFX(_t) *)l;
 
206
207	return ATOMIC_LONG_PFX(_inc_and_test)(v);
 
 
 
208}
209
210static inline int atomic_long_add_negative(long i, atomic_long_t *l)
 
211{
212	ATOMIC_LONG_PFX(_t) *v = (ATOMIC_LONG_PFX(_t) *)l;
 
213
214	return ATOMIC_LONG_PFX(_add_negative)(i, v);
 
 
 
215}
216
217#define ATOMIC_LONG_INC_DEC_OP(op, mo)					\
218static inline long							\
219atomic_long_##op##_return##mo(atomic_long_t *l)				\
220{									\
221	ATOMIC_LONG_PFX(_t) *v = (ATOMIC_LONG_PFX(_t) *)l;		\
222									\
223	return (long)ATOMIC_LONG_PFX(_##op##_return##mo)(v);		\
224}
225ATOMIC_LONG_INC_DEC_OP(inc,)
226ATOMIC_LONG_INC_DEC_OP(inc, _relaxed)
227ATOMIC_LONG_INC_DEC_OP(inc, _acquire)
228ATOMIC_LONG_INC_DEC_OP(inc, _release)
229ATOMIC_LONG_INC_DEC_OP(dec,)
230ATOMIC_LONG_INC_DEC_OP(dec, _relaxed)
231ATOMIC_LONG_INC_DEC_OP(dec, _acquire)
232ATOMIC_LONG_INC_DEC_OP(dec, _release)
233
234#undef ATOMIC_LONG_INC_DEC_OP
 
 
 
 
235
236static inline long atomic_long_add_unless(atomic_long_t *l, long a, long u)
 
237{
238	ATOMIC_LONG_PFX(_t) *v = (ATOMIC_LONG_PFX(_t) *)l;
 
239
240	return (long)ATOMIC_LONG_PFX(_add_unless)(v, a, u);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
241}
242
243#define atomic_long_inc_not_zero(l) \
244	ATOMIC_LONG_PFX(_inc_not_zero)((ATOMIC_LONG_PFX(_t) *)(l))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
245
246#endif  /*  _ASM_GENERIC_ATOMIC_LONG_H  */
 
 
v5.14.15
   1// SPDX-License-Identifier: GPL-2.0
   2
   3// Generated by scripts/atomic/gen-atomic-long.sh
   4// DO NOT MODIFY THIS FILE DIRECTLY
   5
   6#ifndef _ASM_GENERIC_ATOMIC_LONG_H
   7#define _ASM_GENERIC_ATOMIC_LONG_H
 
 
 
 
 
 
 
   8
   9#include <linux/compiler.h>
  10#include <asm/types.h>
  11
  12#ifdef CONFIG_64BIT
  13typedef atomic64_t atomic_long_t;
  14#define ATOMIC_LONG_INIT(i)		ATOMIC64_INIT(i)
  15#define atomic_long_cond_read_acquire	atomic64_cond_read_acquire
  16#define atomic_long_cond_read_relaxed	atomic64_cond_read_relaxed
  17#else
  18typedef atomic_t atomic_long_t;
  19#define ATOMIC_LONG_INIT(i)		ATOMIC_INIT(i)
  20#define atomic_long_cond_read_acquire	atomic_cond_read_acquire
  21#define atomic_long_cond_read_relaxed	atomic_cond_read_relaxed
  22#endif
  23
  24#ifdef CONFIG_64BIT
  25
  26static __always_inline long
  27atomic_long_read(const atomic_long_t *v)
  28{
  29	return atomic64_read(v);
  30}
  31
  32static __always_inline long
  33atomic_long_read_acquire(const atomic_long_t *v)
  34{
  35	return atomic64_read_acquire(v);
  36}
  37
  38static __always_inline void
  39atomic_long_set(atomic_long_t *v, long i)
  40{
  41	atomic64_set(v, i);
  42}
  43
  44static __always_inline void
  45atomic_long_set_release(atomic_long_t *v, long i)
  46{
  47	atomic64_set_release(v, i);
  48}
  49
  50static __always_inline void
  51atomic_long_add(long i, atomic_long_t *v)
  52{
  53	atomic64_add(i, v);
  54}
  55
  56static __always_inline long
  57atomic_long_add_return(long i, atomic_long_t *v)
  58{
  59	return atomic64_add_return(i, v);
  60}
  61
  62static __always_inline long
  63atomic_long_add_return_acquire(long i, atomic_long_t *v)
  64{
  65	return atomic64_add_return_acquire(i, v);
  66}
  67
  68static __always_inline long
  69atomic_long_add_return_release(long i, atomic_long_t *v)
  70{
  71	return atomic64_add_return_release(i, v);
  72}
  73
  74static __always_inline long
  75atomic_long_add_return_relaxed(long i, atomic_long_t *v)
  76{
  77	return atomic64_add_return_relaxed(i, v);
  78}
  79
  80static __always_inline long
  81atomic_long_fetch_add(long i, atomic_long_t *v)
  82{
  83	return atomic64_fetch_add(i, v);
  84}
  85
  86static __always_inline long
  87atomic_long_fetch_add_acquire(long i, atomic_long_t *v)
  88{
  89	return atomic64_fetch_add_acquire(i, v);
  90}
  91
  92static __always_inline long
  93atomic_long_fetch_add_release(long i, atomic_long_t *v)
  94{
  95	return atomic64_fetch_add_release(i, v);
  96}
  97
  98static __always_inline long
  99atomic_long_fetch_add_relaxed(long i, atomic_long_t *v)
 100{
 101	return atomic64_fetch_add_relaxed(i, v);
 102}
 103
 104static __always_inline void
 105atomic_long_sub(long i, atomic_long_t *v)
 106{
 107	atomic64_sub(i, v);
 108}
 109
 110static __always_inline long
 111atomic_long_sub_return(long i, atomic_long_t *v)
 112{
 113	return atomic64_sub_return(i, v);
 114}
 115
 116static __always_inline long
 117atomic_long_sub_return_acquire(long i, atomic_long_t *v)
 118{
 119	return atomic64_sub_return_acquire(i, v);
 120}
 121
 122static __always_inline long
 123atomic_long_sub_return_release(long i, atomic_long_t *v)
 124{
 125	return atomic64_sub_return_release(i, v);
 126}
 127
 128static __always_inline long
 129atomic_long_sub_return_relaxed(long i, atomic_long_t *v)
 130{
 131	return atomic64_sub_return_relaxed(i, v);
 132}
 133
 134static __always_inline long
 135atomic_long_fetch_sub(long i, atomic_long_t *v)
 136{
 137	return atomic64_fetch_sub(i, v);
 138}
 139
 140static __always_inline long
 141atomic_long_fetch_sub_acquire(long i, atomic_long_t *v)
 142{
 143	return atomic64_fetch_sub_acquire(i, v);
 144}
 145
 146static __always_inline long
 147atomic_long_fetch_sub_release(long i, atomic_long_t *v)
 148{
 149	return atomic64_fetch_sub_release(i, v);
 150}
 151
 152static __always_inline long
 153atomic_long_fetch_sub_relaxed(long i, atomic_long_t *v)
 154{
 155	return atomic64_fetch_sub_relaxed(i, v);
 156}
 157
 158static __always_inline void
 159atomic_long_inc(atomic_long_t *v)
 160{
 161	atomic64_inc(v);
 162}
 163
 164static __always_inline long
 165atomic_long_inc_return(atomic_long_t *v)
 166{
 167	return atomic64_inc_return(v);
 168}
 169
 170static __always_inline long
 171atomic_long_inc_return_acquire(atomic_long_t *v)
 172{
 173	return atomic64_inc_return_acquire(v);
 174}
 175
 176static __always_inline long
 177atomic_long_inc_return_release(atomic_long_t *v)
 178{
 179	return atomic64_inc_return_release(v);
 180}
 181
 182static __always_inline long
 183atomic_long_inc_return_relaxed(atomic_long_t *v)
 184{
 185	return atomic64_inc_return_relaxed(v);
 186}
 187
 188static __always_inline long
 189atomic_long_fetch_inc(atomic_long_t *v)
 190{
 191	return atomic64_fetch_inc(v);
 192}
 193
 194static __always_inline long
 195atomic_long_fetch_inc_acquire(atomic_long_t *v)
 196{
 197	return atomic64_fetch_inc_acquire(v);
 198}
 199
 200static __always_inline long
 201atomic_long_fetch_inc_release(atomic_long_t *v)
 202{
 203	return atomic64_fetch_inc_release(v);
 204}
 205
 206static __always_inline long
 207atomic_long_fetch_inc_relaxed(atomic_long_t *v)
 208{
 209	return atomic64_fetch_inc_relaxed(v);
 210}
 211
 212static __always_inline void
 213atomic_long_dec(atomic_long_t *v)
 214{
 215	atomic64_dec(v);
 216}
 217
 218static __always_inline long
 219atomic_long_dec_return(atomic_long_t *v)
 220{
 221	return atomic64_dec_return(v);
 222}
 223
 224static __always_inline long
 225atomic_long_dec_return_acquire(atomic_long_t *v)
 226{
 227	return atomic64_dec_return_acquire(v);
 228}
 229
 230static __always_inline long
 231atomic_long_dec_return_release(atomic_long_t *v)
 232{
 233	return atomic64_dec_return_release(v);
 234}
 235
 236static __always_inline long
 237atomic_long_dec_return_relaxed(atomic_long_t *v)
 238{
 239	return atomic64_dec_return_relaxed(v);
 240}
 241
 242static __always_inline long
 243atomic_long_fetch_dec(atomic_long_t *v)
 244{
 245	return atomic64_fetch_dec(v);
 246}
 247
 248static __always_inline long
 249atomic_long_fetch_dec_acquire(atomic_long_t *v)
 250{
 251	return atomic64_fetch_dec_acquire(v);
 252}
 253
 254static __always_inline long
 255atomic_long_fetch_dec_release(atomic_long_t *v)
 256{
 257	return atomic64_fetch_dec_release(v);
 258}
 259
 260static __always_inline long
 261atomic_long_fetch_dec_relaxed(atomic_long_t *v)
 262{
 263	return atomic64_fetch_dec_relaxed(v);
 264}
 265
 266static __always_inline void
 267atomic_long_and(long i, atomic_long_t *v)
 268{
 269	atomic64_and(i, v);
 270}
 271
 272static __always_inline long
 273atomic_long_fetch_and(long i, atomic_long_t *v)
 274{
 275	return atomic64_fetch_and(i, v);
 276}
 277
 278static __always_inline long
 279atomic_long_fetch_and_acquire(long i, atomic_long_t *v)
 280{
 281	return atomic64_fetch_and_acquire(i, v);
 282}
 283
 284static __always_inline long
 285atomic_long_fetch_and_release(long i, atomic_long_t *v)
 286{
 287	return atomic64_fetch_and_release(i, v);
 288}
 289
 290static __always_inline long
 291atomic_long_fetch_and_relaxed(long i, atomic_long_t *v)
 292{
 293	return atomic64_fetch_and_relaxed(i, v);
 294}
 295
 296static __always_inline void
 297atomic_long_andnot(long i, atomic_long_t *v)
 298{
 299	atomic64_andnot(i, v);
 300}
 301
 302static __always_inline long
 303atomic_long_fetch_andnot(long i, atomic_long_t *v)
 304{
 305	return atomic64_fetch_andnot(i, v);
 306}
 307
 308static __always_inline long
 309atomic_long_fetch_andnot_acquire(long i, atomic_long_t *v)
 310{
 311	return atomic64_fetch_andnot_acquire(i, v);
 312}
 313
 314static __always_inline long
 315atomic_long_fetch_andnot_release(long i, atomic_long_t *v)
 316{
 317	return atomic64_fetch_andnot_release(i, v);
 318}
 319
 320static __always_inline long
 321atomic_long_fetch_andnot_relaxed(long i, atomic_long_t *v)
 322{
 323	return atomic64_fetch_andnot_relaxed(i, v);
 324}
 325
 326static __always_inline void
 327atomic_long_or(long i, atomic_long_t *v)
 328{
 329	atomic64_or(i, v);
 330}
 331
 332static __always_inline long
 333atomic_long_fetch_or(long i, atomic_long_t *v)
 334{
 335	return atomic64_fetch_or(i, v);
 336}
 337
 338static __always_inline long
 339atomic_long_fetch_or_acquire(long i, atomic_long_t *v)
 340{
 341	return atomic64_fetch_or_acquire(i, v);
 342}
 343
 344static __always_inline long
 345atomic_long_fetch_or_release(long i, atomic_long_t *v)
 346{
 347	return atomic64_fetch_or_release(i, v);
 348}
 349
 350static __always_inline long
 351atomic_long_fetch_or_relaxed(long i, atomic_long_t *v)
 352{
 353	return atomic64_fetch_or_relaxed(i, v);
 354}
 355
 356static __always_inline void
 357atomic_long_xor(long i, atomic_long_t *v)
 358{
 359	atomic64_xor(i, v);
 360}
 361
 362static __always_inline long
 363atomic_long_fetch_xor(long i, atomic_long_t *v)
 364{
 365	return atomic64_fetch_xor(i, v);
 366}
 367
 368static __always_inline long
 369atomic_long_fetch_xor_acquire(long i, atomic_long_t *v)
 370{
 371	return atomic64_fetch_xor_acquire(i, v);
 372}
 373
 374static __always_inline long
 375atomic_long_fetch_xor_release(long i, atomic_long_t *v)
 376{
 377	return atomic64_fetch_xor_release(i, v);
 378}
 379
 380static __always_inline long
 381atomic_long_fetch_xor_relaxed(long i, atomic_long_t *v)
 382{
 383	return atomic64_fetch_xor_relaxed(i, v);
 384}
 385
 386static __always_inline long
 387atomic_long_xchg(atomic_long_t *v, long i)
 388{
 389	return atomic64_xchg(v, i);
 390}
 391
 392static __always_inline long
 393atomic_long_xchg_acquire(atomic_long_t *v, long i)
 394{
 395	return atomic64_xchg_acquire(v, i);
 396}
 397
 398static __always_inline long
 399atomic_long_xchg_release(atomic_long_t *v, long i)
 400{
 401	return atomic64_xchg_release(v, i);
 402}
 403
 404static __always_inline long
 405atomic_long_xchg_relaxed(atomic_long_t *v, long i)
 406{
 407	return atomic64_xchg_relaxed(v, i);
 408}
 409
 410static __always_inline long
 411atomic_long_cmpxchg(atomic_long_t *v, long old, long new)
 412{
 413	return atomic64_cmpxchg(v, old, new);
 414}
 415
 416static __always_inline long
 417atomic_long_cmpxchg_acquire(atomic_long_t *v, long old, long new)
 418{
 419	return atomic64_cmpxchg_acquire(v, old, new);
 420}
 421
 422static __always_inline long
 423atomic_long_cmpxchg_release(atomic_long_t *v, long old, long new)
 424{
 425	return atomic64_cmpxchg_release(v, old, new);
 426}
 427
 428static __always_inline long
 429atomic_long_cmpxchg_relaxed(atomic_long_t *v, long old, long new)
 430{
 431	return atomic64_cmpxchg_relaxed(v, old, new);
 432}
 433
 434static __always_inline bool
 435atomic_long_try_cmpxchg(atomic_long_t *v, long *old, long new)
 436{
 437	return atomic64_try_cmpxchg(v, (s64 *)old, new);
 438}
 439
 440static __always_inline bool
 441atomic_long_try_cmpxchg_acquire(atomic_long_t *v, long *old, long new)
 442{
 443	return atomic64_try_cmpxchg_acquire(v, (s64 *)old, new);
 444}
 445
 446static __always_inline bool
 447atomic_long_try_cmpxchg_release(atomic_long_t *v, long *old, long new)
 448{
 449	return atomic64_try_cmpxchg_release(v, (s64 *)old, new);
 450}
 451
 452static __always_inline bool
 453atomic_long_try_cmpxchg_relaxed(atomic_long_t *v, long *old, long new)
 454{
 455	return atomic64_try_cmpxchg_relaxed(v, (s64 *)old, new);
 456}
 457
 458static __always_inline bool
 459atomic_long_sub_and_test(long i, atomic_long_t *v)
 460{
 461	return atomic64_sub_and_test(i, v);
 462}
 463
 464static __always_inline bool
 465atomic_long_dec_and_test(atomic_long_t *v)
 466{
 467	return atomic64_dec_and_test(v);
 468}
 469
 470static __always_inline bool
 471atomic_long_inc_and_test(atomic_long_t *v)
 472{
 473	return atomic64_inc_and_test(v);
 474}
 475
 476static __always_inline bool
 477atomic_long_add_negative(long i, atomic_long_t *v)
 478{
 479	return atomic64_add_negative(i, v);
 480}
 481
 482static __always_inline long
 483atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u)
 484{
 485	return atomic64_fetch_add_unless(v, a, u);
 486}
 487
 488static __always_inline bool
 489atomic_long_add_unless(atomic_long_t *v, long a, long u)
 490{
 491	return atomic64_add_unless(v, a, u);
 
 
 492}
 
 
 493
 494static __always_inline bool
 495atomic_long_inc_not_zero(atomic_long_t *v)
 496{
 497	return atomic64_inc_not_zero(v);
 498}
 499
 500static __always_inline bool
 501atomic_long_inc_unless_negative(atomic_long_t *v)
 502{
 503	return atomic64_inc_unless_negative(v);
 
 
 504}
 
 
 505
 506static __always_inline bool
 507atomic_long_dec_unless_positive(atomic_long_t *v)
 508{
 509	return atomic64_dec_unless_positive(v);
 510}
 511
 512static __always_inline long
 513atomic_long_dec_if_positive(atomic_long_t *v)
 514{
 515	return atomic64_dec_if_positive(v);
 
 
 
 516}
 
 
 
 
 
 
 
 
 517
 518#else /* CONFIG_64BIT */
 519
 520static __always_inline long
 521atomic_long_read(const atomic_long_t *v)
 522{
 523	return atomic_read(v);
 524}
 
 
 
 
 
 
 525
 526static __always_inline long
 527atomic_long_read_acquire(const atomic_long_t *v)
 528{
 529	return atomic_read_acquire(v);
 530}
 
 
 
 531
 532static __always_inline void
 533atomic_long_set(atomic_long_t *v, long i)
 534{
 535	atomic_set(v, i);
 536}
 537
 538static __always_inline void
 539atomic_long_set_release(atomic_long_t *v, long i)
 540{
 541	atomic_set_release(v, i);
 542}
 543
 544static __always_inline void
 545atomic_long_add(long i, atomic_long_t *v)
 546{
 547	atomic_add(i, v);
 548}
 549
 550static __always_inline long
 551atomic_long_add_return(long i, atomic_long_t *v)
 552{
 553	return atomic_add_return(i, v);
 554}
 555
 556static __always_inline long
 557atomic_long_add_return_acquire(long i, atomic_long_t *v)
 558{
 559	return atomic_add_return_acquire(i, v);
 
 
 
 560}
 561
 562static __always_inline long
 563atomic_long_add_return_release(long i, atomic_long_t *v)
 564{
 565	return atomic_add_return_release(i, v);
 566}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 567
 568static __always_inline long
 569atomic_long_add_return_relaxed(long i, atomic_long_t *v)
 570{
 571	return atomic_add_return_relaxed(i, v);
 572}
 573
 574static __always_inline long
 575atomic_long_fetch_add(long i, atomic_long_t *v)
 576{
 577	return atomic_fetch_add(i, v);
 
 
 
 578}
 579
 580static __always_inline long
 581atomic_long_fetch_add_acquire(long i, atomic_long_t *v)
 582{
 583	return atomic_fetch_add_acquire(i, v);
 584}
 
 
 
 585
 586static __always_inline long
 587atomic_long_fetch_add_release(long i, atomic_long_t *v)
 588{
 589	return atomic_fetch_add_release(i, v);
 590}
 591
 592static __always_inline long
 593atomic_long_fetch_add_relaxed(long i, atomic_long_t *v)
 594{
 595	return atomic_fetch_add_relaxed(i, v);
 
 
 
 596}
 597
 598static __always_inline void
 599atomic_long_sub(long i, atomic_long_t *v)
 600{
 601	atomic_sub(i, v);
 602}
 
 603
 604static __always_inline long
 605atomic_long_sub_return(long i, atomic_long_t *v)
 606{
 607	return atomic_sub_return(i, v);
 608}
 609
 610static __always_inline long
 611atomic_long_sub_return_acquire(long i, atomic_long_t *v)
 612{
 613	return atomic_sub_return_acquire(i, v);
 614}
 615
 616static __always_inline long
 617atomic_long_sub_return_release(long i, atomic_long_t *v)
 618{
 619	return atomic_sub_return_release(i, v);
 620}
 621
 622static __always_inline long
 623atomic_long_sub_return_relaxed(long i, atomic_long_t *v)
 624{
 625	return atomic_sub_return_relaxed(i, v);
 626}
 627
 628static __always_inline long
 629atomic_long_fetch_sub(long i, atomic_long_t *v)
 630{
 631	return atomic_fetch_sub(i, v);
 632}
 633
 634static __always_inline long
 635atomic_long_fetch_sub_acquire(long i, atomic_long_t *v)
 636{
 637	return atomic_fetch_sub_acquire(i, v);
 638}
 639
 640static __always_inline long
 641atomic_long_fetch_sub_release(long i, atomic_long_t *v)
 642{
 643	return atomic_fetch_sub_release(i, v);
 644}
 645
 646static __always_inline long
 647atomic_long_fetch_sub_relaxed(long i, atomic_long_t *v)
 648{
 649	return atomic_fetch_sub_relaxed(i, v);
 650}
 651
 652static __always_inline void
 653atomic_long_inc(atomic_long_t *v)
 654{
 655	atomic_inc(v);
 656}
 657
 658static __always_inline long
 659atomic_long_inc_return(atomic_long_t *v)
 660{
 661	return atomic_inc_return(v);
 
 
 
 662}
 
 
 
 
 
 
 
 
 663
 664static __always_inline long
 665atomic_long_inc_return_acquire(atomic_long_t *v)
 666{
 667	return atomic_inc_return_acquire(v);
 668}
 669
 670static __always_inline long
 671atomic_long_inc_return_release(atomic_long_t *v)
 672{
 673	return atomic_inc_return_release(v);
 674}
 675
 676static __always_inline long
 677atomic_long_inc_return_relaxed(atomic_long_t *v)
 678{
 679	return atomic_inc_return_relaxed(v);
 680}
 681
 682static __always_inline long
 683atomic_long_fetch_inc(atomic_long_t *v)
 684{
 685	return atomic_fetch_inc(v);
 686}
 687
 688static __always_inline long
 689atomic_long_fetch_inc_acquire(atomic_long_t *v)
 690{
 691	return atomic_fetch_inc_acquire(v);
 692}
 693
 694static __always_inline long
 695atomic_long_fetch_inc_release(atomic_long_t *v)
 696{
 697	return atomic_fetch_inc_release(v);
 698}
 699
 700static __always_inline long
 701atomic_long_fetch_inc_relaxed(atomic_long_t *v)
 702{
 703	return atomic_fetch_inc_relaxed(v);
 704}
 705
 706static __always_inline void
 707atomic_long_dec(atomic_long_t *v)
 708{
 709	atomic_dec(v);
 710}
 711
 712static __always_inline long
 713atomic_long_dec_return(atomic_long_t *v)
 714{
 715	return atomic_dec_return(v);
 716}
 717
 718static __always_inline long
 719atomic_long_dec_return_acquire(atomic_long_t *v)
 720{
 721	return atomic_dec_return_acquire(v);
 722}
 723
 724static __always_inline long
 725atomic_long_dec_return_release(atomic_long_t *v)
 726{
 727	return atomic_dec_return_release(v);
 728}
 729
 730static __always_inline long
 731atomic_long_dec_return_relaxed(atomic_long_t *v)
 732{
 733	return atomic_dec_return_relaxed(v);
 734}
 735
 736static __always_inline long
 737atomic_long_fetch_dec(atomic_long_t *v)
 738{
 739	return atomic_fetch_dec(v);
 740}
 741
 742static __always_inline long
 743atomic_long_fetch_dec_acquire(atomic_long_t *v)
 744{
 745	return atomic_fetch_dec_acquire(v);
 746}
 747
 748static __always_inline long
 749atomic_long_fetch_dec_release(atomic_long_t *v)
 750{
 751	return atomic_fetch_dec_release(v);
 752}
 753
 754static __always_inline long
 755atomic_long_fetch_dec_relaxed(atomic_long_t *v)
 756{
 757	return atomic_fetch_dec_relaxed(v);
 758}
 759
 760static __always_inline void
 761atomic_long_and(long i, atomic_long_t *v)
 762{
 763	atomic_and(i, v);
 764}
 765
 766static __always_inline long
 767atomic_long_fetch_and(long i, atomic_long_t *v)
 768{
 769	return atomic_fetch_and(i, v);
 770}
 771
 772static __always_inline long
 773atomic_long_fetch_and_acquire(long i, atomic_long_t *v)
 774{
 775	return atomic_fetch_and_acquire(i, v);
 776}
 777
 778static __always_inline long
 779atomic_long_fetch_and_release(long i, atomic_long_t *v)
 780{
 781	return atomic_fetch_and_release(i, v);
 782}
 783
 784static __always_inline long
 785atomic_long_fetch_and_relaxed(long i, atomic_long_t *v)
 786{
 787	return atomic_fetch_and_relaxed(i, v);
 788}
 789
 790static __always_inline void
 791atomic_long_andnot(long i, atomic_long_t *v)
 792{
 793	atomic_andnot(i, v);
 794}
 795
 796static __always_inline long
 797atomic_long_fetch_andnot(long i, atomic_long_t *v)
 798{
 799	return atomic_fetch_andnot(i, v);
 800}
 801
 802static __always_inline long
 803atomic_long_fetch_andnot_acquire(long i, atomic_long_t *v)
 804{
 805	return atomic_fetch_andnot_acquire(i, v);
 806}
 807
 808static __always_inline long
 809atomic_long_fetch_andnot_release(long i, atomic_long_t *v)
 810{
 811	return atomic_fetch_andnot_release(i, v);
 812}
 813
 814static __always_inline long
 815atomic_long_fetch_andnot_relaxed(long i, atomic_long_t *v)
 816{
 817	return atomic_fetch_andnot_relaxed(i, v);
 818}
 819
 820static __always_inline void
 821atomic_long_or(long i, atomic_long_t *v)
 822{
 823	atomic_or(i, v);
 824}
 825
 826static __always_inline long
 827atomic_long_fetch_or(long i, atomic_long_t *v)
 828{
 829	return atomic_fetch_or(i, v);
 830}
 831
 832static __always_inline long
 833atomic_long_fetch_or_acquire(long i, atomic_long_t *v)
 834{
 835	return atomic_fetch_or_acquire(i, v);
 836}
 837
 838static __always_inline long
 839atomic_long_fetch_or_release(long i, atomic_long_t *v)
 840{
 841	return atomic_fetch_or_release(i, v);
 842}
 843
 844static __always_inline long
 845atomic_long_fetch_or_relaxed(long i, atomic_long_t *v)
 846{
 847	return atomic_fetch_or_relaxed(i, v);
 848}
 849
 850static __always_inline void
 851atomic_long_xor(long i, atomic_long_t *v)
 852{
 853	atomic_xor(i, v);
 854}
 855
 856static __always_inline long
 857atomic_long_fetch_xor(long i, atomic_long_t *v)
 858{
 859	return atomic_fetch_xor(i, v);
 860}
 861
 862static __always_inline long
 863atomic_long_fetch_xor_acquire(long i, atomic_long_t *v)
 864{
 865	return atomic_fetch_xor_acquire(i, v);
 866}
 867
 868static __always_inline long
 869atomic_long_fetch_xor_release(long i, atomic_long_t *v)
 870{
 871	return atomic_fetch_xor_release(i, v);
 872}
 873
 874static __always_inline long
 875atomic_long_fetch_xor_relaxed(long i, atomic_long_t *v)
 876{
 877	return atomic_fetch_xor_relaxed(i, v);
 878}
 879
 880static __always_inline long
 881atomic_long_xchg(atomic_long_t *v, long i)
 882{
 883	return atomic_xchg(v, i);
 884}
 885
 886static __always_inline long
 887atomic_long_xchg_acquire(atomic_long_t *v, long i)
 888{
 889	return atomic_xchg_acquire(v, i);
 890}
 891
 892static __always_inline long
 893atomic_long_xchg_release(atomic_long_t *v, long i)
 894{
 895	return atomic_xchg_release(v, i);
 896}
 897
 898static __always_inline long
 899atomic_long_xchg_relaxed(atomic_long_t *v, long i)
 900{
 901	return atomic_xchg_relaxed(v, i);
 902}
 903
 904static __always_inline long
 905atomic_long_cmpxchg(atomic_long_t *v, long old, long new)
 906{
 907	return atomic_cmpxchg(v, old, new);
 908}
 909
 910static __always_inline long
 911atomic_long_cmpxchg_acquire(atomic_long_t *v, long old, long new)
 912{
 913	return atomic_cmpxchg_acquire(v, old, new);
 914}
 915
 916static __always_inline long
 917atomic_long_cmpxchg_release(atomic_long_t *v, long old, long new)
 918{
 919	return atomic_cmpxchg_release(v, old, new);
 920}
 921
 922static __always_inline long
 923atomic_long_cmpxchg_relaxed(atomic_long_t *v, long old, long new)
 924{
 925	return atomic_cmpxchg_relaxed(v, old, new);
 926}
 927
 928static __always_inline bool
 929atomic_long_try_cmpxchg(atomic_long_t *v, long *old, long new)
 930{
 931	return atomic_try_cmpxchg(v, (int *)old, new);
 932}
 933
 934static __always_inline bool
 935atomic_long_try_cmpxchg_acquire(atomic_long_t *v, long *old, long new)
 936{
 937	return atomic_try_cmpxchg_acquire(v, (int *)old, new);
 938}
 939
 940static __always_inline bool
 941atomic_long_try_cmpxchg_release(atomic_long_t *v, long *old, long new)
 942{
 943	return atomic_try_cmpxchg_release(v, (int *)old, new);
 944}
 945
 946static __always_inline bool
 947atomic_long_try_cmpxchg_relaxed(atomic_long_t *v, long *old, long new)
 948{
 949	return atomic_try_cmpxchg_relaxed(v, (int *)old, new);
 950}
 951
 952static __always_inline bool
 953atomic_long_sub_and_test(long i, atomic_long_t *v)
 954{
 955	return atomic_sub_and_test(i, v);
 956}
 957
 958static __always_inline bool
 959atomic_long_dec_and_test(atomic_long_t *v)
 960{
 961	return atomic_dec_and_test(v);
 962}
 963
 964static __always_inline bool
 965atomic_long_inc_and_test(atomic_long_t *v)
 966{
 967	return atomic_inc_and_test(v);
 968}
 969
 970static __always_inline bool
 971atomic_long_add_negative(long i, atomic_long_t *v)
 972{
 973	return atomic_add_negative(i, v);
 974}
 975
 976static __always_inline long
 977atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u)
 978{
 979	return atomic_fetch_add_unless(v, a, u);
 980}
 981
 982static __always_inline bool
 983atomic_long_add_unless(atomic_long_t *v, long a, long u)
 984{
 985	return atomic_add_unless(v, a, u);
 986}
 987
 988static __always_inline bool
 989atomic_long_inc_not_zero(atomic_long_t *v)
 990{
 991	return atomic_inc_not_zero(v);
 992}
 993
 994static __always_inline bool
 995atomic_long_inc_unless_negative(atomic_long_t *v)
 996{
 997	return atomic_inc_unless_negative(v);
 998}
 999
1000static __always_inline bool
1001atomic_long_dec_unless_positive(atomic_long_t *v)
1002{
1003	return atomic_dec_unless_positive(v);
1004}
1005
1006static __always_inline long
1007atomic_long_dec_if_positive(atomic_long_t *v)
1008{
1009	return atomic_dec_if_positive(v);
1010}
1011
1012#endif /* CONFIG_64BIT */
1013#endif /* _ASM_GENERIC_ATOMIC_LONG_H */
1014// a624200981f552b2c6be4f32fe44da8289f30d87