Loading...
1#ifndef _ASM_GENERIC_ATOMIC_LONG_H
2#define _ASM_GENERIC_ATOMIC_LONG_H
3/*
4 * Copyright (C) 2005 Silicon Graphics, Inc.
5 * Christoph Lameter
6 *
7 * Allows to provide arch independent atomic definitions without the need to
8 * edit all arch specific atomic.h files.
9 */
10
11#include <asm/types.h>
12
13/*
14 * Suppport for atomic_long_t
15 *
16 * Casts for parameters are avoided for existing atomic functions in order to
17 * avoid issues with cast-as-lval under gcc 4.x and other limitations that the
18 * macros of a platform may have.
19 */
20
21#if BITS_PER_LONG == 64
22
23typedef atomic64_t atomic_long_t;
24
25#define ATOMIC_LONG_INIT(i) ATOMIC64_INIT(i)
26
27static inline long atomic_long_read(atomic_long_t *l)
28{
29 atomic64_t *v = (atomic64_t *)l;
30
31 return (long)atomic64_read(v);
32}
33
34static inline void atomic_long_set(atomic_long_t *l, long i)
35{
36 atomic64_t *v = (atomic64_t *)l;
37
38 atomic64_set(v, i);
39}
40
41static inline void atomic_long_inc(atomic_long_t *l)
42{
43 atomic64_t *v = (atomic64_t *)l;
44
45 atomic64_inc(v);
46}
47
48static inline void atomic_long_dec(atomic_long_t *l)
49{
50 atomic64_t *v = (atomic64_t *)l;
51
52 atomic64_dec(v);
53}
54
55static inline void atomic_long_add(long i, atomic_long_t *l)
56{
57 atomic64_t *v = (atomic64_t *)l;
58
59 atomic64_add(i, v);
60}
61
62static inline void atomic_long_sub(long i, atomic_long_t *l)
63{
64 atomic64_t *v = (atomic64_t *)l;
65
66 atomic64_sub(i, v);
67}
68
69static inline int atomic_long_sub_and_test(long i, atomic_long_t *l)
70{
71 atomic64_t *v = (atomic64_t *)l;
72
73 return atomic64_sub_and_test(i, v);
74}
75
76static inline int atomic_long_dec_and_test(atomic_long_t *l)
77{
78 atomic64_t *v = (atomic64_t *)l;
79
80 return atomic64_dec_and_test(v);
81}
82
83static inline int atomic_long_inc_and_test(atomic_long_t *l)
84{
85 atomic64_t *v = (atomic64_t *)l;
86
87 return atomic64_inc_and_test(v);
88}
89
90static inline int atomic_long_add_negative(long i, atomic_long_t *l)
91{
92 atomic64_t *v = (atomic64_t *)l;
93
94 return atomic64_add_negative(i, v);
95}
96
97static inline long atomic_long_add_return(long i, atomic_long_t *l)
98{
99 atomic64_t *v = (atomic64_t *)l;
100
101 return (long)atomic64_add_return(i, v);
102}
103
104static inline long atomic_long_sub_return(long i, atomic_long_t *l)
105{
106 atomic64_t *v = (atomic64_t *)l;
107
108 return (long)atomic64_sub_return(i, v);
109}
110
111static inline long atomic_long_inc_return(atomic_long_t *l)
112{
113 atomic64_t *v = (atomic64_t *)l;
114
115 return (long)atomic64_inc_return(v);
116}
117
118static inline long atomic_long_dec_return(atomic_long_t *l)
119{
120 atomic64_t *v = (atomic64_t *)l;
121
122 return (long)atomic64_dec_return(v);
123}
124
125static inline long atomic_long_add_unless(atomic_long_t *l, long a, long u)
126{
127 atomic64_t *v = (atomic64_t *)l;
128
129 return (long)atomic64_add_unless(v, a, u);
130}
131
132#define atomic_long_inc_not_zero(l) atomic64_inc_not_zero((atomic64_t *)(l))
133
134#define atomic_long_cmpxchg(l, old, new) \
135 (atomic64_cmpxchg((atomic64_t *)(l), (old), (new)))
136#define atomic_long_xchg(v, new) \
137 (atomic64_xchg((atomic64_t *)(v), (new)))
138
139#else /* BITS_PER_LONG == 64 */
140
141typedef atomic_t atomic_long_t;
142
143#define ATOMIC_LONG_INIT(i) ATOMIC_INIT(i)
144static inline long atomic_long_read(atomic_long_t *l)
145{
146 atomic_t *v = (atomic_t *)l;
147
148 return (long)atomic_read(v);
149}
150
151static inline void atomic_long_set(atomic_long_t *l, long i)
152{
153 atomic_t *v = (atomic_t *)l;
154
155 atomic_set(v, i);
156}
157
158static inline void atomic_long_inc(atomic_long_t *l)
159{
160 atomic_t *v = (atomic_t *)l;
161
162 atomic_inc(v);
163}
164
165static inline void atomic_long_dec(atomic_long_t *l)
166{
167 atomic_t *v = (atomic_t *)l;
168
169 atomic_dec(v);
170}
171
172static inline void atomic_long_add(long i, atomic_long_t *l)
173{
174 atomic_t *v = (atomic_t *)l;
175
176 atomic_add(i, v);
177}
178
179static inline void atomic_long_sub(long i, atomic_long_t *l)
180{
181 atomic_t *v = (atomic_t *)l;
182
183 atomic_sub(i, v);
184}
185
186static inline int atomic_long_sub_and_test(long i, atomic_long_t *l)
187{
188 atomic_t *v = (atomic_t *)l;
189
190 return atomic_sub_and_test(i, v);
191}
192
193static inline int atomic_long_dec_and_test(atomic_long_t *l)
194{
195 atomic_t *v = (atomic_t *)l;
196
197 return atomic_dec_and_test(v);
198}
199
200static inline int atomic_long_inc_and_test(atomic_long_t *l)
201{
202 atomic_t *v = (atomic_t *)l;
203
204 return atomic_inc_and_test(v);
205}
206
207static inline int atomic_long_add_negative(long i, atomic_long_t *l)
208{
209 atomic_t *v = (atomic_t *)l;
210
211 return atomic_add_negative(i, v);
212}
213
214static inline long atomic_long_add_return(long i, atomic_long_t *l)
215{
216 atomic_t *v = (atomic_t *)l;
217
218 return (long)atomic_add_return(i, v);
219}
220
221static inline long atomic_long_sub_return(long i, atomic_long_t *l)
222{
223 atomic_t *v = (atomic_t *)l;
224
225 return (long)atomic_sub_return(i, v);
226}
227
228static inline long atomic_long_inc_return(atomic_long_t *l)
229{
230 atomic_t *v = (atomic_t *)l;
231
232 return (long)atomic_inc_return(v);
233}
234
235static inline long atomic_long_dec_return(atomic_long_t *l)
236{
237 atomic_t *v = (atomic_t *)l;
238
239 return (long)atomic_dec_return(v);
240}
241
242static inline long atomic_long_add_unless(atomic_long_t *l, long a, long u)
243{
244 atomic_t *v = (atomic_t *)l;
245
246 return (long)atomic_add_unless(v, a, u);
247}
248
249#define atomic_long_inc_not_zero(l) atomic_inc_not_zero((atomic_t *)(l))
250
251#define atomic_long_cmpxchg(l, old, new) \
252 (atomic_cmpxchg((atomic_t *)(l), (old), (new)))
253#define atomic_long_xchg(v, new) \
254 (atomic_xchg((atomic_t *)(v), (new)))
255
256#endif /* BITS_PER_LONG == 64 */
257
258#endif /* _ASM_GENERIC_ATOMIC_LONG_H */
1// SPDX-License-Identifier: GPL-2.0
2
3// Generated by scripts/atomic/gen-atomic-long.sh
4// DO NOT MODIFY THIS FILE DIRECTLY
5
6#ifndef _ASM_GENERIC_ATOMIC_LONG_H
7#define _ASM_GENERIC_ATOMIC_LONG_H
8
9#include <linux/compiler.h>
10#include <asm/types.h>
11
12#ifdef CONFIG_64BIT
13typedef atomic64_t atomic_long_t;
14#define ATOMIC_LONG_INIT(i) ATOMIC64_INIT(i)
15#define atomic_long_cond_read_acquire atomic64_cond_read_acquire
16#define atomic_long_cond_read_relaxed atomic64_cond_read_relaxed
17#else
18typedef atomic_t atomic_long_t;
19#define ATOMIC_LONG_INIT(i) ATOMIC_INIT(i)
20#define atomic_long_cond_read_acquire atomic_cond_read_acquire
21#define atomic_long_cond_read_relaxed atomic_cond_read_relaxed
22#endif
23
24#ifdef CONFIG_64BIT
25
26static __always_inline long
27atomic_long_read(const atomic_long_t *v)
28{
29 return atomic64_read(v);
30}
31
32static __always_inline long
33atomic_long_read_acquire(const atomic_long_t *v)
34{
35 return atomic64_read_acquire(v);
36}
37
38static __always_inline void
39atomic_long_set(atomic_long_t *v, long i)
40{
41 atomic64_set(v, i);
42}
43
44static __always_inline void
45atomic_long_set_release(atomic_long_t *v, long i)
46{
47 atomic64_set_release(v, i);
48}
49
50static __always_inline void
51atomic_long_add(long i, atomic_long_t *v)
52{
53 atomic64_add(i, v);
54}
55
56static __always_inline long
57atomic_long_add_return(long i, atomic_long_t *v)
58{
59 return atomic64_add_return(i, v);
60}
61
62static __always_inline long
63atomic_long_add_return_acquire(long i, atomic_long_t *v)
64{
65 return atomic64_add_return_acquire(i, v);
66}
67
68static __always_inline long
69atomic_long_add_return_release(long i, atomic_long_t *v)
70{
71 return atomic64_add_return_release(i, v);
72}
73
74static __always_inline long
75atomic_long_add_return_relaxed(long i, atomic_long_t *v)
76{
77 return atomic64_add_return_relaxed(i, v);
78}
79
80static __always_inline long
81atomic_long_fetch_add(long i, atomic_long_t *v)
82{
83 return atomic64_fetch_add(i, v);
84}
85
86static __always_inline long
87atomic_long_fetch_add_acquire(long i, atomic_long_t *v)
88{
89 return atomic64_fetch_add_acquire(i, v);
90}
91
92static __always_inline long
93atomic_long_fetch_add_release(long i, atomic_long_t *v)
94{
95 return atomic64_fetch_add_release(i, v);
96}
97
98static __always_inline long
99atomic_long_fetch_add_relaxed(long i, atomic_long_t *v)
100{
101 return atomic64_fetch_add_relaxed(i, v);
102}
103
104static __always_inline void
105atomic_long_sub(long i, atomic_long_t *v)
106{
107 atomic64_sub(i, v);
108}
109
110static __always_inline long
111atomic_long_sub_return(long i, atomic_long_t *v)
112{
113 return atomic64_sub_return(i, v);
114}
115
116static __always_inline long
117atomic_long_sub_return_acquire(long i, atomic_long_t *v)
118{
119 return atomic64_sub_return_acquire(i, v);
120}
121
122static __always_inline long
123atomic_long_sub_return_release(long i, atomic_long_t *v)
124{
125 return atomic64_sub_return_release(i, v);
126}
127
128static __always_inline long
129atomic_long_sub_return_relaxed(long i, atomic_long_t *v)
130{
131 return atomic64_sub_return_relaxed(i, v);
132}
133
134static __always_inline long
135atomic_long_fetch_sub(long i, atomic_long_t *v)
136{
137 return atomic64_fetch_sub(i, v);
138}
139
140static __always_inline long
141atomic_long_fetch_sub_acquire(long i, atomic_long_t *v)
142{
143 return atomic64_fetch_sub_acquire(i, v);
144}
145
146static __always_inline long
147atomic_long_fetch_sub_release(long i, atomic_long_t *v)
148{
149 return atomic64_fetch_sub_release(i, v);
150}
151
152static __always_inline long
153atomic_long_fetch_sub_relaxed(long i, atomic_long_t *v)
154{
155 return atomic64_fetch_sub_relaxed(i, v);
156}
157
158static __always_inline void
159atomic_long_inc(atomic_long_t *v)
160{
161 atomic64_inc(v);
162}
163
164static __always_inline long
165atomic_long_inc_return(atomic_long_t *v)
166{
167 return atomic64_inc_return(v);
168}
169
170static __always_inline long
171atomic_long_inc_return_acquire(atomic_long_t *v)
172{
173 return atomic64_inc_return_acquire(v);
174}
175
176static __always_inline long
177atomic_long_inc_return_release(atomic_long_t *v)
178{
179 return atomic64_inc_return_release(v);
180}
181
182static __always_inline long
183atomic_long_inc_return_relaxed(atomic_long_t *v)
184{
185 return atomic64_inc_return_relaxed(v);
186}
187
188static __always_inline long
189atomic_long_fetch_inc(atomic_long_t *v)
190{
191 return atomic64_fetch_inc(v);
192}
193
194static __always_inline long
195atomic_long_fetch_inc_acquire(atomic_long_t *v)
196{
197 return atomic64_fetch_inc_acquire(v);
198}
199
200static __always_inline long
201atomic_long_fetch_inc_release(atomic_long_t *v)
202{
203 return atomic64_fetch_inc_release(v);
204}
205
206static __always_inline long
207atomic_long_fetch_inc_relaxed(atomic_long_t *v)
208{
209 return atomic64_fetch_inc_relaxed(v);
210}
211
212static __always_inline void
213atomic_long_dec(atomic_long_t *v)
214{
215 atomic64_dec(v);
216}
217
218static __always_inline long
219atomic_long_dec_return(atomic_long_t *v)
220{
221 return atomic64_dec_return(v);
222}
223
224static __always_inline long
225atomic_long_dec_return_acquire(atomic_long_t *v)
226{
227 return atomic64_dec_return_acquire(v);
228}
229
230static __always_inline long
231atomic_long_dec_return_release(atomic_long_t *v)
232{
233 return atomic64_dec_return_release(v);
234}
235
236static __always_inline long
237atomic_long_dec_return_relaxed(atomic_long_t *v)
238{
239 return atomic64_dec_return_relaxed(v);
240}
241
242static __always_inline long
243atomic_long_fetch_dec(atomic_long_t *v)
244{
245 return atomic64_fetch_dec(v);
246}
247
248static __always_inline long
249atomic_long_fetch_dec_acquire(atomic_long_t *v)
250{
251 return atomic64_fetch_dec_acquire(v);
252}
253
254static __always_inline long
255atomic_long_fetch_dec_release(atomic_long_t *v)
256{
257 return atomic64_fetch_dec_release(v);
258}
259
260static __always_inline long
261atomic_long_fetch_dec_relaxed(atomic_long_t *v)
262{
263 return atomic64_fetch_dec_relaxed(v);
264}
265
266static __always_inline void
267atomic_long_and(long i, atomic_long_t *v)
268{
269 atomic64_and(i, v);
270}
271
272static __always_inline long
273atomic_long_fetch_and(long i, atomic_long_t *v)
274{
275 return atomic64_fetch_and(i, v);
276}
277
278static __always_inline long
279atomic_long_fetch_and_acquire(long i, atomic_long_t *v)
280{
281 return atomic64_fetch_and_acquire(i, v);
282}
283
284static __always_inline long
285atomic_long_fetch_and_release(long i, atomic_long_t *v)
286{
287 return atomic64_fetch_and_release(i, v);
288}
289
290static __always_inline long
291atomic_long_fetch_and_relaxed(long i, atomic_long_t *v)
292{
293 return atomic64_fetch_and_relaxed(i, v);
294}
295
296static __always_inline void
297atomic_long_andnot(long i, atomic_long_t *v)
298{
299 atomic64_andnot(i, v);
300}
301
302static __always_inline long
303atomic_long_fetch_andnot(long i, atomic_long_t *v)
304{
305 return atomic64_fetch_andnot(i, v);
306}
307
308static __always_inline long
309atomic_long_fetch_andnot_acquire(long i, atomic_long_t *v)
310{
311 return atomic64_fetch_andnot_acquire(i, v);
312}
313
314static __always_inline long
315atomic_long_fetch_andnot_release(long i, atomic_long_t *v)
316{
317 return atomic64_fetch_andnot_release(i, v);
318}
319
320static __always_inline long
321atomic_long_fetch_andnot_relaxed(long i, atomic_long_t *v)
322{
323 return atomic64_fetch_andnot_relaxed(i, v);
324}
325
326static __always_inline void
327atomic_long_or(long i, atomic_long_t *v)
328{
329 atomic64_or(i, v);
330}
331
332static __always_inline long
333atomic_long_fetch_or(long i, atomic_long_t *v)
334{
335 return atomic64_fetch_or(i, v);
336}
337
338static __always_inline long
339atomic_long_fetch_or_acquire(long i, atomic_long_t *v)
340{
341 return atomic64_fetch_or_acquire(i, v);
342}
343
344static __always_inline long
345atomic_long_fetch_or_release(long i, atomic_long_t *v)
346{
347 return atomic64_fetch_or_release(i, v);
348}
349
350static __always_inline long
351atomic_long_fetch_or_relaxed(long i, atomic_long_t *v)
352{
353 return atomic64_fetch_or_relaxed(i, v);
354}
355
356static __always_inline void
357atomic_long_xor(long i, atomic_long_t *v)
358{
359 atomic64_xor(i, v);
360}
361
362static __always_inline long
363atomic_long_fetch_xor(long i, atomic_long_t *v)
364{
365 return atomic64_fetch_xor(i, v);
366}
367
368static __always_inline long
369atomic_long_fetch_xor_acquire(long i, atomic_long_t *v)
370{
371 return atomic64_fetch_xor_acquire(i, v);
372}
373
374static __always_inline long
375atomic_long_fetch_xor_release(long i, atomic_long_t *v)
376{
377 return atomic64_fetch_xor_release(i, v);
378}
379
380static __always_inline long
381atomic_long_fetch_xor_relaxed(long i, atomic_long_t *v)
382{
383 return atomic64_fetch_xor_relaxed(i, v);
384}
385
386static __always_inline long
387atomic_long_xchg(atomic_long_t *v, long i)
388{
389 return atomic64_xchg(v, i);
390}
391
392static __always_inline long
393atomic_long_xchg_acquire(atomic_long_t *v, long i)
394{
395 return atomic64_xchg_acquire(v, i);
396}
397
398static __always_inline long
399atomic_long_xchg_release(atomic_long_t *v, long i)
400{
401 return atomic64_xchg_release(v, i);
402}
403
404static __always_inline long
405atomic_long_xchg_relaxed(atomic_long_t *v, long i)
406{
407 return atomic64_xchg_relaxed(v, i);
408}
409
410static __always_inline long
411atomic_long_cmpxchg(atomic_long_t *v, long old, long new)
412{
413 return atomic64_cmpxchg(v, old, new);
414}
415
416static __always_inline long
417atomic_long_cmpxchg_acquire(atomic_long_t *v, long old, long new)
418{
419 return atomic64_cmpxchg_acquire(v, old, new);
420}
421
422static __always_inline long
423atomic_long_cmpxchg_release(atomic_long_t *v, long old, long new)
424{
425 return atomic64_cmpxchg_release(v, old, new);
426}
427
428static __always_inline long
429atomic_long_cmpxchg_relaxed(atomic_long_t *v, long old, long new)
430{
431 return atomic64_cmpxchg_relaxed(v, old, new);
432}
433
434static __always_inline bool
435atomic_long_try_cmpxchg(atomic_long_t *v, long *old, long new)
436{
437 return atomic64_try_cmpxchg(v, (s64 *)old, new);
438}
439
440static __always_inline bool
441atomic_long_try_cmpxchg_acquire(atomic_long_t *v, long *old, long new)
442{
443 return atomic64_try_cmpxchg_acquire(v, (s64 *)old, new);
444}
445
446static __always_inline bool
447atomic_long_try_cmpxchg_release(atomic_long_t *v, long *old, long new)
448{
449 return atomic64_try_cmpxchg_release(v, (s64 *)old, new);
450}
451
452static __always_inline bool
453atomic_long_try_cmpxchg_relaxed(atomic_long_t *v, long *old, long new)
454{
455 return atomic64_try_cmpxchg_relaxed(v, (s64 *)old, new);
456}
457
458static __always_inline bool
459atomic_long_sub_and_test(long i, atomic_long_t *v)
460{
461 return atomic64_sub_and_test(i, v);
462}
463
464static __always_inline bool
465atomic_long_dec_and_test(atomic_long_t *v)
466{
467 return atomic64_dec_and_test(v);
468}
469
470static __always_inline bool
471atomic_long_inc_and_test(atomic_long_t *v)
472{
473 return atomic64_inc_and_test(v);
474}
475
476static __always_inline bool
477atomic_long_add_negative(long i, atomic_long_t *v)
478{
479 return atomic64_add_negative(i, v);
480}
481
482static __always_inline long
483atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u)
484{
485 return atomic64_fetch_add_unless(v, a, u);
486}
487
488static __always_inline bool
489atomic_long_add_unless(atomic_long_t *v, long a, long u)
490{
491 return atomic64_add_unless(v, a, u);
492}
493
494static __always_inline bool
495atomic_long_inc_not_zero(atomic_long_t *v)
496{
497 return atomic64_inc_not_zero(v);
498}
499
500static __always_inline bool
501atomic_long_inc_unless_negative(atomic_long_t *v)
502{
503 return atomic64_inc_unless_negative(v);
504}
505
506static __always_inline bool
507atomic_long_dec_unless_positive(atomic_long_t *v)
508{
509 return atomic64_dec_unless_positive(v);
510}
511
512static __always_inline long
513atomic_long_dec_if_positive(atomic_long_t *v)
514{
515 return atomic64_dec_if_positive(v);
516}
517
518#else /* CONFIG_64BIT */
519
520static __always_inline long
521atomic_long_read(const atomic_long_t *v)
522{
523 return atomic_read(v);
524}
525
526static __always_inline long
527atomic_long_read_acquire(const atomic_long_t *v)
528{
529 return atomic_read_acquire(v);
530}
531
532static __always_inline void
533atomic_long_set(atomic_long_t *v, long i)
534{
535 atomic_set(v, i);
536}
537
538static __always_inline void
539atomic_long_set_release(atomic_long_t *v, long i)
540{
541 atomic_set_release(v, i);
542}
543
544static __always_inline void
545atomic_long_add(long i, atomic_long_t *v)
546{
547 atomic_add(i, v);
548}
549
550static __always_inline long
551atomic_long_add_return(long i, atomic_long_t *v)
552{
553 return atomic_add_return(i, v);
554}
555
556static __always_inline long
557atomic_long_add_return_acquire(long i, atomic_long_t *v)
558{
559 return atomic_add_return_acquire(i, v);
560}
561
562static __always_inline long
563atomic_long_add_return_release(long i, atomic_long_t *v)
564{
565 return atomic_add_return_release(i, v);
566}
567
568static __always_inline long
569atomic_long_add_return_relaxed(long i, atomic_long_t *v)
570{
571 return atomic_add_return_relaxed(i, v);
572}
573
574static __always_inline long
575atomic_long_fetch_add(long i, atomic_long_t *v)
576{
577 return atomic_fetch_add(i, v);
578}
579
580static __always_inline long
581atomic_long_fetch_add_acquire(long i, atomic_long_t *v)
582{
583 return atomic_fetch_add_acquire(i, v);
584}
585
586static __always_inline long
587atomic_long_fetch_add_release(long i, atomic_long_t *v)
588{
589 return atomic_fetch_add_release(i, v);
590}
591
592static __always_inline long
593atomic_long_fetch_add_relaxed(long i, atomic_long_t *v)
594{
595 return atomic_fetch_add_relaxed(i, v);
596}
597
598static __always_inline void
599atomic_long_sub(long i, atomic_long_t *v)
600{
601 atomic_sub(i, v);
602}
603
604static __always_inline long
605atomic_long_sub_return(long i, atomic_long_t *v)
606{
607 return atomic_sub_return(i, v);
608}
609
610static __always_inline long
611atomic_long_sub_return_acquire(long i, atomic_long_t *v)
612{
613 return atomic_sub_return_acquire(i, v);
614}
615
616static __always_inline long
617atomic_long_sub_return_release(long i, atomic_long_t *v)
618{
619 return atomic_sub_return_release(i, v);
620}
621
622static __always_inline long
623atomic_long_sub_return_relaxed(long i, atomic_long_t *v)
624{
625 return atomic_sub_return_relaxed(i, v);
626}
627
628static __always_inline long
629atomic_long_fetch_sub(long i, atomic_long_t *v)
630{
631 return atomic_fetch_sub(i, v);
632}
633
634static __always_inline long
635atomic_long_fetch_sub_acquire(long i, atomic_long_t *v)
636{
637 return atomic_fetch_sub_acquire(i, v);
638}
639
640static __always_inline long
641atomic_long_fetch_sub_release(long i, atomic_long_t *v)
642{
643 return atomic_fetch_sub_release(i, v);
644}
645
646static __always_inline long
647atomic_long_fetch_sub_relaxed(long i, atomic_long_t *v)
648{
649 return atomic_fetch_sub_relaxed(i, v);
650}
651
652static __always_inline void
653atomic_long_inc(atomic_long_t *v)
654{
655 atomic_inc(v);
656}
657
658static __always_inline long
659atomic_long_inc_return(atomic_long_t *v)
660{
661 return atomic_inc_return(v);
662}
663
664static __always_inline long
665atomic_long_inc_return_acquire(atomic_long_t *v)
666{
667 return atomic_inc_return_acquire(v);
668}
669
670static __always_inline long
671atomic_long_inc_return_release(atomic_long_t *v)
672{
673 return atomic_inc_return_release(v);
674}
675
676static __always_inline long
677atomic_long_inc_return_relaxed(atomic_long_t *v)
678{
679 return atomic_inc_return_relaxed(v);
680}
681
682static __always_inline long
683atomic_long_fetch_inc(atomic_long_t *v)
684{
685 return atomic_fetch_inc(v);
686}
687
688static __always_inline long
689atomic_long_fetch_inc_acquire(atomic_long_t *v)
690{
691 return atomic_fetch_inc_acquire(v);
692}
693
694static __always_inline long
695atomic_long_fetch_inc_release(atomic_long_t *v)
696{
697 return atomic_fetch_inc_release(v);
698}
699
700static __always_inline long
701atomic_long_fetch_inc_relaxed(atomic_long_t *v)
702{
703 return atomic_fetch_inc_relaxed(v);
704}
705
706static __always_inline void
707atomic_long_dec(atomic_long_t *v)
708{
709 atomic_dec(v);
710}
711
712static __always_inline long
713atomic_long_dec_return(atomic_long_t *v)
714{
715 return atomic_dec_return(v);
716}
717
718static __always_inline long
719atomic_long_dec_return_acquire(atomic_long_t *v)
720{
721 return atomic_dec_return_acquire(v);
722}
723
724static __always_inline long
725atomic_long_dec_return_release(atomic_long_t *v)
726{
727 return atomic_dec_return_release(v);
728}
729
730static __always_inline long
731atomic_long_dec_return_relaxed(atomic_long_t *v)
732{
733 return atomic_dec_return_relaxed(v);
734}
735
736static __always_inline long
737atomic_long_fetch_dec(atomic_long_t *v)
738{
739 return atomic_fetch_dec(v);
740}
741
742static __always_inline long
743atomic_long_fetch_dec_acquire(atomic_long_t *v)
744{
745 return atomic_fetch_dec_acquire(v);
746}
747
748static __always_inline long
749atomic_long_fetch_dec_release(atomic_long_t *v)
750{
751 return atomic_fetch_dec_release(v);
752}
753
754static __always_inline long
755atomic_long_fetch_dec_relaxed(atomic_long_t *v)
756{
757 return atomic_fetch_dec_relaxed(v);
758}
759
760static __always_inline void
761atomic_long_and(long i, atomic_long_t *v)
762{
763 atomic_and(i, v);
764}
765
766static __always_inline long
767atomic_long_fetch_and(long i, atomic_long_t *v)
768{
769 return atomic_fetch_and(i, v);
770}
771
772static __always_inline long
773atomic_long_fetch_and_acquire(long i, atomic_long_t *v)
774{
775 return atomic_fetch_and_acquire(i, v);
776}
777
778static __always_inline long
779atomic_long_fetch_and_release(long i, atomic_long_t *v)
780{
781 return atomic_fetch_and_release(i, v);
782}
783
784static __always_inline long
785atomic_long_fetch_and_relaxed(long i, atomic_long_t *v)
786{
787 return atomic_fetch_and_relaxed(i, v);
788}
789
790static __always_inline void
791atomic_long_andnot(long i, atomic_long_t *v)
792{
793 atomic_andnot(i, v);
794}
795
796static __always_inline long
797atomic_long_fetch_andnot(long i, atomic_long_t *v)
798{
799 return atomic_fetch_andnot(i, v);
800}
801
802static __always_inline long
803atomic_long_fetch_andnot_acquire(long i, atomic_long_t *v)
804{
805 return atomic_fetch_andnot_acquire(i, v);
806}
807
808static __always_inline long
809atomic_long_fetch_andnot_release(long i, atomic_long_t *v)
810{
811 return atomic_fetch_andnot_release(i, v);
812}
813
814static __always_inline long
815atomic_long_fetch_andnot_relaxed(long i, atomic_long_t *v)
816{
817 return atomic_fetch_andnot_relaxed(i, v);
818}
819
820static __always_inline void
821atomic_long_or(long i, atomic_long_t *v)
822{
823 atomic_or(i, v);
824}
825
826static __always_inline long
827atomic_long_fetch_or(long i, atomic_long_t *v)
828{
829 return atomic_fetch_or(i, v);
830}
831
832static __always_inline long
833atomic_long_fetch_or_acquire(long i, atomic_long_t *v)
834{
835 return atomic_fetch_or_acquire(i, v);
836}
837
838static __always_inline long
839atomic_long_fetch_or_release(long i, atomic_long_t *v)
840{
841 return atomic_fetch_or_release(i, v);
842}
843
844static __always_inline long
845atomic_long_fetch_or_relaxed(long i, atomic_long_t *v)
846{
847 return atomic_fetch_or_relaxed(i, v);
848}
849
850static __always_inline void
851atomic_long_xor(long i, atomic_long_t *v)
852{
853 atomic_xor(i, v);
854}
855
856static __always_inline long
857atomic_long_fetch_xor(long i, atomic_long_t *v)
858{
859 return atomic_fetch_xor(i, v);
860}
861
862static __always_inline long
863atomic_long_fetch_xor_acquire(long i, atomic_long_t *v)
864{
865 return atomic_fetch_xor_acquire(i, v);
866}
867
868static __always_inline long
869atomic_long_fetch_xor_release(long i, atomic_long_t *v)
870{
871 return atomic_fetch_xor_release(i, v);
872}
873
874static __always_inline long
875atomic_long_fetch_xor_relaxed(long i, atomic_long_t *v)
876{
877 return atomic_fetch_xor_relaxed(i, v);
878}
879
880static __always_inline long
881atomic_long_xchg(atomic_long_t *v, long i)
882{
883 return atomic_xchg(v, i);
884}
885
886static __always_inline long
887atomic_long_xchg_acquire(atomic_long_t *v, long i)
888{
889 return atomic_xchg_acquire(v, i);
890}
891
892static __always_inline long
893atomic_long_xchg_release(atomic_long_t *v, long i)
894{
895 return atomic_xchg_release(v, i);
896}
897
898static __always_inline long
899atomic_long_xchg_relaxed(atomic_long_t *v, long i)
900{
901 return atomic_xchg_relaxed(v, i);
902}
903
904static __always_inline long
905atomic_long_cmpxchg(atomic_long_t *v, long old, long new)
906{
907 return atomic_cmpxchg(v, old, new);
908}
909
910static __always_inline long
911atomic_long_cmpxchg_acquire(atomic_long_t *v, long old, long new)
912{
913 return atomic_cmpxchg_acquire(v, old, new);
914}
915
916static __always_inline long
917atomic_long_cmpxchg_release(atomic_long_t *v, long old, long new)
918{
919 return atomic_cmpxchg_release(v, old, new);
920}
921
922static __always_inline long
923atomic_long_cmpxchg_relaxed(atomic_long_t *v, long old, long new)
924{
925 return atomic_cmpxchg_relaxed(v, old, new);
926}
927
928static __always_inline bool
929atomic_long_try_cmpxchg(atomic_long_t *v, long *old, long new)
930{
931 return atomic_try_cmpxchg(v, (int *)old, new);
932}
933
934static __always_inline bool
935atomic_long_try_cmpxchg_acquire(atomic_long_t *v, long *old, long new)
936{
937 return atomic_try_cmpxchg_acquire(v, (int *)old, new);
938}
939
940static __always_inline bool
941atomic_long_try_cmpxchg_release(atomic_long_t *v, long *old, long new)
942{
943 return atomic_try_cmpxchg_release(v, (int *)old, new);
944}
945
946static __always_inline bool
947atomic_long_try_cmpxchg_relaxed(atomic_long_t *v, long *old, long new)
948{
949 return atomic_try_cmpxchg_relaxed(v, (int *)old, new);
950}
951
952static __always_inline bool
953atomic_long_sub_and_test(long i, atomic_long_t *v)
954{
955 return atomic_sub_and_test(i, v);
956}
957
958static __always_inline bool
959atomic_long_dec_and_test(atomic_long_t *v)
960{
961 return atomic_dec_and_test(v);
962}
963
964static __always_inline bool
965atomic_long_inc_and_test(atomic_long_t *v)
966{
967 return atomic_inc_and_test(v);
968}
969
970static __always_inline bool
971atomic_long_add_negative(long i, atomic_long_t *v)
972{
973 return atomic_add_negative(i, v);
974}
975
976static __always_inline long
977atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u)
978{
979 return atomic_fetch_add_unless(v, a, u);
980}
981
982static __always_inline bool
983atomic_long_add_unless(atomic_long_t *v, long a, long u)
984{
985 return atomic_add_unless(v, a, u);
986}
987
988static __always_inline bool
989atomic_long_inc_not_zero(atomic_long_t *v)
990{
991 return atomic_inc_not_zero(v);
992}
993
994static __always_inline bool
995atomic_long_inc_unless_negative(atomic_long_t *v)
996{
997 return atomic_inc_unless_negative(v);
998}
999
1000static __always_inline bool
1001atomic_long_dec_unless_positive(atomic_long_t *v)
1002{
1003 return atomic_dec_unless_positive(v);
1004}
1005
1006static __always_inline long
1007atomic_long_dec_if_positive(atomic_long_t *v)
1008{
1009 return atomic_dec_if_positive(v);
1010}
1011
1012#endif /* CONFIG_64BIT */
1013#endif /* _ASM_GENERIC_ATOMIC_LONG_H */
1014// a624200981f552b2c6be4f32fe44da8289f30d87