Loading...
Note: File does not exist in v4.10.11.
1// SPDX-License-Identifier: GPL-2.0
2
3// Generated by scripts/atomic/gen-atomic-instrumented.sh
4// DO NOT MODIFY THIS FILE DIRECTLY
5
6/*
7 * This file provides wrappers with KASAN instrumentation for atomic operations.
8 * To use this functionality an arch's atomic.h file needs to define all
9 * atomic operations with arch_ prefix (e.g. arch_atomic_read()) and include
10 * this file at the end. This file provides atomic_read() that forwards to
11 * arch_atomic_read() for actual atomic operation.
12 * Note: if an arch atomic operation is implemented by means of other atomic
13 * operations (e.g. atomic_read()/atomic_cmpxchg() loop), then it needs to use
14 * arch_ variants (i.e. arch_atomic_read()/arch_atomic_cmpxchg()) to avoid
15 * double instrumentation.
16 */
17#ifndef _ASM_GENERIC_ATOMIC_INSTRUMENTED_H
18#define _ASM_GENERIC_ATOMIC_INSTRUMENTED_H
19
20#include <linux/build_bug.h>
21#include <linux/compiler.h>
22#include <linux/instrumented.h>
23
24static __always_inline int
25atomic_read(const atomic_t *v)
26{
27 instrument_atomic_read(v, sizeof(*v));
28 return arch_atomic_read(v);
29}
30#define atomic_read atomic_read
31
32#if defined(arch_atomic_read_acquire)
33static __always_inline int
34atomic_read_acquire(const atomic_t *v)
35{
36 instrument_atomic_read(v, sizeof(*v));
37 return arch_atomic_read_acquire(v);
38}
39#define atomic_read_acquire atomic_read_acquire
40#endif
41
42static __always_inline void
43atomic_set(atomic_t *v, int i)
44{
45 instrument_atomic_write(v, sizeof(*v));
46 arch_atomic_set(v, i);
47}
48#define atomic_set atomic_set
49
50#if defined(arch_atomic_set_release)
51static __always_inline void
52atomic_set_release(atomic_t *v, int i)
53{
54 instrument_atomic_write(v, sizeof(*v));
55 arch_atomic_set_release(v, i);
56}
57#define atomic_set_release atomic_set_release
58#endif
59
60static __always_inline void
61atomic_add(int i, atomic_t *v)
62{
63 instrument_atomic_write(v, sizeof(*v));
64 arch_atomic_add(i, v);
65}
66#define atomic_add atomic_add
67
68#if !defined(arch_atomic_add_return_relaxed) || defined(arch_atomic_add_return)
69static __always_inline int
70atomic_add_return(int i, atomic_t *v)
71{
72 instrument_atomic_write(v, sizeof(*v));
73 return arch_atomic_add_return(i, v);
74}
75#define atomic_add_return atomic_add_return
76#endif
77
78#if defined(arch_atomic_add_return_acquire)
79static __always_inline int
80atomic_add_return_acquire(int i, atomic_t *v)
81{
82 instrument_atomic_write(v, sizeof(*v));
83 return arch_atomic_add_return_acquire(i, v);
84}
85#define atomic_add_return_acquire atomic_add_return_acquire
86#endif
87
88#if defined(arch_atomic_add_return_release)
89static __always_inline int
90atomic_add_return_release(int i, atomic_t *v)
91{
92 instrument_atomic_write(v, sizeof(*v));
93 return arch_atomic_add_return_release(i, v);
94}
95#define atomic_add_return_release atomic_add_return_release
96#endif
97
98#if defined(arch_atomic_add_return_relaxed)
99static __always_inline int
100atomic_add_return_relaxed(int i, atomic_t *v)
101{
102 instrument_atomic_write(v, sizeof(*v));
103 return arch_atomic_add_return_relaxed(i, v);
104}
105#define atomic_add_return_relaxed atomic_add_return_relaxed
106#endif
107
108#if !defined(arch_atomic_fetch_add_relaxed) || defined(arch_atomic_fetch_add)
109static __always_inline int
110atomic_fetch_add(int i, atomic_t *v)
111{
112 instrument_atomic_write(v, sizeof(*v));
113 return arch_atomic_fetch_add(i, v);
114}
115#define atomic_fetch_add atomic_fetch_add
116#endif
117
118#if defined(arch_atomic_fetch_add_acquire)
119static __always_inline int
120atomic_fetch_add_acquire(int i, atomic_t *v)
121{
122 instrument_atomic_write(v, sizeof(*v));
123 return arch_atomic_fetch_add_acquire(i, v);
124}
125#define atomic_fetch_add_acquire atomic_fetch_add_acquire
126#endif
127
128#if defined(arch_atomic_fetch_add_release)
129static __always_inline int
130atomic_fetch_add_release(int i, atomic_t *v)
131{
132 instrument_atomic_write(v, sizeof(*v));
133 return arch_atomic_fetch_add_release(i, v);
134}
135#define atomic_fetch_add_release atomic_fetch_add_release
136#endif
137
138#if defined(arch_atomic_fetch_add_relaxed)
139static __always_inline int
140atomic_fetch_add_relaxed(int i, atomic_t *v)
141{
142 instrument_atomic_write(v, sizeof(*v));
143 return arch_atomic_fetch_add_relaxed(i, v);
144}
145#define atomic_fetch_add_relaxed atomic_fetch_add_relaxed
146#endif
147
148static __always_inline void
149atomic_sub(int i, atomic_t *v)
150{
151 instrument_atomic_write(v, sizeof(*v));
152 arch_atomic_sub(i, v);
153}
154#define atomic_sub atomic_sub
155
156#if !defined(arch_atomic_sub_return_relaxed) || defined(arch_atomic_sub_return)
157static __always_inline int
158atomic_sub_return(int i, atomic_t *v)
159{
160 instrument_atomic_write(v, sizeof(*v));
161 return arch_atomic_sub_return(i, v);
162}
163#define atomic_sub_return atomic_sub_return
164#endif
165
166#if defined(arch_atomic_sub_return_acquire)
167static __always_inline int
168atomic_sub_return_acquire(int i, atomic_t *v)
169{
170 instrument_atomic_write(v, sizeof(*v));
171 return arch_atomic_sub_return_acquire(i, v);
172}
173#define atomic_sub_return_acquire atomic_sub_return_acquire
174#endif
175
176#if defined(arch_atomic_sub_return_release)
177static __always_inline int
178atomic_sub_return_release(int i, atomic_t *v)
179{
180 instrument_atomic_write(v, sizeof(*v));
181 return arch_atomic_sub_return_release(i, v);
182}
183#define atomic_sub_return_release atomic_sub_return_release
184#endif
185
186#if defined(arch_atomic_sub_return_relaxed)
187static __always_inline int
188atomic_sub_return_relaxed(int i, atomic_t *v)
189{
190 instrument_atomic_write(v, sizeof(*v));
191 return arch_atomic_sub_return_relaxed(i, v);
192}
193#define atomic_sub_return_relaxed atomic_sub_return_relaxed
194#endif
195
196#if !defined(arch_atomic_fetch_sub_relaxed) || defined(arch_atomic_fetch_sub)
197static __always_inline int
198atomic_fetch_sub(int i, atomic_t *v)
199{
200 instrument_atomic_write(v, sizeof(*v));
201 return arch_atomic_fetch_sub(i, v);
202}
203#define atomic_fetch_sub atomic_fetch_sub
204#endif
205
206#if defined(arch_atomic_fetch_sub_acquire)
207static __always_inline int
208atomic_fetch_sub_acquire(int i, atomic_t *v)
209{
210 instrument_atomic_write(v, sizeof(*v));
211 return arch_atomic_fetch_sub_acquire(i, v);
212}
213#define atomic_fetch_sub_acquire atomic_fetch_sub_acquire
214#endif
215
216#if defined(arch_atomic_fetch_sub_release)
217static __always_inline int
218atomic_fetch_sub_release(int i, atomic_t *v)
219{
220 instrument_atomic_write(v, sizeof(*v));
221 return arch_atomic_fetch_sub_release(i, v);
222}
223#define atomic_fetch_sub_release atomic_fetch_sub_release
224#endif
225
226#if defined(arch_atomic_fetch_sub_relaxed)
227static __always_inline int
228atomic_fetch_sub_relaxed(int i, atomic_t *v)
229{
230 instrument_atomic_write(v, sizeof(*v));
231 return arch_atomic_fetch_sub_relaxed(i, v);
232}
233#define atomic_fetch_sub_relaxed atomic_fetch_sub_relaxed
234#endif
235
236#if defined(arch_atomic_inc)
237static __always_inline void
238atomic_inc(atomic_t *v)
239{
240 instrument_atomic_write(v, sizeof(*v));
241 arch_atomic_inc(v);
242}
243#define atomic_inc atomic_inc
244#endif
245
246#if defined(arch_atomic_inc_return)
247static __always_inline int
248atomic_inc_return(atomic_t *v)
249{
250 instrument_atomic_write(v, sizeof(*v));
251 return arch_atomic_inc_return(v);
252}
253#define atomic_inc_return atomic_inc_return
254#endif
255
256#if defined(arch_atomic_inc_return_acquire)
257static __always_inline int
258atomic_inc_return_acquire(atomic_t *v)
259{
260 instrument_atomic_write(v, sizeof(*v));
261 return arch_atomic_inc_return_acquire(v);
262}
263#define atomic_inc_return_acquire atomic_inc_return_acquire
264#endif
265
266#if defined(arch_atomic_inc_return_release)
267static __always_inline int
268atomic_inc_return_release(atomic_t *v)
269{
270 instrument_atomic_write(v, sizeof(*v));
271 return arch_atomic_inc_return_release(v);
272}
273#define atomic_inc_return_release atomic_inc_return_release
274#endif
275
276#if defined(arch_atomic_inc_return_relaxed)
277static __always_inline int
278atomic_inc_return_relaxed(atomic_t *v)
279{
280 instrument_atomic_write(v, sizeof(*v));
281 return arch_atomic_inc_return_relaxed(v);
282}
283#define atomic_inc_return_relaxed atomic_inc_return_relaxed
284#endif
285
286#if defined(arch_atomic_fetch_inc)
287static __always_inline int
288atomic_fetch_inc(atomic_t *v)
289{
290 instrument_atomic_write(v, sizeof(*v));
291 return arch_atomic_fetch_inc(v);
292}
293#define atomic_fetch_inc atomic_fetch_inc
294#endif
295
296#if defined(arch_atomic_fetch_inc_acquire)
297static __always_inline int
298atomic_fetch_inc_acquire(atomic_t *v)
299{
300 instrument_atomic_write(v, sizeof(*v));
301 return arch_atomic_fetch_inc_acquire(v);
302}
303#define atomic_fetch_inc_acquire atomic_fetch_inc_acquire
304#endif
305
306#if defined(arch_atomic_fetch_inc_release)
307static __always_inline int
308atomic_fetch_inc_release(atomic_t *v)
309{
310 instrument_atomic_write(v, sizeof(*v));
311 return arch_atomic_fetch_inc_release(v);
312}
313#define atomic_fetch_inc_release atomic_fetch_inc_release
314#endif
315
316#if defined(arch_atomic_fetch_inc_relaxed)
317static __always_inline int
318atomic_fetch_inc_relaxed(atomic_t *v)
319{
320 instrument_atomic_write(v, sizeof(*v));
321 return arch_atomic_fetch_inc_relaxed(v);
322}
323#define atomic_fetch_inc_relaxed atomic_fetch_inc_relaxed
324#endif
325
326#if defined(arch_atomic_dec)
327static __always_inline void
328atomic_dec(atomic_t *v)
329{
330 instrument_atomic_write(v, sizeof(*v));
331 arch_atomic_dec(v);
332}
333#define atomic_dec atomic_dec
334#endif
335
336#if defined(arch_atomic_dec_return)
337static __always_inline int
338atomic_dec_return(atomic_t *v)
339{
340 instrument_atomic_write(v, sizeof(*v));
341 return arch_atomic_dec_return(v);
342}
343#define atomic_dec_return atomic_dec_return
344#endif
345
346#if defined(arch_atomic_dec_return_acquire)
347static __always_inline int
348atomic_dec_return_acquire(atomic_t *v)
349{
350 instrument_atomic_write(v, sizeof(*v));
351 return arch_atomic_dec_return_acquire(v);
352}
353#define atomic_dec_return_acquire atomic_dec_return_acquire
354#endif
355
356#if defined(arch_atomic_dec_return_release)
357static __always_inline int
358atomic_dec_return_release(atomic_t *v)
359{
360 instrument_atomic_write(v, sizeof(*v));
361 return arch_atomic_dec_return_release(v);
362}
363#define atomic_dec_return_release atomic_dec_return_release
364#endif
365
366#if defined(arch_atomic_dec_return_relaxed)
367static __always_inline int
368atomic_dec_return_relaxed(atomic_t *v)
369{
370 instrument_atomic_write(v, sizeof(*v));
371 return arch_atomic_dec_return_relaxed(v);
372}
373#define atomic_dec_return_relaxed atomic_dec_return_relaxed
374#endif
375
376#if defined(arch_atomic_fetch_dec)
377static __always_inline int
378atomic_fetch_dec(atomic_t *v)
379{
380 instrument_atomic_write(v, sizeof(*v));
381 return arch_atomic_fetch_dec(v);
382}
383#define atomic_fetch_dec atomic_fetch_dec
384#endif
385
386#if defined(arch_atomic_fetch_dec_acquire)
387static __always_inline int
388atomic_fetch_dec_acquire(atomic_t *v)
389{
390 instrument_atomic_write(v, sizeof(*v));
391 return arch_atomic_fetch_dec_acquire(v);
392}
393#define atomic_fetch_dec_acquire atomic_fetch_dec_acquire
394#endif
395
396#if defined(arch_atomic_fetch_dec_release)
397static __always_inline int
398atomic_fetch_dec_release(atomic_t *v)
399{
400 instrument_atomic_write(v, sizeof(*v));
401 return arch_atomic_fetch_dec_release(v);
402}
403#define atomic_fetch_dec_release atomic_fetch_dec_release
404#endif
405
406#if defined(arch_atomic_fetch_dec_relaxed)
407static __always_inline int
408atomic_fetch_dec_relaxed(atomic_t *v)
409{
410 instrument_atomic_write(v, sizeof(*v));
411 return arch_atomic_fetch_dec_relaxed(v);
412}
413#define atomic_fetch_dec_relaxed atomic_fetch_dec_relaxed
414#endif
415
416static __always_inline void
417atomic_and(int i, atomic_t *v)
418{
419 instrument_atomic_write(v, sizeof(*v));
420 arch_atomic_and(i, v);
421}
422#define atomic_and atomic_and
423
424#if !defined(arch_atomic_fetch_and_relaxed) || defined(arch_atomic_fetch_and)
425static __always_inline int
426atomic_fetch_and(int i, atomic_t *v)
427{
428 instrument_atomic_write(v, sizeof(*v));
429 return arch_atomic_fetch_and(i, v);
430}
431#define atomic_fetch_and atomic_fetch_and
432#endif
433
434#if defined(arch_atomic_fetch_and_acquire)
435static __always_inline int
436atomic_fetch_and_acquire(int i, atomic_t *v)
437{
438 instrument_atomic_write(v, sizeof(*v));
439 return arch_atomic_fetch_and_acquire(i, v);
440}
441#define atomic_fetch_and_acquire atomic_fetch_and_acquire
442#endif
443
444#if defined(arch_atomic_fetch_and_release)
445static __always_inline int
446atomic_fetch_and_release(int i, atomic_t *v)
447{
448 instrument_atomic_write(v, sizeof(*v));
449 return arch_atomic_fetch_and_release(i, v);
450}
451#define atomic_fetch_and_release atomic_fetch_and_release
452#endif
453
454#if defined(arch_atomic_fetch_and_relaxed)
455static __always_inline int
456atomic_fetch_and_relaxed(int i, atomic_t *v)
457{
458 instrument_atomic_write(v, sizeof(*v));
459 return arch_atomic_fetch_and_relaxed(i, v);
460}
461#define atomic_fetch_and_relaxed atomic_fetch_and_relaxed
462#endif
463
464#if defined(arch_atomic_andnot)
465static __always_inline void
466atomic_andnot(int i, atomic_t *v)
467{
468 instrument_atomic_write(v, sizeof(*v));
469 arch_atomic_andnot(i, v);
470}
471#define atomic_andnot atomic_andnot
472#endif
473
474#if defined(arch_atomic_fetch_andnot)
475static __always_inline int
476atomic_fetch_andnot(int i, atomic_t *v)
477{
478 instrument_atomic_write(v, sizeof(*v));
479 return arch_atomic_fetch_andnot(i, v);
480}
481#define atomic_fetch_andnot atomic_fetch_andnot
482#endif
483
484#if defined(arch_atomic_fetch_andnot_acquire)
485static __always_inline int
486atomic_fetch_andnot_acquire(int i, atomic_t *v)
487{
488 instrument_atomic_write(v, sizeof(*v));
489 return arch_atomic_fetch_andnot_acquire(i, v);
490}
491#define atomic_fetch_andnot_acquire atomic_fetch_andnot_acquire
492#endif
493
494#if defined(arch_atomic_fetch_andnot_release)
495static __always_inline int
496atomic_fetch_andnot_release(int i, atomic_t *v)
497{
498 instrument_atomic_write(v, sizeof(*v));
499 return arch_atomic_fetch_andnot_release(i, v);
500}
501#define atomic_fetch_andnot_release atomic_fetch_andnot_release
502#endif
503
504#if defined(arch_atomic_fetch_andnot_relaxed)
505static __always_inline int
506atomic_fetch_andnot_relaxed(int i, atomic_t *v)
507{
508 instrument_atomic_write(v, sizeof(*v));
509 return arch_atomic_fetch_andnot_relaxed(i, v);
510}
511#define atomic_fetch_andnot_relaxed atomic_fetch_andnot_relaxed
512#endif
513
514static __always_inline void
515atomic_or(int i, atomic_t *v)
516{
517 instrument_atomic_write(v, sizeof(*v));
518 arch_atomic_or(i, v);
519}
520#define atomic_or atomic_or
521
522#if !defined(arch_atomic_fetch_or_relaxed) || defined(arch_atomic_fetch_or)
523static __always_inline int
524atomic_fetch_or(int i, atomic_t *v)
525{
526 instrument_atomic_write(v, sizeof(*v));
527 return arch_atomic_fetch_or(i, v);
528}
529#define atomic_fetch_or atomic_fetch_or
530#endif
531
532#if defined(arch_atomic_fetch_or_acquire)
533static __always_inline int
534atomic_fetch_or_acquire(int i, atomic_t *v)
535{
536 instrument_atomic_write(v, sizeof(*v));
537 return arch_atomic_fetch_or_acquire(i, v);
538}
539#define atomic_fetch_or_acquire atomic_fetch_or_acquire
540#endif
541
542#if defined(arch_atomic_fetch_or_release)
543static __always_inline int
544atomic_fetch_or_release(int i, atomic_t *v)
545{
546 instrument_atomic_write(v, sizeof(*v));
547 return arch_atomic_fetch_or_release(i, v);
548}
549#define atomic_fetch_or_release atomic_fetch_or_release
550#endif
551
552#if defined(arch_atomic_fetch_or_relaxed)
553static __always_inline int
554atomic_fetch_or_relaxed(int i, atomic_t *v)
555{
556 instrument_atomic_write(v, sizeof(*v));
557 return arch_atomic_fetch_or_relaxed(i, v);
558}
559#define atomic_fetch_or_relaxed atomic_fetch_or_relaxed
560#endif
561
562static __always_inline void
563atomic_xor(int i, atomic_t *v)
564{
565 instrument_atomic_write(v, sizeof(*v));
566 arch_atomic_xor(i, v);
567}
568#define atomic_xor atomic_xor
569
570#if !defined(arch_atomic_fetch_xor_relaxed) || defined(arch_atomic_fetch_xor)
571static __always_inline int
572atomic_fetch_xor(int i, atomic_t *v)
573{
574 instrument_atomic_write(v, sizeof(*v));
575 return arch_atomic_fetch_xor(i, v);
576}
577#define atomic_fetch_xor atomic_fetch_xor
578#endif
579
580#if defined(arch_atomic_fetch_xor_acquire)
581static __always_inline int
582atomic_fetch_xor_acquire(int i, atomic_t *v)
583{
584 instrument_atomic_write(v, sizeof(*v));
585 return arch_atomic_fetch_xor_acquire(i, v);
586}
587#define atomic_fetch_xor_acquire atomic_fetch_xor_acquire
588#endif
589
590#if defined(arch_atomic_fetch_xor_release)
591static __always_inline int
592atomic_fetch_xor_release(int i, atomic_t *v)
593{
594 instrument_atomic_write(v, sizeof(*v));
595 return arch_atomic_fetch_xor_release(i, v);
596}
597#define atomic_fetch_xor_release atomic_fetch_xor_release
598#endif
599
600#if defined(arch_atomic_fetch_xor_relaxed)
601static __always_inline int
602atomic_fetch_xor_relaxed(int i, atomic_t *v)
603{
604 instrument_atomic_write(v, sizeof(*v));
605 return arch_atomic_fetch_xor_relaxed(i, v);
606}
607#define atomic_fetch_xor_relaxed atomic_fetch_xor_relaxed
608#endif
609
610#if !defined(arch_atomic_xchg_relaxed) || defined(arch_atomic_xchg)
611static __always_inline int
612atomic_xchg(atomic_t *v, int i)
613{
614 instrument_atomic_write(v, sizeof(*v));
615 return arch_atomic_xchg(v, i);
616}
617#define atomic_xchg atomic_xchg
618#endif
619
620#if defined(arch_atomic_xchg_acquire)
621static __always_inline int
622atomic_xchg_acquire(atomic_t *v, int i)
623{
624 instrument_atomic_write(v, sizeof(*v));
625 return arch_atomic_xchg_acquire(v, i);
626}
627#define atomic_xchg_acquire atomic_xchg_acquire
628#endif
629
630#if defined(arch_atomic_xchg_release)
631static __always_inline int
632atomic_xchg_release(atomic_t *v, int i)
633{
634 instrument_atomic_write(v, sizeof(*v));
635 return arch_atomic_xchg_release(v, i);
636}
637#define atomic_xchg_release atomic_xchg_release
638#endif
639
640#if defined(arch_atomic_xchg_relaxed)
641static __always_inline int
642atomic_xchg_relaxed(atomic_t *v, int i)
643{
644 instrument_atomic_write(v, sizeof(*v));
645 return arch_atomic_xchg_relaxed(v, i);
646}
647#define atomic_xchg_relaxed atomic_xchg_relaxed
648#endif
649
650#if !defined(arch_atomic_cmpxchg_relaxed) || defined(arch_atomic_cmpxchg)
651static __always_inline int
652atomic_cmpxchg(atomic_t *v, int old, int new)
653{
654 instrument_atomic_write(v, sizeof(*v));
655 return arch_atomic_cmpxchg(v, old, new);
656}
657#define atomic_cmpxchg atomic_cmpxchg
658#endif
659
660#if defined(arch_atomic_cmpxchg_acquire)
661static __always_inline int
662atomic_cmpxchg_acquire(atomic_t *v, int old, int new)
663{
664 instrument_atomic_write(v, sizeof(*v));
665 return arch_atomic_cmpxchg_acquire(v, old, new);
666}
667#define atomic_cmpxchg_acquire atomic_cmpxchg_acquire
668#endif
669
670#if defined(arch_atomic_cmpxchg_release)
671static __always_inline int
672atomic_cmpxchg_release(atomic_t *v, int old, int new)
673{
674 instrument_atomic_write(v, sizeof(*v));
675 return arch_atomic_cmpxchg_release(v, old, new);
676}
677#define atomic_cmpxchg_release atomic_cmpxchg_release
678#endif
679
680#if defined(arch_atomic_cmpxchg_relaxed)
681static __always_inline int
682atomic_cmpxchg_relaxed(atomic_t *v, int old, int new)
683{
684 instrument_atomic_write(v, sizeof(*v));
685 return arch_atomic_cmpxchg_relaxed(v, old, new);
686}
687#define atomic_cmpxchg_relaxed atomic_cmpxchg_relaxed
688#endif
689
690#if defined(arch_atomic_try_cmpxchg)
691static __always_inline bool
692atomic_try_cmpxchg(atomic_t *v, int *old, int new)
693{
694 instrument_atomic_write(v, sizeof(*v));
695 instrument_atomic_write(old, sizeof(*old));
696 return arch_atomic_try_cmpxchg(v, old, new);
697}
698#define atomic_try_cmpxchg atomic_try_cmpxchg
699#endif
700
701#if defined(arch_atomic_try_cmpxchg_acquire)
702static __always_inline bool
703atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
704{
705 instrument_atomic_write(v, sizeof(*v));
706 instrument_atomic_write(old, sizeof(*old));
707 return arch_atomic_try_cmpxchg_acquire(v, old, new);
708}
709#define atomic_try_cmpxchg_acquire atomic_try_cmpxchg_acquire
710#endif
711
712#if defined(arch_atomic_try_cmpxchg_release)
713static __always_inline bool
714atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
715{
716 instrument_atomic_write(v, sizeof(*v));
717 instrument_atomic_write(old, sizeof(*old));
718 return arch_atomic_try_cmpxchg_release(v, old, new);
719}
720#define atomic_try_cmpxchg_release atomic_try_cmpxchg_release
721#endif
722
723#if defined(arch_atomic_try_cmpxchg_relaxed)
724static __always_inline bool
725atomic_try_cmpxchg_relaxed(atomic_t *v, int *old, int new)
726{
727 instrument_atomic_write(v, sizeof(*v));
728 instrument_atomic_write(old, sizeof(*old));
729 return arch_atomic_try_cmpxchg_relaxed(v, old, new);
730}
731#define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg_relaxed
732#endif
733
734#if defined(arch_atomic_sub_and_test)
735static __always_inline bool
736atomic_sub_and_test(int i, atomic_t *v)
737{
738 instrument_atomic_write(v, sizeof(*v));
739 return arch_atomic_sub_and_test(i, v);
740}
741#define atomic_sub_and_test atomic_sub_and_test
742#endif
743
744#if defined(arch_atomic_dec_and_test)
745static __always_inline bool
746atomic_dec_and_test(atomic_t *v)
747{
748 instrument_atomic_write(v, sizeof(*v));
749 return arch_atomic_dec_and_test(v);
750}
751#define atomic_dec_and_test atomic_dec_and_test
752#endif
753
754#if defined(arch_atomic_inc_and_test)
755static __always_inline bool
756atomic_inc_and_test(atomic_t *v)
757{
758 instrument_atomic_write(v, sizeof(*v));
759 return arch_atomic_inc_and_test(v);
760}
761#define atomic_inc_and_test atomic_inc_and_test
762#endif
763
764#if defined(arch_atomic_add_negative)
765static __always_inline bool
766atomic_add_negative(int i, atomic_t *v)
767{
768 instrument_atomic_write(v, sizeof(*v));
769 return arch_atomic_add_negative(i, v);
770}
771#define atomic_add_negative atomic_add_negative
772#endif
773
774#if defined(arch_atomic_fetch_add_unless)
775static __always_inline int
776atomic_fetch_add_unless(atomic_t *v, int a, int u)
777{
778 instrument_atomic_write(v, sizeof(*v));
779 return arch_atomic_fetch_add_unless(v, a, u);
780}
781#define atomic_fetch_add_unless atomic_fetch_add_unless
782#endif
783
784#if defined(arch_atomic_add_unless)
785static __always_inline bool
786atomic_add_unless(atomic_t *v, int a, int u)
787{
788 instrument_atomic_write(v, sizeof(*v));
789 return arch_atomic_add_unless(v, a, u);
790}
791#define atomic_add_unless atomic_add_unless
792#endif
793
794#if defined(arch_atomic_inc_not_zero)
795static __always_inline bool
796atomic_inc_not_zero(atomic_t *v)
797{
798 instrument_atomic_write(v, sizeof(*v));
799 return arch_atomic_inc_not_zero(v);
800}
801#define atomic_inc_not_zero atomic_inc_not_zero
802#endif
803
804#if defined(arch_atomic_inc_unless_negative)
805static __always_inline bool
806atomic_inc_unless_negative(atomic_t *v)
807{
808 instrument_atomic_write(v, sizeof(*v));
809 return arch_atomic_inc_unless_negative(v);
810}
811#define atomic_inc_unless_negative atomic_inc_unless_negative
812#endif
813
814#if defined(arch_atomic_dec_unless_positive)
815static __always_inline bool
816atomic_dec_unless_positive(atomic_t *v)
817{
818 instrument_atomic_write(v, sizeof(*v));
819 return arch_atomic_dec_unless_positive(v);
820}
821#define atomic_dec_unless_positive atomic_dec_unless_positive
822#endif
823
824#if defined(arch_atomic_dec_if_positive)
825static __always_inline int
826atomic_dec_if_positive(atomic_t *v)
827{
828 instrument_atomic_write(v, sizeof(*v));
829 return arch_atomic_dec_if_positive(v);
830}
831#define atomic_dec_if_positive atomic_dec_if_positive
832#endif
833
834static __always_inline s64
835atomic64_read(const atomic64_t *v)
836{
837 instrument_atomic_read(v, sizeof(*v));
838 return arch_atomic64_read(v);
839}
840#define atomic64_read atomic64_read
841
842#if defined(arch_atomic64_read_acquire)
843static __always_inline s64
844atomic64_read_acquire(const atomic64_t *v)
845{
846 instrument_atomic_read(v, sizeof(*v));
847 return arch_atomic64_read_acquire(v);
848}
849#define atomic64_read_acquire atomic64_read_acquire
850#endif
851
852static __always_inline void
853atomic64_set(atomic64_t *v, s64 i)
854{
855 instrument_atomic_write(v, sizeof(*v));
856 arch_atomic64_set(v, i);
857}
858#define atomic64_set atomic64_set
859
860#if defined(arch_atomic64_set_release)
861static __always_inline void
862atomic64_set_release(atomic64_t *v, s64 i)
863{
864 instrument_atomic_write(v, sizeof(*v));
865 arch_atomic64_set_release(v, i);
866}
867#define atomic64_set_release atomic64_set_release
868#endif
869
870static __always_inline void
871atomic64_add(s64 i, atomic64_t *v)
872{
873 instrument_atomic_write(v, sizeof(*v));
874 arch_atomic64_add(i, v);
875}
876#define atomic64_add atomic64_add
877
878#if !defined(arch_atomic64_add_return_relaxed) || defined(arch_atomic64_add_return)
879static __always_inline s64
880atomic64_add_return(s64 i, atomic64_t *v)
881{
882 instrument_atomic_write(v, sizeof(*v));
883 return arch_atomic64_add_return(i, v);
884}
885#define atomic64_add_return atomic64_add_return
886#endif
887
888#if defined(arch_atomic64_add_return_acquire)
889static __always_inline s64
890atomic64_add_return_acquire(s64 i, atomic64_t *v)
891{
892 instrument_atomic_write(v, sizeof(*v));
893 return arch_atomic64_add_return_acquire(i, v);
894}
895#define atomic64_add_return_acquire atomic64_add_return_acquire
896#endif
897
898#if defined(arch_atomic64_add_return_release)
899static __always_inline s64
900atomic64_add_return_release(s64 i, atomic64_t *v)
901{
902 instrument_atomic_write(v, sizeof(*v));
903 return arch_atomic64_add_return_release(i, v);
904}
905#define atomic64_add_return_release atomic64_add_return_release
906#endif
907
908#if defined(arch_atomic64_add_return_relaxed)
909static __always_inline s64
910atomic64_add_return_relaxed(s64 i, atomic64_t *v)
911{
912 instrument_atomic_write(v, sizeof(*v));
913 return arch_atomic64_add_return_relaxed(i, v);
914}
915#define atomic64_add_return_relaxed atomic64_add_return_relaxed
916#endif
917
918#if !defined(arch_atomic64_fetch_add_relaxed) || defined(arch_atomic64_fetch_add)
919static __always_inline s64
920atomic64_fetch_add(s64 i, atomic64_t *v)
921{
922 instrument_atomic_write(v, sizeof(*v));
923 return arch_atomic64_fetch_add(i, v);
924}
925#define atomic64_fetch_add atomic64_fetch_add
926#endif
927
928#if defined(arch_atomic64_fetch_add_acquire)
929static __always_inline s64
930atomic64_fetch_add_acquire(s64 i, atomic64_t *v)
931{
932 instrument_atomic_write(v, sizeof(*v));
933 return arch_atomic64_fetch_add_acquire(i, v);
934}
935#define atomic64_fetch_add_acquire atomic64_fetch_add_acquire
936#endif
937
938#if defined(arch_atomic64_fetch_add_release)
939static __always_inline s64
940atomic64_fetch_add_release(s64 i, atomic64_t *v)
941{
942 instrument_atomic_write(v, sizeof(*v));
943 return arch_atomic64_fetch_add_release(i, v);
944}
945#define atomic64_fetch_add_release atomic64_fetch_add_release
946#endif
947
948#if defined(arch_atomic64_fetch_add_relaxed)
949static __always_inline s64
950atomic64_fetch_add_relaxed(s64 i, atomic64_t *v)
951{
952 instrument_atomic_write(v, sizeof(*v));
953 return arch_atomic64_fetch_add_relaxed(i, v);
954}
955#define atomic64_fetch_add_relaxed atomic64_fetch_add_relaxed
956#endif
957
958static __always_inline void
959atomic64_sub(s64 i, atomic64_t *v)
960{
961 instrument_atomic_write(v, sizeof(*v));
962 arch_atomic64_sub(i, v);
963}
964#define atomic64_sub atomic64_sub
965
966#if !defined(arch_atomic64_sub_return_relaxed) || defined(arch_atomic64_sub_return)
967static __always_inline s64
968atomic64_sub_return(s64 i, atomic64_t *v)
969{
970 instrument_atomic_write(v, sizeof(*v));
971 return arch_atomic64_sub_return(i, v);
972}
973#define atomic64_sub_return atomic64_sub_return
974#endif
975
976#if defined(arch_atomic64_sub_return_acquire)
977static __always_inline s64
978atomic64_sub_return_acquire(s64 i, atomic64_t *v)
979{
980 instrument_atomic_write(v, sizeof(*v));
981 return arch_atomic64_sub_return_acquire(i, v);
982}
983#define atomic64_sub_return_acquire atomic64_sub_return_acquire
984#endif
985
986#if defined(arch_atomic64_sub_return_release)
987static __always_inline s64
988atomic64_sub_return_release(s64 i, atomic64_t *v)
989{
990 instrument_atomic_write(v, sizeof(*v));
991 return arch_atomic64_sub_return_release(i, v);
992}
993#define atomic64_sub_return_release atomic64_sub_return_release
994#endif
995
996#if defined(arch_atomic64_sub_return_relaxed)
997static __always_inline s64
998atomic64_sub_return_relaxed(s64 i, atomic64_t *v)
999{
1000 instrument_atomic_write(v, sizeof(*v));
1001 return arch_atomic64_sub_return_relaxed(i, v);
1002}
1003#define atomic64_sub_return_relaxed atomic64_sub_return_relaxed
1004#endif
1005
1006#if !defined(arch_atomic64_fetch_sub_relaxed) || defined(arch_atomic64_fetch_sub)
1007static __always_inline s64
1008atomic64_fetch_sub(s64 i, atomic64_t *v)
1009{
1010 instrument_atomic_write(v, sizeof(*v));
1011 return arch_atomic64_fetch_sub(i, v);
1012}
1013#define atomic64_fetch_sub atomic64_fetch_sub
1014#endif
1015
1016#if defined(arch_atomic64_fetch_sub_acquire)
1017static __always_inline s64
1018atomic64_fetch_sub_acquire(s64 i, atomic64_t *v)
1019{
1020 instrument_atomic_write(v, sizeof(*v));
1021 return arch_atomic64_fetch_sub_acquire(i, v);
1022}
1023#define atomic64_fetch_sub_acquire atomic64_fetch_sub_acquire
1024#endif
1025
1026#if defined(arch_atomic64_fetch_sub_release)
1027static __always_inline s64
1028atomic64_fetch_sub_release(s64 i, atomic64_t *v)
1029{
1030 instrument_atomic_write(v, sizeof(*v));
1031 return arch_atomic64_fetch_sub_release(i, v);
1032}
1033#define atomic64_fetch_sub_release atomic64_fetch_sub_release
1034#endif
1035
1036#if defined(arch_atomic64_fetch_sub_relaxed)
1037static __always_inline s64
1038atomic64_fetch_sub_relaxed(s64 i, atomic64_t *v)
1039{
1040 instrument_atomic_write(v, sizeof(*v));
1041 return arch_atomic64_fetch_sub_relaxed(i, v);
1042}
1043#define atomic64_fetch_sub_relaxed atomic64_fetch_sub_relaxed
1044#endif
1045
1046#if defined(arch_atomic64_inc)
1047static __always_inline void
1048atomic64_inc(atomic64_t *v)
1049{
1050 instrument_atomic_write(v, sizeof(*v));
1051 arch_atomic64_inc(v);
1052}
1053#define atomic64_inc atomic64_inc
1054#endif
1055
1056#if defined(arch_atomic64_inc_return)
1057static __always_inline s64
1058atomic64_inc_return(atomic64_t *v)
1059{
1060 instrument_atomic_write(v, sizeof(*v));
1061 return arch_atomic64_inc_return(v);
1062}
1063#define atomic64_inc_return atomic64_inc_return
1064#endif
1065
1066#if defined(arch_atomic64_inc_return_acquire)
1067static __always_inline s64
1068atomic64_inc_return_acquire(atomic64_t *v)
1069{
1070 instrument_atomic_write(v, sizeof(*v));
1071 return arch_atomic64_inc_return_acquire(v);
1072}
1073#define atomic64_inc_return_acquire atomic64_inc_return_acquire
1074#endif
1075
1076#if defined(arch_atomic64_inc_return_release)
1077static __always_inline s64
1078atomic64_inc_return_release(atomic64_t *v)
1079{
1080 instrument_atomic_write(v, sizeof(*v));
1081 return arch_atomic64_inc_return_release(v);
1082}
1083#define atomic64_inc_return_release atomic64_inc_return_release
1084#endif
1085
1086#if defined(arch_atomic64_inc_return_relaxed)
1087static __always_inline s64
1088atomic64_inc_return_relaxed(atomic64_t *v)
1089{
1090 instrument_atomic_write(v, sizeof(*v));
1091 return arch_atomic64_inc_return_relaxed(v);
1092}
1093#define atomic64_inc_return_relaxed atomic64_inc_return_relaxed
1094#endif
1095
1096#if defined(arch_atomic64_fetch_inc)
1097static __always_inline s64
1098atomic64_fetch_inc(atomic64_t *v)
1099{
1100 instrument_atomic_write(v, sizeof(*v));
1101 return arch_atomic64_fetch_inc(v);
1102}
1103#define atomic64_fetch_inc atomic64_fetch_inc
1104#endif
1105
1106#if defined(arch_atomic64_fetch_inc_acquire)
1107static __always_inline s64
1108atomic64_fetch_inc_acquire(atomic64_t *v)
1109{
1110 instrument_atomic_write(v, sizeof(*v));
1111 return arch_atomic64_fetch_inc_acquire(v);
1112}
1113#define atomic64_fetch_inc_acquire atomic64_fetch_inc_acquire
1114#endif
1115
1116#if defined(arch_atomic64_fetch_inc_release)
1117static __always_inline s64
1118atomic64_fetch_inc_release(atomic64_t *v)
1119{
1120 instrument_atomic_write(v, sizeof(*v));
1121 return arch_atomic64_fetch_inc_release(v);
1122}
1123#define atomic64_fetch_inc_release atomic64_fetch_inc_release
1124#endif
1125
1126#if defined(arch_atomic64_fetch_inc_relaxed)
1127static __always_inline s64
1128atomic64_fetch_inc_relaxed(atomic64_t *v)
1129{
1130 instrument_atomic_write(v, sizeof(*v));
1131 return arch_atomic64_fetch_inc_relaxed(v);
1132}
1133#define atomic64_fetch_inc_relaxed atomic64_fetch_inc_relaxed
1134#endif
1135
1136#if defined(arch_atomic64_dec)
1137static __always_inline void
1138atomic64_dec(atomic64_t *v)
1139{
1140 instrument_atomic_write(v, sizeof(*v));
1141 arch_atomic64_dec(v);
1142}
1143#define atomic64_dec atomic64_dec
1144#endif
1145
1146#if defined(arch_atomic64_dec_return)
1147static __always_inline s64
1148atomic64_dec_return(atomic64_t *v)
1149{
1150 instrument_atomic_write(v, sizeof(*v));
1151 return arch_atomic64_dec_return(v);
1152}
1153#define atomic64_dec_return atomic64_dec_return
1154#endif
1155
1156#if defined(arch_atomic64_dec_return_acquire)
1157static __always_inline s64
1158atomic64_dec_return_acquire(atomic64_t *v)
1159{
1160 instrument_atomic_write(v, sizeof(*v));
1161 return arch_atomic64_dec_return_acquire(v);
1162}
1163#define atomic64_dec_return_acquire atomic64_dec_return_acquire
1164#endif
1165
1166#if defined(arch_atomic64_dec_return_release)
1167static __always_inline s64
1168atomic64_dec_return_release(atomic64_t *v)
1169{
1170 instrument_atomic_write(v, sizeof(*v));
1171 return arch_atomic64_dec_return_release(v);
1172}
1173#define atomic64_dec_return_release atomic64_dec_return_release
1174#endif
1175
1176#if defined(arch_atomic64_dec_return_relaxed)
1177static __always_inline s64
1178atomic64_dec_return_relaxed(atomic64_t *v)
1179{
1180 instrument_atomic_write(v, sizeof(*v));
1181 return arch_atomic64_dec_return_relaxed(v);
1182}
1183#define atomic64_dec_return_relaxed atomic64_dec_return_relaxed
1184#endif
1185
1186#if defined(arch_atomic64_fetch_dec)
1187static __always_inline s64
1188atomic64_fetch_dec(atomic64_t *v)
1189{
1190 instrument_atomic_write(v, sizeof(*v));
1191 return arch_atomic64_fetch_dec(v);
1192}
1193#define atomic64_fetch_dec atomic64_fetch_dec
1194#endif
1195
1196#if defined(arch_atomic64_fetch_dec_acquire)
1197static __always_inline s64
1198atomic64_fetch_dec_acquire(atomic64_t *v)
1199{
1200 instrument_atomic_write(v, sizeof(*v));
1201 return arch_atomic64_fetch_dec_acquire(v);
1202}
1203#define atomic64_fetch_dec_acquire atomic64_fetch_dec_acquire
1204#endif
1205
1206#if defined(arch_atomic64_fetch_dec_release)
1207static __always_inline s64
1208atomic64_fetch_dec_release(atomic64_t *v)
1209{
1210 instrument_atomic_write(v, sizeof(*v));
1211 return arch_atomic64_fetch_dec_release(v);
1212}
1213#define atomic64_fetch_dec_release atomic64_fetch_dec_release
1214#endif
1215
1216#if defined(arch_atomic64_fetch_dec_relaxed)
1217static __always_inline s64
1218atomic64_fetch_dec_relaxed(atomic64_t *v)
1219{
1220 instrument_atomic_write(v, sizeof(*v));
1221 return arch_atomic64_fetch_dec_relaxed(v);
1222}
1223#define atomic64_fetch_dec_relaxed atomic64_fetch_dec_relaxed
1224#endif
1225
1226static __always_inline void
1227atomic64_and(s64 i, atomic64_t *v)
1228{
1229 instrument_atomic_write(v, sizeof(*v));
1230 arch_atomic64_and(i, v);
1231}
1232#define atomic64_and atomic64_and
1233
1234#if !defined(arch_atomic64_fetch_and_relaxed) || defined(arch_atomic64_fetch_and)
1235static __always_inline s64
1236atomic64_fetch_and(s64 i, atomic64_t *v)
1237{
1238 instrument_atomic_write(v, sizeof(*v));
1239 return arch_atomic64_fetch_and(i, v);
1240}
1241#define atomic64_fetch_and atomic64_fetch_and
1242#endif
1243
1244#if defined(arch_atomic64_fetch_and_acquire)
1245static __always_inline s64
1246atomic64_fetch_and_acquire(s64 i, atomic64_t *v)
1247{
1248 instrument_atomic_write(v, sizeof(*v));
1249 return arch_atomic64_fetch_and_acquire(i, v);
1250}
1251#define atomic64_fetch_and_acquire atomic64_fetch_and_acquire
1252#endif
1253
1254#if defined(arch_atomic64_fetch_and_release)
1255static __always_inline s64
1256atomic64_fetch_and_release(s64 i, atomic64_t *v)
1257{
1258 instrument_atomic_write(v, sizeof(*v));
1259 return arch_atomic64_fetch_and_release(i, v);
1260}
1261#define atomic64_fetch_and_release atomic64_fetch_and_release
1262#endif
1263
1264#if defined(arch_atomic64_fetch_and_relaxed)
1265static __always_inline s64
1266atomic64_fetch_and_relaxed(s64 i, atomic64_t *v)
1267{
1268 instrument_atomic_write(v, sizeof(*v));
1269 return arch_atomic64_fetch_and_relaxed(i, v);
1270}
1271#define atomic64_fetch_and_relaxed atomic64_fetch_and_relaxed
1272#endif
1273
1274#if defined(arch_atomic64_andnot)
1275static __always_inline void
1276atomic64_andnot(s64 i, atomic64_t *v)
1277{
1278 instrument_atomic_write(v, sizeof(*v));
1279 arch_atomic64_andnot(i, v);
1280}
1281#define atomic64_andnot atomic64_andnot
1282#endif
1283
1284#if defined(arch_atomic64_fetch_andnot)
1285static __always_inline s64
1286atomic64_fetch_andnot(s64 i, atomic64_t *v)
1287{
1288 instrument_atomic_write(v, sizeof(*v));
1289 return arch_atomic64_fetch_andnot(i, v);
1290}
1291#define atomic64_fetch_andnot atomic64_fetch_andnot
1292#endif
1293
1294#if defined(arch_atomic64_fetch_andnot_acquire)
1295static __always_inline s64
1296atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
1297{
1298 instrument_atomic_write(v, sizeof(*v));
1299 return arch_atomic64_fetch_andnot_acquire(i, v);
1300}
1301#define atomic64_fetch_andnot_acquire atomic64_fetch_andnot_acquire
1302#endif
1303
1304#if defined(arch_atomic64_fetch_andnot_release)
1305static __always_inline s64
1306atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
1307{
1308 instrument_atomic_write(v, sizeof(*v));
1309 return arch_atomic64_fetch_andnot_release(i, v);
1310}
1311#define atomic64_fetch_andnot_release atomic64_fetch_andnot_release
1312#endif
1313
1314#if defined(arch_atomic64_fetch_andnot_relaxed)
1315static __always_inline s64
1316atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v)
1317{
1318 instrument_atomic_write(v, sizeof(*v));
1319 return arch_atomic64_fetch_andnot_relaxed(i, v);
1320}
1321#define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot_relaxed
1322#endif
1323
1324static __always_inline void
1325atomic64_or(s64 i, atomic64_t *v)
1326{
1327 instrument_atomic_write(v, sizeof(*v));
1328 arch_atomic64_or(i, v);
1329}
1330#define atomic64_or atomic64_or
1331
1332#if !defined(arch_atomic64_fetch_or_relaxed) || defined(arch_atomic64_fetch_or)
1333static __always_inline s64
1334atomic64_fetch_or(s64 i, atomic64_t *v)
1335{
1336 instrument_atomic_write(v, sizeof(*v));
1337 return arch_atomic64_fetch_or(i, v);
1338}
1339#define atomic64_fetch_or atomic64_fetch_or
1340#endif
1341
1342#if defined(arch_atomic64_fetch_or_acquire)
1343static __always_inline s64
1344atomic64_fetch_or_acquire(s64 i, atomic64_t *v)
1345{
1346 instrument_atomic_write(v, sizeof(*v));
1347 return arch_atomic64_fetch_or_acquire(i, v);
1348}
1349#define atomic64_fetch_or_acquire atomic64_fetch_or_acquire
1350#endif
1351
1352#if defined(arch_atomic64_fetch_or_release)
1353static __always_inline s64
1354atomic64_fetch_or_release(s64 i, atomic64_t *v)
1355{
1356 instrument_atomic_write(v, sizeof(*v));
1357 return arch_atomic64_fetch_or_release(i, v);
1358}
1359#define atomic64_fetch_or_release atomic64_fetch_or_release
1360#endif
1361
1362#if defined(arch_atomic64_fetch_or_relaxed)
1363static __always_inline s64
1364atomic64_fetch_or_relaxed(s64 i, atomic64_t *v)
1365{
1366 instrument_atomic_write(v, sizeof(*v));
1367 return arch_atomic64_fetch_or_relaxed(i, v);
1368}
1369#define atomic64_fetch_or_relaxed atomic64_fetch_or_relaxed
1370#endif
1371
1372static __always_inline void
1373atomic64_xor(s64 i, atomic64_t *v)
1374{
1375 instrument_atomic_write(v, sizeof(*v));
1376 arch_atomic64_xor(i, v);
1377}
1378#define atomic64_xor atomic64_xor
1379
1380#if !defined(arch_atomic64_fetch_xor_relaxed) || defined(arch_atomic64_fetch_xor)
1381static __always_inline s64
1382atomic64_fetch_xor(s64 i, atomic64_t *v)
1383{
1384 instrument_atomic_write(v, sizeof(*v));
1385 return arch_atomic64_fetch_xor(i, v);
1386}
1387#define atomic64_fetch_xor atomic64_fetch_xor
1388#endif
1389
1390#if defined(arch_atomic64_fetch_xor_acquire)
1391static __always_inline s64
1392atomic64_fetch_xor_acquire(s64 i, atomic64_t *v)
1393{
1394 instrument_atomic_write(v, sizeof(*v));
1395 return arch_atomic64_fetch_xor_acquire(i, v);
1396}
1397#define atomic64_fetch_xor_acquire atomic64_fetch_xor_acquire
1398#endif
1399
1400#if defined(arch_atomic64_fetch_xor_release)
1401static __always_inline s64
1402atomic64_fetch_xor_release(s64 i, atomic64_t *v)
1403{
1404 instrument_atomic_write(v, sizeof(*v));
1405 return arch_atomic64_fetch_xor_release(i, v);
1406}
1407#define atomic64_fetch_xor_release atomic64_fetch_xor_release
1408#endif
1409
1410#if defined(arch_atomic64_fetch_xor_relaxed)
1411static __always_inline s64
1412atomic64_fetch_xor_relaxed(s64 i, atomic64_t *v)
1413{
1414 instrument_atomic_write(v, sizeof(*v));
1415 return arch_atomic64_fetch_xor_relaxed(i, v);
1416}
1417#define atomic64_fetch_xor_relaxed atomic64_fetch_xor_relaxed
1418#endif
1419
1420#if !defined(arch_atomic64_xchg_relaxed) || defined(arch_atomic64_xchg)
1421static __always_inline s64
1422atomic64_xchg(atomic64_t *v, s64 i)
1423{
1424 instrument_atomic_write(v, sizeof(*v));
1425 return arch_atomic64_xchg(v, i);
1426}
1427#define atomic64_xchg atomic64_xchg
1428#endif
1429
1430#if defined(arch_atomic64_xchg_acquire)
1431static __always_inline s64
1432atomic64_xchg_acquire(atomic64_t *v, s64 i)
1433{
1434 instrument_atomic_write(v, sizeof(*v));
1435 return arch_atomic64_xchg_acquire(v, i);
1436}
1437#define atomic64_xchg_acquire atomic64_xchg_acquire
1438#endif
1439
1440#if defined(arch_atomic64_xchg_release)
1441static __always_inline s64
1442atomic64_xchg_release(atomic64_t *v, s64 i)
1443{
1444 instrument_atomic_write(v, sizeof(*v));
1445 return arch_atomic64_xchg_release(v, i);
1446}
1447#define atomic64_xchg_release atomic64_xchg_release
1448#endif
1449
1450#if defined(arch_atomic64_xchg_relaxed)
1451static __always_inline s64
1452atomic64_xchg_relaxed(atomic64_t *v, s64 i)
1453{
1454 instrument_atomic_write(v, sizeof(*v));
1455 return arch_atomic64_xchg_relaxed(v, i);
1456}
1457#define atomic64_xchg_relaxed atomic64_xchg_relaxed
1458#endif
1459
1460#if !defined(arch_atomic64_cmpxchg_relaxed) || defined(arch_atomic64_cmpxchg)
1461static __always_inline s64
1462atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
1463{
1464 instrument_atomic_write(v, sizeof(*v));
1465 return arch_atomic64_cmpxchg(v, old, new);
1466}
1467#define atomic64_cmpxchg atomic64_cmpxchg
1468#endif
1469
1470#if defined(arch_atomic64_cmpxchg_acquire)
1471static __always_inline s64
1472atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new)
1473{
1474 instrument_atomic_write(v, sizeof(*v));
1475 return arch_atomic64_cmpxchg_acquire(v, old, new);
1476}
1477#define atomic64_cmpxchg_acquire atomic64_cmpxchg_acquire
1478#endif
1479
1480#if defined(arch_atomic64_cmpxchg_release)
1481static __always_inline s64
1482atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new)
1483{
1484 instrument_atomic_write(v, sizeof(*v));
1485 return arch_atomic64_cmpxchg_release(v, old, new);
1486}
1487#define atomic64_cmpxchg_release atomic64_cmpxchg_release
1488#endif
1489
1490#if defined(arch_atomic64_cmpxchg_relaxed)
1491static __always_inline s64
1492atomic64_cmpxchg_relaxed(atomic64_t *v, s64 old, s64 new)
1493{
1494 instrument_atomic_write(v, sizeof(*v));
1495 return arch_atomic64_cmpxchg_relaxed(v, old, new);
1496}
1497#define atomic64_cmpxchg_relaxed atomic64_cmpxchg_relaxed
1498#endif
1499
1500#if defined(arch_atomic64_try_cmpxchg)
1501static __always_inline bool
1502atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
1503{
1504 instrument_atomic_write(v, sizeof(*v));
1505 instrument_atomic_write(old, sizeof(*old));
1506 return arch_atomic64_try_cmpxchg(v, old, new);
1507}
1508#define atomic64_try_cmpxchg atomic64_try_cmpxchg
1509#endif
1510
1511#if defined(arch_atomic64_try_cmpxchg_acquire)
1512static __always_inline bool
1513atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
1514{
1515 instrument_atomic_write(v, sizeof(*v));
1516 instrument_atomic_write(old, sizeof(*old));
1517 return arch_atomic64_try_cmpxchg_acquire(v, old, new);
1518}
1519#define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg_acquire
1520#endif
1521
1522#if defined(arch_atomic64_try_cmpxchg_release)
1523static __always_inline bool
1524atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
1525{
1526 instrument_atomic_write(v, sizeof(*v));
1527 instrument_atomic_write(old, sizeof(*old));
1528 return arch_atomic64_try_cmpxchg_release(v, old, new);
1529}
1530#define atomic64_try_cmpxchg_release atomic64_try_cmpxchg_release
1531#endif
1532
1533#if defined(arch_atomic64_try_cmpxchg_relaxed)
1534static __always_inline bool
1535atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new)
1536{
1537 instrument_atomic_write(v, sizeof(*v));
1538 instrument_atomic_write(old, sizeof(*old));
1539 return arch_atomic64_try_cmpxchg_relaxed(v, old, new);
1540}
1541#define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg_relaxed
1542#endif
1543
1544#if defined(arch_atomic64_sub_and_test)
1545static __always_inline bool
1546atomic64_sub_and_test(s64 i, atomic64_t *v)
1547{
1548 instrument_atomic_write(v, sizeof(*v));
1549 return arch_atomic64_sub_and_test(i, v);
1550}
1551#define atomic64_sub_and_test atomic64_sub_and_test
1552#endif
1553
1554#if defined(arch_atomic64_dec_and_test)
1555static __always_inline bool
1556atomic64_dec_and_test(atomic64_t *v)
1557{
1558 instrument_atomic_write(v, sizeof(*v));
1559 return arch_atomic64_dec_and_test(v);
1560}
1561#define atomic64_dec_and_test atomic64_dec_and_test
1562#endif
1563
1564#if defined(arch_atomic64_inc_and_test)
1565static __always_inline bool
1566atomic64_inc_and_test(atomic64_t *v)
1567{
1568 instrument_atomic_write(v, sizeof(*v));
1569 return arch_atomic64_inc_and_test(v);
1570}
1571#define atomic64_inc_and_test atomic64_inc_and_test
1572#endif
1573
1574#if defined(arch_atomic64_add_negative)
1575static __always_inline bool
1576atomic64_add_negative(s64 i, atomic64_t *v)
1577{
1578 instrument_atomic_write(v, sizeof(*v));
1579 return arch_atomic64_add_negative(i, v);
1580}
1581#define atomic64_add_negative atomic64_add_negative
1582#endif
1583
1584#if defined(arch_atomic64_fetch_add_unless)
1585static __always_inline s64
1586atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
1587{
1588 instrument_atomic_write(v, sizeof(*v));
1589 return arch_atomic64_fetch_add_unless(v, a, u);
1590}
1591#define atomic64_fetch_add_unless atomic64_fetch_add_unless
1592#endif
1593
1594#if defined(arch_atomic64_add_unless)
1595static __always_inline bool
1596atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
1597{
1598 instrument_atomic_write(v, sizeof(*v));
1599 return arch_atomic64_add_unless(v, a, u);
1600}
1601#define atomic64_add_unless atomic64_add_unless
1602#endif
1603
1604#if defined(arch_atomic64_inc_not_zero)
1605static __always_inline bool
1606atomic64_inc_not_zero(atomic64_t *v)
1607{
1608 instrument_atomic_write(v, sizeof(*v));
1609 return arch_atomic64_inc_not_zero(v);
1610}
1611#define atomic64_inc_not_zero atomic64_inc_not_zero
1612#endif
1613
1614#if defined(arch_atomic64_inc_unless_negative)
1615static __always_inline bool
1616atomic64_inc_unless_negative(atomic64_t *v)
1617{
1618 instrument_atomic_write(v, sizeof(*v));
1619 return arch_atomic64_inc_unless_negative(v);
1620}
1621#define atomic64_inc_unless_negative atomic64_inc_unless_negative
1622#endif
1623
1624#if defined(arch_atomic64_dec_unless_positive)
1625static __always_inline bool
1626atomic64_dec_unless_positive(atomic64_t *v)
1627{
1628 instrument_atomic_write(v, sizeof(*v));
1629 return arch_atomic64_dec_unless_positive(v);
1630}
1631#define atomic64_dec_unless_positive atomic64_dec_unless_positive
1632#endif
1633
1634#if defined(arch_atomic64_dec_if_positive)
1635static __always_inline s64
1636atomic64_dec_if_positive(atomic64_t *v)
1637{
1638 instrument_atomic_write(v, sizeof(*v));
1639 return arch_atomic64_dec_if_positive(v);
1640}
1641#define atomic64_dec_if_positive atomic64_dec_if_positive
1642#endif
1643
1644#if !defined(arch_xchg_relaxed) || defined(arch_xchg)
1645#define xchg(ptr, ...) \
1646({ \
1647 typeof(ptr) __ai_ptr = (ptr); \
1648 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1649 arch_xchg(__ai_ptr, __VA_ARGS__); \
1650})
1651#endif
1652
1653#if defined(arch_xchg_acquire)
1654#define xchg_acquire(ptr, ...) \
1655({ \
1656 typeof(ptr) __ai_ptr = (ptr); \
1657 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1658 arch_xchg_acquire(__ai_ptr, __VA_ARGS__); \
1659})
1660#endif
1661
1662#if defined(arch_xchg_release)
1663#define xchg_release(ptr, ...) \
1664({ \
1665 typeof(ptr) __ai_ptr = (ptr); \
1666 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1667 arch_xchg_release(__ai_ptr, __VA_ARGS__); \
1668})
1669#endif
1670
1671#if defined(arch_xchg_relaxed)
1672#define xchg_relaxed(ptr, ...) \
1673({ \
1674 typeof(ptr) __ai_ptr = (ptr); \
1675 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1676 arch_xchg_relaxed(__ai_ptr, __VA_ARGS__); \
1677})
1678#endif
1679
1680#if !defined(arch_cmpxchg_relaxed) || defined(arch_cmpxchg)
1681#define cmpxchg(ptr, ...) \
1682({ \
1683 typeof(ptr) __ai_ptr = (ptr); \
1684 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1685 arch_cmpxchg(__ai_ptr, __VA_ARGS__); \
1686})
1687#endif
1688
1689#if defined(arch_cmpxchg_acquire)
1690#define cmpxchg_acquire(ptr, ...) \
1691({ \
1692 typeof(ptr) __ai_ptr = (ptr); \
1693 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1694 arch_cmpxchg_acquire(__ai_ptr, __VA_ARGS__); \
1695})
1696#endif
1697
1698#if defined(arch_cmpxchg_release)
1699#define cmpxchg_release(ptr, ...) \
1700({ \
1701 typeof(ptr) __ai_ptr = (ptr); \
1702 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1703 arch_cmpxchg_release(__ai_ptr, __VA_ARGS__); \
1704})
1705#endif
1706
1707#if defined(arch_cmpxchg_relaxed)
1708#define cmpxchg_relaxed(ptr, ...) \
1709({ \
1710 typeof(ptr) __ai_ptr = (ptr); \
1711 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1712 arch_cmpxchg_relaxed(__ai_ptr, __VA_ARGS__); \
1713})
1714#endif
1715
1716#if !defined(arch_cmpxchg64_relaxed) || defined(arch_cmpxchg64)
1717#define cmpxchg64(ptr, ...) \
1718({ \
1719 typeof(ptr) __ai_ptr = (ptr); \
1720 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1721 arch_cmpxchg64(__ai_ptr, __VA_ARGS__); \
1722})
1723#endif
1724
1725#if defined(arch_cmpxchg64_acquire)
1726#define cmpxchg64_acquire(ptr, ...) \
1727({ \
1728 typeof(ptr) __ai_ptr = (ptr); \
1729 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1730 arch_cmpxchg64_acquire(__ai_ptr, __VA_ARGS__); \
1731})
1732#endif
1733
1734#if defined(arch_cmpxchg64_release)
1735#define cmpxchg64_release(ptr, ...) \
1736({ \
1737 typeof(ptr) __ai_ptr = (ptr); \
1738 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1739 arch_cmpxchg64_release(__ai_ptr, __VA_ARGS__); \
1740})
1741#endif
1742
1743#if defined(arch_cmpxchg64_relaxed)
1744#define cmpxchg64_relaxed(ptr, ...) \
1745({ \
1746 typeof(ptr) __ai_ptr = (ptr); \
1747 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1748 arch_cmpxchg64_relaxed(__ai_ptr, __VA_ARGS__); \
1749})
1750#endif
1751
1752#define cmpxchg_local(ptr, ...) \
1753({ \
1754 typeof(ptr) __ai_ptr = (ptr); \
1755 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1756 arch_cmpxchg_local(__ai_ptr, __VA_ARGS__); \
1757})
1758
1759#define cmpxchg64_local(ptr, ...) \
1760({ \
1761 typeof(ptr) __ai_ptr = (ptr); \
1762 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1763 arch_cmpxchg64_local(__ai_ptr, __VA_ARGS__); \
1764})
1765
1766#define sync_cmpxchg(ptr, ...) \
1767({ \
1768 typeof(ptr) __ai_ptr = (ptr); \
1769 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1770 arch_sync_cmpxchg(__ai_ptr, __VA_ARGS__); \
1771})
1772
1773#define cmpxchg_double(ptr, ...) \
1774({ \
1775 typeof(ptr) __ai_ptr = (ptr); \
1776 instrument_atomic_write(__ai_ptr, 2 * sizeof(*__ai_ptr)); \
1777 arch_cmpxchg_double(__ai_ptr, __VA_ARGS__); \
1778})
1779
1780
1781#define cmpxchg_double_local(ptr, ...) \
1782({ \
1783 typeof(ptr) __ai_ptr = (ptr); \
1784 instrument_atomic_write(__ai_ptr, 2 * sizeof(*__ai_ptr)); \
1785 arch_cmpxchg_double_local(__ai_ptr, __VA_ARGS__); \
1786})
1787
1788#endif /* _ASM_GENERIC_ATOMIC_INSTRUMENTED_H */
1789// 89bf97f3a7509b740845e51ddf31055b48a81f40