Linux Audio

Check our new training course

Loading...
v5.4
   1// SPDX-License-Identifier: GPL-2.0
   2
   3// Generated by scripts/atomic/gen-atomic-instrumented.sh
   4// DO NOT MODIFY THIS FILE DIRECTLY
   5
   6/*
   7 * This file provides wrappers with KASAN instrumentation for atomic operations.
   8 * To use this functionality an arch's atomic.h file needs to define all
   9 * atomic operations with arch_ prefix (e.g. arch_atomic_read()) and include
  10 * this file at the end. This file provides atomic_read() that forwards to
  11 * arch_atomic_read() for actual atomic operation.
  12 * Note: if an arch atomic operation is implemented by means of other atomic
  13 * operations (e.g. atomic_read()/atomic_cmpxchg() loop), then it needs to use
  14 * arch_ variants (i.e. arch_atomic_read()/arch_atomic_cmpxchg()) to avoid
  15 * double instrumentation.
  16 */
  17#ifndef _ASM_GENERIC_ATOMIC_INSTRUMENTED_H
  18#define _ASM_GENERIC_ATOMIC_INSTRUMENTED_H
  19
  20#include <linux/build_bug.h>
  21#include <linux/kasan-checks.h>
 
  22
  23static inline int
  24atomic_read(const atomic_t *v)
  25{
  26	kasan_check_read(v, sizeof(*v));
  27	return arch_atomic_read(v);
  28}
  29#define atomic_read atomic_read
  30
  31#if defined(arch_atomic_read_acquire)
  32static inline int
  33atomic_read_acquire(const atomic_t *v)
  34{
  35	kasan_check_read(v, sizeof(*v));
  36	return arch_atomic_read_acquire(v);
  37}
  38#define atomic_read_acquire atomic_read_acquire
  39#endif
  40
  41static inline void
  42atomic_set(atomic_t *v, int i)
  43{
  44	kasan_check_write(v, sizeof(*v));
  45	arch_atomic_set(v, i);
  46}
  47#define atomic_set atomic_set
  48
  49#if defined(arch_atomic_set_release)
  50static inline void
  51atomic_set_release(atomic_t *v, int i)
  52{
  53	kasan_check_write(v, sizeof(*v));
  54	arch_atomic_set_release(v, i);
  55}
  56#define atomic_set_release atomic_set_release
  57#endif
  58
  59static inline void
  60atomic_add(int i, atomic_t *v)
  61{
  62	kasan_check_write(v, sizeof(*v));
  63	arch_atomic_add(i, v);
  64}
  65#define atomic_add atomic_add
  66
  67#if !defined(arch_atomic_add_return_relaxed) || defined(arch_atomic_add_return)
  68static inline int
  69atomic_add_return(int i, atomic_t *v)
  70{
  71	kasan_check_write(v, sizeof(*v));
  72	return arch_atomic_add_return(i, v);
  73}
  74#define atomic_add_return atomic_add_return
  75#endif
  76
  77#if defined(arch_atomic_add_return_acquire)
  78static inline int
  79atomic_add_return_acquire(int i, atomic_t *v)
  80{
  81	kasan_check_write(v, sizeof(*v));
  82	return arch_atomic_add_return_acquire(i, v);
  83}
  84#define atomic_add_return_acquire atomic_add_return_acquire
  85#endif
  86
  87#if defined(arch_atomic_add_return_release)
  88static inline int
  89atomic_add_return_release(int i, atomic_t *v)
  90{
  91	kasan_check_write(v, sizeof(*v));
  92	return arch_atomic_add_return_release(i, v);
  93}
  94#define atomic_add_return_release atomic_add_return_release
  95#endif
  96
  97#if defined(arch_atomic_add_return_relaxed)
  98static inline int
  99atomic_add_return_relaxed(int i, atomic_t *v)
 100{
 101	kasan_check_write(v, sizeof(*v));
 102	return arch_atomic_add_return_relaxed(i, v);
 103}
 104#define atomic_add_return_relaxed atomic_add_return_relaxed
 105#endif
 106
 107#if !defined(arch_atomic_fetch_add_relaxed) || defined(arch_atomic_fetch_add)
 108static inline int
 109atomic_fetch_add(int i, atomic_t *v)
 110{
 111	kasan_check_write(v, sizeof(*v));
 112	return arch_atomic_fetch_add(i, v);
 113}
 114#define atomic_fetch_add atomic_fetch_add
 115#endif
 116
 117#if defined(arch_atomic_fetch_add_acquire)
 118static inline int
 119atomic_fetch_add_acquire(int i, atomic_t *v)
 120{
 121	kasan_check_write(v, sizeof(*v));
 122	return arch_atomic_fetch_add_acquire(i, v);
 123}
 124#define atomic_fetch_add_acquire atomic_fetch_add_acquire
 125#endif
 126
 127#if defined(arch_atomic_fetch_add_release)
 128static inline int
 129atomic_fetch_add_release(int i, atomic_t *v)
 130{
 131	kasan_check_write(v, sizeof(*v));
 132	return arch_atomic_fetch_add_release(i, v);
 133}
 134#define atomic_fetch_add_release atomic_fetch_add_release
 135#endif
 136
 137#if defined(arch_atomic_fetch_add_relaxed)
 138static inline int
 139atomic_fetch_add_relaxed(int i, atomic_t *v)
 140{
 141	kasan_check_write(v, sizeof(*v));
 142	return arch_atomic_fetch_add_relaxed(i, v);
 143}
 144#define atomic_fetch_add_relaxed atomic_fetch_add_relaxed
 145#endif
 146
 147static inline void
 148atomic_sub(int i, atomic_t *v)
 149{
 150	kasan_check_write(v, sizeof(*v));
 151	arch_atomic_sub(i, v);
 152}
 153#define atomic_sub atomic_sub
 154
 155#if !defined(arch_atomic_sub_return_relaxed) || defined(arch_atomic_sub_return)
 156static inline int
 157atomic_sub_return(int i, atomic_t *v)
 158{
 159	kasan_check_write(v, sizeof(*v));
 160	return arch_atomic_sub_return(i, v);
 161}
 162#define atomic_sub_return atomic_sub_return
 163#endif
 164
 165#if defined(arch_atomic_sub_return_acquire)
 166static inline int
 167atomic_sub_return_acquire(int i, atomic_t *v)
 168{
 169	kasan_check_write(v, sizeof(*v));
 170	return arch_atomic_sub_return_acquire(i, v);
 171}
 172#define atomic_sub_return_acquire atomic_sub_return_acquire
 173#endif
 174
 175#if defined(arch_atomic_sub_return_release)
 176static inline int
 177atomic_sub_return_release(int i, atomic_t *v)
 178{
 179	kasan_check_write(v, sizeof(*v));
 180	return arch_atomic_sub_return_release(i, v);
 181}
 182#define atomic_sub_return_release atomic_sub_return_release
 183#endif
 184
 185#if defined(arch_atomic_sub_return_relaxed)
 186static inline int
 187atomic_sub_return_relaxed(int i, atomic_t *v)
 188{
 189	kasan_check_write(v, sizeof(*v));
 190	return arch_atomic_sub_return_relaxed(i, v);
 191}
 192#define atomic_sub_return_relaxed atomic_sub_return_relaxed
 193#endif
 194
 195#if !defined(arch_atomic_fetch_sub_relaxed) || defined(arch_atomic_fetch_sub)
 196static inline int
 197atomic_fetch_sub(int i, atomic_t *v)
 198{
 199	kasan_check_write(v, sizeof(*v));
 200	return arch_atomic_fetch_sub(i, v);
 201}
 202#define atomic_fetch_sub atomic_fetch_sub
 203#endif
 204
 205#if defined(arch_atomic_fetch_sub_acquire)
 206static inline int
 207atomic_fetch_sub_acquire(int i, atomic_t *v)
 208{
 209	kasan_check_write(v, sizeof(*v));
 210	return arch_atomic_fetch_sub_acquire(i, v);
 211}
 212#define atomic_fetch_sub_acquire atomic_fetch_sub_acquire
 213#endif
 214
 215#if defined(arch_atomic_fetch_sub_release)
 216static inline int
 217atomic_fetch_sub_release(int i, atomic_t *v)
 218{
 219	kasan_check_write(v, sizeof(*v));
 220	return arch_atomic_fetch_sub_release(i, v);
 221}
 222#define atomic_fetch_sub_release atomic_fetch_sub_release
 223#endif
 224
 225#if defined(arch_atomic_fetch_sub_relaxed)
 226static inline int
 227atomic_fetch_sub_relaxed(int i, atomic_t *v)
 228{
 229	kasan_check_write(v, sizeof(*v));
 230	return arch_atomic_fetch_sub_relaxed(i, v);
 231}
 232#define atomic_fetch_sub_relaxed atomic_fetch_sub_relaxed
 233#endif
 234
 235#if defined(arch_atomic_inc)
 236static inline void
 237atomic_inc(atomic_t *v)
 238{
 239	kasan_check_write(v, sizeof(*v));
 240	arch_atomic_inc(v);
 241}
 242#define atomic_inc atomic_inc
 243#endif
 244
 245#if defined(arch_atomic_inc_return)
 246static inline int
 247atomic_inc_return(atomic_t *v)
 248{
 249	kasan_check_write(v, sizeof(*v));
 250	return arch_atomic_inc_return(v);
 251}
 252#define atomic_inc_return atomic_inc_return
 253#endif
 254
 255#if defined(arch_atomic_inc_return_acquire)
 256static inline int
 257atomic_inc_return_acquire(atomic_t *v)
 258{
 259	kasan_check_write(v, sizeof(*v));
 260	return arch_atomic_inc_return_acquire(v);
 261}
 262#define atomic_inc_return_acquire atomic_inc_return_acquire
 263#endif
 264
 265#if defined(arch_atomic_inc_return_release)
 266static inline int
 267atomic_inc_return_release(atomic_t *v)
 268{
 269	kasan_check_write(v, sizeof(*v));
 270	return arch_atomic_inc_return_release(v);
 271}
 272#define atomic_inc_return_release atomic_inc_return_release
 273#endif
 274
 275#if defined(arch_atomic_inc_return_relaxed)
 276static inline int
 277atomic_inc_return_relaxed(atomic_t *v)
 278{
 279	kasan_check_write(v, sizeof(*v));
 280	return arch_atomic_inc_return_relaxed(v);
 281}
 282#define atomic_inc_return_relaxed atomic_inc_return_relaxed
 283#endif
 284
 285#if defined(arch_atomic_fetch_inc)
 286static inline int
 287atomic_fetch_inc(atomic_t *v)
 288{
 289	kasan_check_write(v, sizeof(*v));
 290	return arch_atomic_fetch_inc(v);
 291}
 292#define atomic_fetch_inc atomic_fetch_inc
 293#endif
 294
 295#if defined(arch_atomic_fetch_inc_acquire)
 296static inline int
 297atomic_fetch_inc_acquire(atomic_t *v)
 298{
 299	kasan_check_write(v, sizeof(*v));
 300	return arch_atomic_fetch_inc_acquire(v);
 301}
 302#define atomic_fetch_inc_acquire atomic_fetch_inc_acquire
 303#endif
 304
 305#if defined(arch_atomic_fetch_inc_release)
 306static inline int
 307atomic_fetch_inc_release(atomic_t *v)
 308{
 309	kasan_check_write(v, sizeof(*v));
 310	return arch_atomic_fetch_inc_release(v);
 311}
 312#define atomic_fetch_inc_release atomic_fetch_inc_release
 313#endif
 314
 315#if defined(arch_atomic_fetch_inc_relaxed)
 316static inline int
 317atomic_fetch_inc_relaxed(atomic_t *v)
 318{
 319	kasan_check_write(v, sizeof(*v));
 320	return arch_atomic_fetch_inc_relaxed(v);
 321}
 322#define atomic_fetch_inc_relaxed atomic_fetch_inc_relaxed
 323#endif
 324
 325#if defined(arch_atomic_dec)
 326static inline void
 327atomic_dec(atomic_t *v)
 328{
 329	kasan_check_write(v, sizeof(*v));
 330	arch_atomic_dec(v);
 331}
 332#define atomic_dec atomic_dec
 333#endif
 334
 335#if defined(arch_atomic_dec_return)
 336static inline int
 337atomic_dec_return(atomic_t *v)
 338{
 339	kasan_check_write(v, sizeof(*v));
 340	return arch_atomic_dec_return(v);
 341}
 342#define atomic_dec_return atomic_dec_return
 343#endif
 344
 345#if defined(arch_atomic_dec_return_acquire)
 346static inline int
 347atomic_dec_return_acquire(atomic_t *v)
 348{
 349	kasan_check_write(v, sizeof(*v));
 350	return arch_atomic_dec_return_acquire(v);
 351}
 352#define atomic_dec_return_acquire atomic_dec_return_acquire
 353#endif
 354
 355#if defined(arch_atomic_dec_return_release)
 356static inline int
 357atomic_dec_return_release(atomic_t *v)
 358{
 359	kasan_check_write(v, sizeof(*v));
 360	return arch_atomic_dec_return_release(v);
 361}
 362#define atomic_dec_return_release atomic_dec_return_release
 363#endif
 364
 365#if defined(arch_atomic_dec_return_relaxed)
 366static inline int
 367atomic_dec_return_relaxed(atomic_t *v)
 368{
 369	kasan_check_write(v, sizeof(*v));
 370	return arch_atomic_dec_return_relaxed(v);
 371}
 372#define atomic_dec_return_relaxed atomic_dec_return_relaxed
 373#endif
 374
 375#if defined(arch_atomic_fetch_dec)
 376static inline int
 377atomic_fetch_dec(atomic_t *v)
 378{
 379	kasan_check_write(v, sizeof(*v));
 380	return arch_atomic_fetch_dec(v);
 381}
 382#define atomic_fetch_dec atomic_fetch_dec
 383#endif
 384
 385#if defined(arch_atomic_fetch_dec_acquire)
 386static inline int
 387atomic_fetch_dec_acquire(atomic_t *v)
 388{
 389	kasan_check_write(v, sizeof(*v));
 390	return arch_atomic_fetch_dec_acquire(v);
 391}
 392#define atomic_fetch_dec_acquire atomic_fetch_dec_acquire
 393#endif
 394
 395#if defined(arch_atomic_fetch_dec_release)
 396static inline int
 397atomic_fetch_dec_release(atomic_t *v)
 398{
 399	kasan_check_write(v, sizeof(*v));
 400	return arch_atomic_fetch_dec_release(v);
 401}
 402#define atomic_fetch_dec_release atomic_fetch_dec_release
 403#endif
 404
 405#if defined(arch_atomic_fetch_dec_relaxed)
 406static inline int
 407atomic_fetch_dec_relaxed(atomic_t *v)
 408{
 409	kasan_check_write(v, sizeof(*v));
 410	return arch_atomic_fetch_dec_relaxed(v);
 411}
 412#define atomic_fetch_dec_relaxed atomic_fetch_dec_relaxed
 413#endif
 414
 415static inline void
 416atomic_and(int i, atomic_t *v)
 417{
 418	kasan_check_write(v, sizeof(*v));
 419	arch_atomic_and(i, v);
 420}
 421#define atomic_and atomic_and
 422
 423#if !defined(arch_atomic_fetch_and_relaxed) || defined(arch_atomic_fetch_and)
 424static inline int
 425atomic_fetch_and(int i, atomic_t *v)
 426{
 427	kasan_check_write(v, sizeof(*v));
 428	return arch_atomic_fetch_and(i, v);
 429}
 430#define atomic_fetch_and atomic_fetch_and
 431#endif
 432
 433#if defined(arch_atomic_fetch_and_acquire)
 434static inline int
 435atomic_fetch_and_acquire(int i, atomic_t *v)
 436{
 437	kasan_check_write(v, sizeof(*v));
 438	return arch_atomic_fetch_and_acquire(i, v);
 439}
 440#define atomic_fetch_and_acquire atomic_fetch_and_acquire
 441#endif
 442
 443#if defined(arch_atomic_fetch_and_release)
 444static inline int
 445atomic_fetch_and_release(int i, atomic_t *v)
 446{
 447	kasan_check_write(v, sizeof(*v));
 448	return arch_atomic_fetch_and_release(i, v);
 449}
 450#define atomic_fetch_and_release atomic_fetch_and_release
 451#endif
 452
 453#if defined(arch_atomic_fetch_and_relaxed)
 454static inline int
 455atomic_fetch_and_relaxed(int i, atomic_t *v)
 456{
 457	kasan_check_write(v, sizeof(*v));
 458	return arch_atomic_fetch_and_relaxed(i, v);
 459}
 460#define atomic_fetch_and_relaxed atomic_fetch_and_relaxed
 461#endif
 462
 463#if defined(arch_atomic_andnot)
 464static inline void
 465atomic_andnot(int i, atomic_t *v)
 466{
 467	kasan_check_write(v, sizeof(*v));
 468	arch_atomic_andnot(i, v);
 469}
 470#define atomic_andnot atomic_andnot
 471#endif
 472
 473#if defined(arch_atomic_fetch_andnot)
 474static inline int
 475atomic_fetch_andnot(int i, atomic_t *v)
 476{
 477	kasan_check_write(v, sizeof(*v));
 478	return arch_atomic_fetch_andnot(i, v);
 479}
 480#define atomic_fetch_andnot atomic_fetch_andnot
 481#endif
 482
 483#if defined(arch_atomic_fetch_andnot_acquire)
 484static inline int
 485atomic_fetch_andnot_acquire(int i, atomic_t *v)
 486{
 487	kasan_check_write(v, sizeof(*v));
 488	return arch_atomic_fetch_andnot_acquire(i, v);
 489}
 490#define atomic_fetch_andnot_acquire atomic_fetch_andnot_acquire
 491#endif
 492
 493#if defined(arch_atomic_fetch_andnot_release)
 494static inline int
 495atomic_fetch_andnot_release(int i, atomic_t *v)
 496{
 497	kasan_check_write(v, sizeof(*v));
 498	return arch_atomic_fetch_andnot_release(i, v);
 499}
 500#define atomic_fetch_andnot_release atomic_fetch_andnot_release
 501#endif
 502
 503#if defined(arch_atomic_fetch_andnot_relaxed)
 504static inline int
 505atomic_fetch_andnot_relaxed(int i, atomic_t *v)
 506{
 507	kasan_check_write(v, sizeof(*v));
 508	return arch_atomic_fetch_andnot_relaxed(i, v);
 509}
 510#define atomic_fetch_andnot_relaxed atomic_fetch_andnot_relaxed
 511#endif
 512
 513static inline void
 514atomic_or(int i, atomic_t *v)
 515{
 516	kasan_check_write(v, sizeof(*v));
 517	arch_atomic_or(i, v);
 518}
 519#define atomic_or atomic_or
 520
 521#if !defined(arch_atomic_fetch_or_relaxed) || defined(arch_atomic_fetch_or)
 522static inline int
 523atomic_fetch_or(int i, atomic_t *v)
 524{
 525	kasan_check_write(v, sizeof(*v));
 526	return arch_atomic_fetch_or(i, v);
 527}
 528#define atomic_fetch_or atomic_fetch_or
 529#endif
 530
 531#if defined(arch_atomic_fetch_or_acquire)
 532static inline int
 533atomic_fetch_or_acquire(int i, atomic_t *v)
 534{
 535	kasan_check_write(v, sizeof(*v));
 536	return arch_atomic_fetch_or_acquire(i, v);
 537}
 538#define atomic_fetch_or_acquire atomic_fetch_or_acquire
 539#endif
 540
 541#if defined(arch_atomic_fetch_or_release)
 542static inline int
 543atomic_fetch_or_release(int i, atomic_t *v)
 544{
 545	kasan_check_write(v, sizeof(*v));
 546	return arch_atomic_fetch_or_release(i, v);
 547}
 548#define atomic_fetch_or_release atomic_fetch_or_release
 549#endif
 550
 551#if defined(arch_atomic_fetch_or_relaxed)
 552static inline int
 553atomic_fetch_or_relaxed(int i, atomic_t *v)
 554{
 555	kasan_check_write(v, sizeof(*v));
 556	return arch_atomic_fetch_or_relaxed(i, v);
 557}
 558#define atomic_fetch_or_relaxed atomic_fetch_or_relaxed
 559#endif
 560
 561static inline void
 562atomic_xor(int i, atomic_t *v)
 563{
 564	kasan_check_write(v, sizeof(*v));
 565	arch_atomic_xor(i, v);
 566}
 567#define atomic_xor atomic_xor
 568
 569#if !defined(arch_atomic_fetch_xor_relaxed) || defined(arch_atomic_fetch_xor)
 570static inline int
 571atomic_fetch_xor(int i, atomic_t *v)
 572{
 573	kasan_check_write(v, sizeof(*v));
 574	return arch_atomic_fetch_xor(i, v);
 575}
 576#define atomic_fetch_xor atomic_fetch_xor
 577#endif
 578
 579#if defined(arch_atomic_fetch_xor_acquire)
 580static inline int
 581atomic_fetch_xor_acquire(int i, atomic_t *v)
 582{
 583	kasan_check_write(v, sizeof(*v));
 584	return arch_atomic_fetch_xor_acquire(i, v);
 585}
 586#define atomic_fetch_xor_acquire atomic_fetch_xor_acquire
 587#endif
 588
 589#if defined(arch_atomic_fetch_xor_release)
 590static inline int
 591atomic_fetch_xor_release(int i, atomic_t *v)
 592{
 593	kasan_check_write(v, sizeof(*v));
 594	return arch_atomic_fetch_xor_release(i, v);
 595}
 596#define atomic_fetch_xor_release atomic_fetch_xor_release
 597#endif
 598
 599#if defined(arch_atomic_fetch_xor_relaxed)
 600static inline int
 601atomic_fetch_xor_relaxed(int i, atomic_t *v)
 602{
 603	kasan_check_write(v, sizeof(*v));
 604	return arch_atomic_fetch_xor_relaxed(i, v);
 605}
 606#define atomic_fetch_xor_relaxed atomic_fetch_xor_relaxed
 607#endif
 608
 609#if !defined(arch_atomic_xchg_relaxed) || defined(arch_atomic_xchg)
 610static inline int
 611atomic_xchg(atomic_t *v, int i)
 612{
 613	kasan_check_write(v, sizeof(*v));
 614	return arch_atomic_xchg(v, i);
 615}
 616#define atomic_xchg atomic_xchg
 617#endif
 618
 619#if defined(arch_atomic_xchg_acquire)
 620static inline int
 621atomic_xchg_acquire(atomic_t *v, int i)
 622{
 623	kasan_check_write(v, sizeof(*v));
 624	return arch_atomic_xchg_acquire(v, i);
 625}
 626#define atomic_xchg_acquire atomic_xchg_acquire
 627#endif
 628
 629#if defined(arch_atomic_xchg_release)
 630static inline int
 631atomic_xchg_release(atomic_t *v, int i)
 632{
 633	kasan_check_write(v, sizeof(*v));
 634	return arch_atomic_xchg_release(v, i);
 635}
 636#define atomic_xchg_release atomic_xchg_release
 637#endif
 638
 639#if defined(arch_atomic_xchg_relaxed)
 640static inline int
 641atomic_xchg_relaxed(atomic_t *v, int i)
 642{
 643	kasan_check_write(v, sizeof(*v));
 644	return arch_atomic_xchg_relaxed(v, i);
 645}
 646#define atomic_xchg_relaxed atomic_xchg_relaxed
 647#endif
 648
 649#if !defined(arch_atomic_cmpxchg_relaxed) || defined(arch_atomic_cmpxchg)
 650static inline int
 651atomic_cmpxchg(atomic_t *v, int old, int new)
 652{
 653	kasan_check_write(v, sizeof(*v));
 654	return arch_atomic_cmpxchg(v, old, new);
 655}
 656#define atomic_cmpxchg atomic_cmpxchg
 657#endif
 658
 659#if defined(arch_atomic_cmpxchg_acquire)
 660static inline int
 661atomic_cmpxchg_acquire(atomic_t *v, int old, int new)
 662{
 663	kasan_check_write(v, sizeof(*v));
 664	return arch_atomic_cmpxchg_acquire(v, old, new);
 665}
 666#define atomic_cmpxchg_acquire atomic_cmpxchg_acquire
 667#endif
 668
 669#if defined(arch_atomic_cmpxchg_release)
 670static inline int
 671atomic_cmpxchg_release(atomic_t *v, int old, int new)
 672{
 673	kasan_check_write(v, sizeof(*v));
 674	return arch_atomic_cmpxchg_release(v, old, new);
 675}
 676#define atomic_cmpxchg_release atomic_cmpxchg_release
 677#endif
 678
 679#if defined(arch_atomic_cmpxchg_relaxed)
 680static inline int
 681atomic_cmpxchg_relaxed(atomic_t *v, int old, int new)
 682{
 683	kasan_check_write(v, sizeof(*v));
 684	return arch_atomic_cmpxchg_relaxed(v, old, new);
 685}
 686#define atomic_cmpxchg_relaxed atomic_cmpxchg_relaxed
 687#endif
 688
 689#if defined(arch_atomic_try_cmpxchg)
 690static inline bool
 691atomic_try_cmpxchg(atomic_t *v, int *old, int new)
 692{
 693	kasan_check_write(v, sizeof(*v));
 694	kasan_check_write(old, sizeof(*old));
 695	return arch_atomic_try_cmpxchg(v, old, new);
 696}
 697#define atomic_try_cmpxchg atomic_try_cmpxchg
 698#endif
 699
 700#if defined(arch_atomic_try_cmpxchg_acquire)
 701static inline bool
 702atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
 703{
 704	kasan_check_write(v, sizeof(*v));
 705	kasan_check_write(old, sizeof(*old));
 706	return arch_atomic_try_cmpxchg_acquire(v, old, new);
 707}
 708#define atomic_try_cmpxchg_acquire atomic_try_cmpxchg_acquire
 709#endif
 710
 711#if defined(arch_atomic_try_cmpxchg_release)
 712static inline bool
 713atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
 714{
 715	kasan_check_write(v, sizeof(*v));
 716	kasan_check_write(old, sizeof(*old));
 717	return arch_atomic_try_cmpxchg_release(v, old, new);
 718}
 719#define atomic_try_cmpxchg_release atomic_try_cmpxchg_release
 720#endif
 721
 722#if defined(arch_atomic_try_cmpxchg_relaxed)
 723static inline bool
 724atomic_try_cmpxchg_relaxed(atomic_t *v, int *old, int new)
 725{
 726	kasan_check_write(v, sizeof(*v));
 727	kasan_check_write(old, sizeof(*old));
 728	return arch_atomic_try_cmpxchg_relaxed(v, old, new);
 729}
 730#define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg_relaxed
 731#endif
 732
 733#if defined(arch_atomic_sub_and_test)
 734static inline bool
 735atomic_sub_and_test(int i, atomic_t *v)
 736{
 737	kasan_check_write(v, sizeof(*v));
 738	return arch_atomic_sub_and_test(i, v);
 739}
 740#define atomic_sub_and_test atomic_sub_and_test
 741#endif
 742
 743#if defined(arch_atomic_dec_and_test)
 744static inline bool
 745atomic_dec_and_test(atomic_t *v)
 746{
 747	kasan_check_write(v, sizeof(*v));
 748	return arch_atomic_dec_and_test(v);
 749}
 750#define atomic_dec_and_test atomic_dec_and_test
 751#endif
 752
 753#if defined(arch_atomic_inc_and_test)
 754static inline bool
 755atomic_inc_and_test(atomic_t *v)
 756{
 757	kasan_check_write(v, sizeof(*v));
 758	return arch_atomic_inc_and_test(v);
 759}
 760#define atomic_inc_and_test atomic_inc_and_test
 761#endif
 762
 763#if defined(arch_atomic_add_negative)
 764static inline bool
 765atomic_add_negative(int i, atomic_t *v)
 766{
 767	kasan_check_write(v, sizeof(*v));
 768	return arch_atomic_add_negative(i, v);
 769}
 770#define atomic_add_negative atomic_add_negative
 771#endif
 772
 773#if defined(arch_atomic_fetch_add_unless)
 774static inline int
 775atomic_fetch_add_unless(atomic_t *v, int a, int u)
 776{
 777	kasan_check_write(v, sizeof(*v));
 778	return arch_atomic_fetch_add_unless(v, a, u);
 779}
 780#define atomic_fetch_add_unless atomic_fetch_add_unless
 781#endif
 782
 783#if defined(arch_atomic_add_unless)
 784static inline bool
 785atomic_add_unless(atomic_t *v, int a, int u)
 786{
 787	kasan_check_write(v, sizeof(*v));
 788	return arch_atomic_add_unless(v, a, u);
 789}
 790#define atomic_add_unless atomic_add_unless
 791#endif
 792
 793#if defined(arch_atomic_inc_not_zero)
 794static inline bool
 795atomic_inc_not_zero(atomic_t *v)
 796{
 797	kasan_check_write(v, sizeof(*v));
 798	return arch_atomic_inc_not_zero(v);
 799}
 800#define atomic_inc_not_zero atomic_inc_not_zero
 801#endif
 802
 803#if defined(arch_atomic_inc_unless_negative)
 804static inline bool
 805atomic_inc_unless_negative(atomic_t *v)
 806{
 807	kasan_check_write(v, sizeof(*v));
 808	return arch_atomic_inc_unless_negative(v);
 809}
 810#define atomic_inc_unless_negative atomic_inc_unless_negative
 811#endif
 812
 813#if defined(arch_atomic_dec_unless_positive)
 814static inline bool
 815atomic_dec_unless_positive(atomic_t *v)
 816{
 817	kasan_check_write(v, sizeof(*v));
 818	return arch_atomic_dec_unless_positive(v);
 819}
 820#define atomic_dec_unless_positive atomic_dec_unless_positive
 821#endif
 822
 823#if defined(arch_atomic_dec_if_positive)
 824static inline int
 825atomic_dec_if_positive(atomic_t *v)
 826{
 827	kasan_check_write(v, sizeof(*v));
 828	return arch_atomic_dec_if_positive(v);
 829}
 830#define atomic_dec_if_positive atomic_dec_if_positive
 831#endif
 832
 833static inline s64
 834atomic64_read(const atomic64_t *v)
 835{
 836	kasan_check_read(v, sizeof(*v));
 837	return arch_atomic64_read(v);
 838}
 839#define atomic64_read atomic64_read
 840
 841#if defined(arch_atomic64_read_acquire)
 842static inline s64
 843atomic64_read_acquire(const atomic64_t *v)
 844{
 845	kasan_check_read(v, sizeof(*v));
 846	return arch_atomic64_read_acquire(v);
 847}
 848#define atomic64_read_acquire atomic64_read_acquire
 849#endif
 850
 851static inline void
 852atomic64_set(atomic64_t *v, s64 i)
 853{
 854	kasan_check_write(v, sizeof(*v));
 855	arch_atomic64_set(v, i);
 856}
 857#define atomic64_set atomic64_set
 858
 859#if defined(arch_atomic64_set_release)
 860static inline void
 861atomic64_set_release(atomic64_t *v, s64 i)
 862{
 863	kasan_check_write(v, sizeof(*v));
 864	arch_atomic64_set_release(v, i);
 865}
 866#define atomic64_set_release atomic64_set_release
 867#endif
 868
 869static inline void
 870atomic64_add(s64 i, atomic64_t *v)
 871{
 872	kasan_check_write(v, sizeof(*v));
 873	arch_atomic64_add(i, v);
 874}
 875#define atomic64_add atomic64_add
 876
 877#if !defined(arch_atomic64_add_return_relaxed) || defined(arch_atomic64_add_return)
 878static inline s64
 879atomic64_add_return(s64 i, atomic64_t *v)
 880{
 881	kasan_check_write(v, sizeof(*v));
 882	return arch_atomic64_add_return(i, v);
 883}
 884#define atomic64_add_return atomic64_add_return
 885#endif
 886
 887#if defined(arch_atomic64_add_return_acquire)
 888static inline s64
 889atomic64_add_return_acquire(s64 i, atomic64_t *v)
 890{
 891	kasan_check_write(v, sizeof(*v));
 892	return arch_atomic64_add_return_acquire(i, v);
 893}
 894#define atomic64_add_return_acquire atomic64_add_return_acquire
 895#endif
 896
 897#if defined(arch_atomic64_add_return_release)
 898static inline s64
 899atomic64_add_return_release(s64 i, atomic64_t *v)
 900{
 901	kasan_check_write(v, sizeof(*v));
 902	return arch_atomic64_add_return_release(i, v);
 903}
 904#define atomic64_add_return_release atomic64_add_return_release
 905#endif
 906
 907#if defined(arch_atomic64_add_return_relaxed)
 908static inline s64
 909atomic64_add_return_relaxed(s64 i, atomic64_t *v)
 910{
 911	kasan_check_write(v, sizeof(*v));
 912	return arch_atomic64_add_return_relaxed(i, v);
 913}
 914#define atomic64_add_return_relaxed atomic64_add_return_relaxed
 915#endif
 916
 917#if !defined(arch_atomic64_fetch_add_relaxed) || defined(arch_atomic64_fetch_add)
 918static inline s64
 919atomic64_fetch_add(s64 i, atomic64_t *v)
 920{
 921	kasan_check_write(v, sizeof(*v));
 922	return arch_atomic64_fetch_add(i, v);
 923}
 924#define atomic64_fetch_add atomic64_fetch_add
 925#endif
 926
 927#if defined(arch_atomic64_fetch_add_acquire)
 928static inline s64
 929atomic64_fetch_add_acquire(s64 i, atomic64_t *v)
 930{
 931	kasan_check_write(v, sizeof(*v));
 932	return arch_atomic64_fetch_add_acquire(i, v);
 933}
 934#define atomic64_fetch_add_acquire atomic64_fetch_add_acquire
 935#endif
 936
 937#if defined(arch_atomic64_fetch_add_release)
 938static inline s64
 939atomic64_fetch_add_release(s64 i, atomic64_t *v)
 940{
 941	kasan_check_write(v, sizeof(*v));
 942	return arch_atomic64_fetch_add_release(i, v);
 943}
 944#define atomic64_fetch_add_release atomic64_fetch_add_release
 945#endif
 946
 947#if defined(arch_atomic64_fetch_add_relaxed)
 948static inline s64
 949atomic64_fetch_add_relaxed(s64 i, atomic64_t *v)
 950{
 951	kasan_check_write(v, sizeof(*v));
 952	return arch_atomic64_fetch_add_relaxed(i, v);
 953}
 954#define atomic64_fetch_add_relaxed atomic64_fetch_add_relaxed
 955#endif
 956
 957static inline void
 958atomic64_sub(s64 i, atomic64_t *v)
 959{
 960	kasan_check_write(v, sizeof(*v));
 961	arch_atomic64_sub(i, v);
 962}
 963#define atomic64_sub atomic64_sub
 964
 965#if !defined(arch_atomic64_sub_return_relaxed) || defined(arch_atomic64_sub_return)
 966static inline s64
 967atomic64_sub_return(s64 i, atomic64_t *v)
 968{
 969	kasan_check_write(v, sizeof(*v));
 970	return arch_atomic64_sub_return(i, v);
 971}
 972#define atomic64_sub_return atomic64_sub_return
 973#endif
 974
 975#if defined(arch_atomic64_sub_return_acquire)
 976static inline s64
 977atomic64_sub_return_acquire(s64 i, atomic64_t *v)
 978{
 979	kasan_check_write(v, sizeof(*v));
 980	return arch_atomic64_sub_return_acquire(i, v);
 981}
 982#define atomic64_sub_return_acquire atomic64_sub_return_acquire
 983#endif
 984
 985#if defined(arch_atomic64_sub_return_release)
 986static inline s64
 987atomic64_sub_return_release(s64 i, atomic64_t *v)
 988{
 989	kasan_check_write(v, sizeof(*v));
 990	return arch_atomic64_sub_return_release(i, v);
 991}
 992#define atomic64_sub_return_release atomic64_sub_return_release
 993#endif
 994
 995#if defined(arch_atomic64_sub_return_relaxed)
 996static inline s64
 997atomic64_sub_return_relaxed(s64 i, atomic64_t *v)
 998{
 999	kasan_check_write(v, sizeof(*v));
1000	return arch_atomic64_sub_return_relaxed(i, v);
1001}
1002#define atomic64_sub_return_relaxed atomic64_sub_return_relaxed
1003#endif
1004
1005#if !defined(arch_atomic64_fetch_sub_relaxed) || defined(arch_atomic64_fetch_sub)
1006static inline s64
1007atomic64_fetch_sub(s64 i, atomic64_t *v)
1008{
1009	kasan_check_write(v, sizeof(*v));
1010	return arch_atomic64_fetch_sub(i, v);
1011}
1012#define atomic64_fetch_sub atomic64_fetch_sub
1013#endif
1014
1015#if defined(arch_atomic64_fetch_sub_acquire)
1016static inline s64
1017atomic64_fetch_sub_acquire(s64 i, atomic64_t *v)
1018{
1019	kasan_check_write(v, sizeof(*v));
1020	return arch_atomic64_fetch_sub_acquire(i, v);
1021}
1022#define atomic64_fetch_sub_acquire atomic64_fetch_sub_acquire
1023#endif
1024
1025#if defined(arch_atomic64_fetch_sub_release)
1026static inline s64
1027atomic64_fetch_sub_release(s64 i, atomic64_t *v)
1028{
1029	kasan_check_write(v, sizeof(*v));
1030	return arch_atomic64_fetch_sub_release(i, v);
1031}
1032#define atomic64_fetch_sub_release atomic64_fetch_sub_release
1033#endif
1034
1035#if defined(arch_atomic64_fetch_sub_relaxed)
1036static inline s64
1037atomic64_fetch_sub_relaxed(s64 i, atomic64_t *v)
1038{
1039	kasan_check_write(v, sizeof(*v));
1040	return arch_atomic64_fetch_sub_relaxed(i, v);
1041}
1042#define atomic64_fetch_sub_relaxed atomic64_fetch_sub_relaxed
1043#endif
1044
1045#if defined(arch_atomic64_inc)
1046static inline void
1047atomic64_inc(atomic64_t *v)
1048{
1049	kasan_check_write(v, sizeof(*v));
1050	arch_atomic64_inc(v);
1051}
1052#define atomic64_inc atomic64_inc
1053#endif
1054
1055#if defined(arch_atomic64_inc_return)
1056static inline s64
1057atomic64_inc_return(atomic64_t *v)
1058{
1059	kasan_check_write(v, sizeof(*v));
1060	return arch_atomic64_inc_return(v);
1061}
1062#define atomic64_inc_return atomic64_inc_return
1063#endif
1064
1065#if defined(arch_atomic64_inc_return_acquire)
1066static inline s64
1067atomic64_inc_return_acquire(atomic64_t *v)
1068{
1069	kasan_check_write(v, sizeof(*v));
1070	return arch_atomic64_inc_return_acquire(v);
1071}
1072#define atomic64_inc_return_acquire atomic64_inc_return_acquire
1073#endif
1074
1075#if defined(arch_atomic64_inc_return_release)
1076static inline s64
1077atomic64_inc_return_release(atomic64_t *v)
1078{
1079	kasan_check_write(v, sizeof(*v));
1080	return arch_atomic64_inc_return_release(v);
1081}
1082#define atomic64_inc_return_release atomic64_inc_return_release
1083#endif
1084
1085#if defined(arch_atomic64_inc_return_relaxed)
1086static inline s64
1087atomic64_inc_return_relaxed(atomic64_t *v)
1088{
1089	kasan_check_write(v, sizeof(*v));
1090	return arch_atomic64_inc_return_relaxed(v);
1091}
1092#define atomic64_inc_return_relaxed atomic64_inc_return_relaxed
1093#endif
1094
1095#if defined(arch_atomic64_fetch_inc)
1096static inline s64
1097atomic64_fetch_inc(atomic64_t *v)
1098{
1099	kasan_check_write(v, sizeof(*v));
1100	return arch_atomic64_fetch_inc(v);
1101}
1102#define atomic64_fetch_inc atomic64_fetch_inc
1103#endif
1104
1105#if defined(arch_atomic64_fetch_inc_acquire)
1106static inline s64
1107atomic64_fetch_inc_acquire(atomic64_t *v)
1108{
1109	kasan_check_write(v, sizeof(*v));
1110	return arch_atomic64_fetch_inc_acquire(v);
1111}
1112#define atomic64_fetch_inc_acquire atomic64_fetch_inc_acquire
1113#endif
1114
1115#if defined(arch_atomic64_fetch_inc_release)
1116static inline s64
1117atomic64_fetch_inc_release(atomic64_t *v)
1118{
1119	kasan_check_write(v, sizeof(*v));
1120	return arch_atomic64_fetch_inc_release(v);
1121}
1122#define atomic64_fetch_inc_release atomic64_fetch_inc_release
1123#endif
1124
1125#if defined(arch_atomic64_fetch_inc_relaxed)
1126static inline s64
1127atomic64_fetch_inc_relaxed(atomic64_t *v)
1128{
1129	kasan_check_write(v, sizeof(*v));
1130	return arch_atomic64_fetch_inc_relaxed(v);
1131}
1132#define atomic64_fetch_inc_relaxed atomic64_fetch_inc_relaxed
1133#endif
1134
1135#if defined(arch_atomic64_dec)
1136static inline void
1137atomic64_dec(atomic64_t *v)
1138{
1139	kasan_check_write(v, sizeof(*v));
1140	arch_atomic64_dec(v);
1141}
1142#define atomic64_dec atomic64_dec
1143#endif
1144
1145#if defined(arch_atomic64_dec_return)
1146static inline s64
1147atomic64_dec_return(atomic64_t *v)
1148{
1149	kasan_check_write(v, sizeof(*v));
1150	return arch_atomic64_dec_return(v);
1151}
1152#define atomic64_dec_return atomic64_dec_return
1153#endif
1154
1155#if defined(arch_atomic64_dec_return_acquire)
1156static inline s64
1157atomic64_dec_return_acquire(atomic64_t *v)
1158{
1159	kasan_check_write(v, sizeof(*v));
1160	return arch_atomic64_dec_return_acquire(v);
1161}
1162#define atomic64_dec_return_acquire atomic64_dec_return_acquire
1163#endif
1164
1165#if defined(arch_atomic64_dec_return_release)
1166static inline s64
1167atomic64_dec_return_release(atomic64_t *v)
1168{
1169	kasan_check_write(v, sizeof(*v));
1170	return arch_atomic64_dec_return_release(v);
1171}
1172#define atomic64_dec_return_release atomic64_dec_return_release
1173#endif
1174
1175#if defined(arch_atomic64_dec_return_relaxed)
1176static inline s64
1177atomic64_dec_return_relaxed(atomic64_t *v)
1178{
1179	kasan_check_write(v, sizeof(*v));
1180	return arch_atomic64_dec_return_relaxed(v);
1181}
1182#define atomic64_dec_return_relaxed atomic64_dec_return_relaxed
1183#endif
1184
1185#if defined(arch_atomic64_fetch_dec)
1186static inline s64
1187atomic64_fetch_dec(atomic64_t *v)
1188{
1189	kasan_check_write(v, sizeof(*v));
1190	return arch_atomic64_fetch_dec(v);
1191}
1192#define atomic64_fetch_dec atomic64_fetch_dec
1193#endif
1194
1195#if defined(arch_atomic64_fetch_dec_acquire)
1196static inline s64
1197atomic64_fetch_dec_acquire(atomic64_t *v)
1198{
1199	kasan_check_write(v, sizeof(*v));
1200	return arch_atomic64_fetch_dec_acquire(v);
1201}
1202#define atomic64_fetch_dec_acquire atomic64_fetch_dec_acquire
1203#endif
1204
1205#if defined(arch_atomic64_fetch_dec_release)
1206static inline s64
1207atomic64_fetch_dec_release(atomic64_t *v)
1208{
1209	kasan_check_write(v, sizeof(*v));
1210	return arch_atomic64_fetch_dec_release(v);
1211}
1212#define atomic64_fetch_dec_release atomic64_fetch_dec_release
1213#endif
1214
1215#if defined(arch_atomic64_fetch_dec_relaxed)
1216static inline s64
1217atomic64_fetch_dec_relaxed(atomic64_t *v)
1218{
1219	kasan_check_write(v, sizeof(*v));
1220	return arch_atomic64_fetch_dec_relaxed(v);
1221}
1222#define atomic64_fetch_dec_relaxed atomic64_fetch_dec_relaxed
1223#endif
1224
1225static inline void
1226atomic64_and(s64 i, atomic64_t *v)
1227{
1228	kasan_check_write(v, sizeof(*v));
1229	arch_atomic64_and(i, v);
1230}
1231#define atomic64_and atomic64_and
1232
1233#if !defined(arch_atomic64_fetch_and_relaxed) || defined(arch_atomic64_fetch_and)
1234static inline s64
1235atomic64_fetch_and(s64 i, atomic64_t *v)
1236{
1237	kasan_check_write(v, sizeof(*v));
1238	return arch_atomic64_fetch_and(i, v);
1239}
1240#define atomic64_fetch_and atomic64_fetch_and
1241#endif
1242
1243#if defined(arch_atomic64_fetch_and_acquire)
1244static inline s64
1245atomic64_fetch_and_acquire(s64 i, atomic64_t *v)
1246{
1247	kasan_check_write(v, sizeof(*v));
1248	return arch_atomic64_fetch_and_acquire(i, v);
1249}
1250#define atomic64_fetch_and_acquire atomic64_fetch_and_acquire
1251#endif
1252
1253#if defined(arch_atomic64_fetch_and_release)
1254static inline s64
1255atomic64_fetch_and_release(s64 i, atomic64_t *v)
1256{
1257	kasan_check_write(v, sizeof(*v));
1258	return arch_atomic64_fetch_and_release(i, v);
1259}
1260#define atomic64_fetch_and_release atomic64_fetch_and_release
1261#endif
1262
1263#if defined(arch_atomic64_fetch_and_relaxed)
1264static inline s64
1265atomic64_fetch_and_relaxed(s64 i, atomic64_t *v)
1266{
1267	kasan_check_write(v, sizeof(*v));
1268	return arch_atomic64_fetch_and_relaxed(i, v);
1269}
1270#define atomic64_fetch_and_relaxed atomic64_fetch_and_relaxed
1271#endif
1272
1273#if defined(arch_atomic64_andnot)
1274static inline void
1275atomic64_andnot(s64 i, atomic64_t *v)
1276{
1277	kasan_check_write(v, sizeof(*v));
1278	arch_atomic64_andnot(i, v);
1279}
1280#define atomic64_andnot atomic64_andnot
1281#endif
1282
1283#if defined(arch_atomic64_fetch_andnot)
1284static inline s64
1285atomic64_fetch_andnot(s64 i, atomic64_t *v)
1286{
1287	kasan_check_write(v, sizeof(*v));
1288	return arch_atomic64_fetch_andnot(i, v);
1289}
1290#define atomic64_fetch_andnot atomic64_fetch_andnot
1291#endif
1292
1293#if defined(arch_atomic64_fetch_andnot_acquire)
1294static inline s64
1295atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
1296{
1297	kasan_check_write(v, sizeof(*v));
1298	return arch_atomic64_fetch_andnot_acquire(i, v);
1299}
1300#define atomic64_fetch_andnot_acquire atomic64_fetch_andnot_acquire
1301#endif
1302
1303#if defined(arch_atomic64_fetch_andnot_release)
1304static inline s64
1305atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
1306{
1307	kasan_check_write(v, sizeof(*v));
1308	return arch_atomic64_fetch_andnot_release(i, v);
1309}
1310#define atomic64_fetch_andnot_release atomic64_fetch_andnot_release
1311#endif
1312
1313#if defined(arch_atomic64_fetch_andnot_relaxed)
1314static inline s64
1315atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v)
1316{
1317	kasan_check_write(v, sizeof(*v));
1318	return arch_atomic64_fetch_andnot_relaxed(i, v);
1319}
1320#define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot_relaxed
1321#endif
1322
1323static inline void
1324atomic64_or(s64 i, atomic64_t *v)
1325{
1326	kasan_check_write(v, sizeof(*v));
1327	arch_atomic64_or(i, v);
1328}
1329#define atomic64_or atomic64_or
1330
1331#if !defined(arch_atomic64_fetch_or_relaxed) || defined(arch_atomic64_fetch_or)
1332static inline s64
1333atomic64_fetch_or(s64 i, atomic64_t *v)
1334{
1335	kasan_check_write(v, sizeof(*v));
1336	return arch_atomic64_fetch_or(i, v);
1337}
1338#define atomic64_fetch_or atomic64_fetch_or
1339#endif
1340
1341#if defined(arch_atomic64_fetch_or_acquire)
1342static inline s64
1343atomic64_fetch_or_acquire(s64 i, atomic64_t *v)
1344{
1345	kasan_check_write(v, sizeof(*v));
1346	return arch_atomic64_fetch_or_acquire(i, v);
1347}
1348#define atomic64_fetch_or_acquire atomic64_fetch_or_acquire
1349#endif
1350
1351#if defined(arch_atomic64_fetch_or_release)
1352static inline s64
1353atomic64_fetch_or_release(s64 i, atomic64_t *v)
1354{
1355	kasan_check_write(v, sizeof(*v));
1356	return arch_atomic64_fetch_or_release(i, v);
1357}
1358#define atomic64_fetch_or_release atomic64_fetch_or_release
1359#endif
1360
1361#if defined(arch_atomic64_fetch_or_relaxed)
1362static inline s64
1363atomic64_fetch_or_relaxed(s64 i, atomic64_t *v)
1364{
1365	kasan_check_write(v, sizeof(*v));
1366	return arch_atomic64_fetch_or_relaxed(i, v);
1367}
1368#define atomic64_fetch_or_relaxed atomic64_fetch_or_relaxed
1369#endif
1370
1371static inline void
1372atomic64_xor(s64 i, atomic64_t *v)
1373{
1374	kasan_check_write(v, sizeof(*v));
1375	arch_atomic64_xor(i, v);
1376}
1377#define atomic64_xor atomic64_xor
1378
1379#if !defined(arch_atomic64_fetch_xor_relaxed) || defined(arch_atomic64_fetch_xor)
1380static inline s64
1381atomic64_fetch_xor(s64 i, atomic64_t *v)
1382{
1383	kasan_check_write(v, sizeof(*v));
1384	return arch_atomic64_fetch_xor(i, v);
1385}
1386#define atomic64_fetch_xor atomic64_fetch_xor
1387#endif
1388
1389#if defined(arch_atomic64_fetch_xor_acquire)
1390static inline s64
1391atomic64_fetch_xor_acquire(s64 i, atomic64_t *v)
1392{
1393	kasan_check_write(v, sizeof(*v));
1394	return arch_atomic64_fetch_xor_acquire(i, v);
1395}
1396#define atomic64_fetch_xor_acquire atomic64_fetch_xor_acquire
1397#endif
1398
1399#if defined(arch_atomic64_fetch_xor_release)
1400static inline s64
1401atomic64_fetch_xor_release(s64 i, atomic64_t *v)
1402{
1403	kasan_check_write(v, sizeof(*v));
1404	return arch_atomic64_fetch_xor_release(i, v);
1405}
1406#define atomic64_fetch_xor_release atomic64_fetch_xor_release
1407#endif
1408
1409#if defined(arch_atomic64_fetch_xor_relaxed)
1410static inline s64
1411atomic64_fetch_xor_relaxed(s64 i, atomic64_t *v)
1412{
1413	kasan_check_write(v, sizeof(*v));
1414	return arch_atomic64_fetch_xor_relaxed(i, v);
1415}
1416#define atomic64_fetch_xor_relaxed atomic64_fetch_xor_relaxed
1417#endif
1418
1419#if !defined(arch_atomic64_xchg_relaxed) || defined(arch_atomic64_xchg)
1420static inline s64
1421atomic64_xchg(atomic64_t *v, s64 i)
1422{
1423	kasan_check_write(v, sizeof(*v));
1424	return arch_atomic64_xchg(v, i);
1425}
1426#define atomic64_xchg atomic64_xchg
1427#endif
1428
1429#if defined(arch_atomic64_xchg_acquire)
1430static inline s64
1431atomic64_xchg_acquire(atomic64_t *v, s64 i)
1432{
1433	kasan_check_write(v, sizeof(*v));
1434	return arch_atomic64_xchg_acquire(v, i);
1435}
1436#define atomic64_xchg_acquire atomic64_xchg_acquire
1437#endif
1438
1439#if defined(arch_atomic64_xchg_release)
1440static inline s64
1441atomic64_xchg_release(atomic64_t *v, s64 i)
1442{
1443	kasan_check_write(v, sizeof(*v));
1444	return arch_atomic64_xchg_release(v, i);
1445}
1446#define atomic64_xchg_release atomic64_xchg_release
1447#endif
1448
1449#if defined(arch_atomic64_xchg_relaxed)
1450static inline s64
1451atomic64_xchg_relaxed(atomic64_t *v, s64 i)
1452{
1453	kasan_check_write(v, sizeof(*v));
1454	return arch_atomic64_xchg_relaxed(v, i);
1455}
1456#define atomic64_xchg_relaxed atomic64_xchg_relaxed
1457#endif
1458
1459#if !defined(arch_atomic64_cmpxchg_relaxed) || defined(arch_atomic64_cmpxchg)
1460static inline s64
1461atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
1462{
1463	kasan_check_write(v, sizeof(*v));
1464	return arch_atomic64_cmpxchg(v, old, new);
1465}
1466#define atomic64_cmpxchg atomic64_cmpxchg
1467#endif
1468
1469#if defined(arch_atomic64_cmpxchg_acquire)
1470static inline s64
1471atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new)
1472{
1473	kasan_check_write(v, sizeof(*v));
1474	return arch_atomic64_cmpxchg_acquire(v, old, new);
1475}
1476#define atomic64_cmpxchg_acquire atomic64_cmpxchg_acquire
1477#endif
1478
1479#if defined(arch_atomic64_cmpxchg_release)
1480static inline s64
1481atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new)
1482{
1483	kasan_check_write(v, sizeof(*v));
1484	return arch_atomic64_cmpxchg_release(v, old, new);
1485}
1486#define atomic64_cmpxchg_release atomic64_cmpxchg_release
1487#endif
1488
1489#if defined(arch_atomic64_cmpxchg_relaxed)
1490static inline s64
1491atomic64_cmpxchg_relaxed(atomic64_t *v, s64 old, s64 new)
1492{
1493	kasan_check_write(v, sizeof(*v));
1494	return arch_atomic64_cmpxchg_relaxed(v, old, new);
1495}
1496#define atomic64_cmpxchg_relaxed atomic64_cmpxchg_relaxed
1497#endif
1498
1499#if defined(arch_atomic64_try_cmpxchg)
1500static inline bool
1501atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
1502{
1503	kasan_check_write(v, sizeof(*v));
1504	kasan_check_write(old, sizeof(*old));
1505	return arch_atomic64_try_cmpxchg(v, old, new);
1506}
1507#define atomic64_try_cmpxchg atomic64_try_cmpxchg
1508#endif
1509
1510#if defined(arch_atomic64_try_cmpxchg_acquire)
1511static inline bool
1512atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
1513{
1514	kasan_check_write(v, sizeof(*v));
1515	kasan_check_write(old, sizeof(*old));
1516	return arch_atomic64_try_cmpxchg_acquire(v, old, new);
1517}
1518#define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg_acquire
1519#endif
1520
1521#if defined(arch_atomic64_try_cmpxchg_release)
1522static inline bool
1523atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
1524{
1525	kasan_check_write(v, sizeof(*v));
1526	kasan_check_write(old, sizeof(*old));
1527	return arch_atomic64_try_cmpxchg_release(v, old, new);
1528}
1529#define atomic64_try_cmpxchg_release atomic64_try_cmpxchg_release
1530#endif
1531
1532#if defined(arch_atomic64_try_cmpxchg_relaxed)
1533static inline bool
1534atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new)
1535{
1536	kasan_check_write(v, sizeof(*v));
1537	kasan_check_write(old, sizeof(*old));
1538	return arch_atomic64_try_cmpxchg_relaxed(v, old, new);
1539}
1540#define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg_relaxed
1541#endif
1542
1543#if defined(arch_atomic64_sub_and_test)
1544static inline bool
1545atomic64_sub_and_test(s64 i, atomic64_t *v)
1546{
1547	kasan_check_write(v, sizeof(*v));
1548	return arch_atomic64_sub_and_test(i, v);
1549}
1550#define atomic64_sub_and_test atomic64_sub_and_test
1551#endif
1552
1553#if defined(arch_atomic64_dec_and_test)
1554static inline bool
1555atomic64_dec_and_test(atomic64_t *v)
1556{
1557	kasan_check_write(v, sizeof(*v));
1558	return arch_atomic64_dec_and_test(v);
1559}
1560#define atomic64_dec_and_test atomic64_dec_and_test
1561#endif
1562
1563#if defined(arch_atomic64_inc_and_test)
1564static inline bool
1565atomic64_inc_and_test(atomic64_t *v)
1566{
1567	kasan_check_write(v, sizeof(*v));
1568	return arch_atomic64_inc_and_test(v);
1569}
1570#define atomic64_inc_and_test atomic64_inc_and_test
1571#endif
1572
1573#if defined(arch_atomic64_add_negative)
1574static inline bool
1575atomic64_add_negative(s64 i, atomic64_t *v)
1576{
1577	kasan_check_write(v, sizeof(*v));
1578	return arch_atomic64_add_negative(i, v);
1579}
1580#define atomic64_add_negative atomic64_add_negative
1581#endif
1582
1583#if defined(arch_atomic64_fetch_add_unless)
1584static inline s64
1585atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
1586{
1587	kasan_check_write(v, sizeof(*v));
1588	return arch_atomic64_fetch_add_unless(v, a, u);
1589}
1590#define atomic64_fetch_add_unless atomic64_fetch_add_unless
1591#endif
1592
1593#if defined(arch_atomic64_add_unless)
1594static inline bool
1595atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
1596{
1597	kasan_check_write(v, sizeof(*v));
1598	return arch_atomic64_add_unless(v, a, u);
1599}
1600#define atomic64_add_unless atomic64_add_unless
1601#endif
1602
1603#if defined(arch_atomic64_inc_not_zero)
1604static inline bool
1605atomic64_inc_not_zero(atomic64_t *v)
1606{
1607	kasan_check_write(v, sizeof(*v));
1608	return arch_atomic64_inc_not_zero(v);
1609}
1610#define atomic64_inc_not_zero atomic64_inc_not_zero
1611#endif
1612
1613#if defined(arch_atomic64_inc_unless_negative)
1614static inline bool
1615atomic64_inc_unless_negative(atomic64_t *v)
1616{
1617	kasan_check_write(v, sizeof(*v));
1618	return arch_atomic64_inc_unless_negative(v);
1619}
1620#define atomic64_inc_unless_negative atomic64_inc_unless_negative
1621#endif
1622
1623#if defined(arch_atomic64_dec_unless_positive)
1624static inline bool
1625atomic64_dec_unless_positive(atomic64_t *v)
1626{
1627	kasan_check_write(v, sizeof(*v));
1628	return arch_atomic64_dec_unless_positive(v);
1629}
1630#define atomic64_dec_unless_positive atomic64_dec_unless_positive
1631#endif
1632
1633#if defined(arch_atomic64_dec_if_positive)
1634static inline s64
1635atomic64_dec_if_positive(atomic64_t *v)
1636{
1637	kasan_check_write(v, sizeof(*v));
1638	return arch_atomic64_dec_if_positive(v);
1639}
1640#define atomic64_dec_if_positive atomic64_dec_if_positive
1641#endif
1642
1643#if !defined(arch_xchg_relaxed) || defined(arch_xchg)
1644#define xchg(ptr, ...)						\
1645({									\
1646	typeof(ptr) __ai_ptr = (ptr);					\
1647	kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));		\
1648	arch_xchg(__ai_ptr, __VA_ARGS__);				\
1649})
1650#endif
1651
1652#if defined(arch_xchg_acquire)
1653#define xchg_acquire(ptr, ...)						\
1654({									\
1655	typeof(ptr) __ai_ptr = (ptr);					\
1656	kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));		\
1657	arch_xchg_acquire(__ai_ptr, __VA_ARGS__);				\
1658})
1659#endif
1660
1661#if defined(arch_xchg_release)
1662#define xchg_release(ptr, ...)						\
1663({									\
1664	typeof(ptr) __ai_ptr = (ptr);					\
1665	kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));		\
1666	arch_xchg_release(__ai_ptr, __VA_ARGS__);				\
1667})
1668#endif
1669
1670#if defined(arch_xchg_relaxed)
1671#define xchg_relaxed(ptr, ...)						\
1672({									\
1673	typeof(ptr) __ai_ptr = (ptr);					\
1674	kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));		\
1675	arch_xchg_relaxed(__ai_ptr, __VA_ARGS__);				\
1676})
1677#endif
1678
1679#if !defined(arch_cmpxchg_relaxed) || defined(arch_cmpxchg)
1680#define cmpxchg(ptr, ...)						\
1681({									\
1682	typeof(ptr) __ai_ptr = (ptr);					\
1683	kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));		\
1684	arch_cmpxchg(__ai_ptr, __VA_ARGS__);				\
1685})
1686#endif
1687
1688#if defined(arch_cmpxchg_acquire)
1689#define cmpxchg_acquire(ptr, ...)						\
1690({									\
1691	typeof(ptr) __ai_ptr = (ptr);					\
1692	kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));		\
1693	arch_cmpxchg_acquire(__ai_ptr, __VA_ARGS__);				\
1694})
1695#endif
1696
1697#if defined(arch_cmpxchg_release)
1698#define cmpxchg_release(ptr, ...)						\
1699({									\
1700	typeof(ptr) __ai_ptr = (ptr);					\
1701	kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));		\
1702	arch_cmpxchg_release(__ai_ptr, __VA_ARGS__);				\
1703})
1704#endif
1705
1706#if defined(arch_cmpxchg_relaxed)
1707#define cmpxchg_relaxed(ptr, ...)						\
1708({									\
1709	typeof(ptr) __ai_ptr = (ptr);					\
1710	kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));		\
1711	arch_cmpxchg_relaxed(__ai_ptr, __VA_ARGS__);				\
1712})
1713#endif
1714
1715#if !defined(arch_cmpxchg64_relaxed) || defined(arch_cmpxchg64)
1716#define cmpxchg64(ptr, ...)						\
1717({									\
1718	typeof(ptr) __ai_ptr = (ptr);					\
1719	kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));		\
1720	arch_cmpxchg64(__ai_ptr, __VA_ARGS__);				\
1721})
1722#endif
1723
1724#if defined(arch_cmpxchg64_acquire)
1725#define cmpxchg64_acquire(ptr, ...)						\
1726({									\
1727	typeof(ptr) __ai_ptr = (ptr);					\
1728	kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));		\
1729	arch_cmpxchg64_acquire(__ai_ptr, __VA_ARGS__);				\
1730})
1731#endif
1732
1733#if defined(arch_cmpxchg64_release)
1734#define cmpxchg64_release(ptr, ...)						\
1735({									\
1736	typeof(ptr) __ai_ptr = (ptr);					\
1737	kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));		\
1738	arch_cmpxchg64_release(__ai_ptr, __VA_ARGS__);				\
1739})
1740#endif
1741
1742#if defined(arch_cmpxchg64_relaxed)
1743#define cmpxchg64_relaxed(ptr, ...)						\
1744({									\
1745	typeof(ptr) __ai_ptr = (ptr);					\
1746	kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));		\
1747	arch_cmpxchg64_relaxed(__ai_ptr, __VA_ARGS__);				\
1748})
1749#endif
1750
1751#define cmpxchg_local(ptr, ...)						\
1752({									\
1753	typeof(ptr) __ai_ptr = (ptr);					\
1754	kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));		\
1755	arch_cmpxchg_local(__ai_ptr, __VA_ARGS__);				\
1756})
1757
1758#define cmpxchg64_local(ptr, ...)						\
1759({									\
1760	typeof(ptr) __ai_ptr = (ptr);					\
1761	kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));		\
1762	arch_cmpxchg64_local(__ai_ptr, __VA_ARGS__);				\
1763})
1764
1765#define sync_cmpxchg(ptr, ...)						\
1766({									\
1767	typeof(ptr) __ai_ptr = (ptr);					\
1768	kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));		\
1769	arch_sync_cmpxchg(__ai_ptr, __VA_ARGS__);				\
1770})
1771
1772#define cmpxchg_double(ptr, ...)						\
1773({									\
1774	typeof(ptr) __ai_ptr = (ptr);					\
1775	kasan_check_write(__ai_ptr, 2 * sizeof(*__ai_ptr));		\
1776	arch_cmpxchg_double(__ai_ptr, __VA_ARGS__);				\
1777})
1778
1779
1780#define cmpxchg_double_local(ptr, ...)						\
1781({									\
1782	typeof(ptr) __ai_ptr = (ptr);					\
1783	kasan_check_write(__ai_ptr, 2 * sizeof(*__ai_ptr));		\
1784	arch_cmpxchg_double_local(__ai_ptr, __VA_ARGS__);				\
1785})
1786
1787#endif /* _ASM_GENERIC_ATOMIC_INSTRUMENTED_H */
1788// b29b625d5de9280f680e42c7be859b55b15e5f6a
v5.9
   1// SPDX-License-Identifier: GPL-2.0
   2
   3// Generated by scripts/atomic/gen-atomic-instrumented.sh
   4// DO NOT MODIFY THIS FILE DIRECTLY
   5
   6/*
   7 * This file provides wrappers with KASAN instrumentation for atomic operations.
   8 * To use this functionality an arch's atomic.h file needs to define all
   9 * atomic operations with arch_ prefix (e.g. arch_atomic_read()) and include
  10 * this file at the end. This file provides atomic_read() that forwards to
  11 * arch_atomic_read() for actual atomic operation.
  12 * Note: if an arch atomic operation is implemented by means of other atomic
  13 * operations (e.g. atomic_read()/atomic_cmpxchg() loop), then it needs to use
  14 * arch_ variants (i.e. arch_atomic_read()/arch_atomic_cmpxchg()) to avoid
  15 * double instrumentation.
  16 */
  17#ifndef _ASM_GENERIC_ATOMIC_INSTRUMENTED_H
  18#define _ASM_GENERIC_ATOMIC_INSTRUMENTED_H
  19
  20#include <linux/build_bug.h>
  21#include <linux/compiler.h>
  22#include <linux/instrumented.h>
  23
  24static __always_inline int
  25atomic_read(const atomic_t *v)
  26{
  27	instrument_atomic_read(v, sizeof(*v));
  28	return arch_atomic_read(v);
  29}
  30#define atomic_read atomic_read
  31
  32#if defined(arch_atomic_read_acquire)
  33static __always_inline int
  34atomic_read_acquire(const atomic_t *v)
  35{
  36	instrument_atomic_read(v, sizeof(*v));
  37	return arch_atomic_read_acquire(v);
  38}
  39#define atomic_read_acquire atomic_read_acquire
  40#endif
  41
  42static __always_inline void
  43atomic_set(atomic_t *v, int i)
  44{
  45	instrument_atomic_write(v, sizeof(*v));
  46	arch_atomic_set(v, i);
  47}
  48#define atomic_set atomic_set
  49
  50#if defined(arch_atomic_set_release)
  51static __always_inline void
  52atomic_set_release(atomic_t *v, int i)
  53{
  54	instrument_atomic_write(v, sizeof(*v));
  55	arch_atomic_set_release(v, i);
  56}
  57#define atomic_set_release atomic_set_release
  58#endif
  59
  60static __always_inline void
  61atomic_add(int i, atomic_t *v)
  62{
  63	instrument_atomic_write(v, sizeof(*v));
  64	arch_atomic_add(i, v);
  65}
  66#define atomic_add atomic_add
  67
  68#if !defined(arch_atomic_add_return_relaxed) || defined(arch_atomic_add_return)
  69static __always_inline int
  70atomic_add_return(int i, atomic_t *v)
  71{
  72	instrument_atomic_write(v, sizeof(*v));
  73	return arch_atomic_add_return(i, v);
  74}
  75#define atomic_add_return atomic_add_return
  76#endif
  77
  78#if defined(arch_atomic_add_return_acquire)
  79static __always_inline int
  80atomic_add_return_acquire(int i, atomic_t *v)
  81{
  82	instrument_atomic_write(v, sizeof(*v));
  83	return arch_atomic_add_return_acquire(i, v);
  84}
  85#define atomic_add_return_acquire atomic_add_return_acquire
  86#endif
  87
  88#if defined(arch_atomic_add_return_release)
  89static __always_inline int
  90atomic_add_return_release(int i, atomic_t *v)
  91{
  92	instrument_atomic_write(v, sizeof(*v));
  93	return arch_atomic_add_return_release(i, v);
  94}
  95#define atomic_add_return_release atomic_add_return_release
  96#endif
  97
  98#if defined(arch_atomic_add_return_relaxed)
  99static __always_inline int
 100atomic_add_return_relaxed(int i, atomic_t *v)
 101{
 102	instrument_atomic_write(v, sizeof(*v));
 103	return arch_atomic_add_return_relaxed(i, v);
 104}
 105#define atomic_add_return_relaxed atomic_add_return_relaxed
 106#endif
 107
 108#if !defined(arch_atomic_fetch_add_relaxed) || defined(arch_atomic_fetch_add)
 109static __always_inline int
 110atomic_fetch_add(int i, atomic_t *v)
 111{
 112	instrument_atomic_write(v, sizeof(*v));
 113	return arch_atomic_fetch_add(i, v);
 114}
 115#define atomic_fetch_add atomic_fetch_add
 116#endif
 117
 118#if defined(arch_atomic_fetch_add_acquire)
 119static __always_inline int
 120atomic_fetch_add_acquire(int i, atomic_t *v)
 121{
 122	instrument_atomic_write(v, sizeof(*v));
 123	return arch_atomic_fetch_add_acquire(i, v);
 124}
 125#define atomic_fetch_add_acquire atomic_fetch_add_acquire
 126#endif
 127
 128#if defined(arch_atomic_fetch_add_release)
 129static __always_inline int
 130atomic_fetch_add_release(int i, atomic_t *v)
 131{
 132	instrument_atomic_write(v, sizeof(*v));
 133	return arch_atomic_fetch_add_release(i, v);
 134}
 135#define atomic_fetch_add_release atomic_fetch_add_release
 136#endif
 137
 138#if defined(arch_atomic_fetch_add_relaxed)
 139static __always_inline int
 140atomic_fetch_add_relaxed(int i, atomic_t *v)
 141{
 142	instrument_atomic_write(v, sizeof(*v));
 143	return arch_atomic_fetch_add_relaxed(i, v);
 144}
 145#define atomic_fetch_add_relaxed atomic_fetch_add_relaxed
 146#endif
 147
 148static __always_inline void
 149atomic_sub(int i, atomic_t *v)
 150{
 151	instrument_atomic_write(v, sizeof(*v));
 152	arch_atomic_sub(i, v);
 153}
 154#define atomic_sub atomic_sub
 155
 156#if !defined(arch_atomic_sub_return_relaxed) || defined(arch_atomic_sub_return)
 157static __always_inline int
 158atomic_sub_return(int i, atomic_t *v)
 159{
 160	instrument_atomic_write(v, sizeof(*v));
 161	return arch_atomic_sub_return(i, v);
 162}
 163#define atomic_sub_return atomic_sub_return
 164#endif
 165
 166#if defined(arch_atomic_sub_return_acquire)
 167static __always_inline int
 168atomic_sub_return_acquire(int i, atomic_t *v)
 169{
 170	instrument_atomic_write(v, sizeof(*v));
 171	return arch_atomic_sub_return_acquire(i, v);
 172}
 173#define atomic_sub_return_acquire atomic_sub_return_acquire
 174#endif
 175
 176#if defined(arch_atomic_sub_return_release)
 177static __always_inline int
 178atomic_sub_return_release(int i, atomic_t *v)
 179{
 180	instrument_atomic_write(v, sizeof(*v));
 181	return arch_atomic_sub_return_release(i, v);
 182}
 183#define atomic_sub_return_release atomic_sub_return_release
 184#endif
 185
 186#if defined(arch_atomic_sub_return_relaxed)
 187static __always_inline int
 188atomic_sub_return_relaxed(int i, atomic_t *v)
 189{
 190	instrument_atomic_write(v, sizeof(*v));
 191	return arch_atomic_sub_return_relaxed(i, v);
 192}
 193#define atomic_sub_return_relaxed atomic_sub_return_relaxed
 194#endif
 195
 196#if !defined(arch_atomic_fetch_sub_relaxed) || defined(arch_atomic_fetch_sub)
 197static __always_inline int
 198atomic_fetch_sub(int i, atomic_t *v)
 199{
 200	instrument_atomic_write(v, sizeof(*v));
 201	return arch_atomic_fetch_sub(i, v);
 202}
 203#define atomic_fetch_sub atomic_fetch_sub
 204#endif
 205
 206#if defined(arch_atomic_fetch_sub_acquire)
 207static __always_inline int
 208atomic_fetch_sub_acquire(int i, atomic_t *v)
 209{
 210	instrument_atomic_write(v, sizeof(*v));
 211	return arch_atomic_fetch_sub_acquire(i, v);
 212}
 213#define atomic_fetch_sub_acquire atomic_fetch_sub_acquire
 214#endif
 215
 216#if defined(arch_atomic_fetch_sub_release)
 217static __always_inline int
 218atomic_fetch_sub_release(int i, atomic_t *v)
 219{
 220	instrument_atomic_write(v, sizeof(*v));
 221	return arch_atomic_fetch_sub_release(i, v);
 222}
 223#define atomic_fetch_sub_release atomic_fetch_sub_release
 224#endif
 225
 226#if defined(arch_atomic_fetch_sub_relaxed)
 227static __always_inline int
 228atomic_fetch_sub_relaxed(int i, atomic_t *v)
 229{
 230	instrument_atomic_write(v, sizeof(*v));
 231	return arch_atomic_fetch_sub_relaxed(i, v);
 232}
 233#define atomic_fetch_sub_relaxed atomic_fetch_sub_relaxed
 234#endif
 235
 236#if defined(arch_atomic_inc)
 237static __always_inline void
 238atomic_inc(atomic_t *v)
 239{
 240	instrument_atomic_write(v, sizeof(*v));
 241	arch_atomic_inc(v);
 242}
 243#define atomic_inc atomic_inc
 244#endif
 245
 246#if defined(arch_atomic_inc_return)
 247static __always_inline int
 248atomic_inc_return(atomic_t *v)
 249{
 250	instrument_atomic_write(v, sizeof(*v));
 251	return arch_atomic_inc_return(v);
 252}
 253#define atomic_inc_return atomic_inc_return
 254#endif
 255
 256#if defined(arch_atomic_inc_return_acquire)
 257static __always_inline int
 258atomic_inc_return_acquire(atomic_t *v)
 259{
 260	instrument_atomic_write(v, sizeof(*v));
 261	return arch_atomic_inc_return_acquire(v);
 262}
 263#define atomic_inc_return_acquire atomic_inc_return_acquire
 264#endif
 265
 266#if defined(arch_atomic_inc_return_release)
 267static __always_inline int
 268atomic_inc_return_release(atomic_t *v)
 269{
 270	instrument_atomic_write(v, sizeof(*v));
 271	return arch_atomic_inc_return_release(v);
 272}
 273#define atomic_inc_return_release atomic_inc_return_release
 274#endif
 275
 276#if defined(arch_atomic_inc_return_relaxed)
 277static __always_inline int
 278atomic_inc_return_relaxed(atomic_t *v)
 279{
 280	instrument_atomic_write(v, sizeof(*v));
 281	return arch_atomic_inc_return_relaxed(v);
 282}
 283#define atomic_inc_return_relaxed atomic_inc_return_relaxed
 284#endif
 285
 286#if defined(arch_atomic_fetch_inc)
 287static __always_inline int
 288atomic_fetch_inc(atomic_t *v)
 289{
 290	instrument_atomic_write(v, sizeof(*v));
 291	return arch_atomic_fetch_inc(v);
 292}
 293#define atomic_fetch_inc atomic_fetch_inc
 294#endif
 295
 296#if defined(arch_atomic_fetch_inc_acquire)
 297static __always_inline int
 298atomic_fetch_inc_acquire(atomic_t *v)
 299{
 300	instrument_atomic_write(v, sizeof(*v));
 301	return arch_atomic_fetch_inc_acquire(v);
 302}
 303#define atomic_fetch_inc_acquire atomic_fetch_inc_acquire
 304#endif
 305
 306#if defined(arch_atomic_fetch_inc_release)
 307static __always_inline int
 308atomic_fetch_inc_release(atomic_t *v)
 309{
 310	instrument_atomic_write(v, sizeof(*v));
 311	return arch_atomic_fetch_inc_release(v);
 312}
 313#define atomic_fetch_inc_release atomic_fetch_inc_release
 314#endif
 315
 316#if defined(arch_atomic_fetch_inc_relaxed)
 317static __always_inline int
 318atomic_fetch_inc_relaxed(atomic_t *v)
 319{
 320	instrument_atomic_write(v, sizeof(*v));
 321	return arch_atomic_fetch_inc_relaxed(v);
 322}
 323#define atomic_fetch_inc_relaxed atomic_fetch_inc_relaxed
 324#endif
 325
 326#if defined(arch_atomic_dec)
 327static __always_inline void
 328atomic_dec(atomic_t *v)
 329{
 330	instrument_atomic_write(v, sizeof(*v));
 331	arch_atomic_dec(v);
 332}
 333#define atomic_dec atomic_dec
 334#endif
 335
 336#if defined(arch_atomic_dec_return)
 337static __always_inline int
 338atomic_dec_return(atomic_t *v)
 339{
 340	instrument_atomic_write(v, sizeof(*v));
 341	return arch_atomic_dec_return(v);
 342}
 343#define atomic_dec_return atomic_dec_return
 344#endif
 345
 346#if defined(arch_atomic_dec_return_acquire)
 347static __always_inline int
 348atomic_dec_return_acquire(atomic_t *v)
 349{
 350	instrument_atomic_write(v, sizeof(*v));
 351	return arch_atomic_dec_return_acquire(v);
 352}
 353#define atomic_dec_return_acquire atomic_dec_return_acquire
 354#endif
 355
 356#if defined(arch_atomic_dec_return_release)
 357static __always_inline int
 358atomic_dec_return_release(atomic_t *v)
 359{
 360	instrument_atomic_write(v, sizeof(*v));
 361	return arch_atomic_dec_return_release(v);
 362}
 363#define atomic_dec_return_release atomic_dec_return_release
 364#endif
 365
 366#if defined(arch_atomic_dec_return_relaxed)
 367static __always_inline int
 368atomic_dec_return_relaxed(atomic_t *v)
 369{
 370	instrument_atomic_write(v, sizeof(*v));
 371	return arch_atomic_dec_return_relaxed(v);
 372}
 373#define atomic_dec_return_relaxed atomic_dec_return_relaxed
 374#endif
 375
 376#if defined(arch_atomic_fetch_dec)
 377static __always_inline int
 378atomic_fetch_dec(atomic_t *v)
 379{
 380	instrument_atomic_write(v, sizeof(*v));
 381	return arch_atomic_fetch_dec(v);
 382}
 383#define atomic_fetch_dec atomic_fetch_dec
 384#endif
 385
 386#if defined(arch_atomic_fetch_dec_acquire)
 387static __always_inline int
 388atomic_fetch_dec_acquire(atomic_t *v)
 389{
 390	instrument_atomic_write(v, sizeof(*v));
 391	return arch_atomic_fetch_dec_acquire(v);
 392}
 393#define atomic_fetch_dec_acquire atomic_fetch_dec_acquire
 394#endif
 395
 396#if defined(arch_atomic_fetch_dec_release)
 397static __always_inline int
 398atomic_fetch_dec_release(atomic_t *v)
 399{
 400	instrument_atomic_write(v, sizeof(*v));
 401	return arch_atomic_fetch_dec_release(v);
 402}
 403#define atomic_fetch_dec_release atomic_fetch_dec_release
 404#endif
 405
 406#if defined(arch_atomic_fetch_dec_relaxed)
 407static __always_inline int
 408atomic_fetch_dec_relaxed(atomic_t *v)
 409{
 410	instrument_atomic_write(v, sizeof(*v));
 411	return arch_atomic_fetch_dec_relaxed(v);
 412}
 413#define atomic_fetch_dec_relaxed atomic_fetch_dec_relaxed
 414#endif
 415
 416static __always_inline void
 417atomic_and(int i, atomic_t *v)
 418{
 419	instrument_atomic_write(v, sizeof(*v));
 420	arch_atomic_and(i, v);
 421}
 422#define atomic_and atomic_and
 423
 424#if !defined(arch_atomic_fetch_and_relaxed) || defined(arch_atomic_fetch_and)
 425static __always_inline int
 426atomic_fetch_and(int i, atomic_t *v)
 427{
 428	instrument_atomic_write(v, sizeof(*v));
 429	return arch_atomic_fetch_and(i, v);
 430}
 431#define atomic_fetch_and atomic_fetch_and
 432#endif
 433
 434#if defined(arch_atomic_fetch_and_acquire)
 435static __always_inline int
 436atomic_fetch_and_acquire(int i, atomic_t *v)
 437{
 438	instrument_atomic_write(v, sizeof(*v));
 439	return arch_atomic_fetch_and_acquire(i, v);
 440}
 441#define atomic_fetch_and_acquire atomic_fetch_and_acquire
 442#endif
 443
 444#if defined(arch_atomic_fetch_and_release)
 445static __always_inline int
 446atomic_fetch_and_release(int i, atomic_t *v)
 447{
 448	instrument_atomic_write(v, sizeof(*v));
 449	return arch_atomic_fetch_and_release(i, v);
 450}
 451#define atomic_fetch_and_release atomic_fetch_and_release
 452#endif
 453
 454#if defined(arch_atomic_fetch_and_relaxed)
 455static __always_inline int
 456atomic_fetch_and_relaxed(int i, atomic_t *v)
 457{
 458	instrument_atomic_write(v, sizeof(*v));
 459	return arch_atomic_fetch_and_relaxed(i, v);
 460}
 461#define atomic_fetch_and_relaxed atomic_fetch_and_relaxed
 462#endif
 463
 464#if defined(arch_atomic_andnot)
 465static __always_inline void
 466atomic_andnot(int i, atomic_t *v)
 467{
 468	instrument_atomic_write(v, sizeof(*v));
 469	arch_atomic_andnot(i, v);
 470}
 471#define atomic_andnot atomic_andnot
 472#endif
 473
 474#if defined(arch_atomic_fetch_andnot)
 475static __always_inline int
 476atomic_fetch_andnot(int i, atomic_t *v)
 477{
 478	instrument_atomic_write(v, sizeof(*v));
 479	return arch_atomic_fetch_andnot(i, v);
 480}
 481#define atomic_fetch_andnot atomic_fetch_andnot
 482#endif
 483
 484#if defined(arch_atomic_fetch_andnot_acquire)
 485static __always_inline int
 486atomic_fetch_andnot_acquire(int i, atomic_t *v)
 487{
 488	instrument_atomic_write(v, sizeof(*v));
 489	return arch_atomic_fetch_andnot_acquire(i, v);
 490}
 491#define atomic_fetch_andnot_acquire atomic_fetch_andnot_acquire
 492#endif
 493
 494#if defined(arch_atomic_fetch_andnot_release)
 495static __always_inline int
 496atomic_fetch_andnot_release(int i, atomic_t *v)
 497{
 498	instrument_atomic_write(v, sizeof(*v));
 499	return arch_atomic_fetch_andnot_release(i, v);
 500}
 501#define atomic_fetch_andnot_release atomic_fetch_andnot_release
 502#endif
 503
 504#if defined(arch_atomic_fetch_andnot_relaxed)
 505static __always_inline int
 506atomic_fetch_andnot_relaxed(int i, atomic_t *v)
 507{
 508	instrument_atomic_write(v, sizeof(*v));
 509	return arch_atomic_fetch_andnot_relaxed(i, v);
 510}
 511#define atomic_fetch_andnot_relaxed atomic_fetch_andnot_relaxed
 512#endif
 513
 514static __always_inline void
 515atomic_or(int i, atomic_t *v)
 516{
 517	instrument_atomic_write(v, sizeof(*v));
 518	arch_atomic_or(i, v);
 519}
 520#define atomic_or atomic_or
 521
 522#if !defined(arch_atomic_fetch_or_relaxed) || defined(arch_atomic_fetch_or)
 523static __always_inline int
 524atomic_fetch_or(int i, atomic_t *v)
 525{
 526	instrument_atomic_write(v, sizeof(*v));
 527	return arch_atomic_fetch_or(i, v);
 528}
 529#define atomic_fetch_or atomic_fetch_or
 530#endif
 531
 532#if defined(arch_atomic_fetch_or_acquire)
 533static __always_inline int
 534atomic_fetch_or_acquire(int i, atomic_t *v)
 535{
 536	instrument_atomic_write(v, sizeof(*v));
 537	return arch_atomic_fetch_or_acquire(i, v);
 538}
 539#define atomic_fetch_or_acquire atomic_fetch_or_acquire
 540#endif
 541
 542#if defined(arch_atomic_fetch_or_release)
 543static __always_inline int
 544atomic_fetch_or_release(int i, atomic_t *v)
 545{
 546	instrument_atomic_write(v, sizeof(*v));
 547	return arch_atomic_fetch_or_release(i, v);
 548}
 549#define atomic_fetch_or_release atomic_fetch_or_release
 550#endif
 551
 552#if defined(arch_atomic_fetch_or_relaxed)
 553static __always_inline int
 554atomic_fetch_or_relaxed(int i, atomic_t *v)
 555{
 556	instrument_atomic_write(v, sizeof(*v));
 557	return arch_atomic_fetch_or_relaxed(i, v);
 558}
 559#define atomic_fetch_or_relaxed atomic_fetch_or_relaxed
 560#endif
 561
 562static __always_inline void
 563atomic_xor(int i, atomic_t *v)
 564{
 565	instrument_atomic_write(v, sizeof(*v));
 566	arch_atomic_xor(i, v);
 567}
 568#define atomic_xor atomic_xor
 569
 570#if !defined(arch_atomic_fetch_xor_relaxed) || defined(arch_atomic_fetch_xor)
 571static __always_inline int
 572atomic_fetch_xor(int i, atomic_t *v)
 573{
 574	instrument_atomic_write(v, sizeof(*v));
 575	return arch_atomic_fetch_xor(i, v);
 576}
 577#define atomic_fetch_xor atomic_fetch_xor
 578#endif
 579
 580#if defined(arch_atomic_fetch_xor_acquire)
 581static __always_inline int
 582atomic_fetch_xor_acquire(int i, atomic_t *v)
 583{
 584	instrument_atomic_write(v, sizeof(*v));
 585	return arch_atomic_fetch_xor_acquire(i, v);
 586}
 587#define atomic_fetch_xor_acquire atomic_fetch_xor_acquire
 588#endif
 589
 590#if defined(arch_atomic_fetch_xor_release)
 591static __always_inline int
 592atomic_fetch_xor_release(int i, atomic_t *v)
 593{
 594	instrument_atomic_write(v, sizeof(*v));
 595	return arch_atomic_fetch_xor_release(i, v);
 596}
 597#define atomic_fetch_xor_release atomic_fetch_xor_release
 598#endif
 599
 600#if defined(arch_atomic_fetch_xor_relaxed)
 601static __always_inline int
 602atomic_fetch_xor_relaxed(int i, atomic_t *v)
 603{
 604	instrument_atomic_write(v, sizeof(*v));
 605	return arch_atomic_fetch_xor_relaxed(i, v);
 606}
 607#define atomic_fetch_xor_relaxed atomic_fetch_xor_relaxed
 608#endif
 609
 610#if !defined(arch_atomic_xchg_relaxed) || defined(arch_atomic_xchg)
 611static __always_inline int
 612atomic_xchg(atomic_t *v, int i)
 613{
 614	instrument_atomic_write(v, sizeof(*v));
 615	return arch_atomic_xchg(v, i);
 616}
 617#define atomic_xchg atomic_xchg
 618#endif
 619
 620#if defined(arch_atomic_xchg_acquire)
 621static __always_inline int
 622atomic_xchg_acquire(atomic_t *v, int i)
 623{
 624	instrument_atomic_write(v, sizeof(*v));
 625	return arch_atomic_xchg_acquire(v, i);
 626}
 627#define atomic_xchg_acquire atomic_xchg_acquire
 628#endif
 629
 630#if defined(arch_atomic_xchg_release)
 631static __always_inline int
 632atomic_xchg_release(atomic_t *v, int i)
 633{
 634	instrument_atomic_write(v, sizeof(*v));
 635	return arch_atomic_xchg_release(v, i);
 636}
 637#define atomic_xchg_release atomic_xchg_release
 638#endif
 639
 640#if defined(arch_atomic_xchg_relaxed)
 641static __always_inline int
 642atomic_xchg_relaxed(atomic_t *v, int i)
 643{
 644	instrument_atomic_write(v, sizeof(*v));
 645	return arch_atomic_xchg_relaxed(v, i);
 646}
 647#define atomic_xchg_relaxed atomic_xchg_relaxed
 648#endif
 649
 650#if !defined(arch_atomic_cmpxchg_relaxed) || defined(arch_atomic_cmpxchg)
 651static __always_inline int
 652atomic_cmpxchg(atomic_t *v, int old, int new)
 653{
 654	instrument_atomic_write(v, sizeof(*v));
 655	return arch_atomic_cmpxchg(v, old, new);
 656}
 657#define atomic_cmpxchg atomic_cmpxchg
 658#endif
 659
 660#if defined(arch_atomic_cmpxchg_acquire)
 661static __always_inline int
 662atomic_cmpxchg_acquire(atomic_t *v, int old, int new)
 663{
 664	instrument_atomic_write(v, sizeof(*v));
 665	return arch_atomic_cmpxchg_acquire(v, old, new);
 666}
 667#define atomic_cmpxchg_acquire atomic_cmpxchg_acquire
 668#endif
 669
 670#if defined(arch_atomic_cmpxchg_release)
 671static __always_inline int
 672atomic_cmpxchg_release(atomic_t *v, int old, int new)
 673{
 674	instrument_atomic_write(v, sizeof(*v));
 675	return arch_atomic_cmpxchg_release(v, old, new);
 676}
 677#define atomic_cmpxchg_release atomic_cmpxchg_release
 678#endif
 679
 680#if defined(arch_atomic_cmpxchg_relaxed)
 681static __always_inline int
 682atomic_cmpxchg_relaxed(atomic_t *v, int old, int new)
 683{
 684	instrument_atomic_write(v, sizeof(*v));
 685	return arch_atomic_cmpxchg_relaxed(v, old, new);
 686}
 687#define atomic_cmpxchg_relaxed atomic_cmpxchg_relaxed
 688#endif
 689
 690#if defined(arch_atomic_try_cmpxchg)
 691static __always_inline bool
 692atomic_try_cmpxchg(atomic_t *v, int *old, int new)
 693{
 694	instrument_atomic_write(v, sizeof(*v));
 695	instrument_atomic_write(old, sizeof(*old));
 696	return arch_atomic_try_cmpxchg(v, old, new);
 697}
 698#define atomic_try_cmpxchg atomic_try_cmpxchg
 699#endif
 700
 701#if defined(arch_atomic_try_cmpxchg_acquire)
 702static __always_inline bool
 703atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
 704{
 705	instrument_atomic_write(v, sizeof(*v));
 706	instrument_atomic_write(old, sizeof(*old));
 707	return arch_atomic_try_cmpxchg_acquire(v, old, new);
 708}
 709#define atomic_try_cmpxchg_acquire atomic_try_cmpxchg_acquire
 710#endif
 711
 712#if defined(arch_atomic_try_cmpxchg_release)
 713static __always_inline bool
 714atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
 715{
 716	instrument_atomic_write(v, sizeof(*v));
 717	instrument_atomic_write(old, sizeof(*old));
 718	return arch_atomic_try_cmpxchg_release(v, old, new);
 719}
 720#define atomic_try_cmpxchg_release atomic_try_cmpxchg_release
 721#endif
 722
 723#if defined(arch_atomic_try_cmpxchg_relaxed)
 724static __always_inline bool
 725atomic_try_cmpxchg_relaxed(atomic_t *v, int *old, int new)
 726{
 727	instrument_atomic_write(v, sizeof(*v));
 728	instrument_atomic_write(old, sizeof(*old));
 729	return arch_atomic_try_cmpxchg_relaxed(v, old, new);
 730}
 731#define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg_relaxed
 732#endif
 733
 734#if defined(arch_atomic_sub_and_test)
 735static __always_inline bool
 736atomic_sub_and_test(int i, atomic_t *v)
 737{
 738	instrument_atomic_write(v, sizeof(*v));
 739	return arch_atomic_sub_and_test(i, v);
 740}
 741#define atomic_sub_and_test atomic_sub_and_test
 742#endif
 743
 744#if defined(arch_atomic_dec_and_test)
 745static __always_inline bool
 746atomic_dec_and_test(atomic_t *v)
 747{
 748	instrument_atomic_write(v, sizeof(*v));
 749	return arch_atomic_dec_and_test(v);
 750}
 751#define atomic_dec_and_test atomic_dec_and_test
 752#endif
 753
 754#if defined(arch_atomic_inc_and_test)
 755static __always_inline bool
 756atomic_inc_and_test(atomic_t *v)
 757{
 758	instrument_atomic_write(v, sizeof(*v));
 759	return arch_atomic_inc_and_test(v);
 760}
 761#define atomic_inc_and_test atomic_inc_and_test
 762#endif
 763
 764#if defined(arch_atomic_add_negative)
 765static __always_inline bool
 766atomic_add_negative(int i, atomic_t *v)
 767{
 768	instrument_atomic_write(v, sizeof(*v));
 769	return arch_atomic_add_negative(i, v);
 770}
 771#define atomic_add_negative atomic_add_negative
 772#endif
 773
 774#if defined(arch_atomic_fetch_add_unless)
 775static __always_inline int
 776atomic_fetch_add_unless(atomic_t *v, int a, int u)
 777{
 778	instrument_atomic_write(v, sizeof(*v));
 779	return arch_atomic_fetch_add_unless(v, a, u);
 780}
 781#define atomic_fetch_add_unless atomic_fetch_add_unless
 782#endif
 783
 784#if defined(arch_atomic_add_unless)
 785static __always_inline bool
 786atomic_add_unless(atomic_t *v, int a, int u)
 787{
 788	instrument_atomic_write(v, sizeof(*v));
 789	return arch_atomic_add_unless(v, a, u);
 790}
 791#define atomic_add_unless atomic_add_unless
 792#endif
 793
 794#if defined(arch_atomic_inc_not_zero)
 795static __always_inline bool
 796atomic_inc_not_zero(atomic_t *v)
 797{
 798	instrument_atomic_write(v, sizeof(*v));
 799	return arch_atomic_inc_not_zero(v);
 800}
 801#define atomic_inc_not_zero atomic_inc_not_zero
 802#endif
 803
 804#if defined(arch_atomic_inc_unless_negative)
 805static __always_inline bool
 806atomic_inc_unless_negative(atomic_t *v)
 807{
 808	instrument_atomic_write(v, sizeof(*v));
 809	return arch_atomic_inc_unless_negative(v);
 810}
 811#define atomic_inc_unless_negative atomic_inc_unless_negative
 812#endif
 813
 814#if defined(arch_atomic_dec_unless_positive)
 815static __always_inline bool
 816atomic_dec_unless_positive(atomic_t *v)
 817{
 818	instrument_atomic_write(v, sizeof(*v));
 819	return arch_atomic_dec_unless_positive(v);
 820}
 821#define atomic_dec_unless_positive atomic_dec_unless_positive
 822#endif
 823
 824#if defined(arch_atomic_dec_if_positive)
 825static __always_inline int
 826atomic_dec_if_positive(atomic_t *v)
 827{
 828	instrument_atomic_write(v, sizeof(*v));
 829	return arch_atomic_dec_if_positive(v);
 830}
 831#define atomic_dec_if_positive atomic_dec_if_positive
 832#endif
 833
 834static __always_inline s64
 835atomic64_read(const atomic64_t *v)
 836{
 837	instrument_atomic_read(v, sizeof(*v));
 838	return arch_atomic64_read(v);
 839}
 840#define atomic64_read atomic64_read
 841
 842#if defined(arch_atomic64_read_acquire)
 843static __always_inline s64
 844atomic64_read_acquire(const atomic64_t *v)
 845{
 846	instrument_atomic_read(v, sizeof(*v));
 847	return arch_atomic64_read_acquire(v);
 848}
 849#define atomic64_read_acquire atomic64_read_acquire
 850#endif
 851
 852static __always_inline void
 853atomic64_set(atomic64_t *v, s64 i)
 854{
 855	instrument_atomic_write(v, sizeof(*v));
 856	arch_atomic64_set(v, i);
 857}
 858#define atomic64_set atomic64_set
 859
 860#if defined(arch_atomic64_set_release)
 861static __always_inline void
 862atomic64_set_release(atomic64_t *v, s64 i)
 863{
 864	instrument_atomic_write(v, sizeof(*v));
 865	arch_atomic64_set_release(v, i);
 866}
 867#define atomic64_set_release atomic64_set_release
 868#endif
 869
 870static __always_inline void
 871atomic64_add(s64 i, atomic64_t *v)
 872{
 873	instrument_atomic_write(v, sizeof(*v));
 874	arch_atomic64_add(i, v);
 875}
 876#define atomic64_add atomic64_add
 877
 878#if !defined(arch_atomic64_add_return_relaxed) || defined(arch_atomic64_add_return)
 879static __always_inline s64
 880atomic64_add_return(s64 i, atomic64_t *v)
 881{
 882	instrument_atomic_write(v, sizeof(*v));
 883	return arch_atomic64_add_return(i, v);
 884}
 885#define atomic64_add_return atomic64_add_return
 886#endif
 887
 888#if defined(arch_atomic64_add_return_acquire)
 889static __always_inline s64
 890atomic64_add_return_acquire(s64 i, atomic64_t *v)
 891{
 892	instrument_atomic_write(v, sizeof(*v));
 893	return arch_atomic64_add_return_acquire(i, v);
 894}
 895#define atomic64_add_return_acquire atomic64_add_return_acquire
 896#endif
 897
 898#if defined(arch_atomic64_add_return_release)
 899static __always_inline s64
 900atomic64_add_return_release(s64 i, atomic64_t *v)
 901{
 902	instrument_atomic_write(v, sizeof(*v));
 903	return arch_atomic64_add_return_release(i, v);
 904}
 905#define atomic64_add_return_release atomic64_add_return_release
 906#endif
 907
 908#if defined(arch_atomic64_add_return_relaxed)
 909static __always_inline s64
 910atomic64_add_return_relaxed(s64 i, atomic64_t *v)
 911{
 912	instrument_atomic_write(v, sizeof(*v));
 913	return arch_atomic64_add_return_relaxed(i, v);
 914}
 915#define atomic64_add_return_relaxed atomic64_add_return_relaxed
 916#endif
 917
 918#if !defined(arch_atomic64_fetch_add_relaxed) || defined(arch_atomic64_fetch_add)
 919static __always_inline s64
 920atomic64_fetch_add(s64 i, atomic64_t *v)
 921{
 922	instrument_atomic_write(v, sizeof(*v));
 923	return arch_atomic64_fetch_add(i, v);
 924}
 925#define atomic64_fetch_add atomic64_fetch_add
 926#endif
 927
 928#if defined(arch_atomic64_fetch_add_acquire)
 929static __always_inline s64
 930atomic64_fetch_add_acquire(s64 i, atomic64_t *v)
 931{
 932	instrument_atomic_write(v, sizeof(*v));
 933	return arch_atomic64_fetch_add_acquire(i, v);
 934}
 935#define atomic64_fetch_add_acquire atomic64_fetch_add_acquire
 936#endif
 937
 938#if defined(arch_atomic64_fetch_add_release)
 939static __always_inline s64
 940atomic64_fetch_add_release(s64 i, atomic64_t *v)
 941{
 942	instrument_atomic_write(v, sizeof(*v));
 943	return arch_atomic64_fetch_add_release(i, v);
 944}
 945#define atomic64_fetch_add_release atomic64_fetch_add_release
 946#endif
 947
 948#if defined(arch_atomic64_fetch_add_relaxed)
 949static __always_inline s64
 950atomic64_fetch_add_relaxed(s64 i, atomic64_t *v)
 951{
 952	instrument_atomic_write(v, sizeof(*v));
 953	return arch_atomic64_fetch_add_relaxed(i, v);
 954}
 955#define atomic64_fetch_add_relaxed atomic64_fetch_add_relaxed
 956#endif
 957
 958static __always_inline void
 959atomic64_sub(s64 i, atomic64_t *v)
 960{
 961	instrument_atomic_write(v, sizeof(*v));
 962	arch_atomic64_sub(i, v);
 963}
 964#define atomic64_sub atomic64_sub
 965
 966#if !defined(arch_atomic64_sub_return_relaxed) || defined(arch_atomic64_sub_return)
 967static __always_inline s64
 968atomic64_sub_return(s64 i, atomic64_t *v)
 969{
 970	instrument_atomic_write(v, sizeof(*v));
 971	return arch_atomic64_sub_return(i, v);
 972}
 973#define atomic64_sub_return atomic64_sub_return
 974#endif
 975
 976#if defined(arch_atomic64_sub_return_acquire)
 977static __always_inline s64
 978atomic64_sub_return_acquire(s64 i, atomic64_t *v)
 979{
 980	instrument_atomic_write(v, sizeof(*v));
 981	return arch_atomic64_sub_return_acquire(i, v);
 982}
 983#define atomic64_sub_return_acquire atomic64_sub_return_acquire
 984#endif
 985
 986#if defined(arch_atomic64_sub_return_release)
 987static __always_inline s64
 988atomic64_sub_return_release(s64 i, atomic64_t *v)
 989{
 990	instrument_atomic_write(v, sizeof(*v));
 991	return arch_atomic64_sub_return_release(i, v);
 992}
 993#define atomic64_sub_return_release atomic64_sub_return_release
 994#endif
 995
 996#if defined(arch_atomic64_sub_return_relaxed)
 997static __always_inline s64
 998atomic64_sub_return_relaxed(s64 i, atomic64_t *v)
 999{
1000	instrument_atomic_write(v, sizeof(*v));
1001	return arch_atomic64_sub_return_relaxed(i, v);
1002}
1003#define atomic64_sub_return_relaxed atomic64_sub_return_relaxed
1004#endif
1005
1006#if !defined(arch_atomic64_fetch_sub_relaxed) || defined(arch_atomic64_fetch_sub)
1007static __always_inline s64
1008atomic64_fetch_sub(s64 i, atomic64_t *v)
1009{
1010	instrument_atomic_write(v, sizeof(*v));
1011	return arch_atomic64_fetch_sub(i, v);
1012}
1013#define atomic64_fetch_sub atomic64_fetch_sub
1014#endif
1015
1016#if defined(arch_atomic64_fetch_sub_acquire)
1017static __always_inline s64
1018atomic64_fetch_sub_acquire(s64 i, atomic64_t *v)
1019{
1020	instrument_atomic_write(v, sizeof(*v));
1021	return arch_atomic64_fetch_sub_acquire(i, v);
1022}
1023#define atomic64_fetch_sub_acquire atomic64_fetch_sub_acquire
1024#endif
1025
1026#if defined(arch_atomic64_fetch_sub_release)
1027static __always_inline s64
1028atomic64_fetch_sub_release(s64 i, atomic64_t *v)
1029{
1030	instrument_atomic_write(v, sizeof(*v));
1031	return arch_atomic64_fetch_sub_release(i, v);
1032}
1033#define atomic64_fetch_sub_release atomic64_fetch_sub_release
1034#endif
1035
1036#if defined(arch_atomic64_fetch_sub_relaxed)
1037static __always_inline s64
1038atomic64_fetch_sub_relaxed(s64 i, atomic64_t *v)
1039{
1040	instrument_atomic_write(v, sizeof(*v));
1041	return arch_atomic64_fetch_sub_relaxed(i, v);
1042}
1043#define atomic64_fetch_sub_relaxed atomic64_fetch_sub_relaxed
1044#endif
1045
1046#if defined(arch_atomic64_inc)
1047static __always_inline void
1048atomic64_inc(atomic64_t *v)
1049{
1050	instrument_atomic_write(v, sizeof(*v));
1051	arch_atomic64_inc(v);
1052}
1053#define atomic64_inc atomic64_inc
1054#endif
1055
1056#if defined(arch_atomic64_inc_return)
1057static __always_inline s64
1058atomic64_inc_return(atomic64_t *v)
1059{
1060	instrument_atomic_write(v, sizeof(*v));
1061	return arch_atomic64_inc_return(v);
1062}
1063#define atomic64_inc_return atomic64_inc_return
1064#endif
1065
1066#if defined(arch_atomic64_inc_return_acquire)
1067static __always_inline s64
1068atomic64_inc_return_acquire(atomic64_t *v)
1069{
1070	instrument_atomic_write(v, sizeof(*v));
1071	return arch_atomic64_inc_return_acquire(v);
1072}
1073#define atomic64_inc_return_acquire atomic64_inc_return_acquire
1074#endif
1075
1076#if defined(arch_atomic64_inc_return_release)
1077static __always_inline s64
1078atomic64_inc_return_release(atomic64_t *v)
1079{
1080	instrument_atomic_write(v, sizeof(*v));
1081	return arch_atomic64_inc_return_release(v);
1082}
1083#define atomic64_inc_return_release atomic64_inc_return_release
1084#endif
1085
1086#if defined(arch_atomic64_inc_return_relaxed)
1087static __always_inline s64
1088atomic64_inc_return_relaxed(atomic64_t *v)
1089{
1090	instrument_atomic_write(v, sizeof(*v));
1091	return arch_atomic64_inc_return_relaxed(v);
1092}
1093#define atomic64_inc_return_relaxed atomic64_inc_return_relaxed
1094#endif
1095
1096#if defined(arch_atomic64_fetch_inc)
1097static __always_inline s64
1098atomic64_fetch_inc(atomic64_t *v)
1099{
1100	instrument_atomic_write(v, sizeof(*v));
1101	return arch_atomic64_fetch_inc(v);
1102}
1103#define atomic64_fetch_inc atomic64_fetch_inc
1104#endif
1105
1106#if defined(arch_atomic64_fetch_inc_acquire)
1107static __always_inline s64
1108atomic64_fetch_inc_acquire(atomic64_t *v)
1109{
1110	instrument_atomic_write(v, sizeof(*v));
1111	return arch_atomic64_fetch_inc_acquire(v);
1112}
1113#define atomic64_fetch_inc_acquire atomic64_fetch_inc_acquire
1114#endif
1115
1116#if defined(arch_atomic64_fetch_inc_release)
1117static __always_inline s64
1118atomic64_fetch_inc_release(atomic64_t *v)
1119{
1120	instrument_atomic_write(v, sizeof(*v));
1121	return arch_atomic64_fetch_inc_release(v);
1122}
1123#define atomic64_fetch_inc_release atomic64_fetch_inc_release
1124#endif
1125
1126#if defined(arch_atomic64_fetch_inc_relaxed)
1127static __always_inline s64
1128atomic64_fetch_inc_relaxed(atomic64_t *v)
1129{
1130	instrument_atomic_write(v, sizeof(*v));
1131	return arch_atomic64_fetch_inc_relaxed(v);
1132}
1133#define atomic64_fetch_inc_relaxed atomic64_fetch_inc_relaxed
1134#endif
1135
1136#if defined(arch_atomic64_dec)
1137static __always_inline void
1138atomic64_dec(atomic64_t *v)
1139{
1140	instrument_atomic_write(v, sizeof(*v));
1141	arch_atomic64_dec(v);
1142}
1143#define atomic64_dec atomic64_dec
1144#endif
1145
1146#if defined(arch_atomic64_dec_return)
1147static __always_inline s64
1148atomic64_dec_return(atomic64_t *v)
1149{
1150	instrument_atomic_write(v, sizeof(*v));
1151	return arch_atomic64_dec_return(v);
1152}
1153#define atomic64_dec_return atomic64_dec_return
1154#endif
1155
1156#if defined(arch_atomic64_dec_return_acquire)
1157static __always_inline s64
1158atomic64_dec_return_acquire(atomic64_t *v)
1159{
1160	instrument_atomic_write(v, sizeof(*v));
1161	return arch_atomic64_dec_return_acquire(v);
1162}
1163#define atomic64_dec_return_acquire atomic64_dec_return_acquire
1164#endif
1165
1166#if defined(arch_atomic64_dec_return_release)
1167static __always_inline s64
1168atomic64_dec_return_release(atomic64_t *v)
1169{
1170	instrument_atomic_write(v, sizeof(*v));
1171	return arch_atomic64_dec_return_release(v);
1172}
1173#define atomic64_dec_return_release atomic64_dec_return_release
1174#endif
1175
1176#if defined(arch_atomic64_dec_return_relaxed)
1177static __always_inline s64
1178atomic64_dec_return_relaxed(atomic64_t *v)
1179{
1180	instrument_atomic_write(v, sizeof(*v));
1181	return arch_atomic64_dec_return_relaxed(v);
1182}
1183#define atomic64_dec_return_relaxed atomic64_dec_return_relaxed
1184#endif
1185
1186#if defined(arch_atomic64_fetch_dec)
1187static __always_inline s64
1188atomic64_fetch_dec(atomic64_t *v)
1189{
1190	instrument_atomic_write(v, sizeof(*v));
1191	return arch_atomic64_fetch_dec(v);
1192}
1193#define atomic64_fetch_dec atomic64_fetch_dec
1194#endif
1195
1196#if defined(arch_atomic64_fetch_dec_acquire)
1197static __always_inline s64
1198atomic64_fetch_dec_acquire(atomic64_t *v)
1199{
1200	instrument_atomic_write(v, sizeof(*v));
1201	return arch_atomic64_fetch_dec_acquire(v);
1202}
1203#define atomic64_fetch_dec_acquire atomic64_fetch_dec_acquire
1204#endif
1205
1206#if defined(arch_atomic64_fetch_dec_release)
1207static __always_inline s64
1208atomic64_fetch_dec_release(atomic64_t *v)
1209{
1210	instrument_atomic_write(v, sizeof(*v));
1211	return arch_atomic64_fetch_dec_release(v);
1212}
1213#define atomic64_fetch_dec_release atomic64_fetch_dec_release
1214#endif
1215
1216#if defined(arch_atomic64_fetch_dec_relaxed)
1217static __always_inline s64
1218atomic64_fetch_dec_relaxed(atomic64_t *v)
1219{
1220	instrument_atomic_write(v, sizeof(*v));
1221	return arch_atomic64_fetch_dec_relaxed(v);
1222}
1223#define atomic64_fetch_dec_relaxed atomic64_fetch_dec_relaxed
1224#endif
1225
1226static __always_inline void
1227atomic64_and(s64 i, atomic64_t *v)
1228{
1229	instrument_atomic_write(v, sizeof(*v));
1230	arch_atomic64_and(i, v);
1231}
1232#define atomic64_and atomic64_and
1233
1234#if !defined(arch_atomic64_fetch_and_relaxed) || defined(arch_atomic64_fetch_and)
1235static __always_inline s64
1236atomic64_fetch_and(s64 i, atomic64_t *v)
1237{
1238	instrument_atomic_write(v, sizeof(*v));
1239	return arch_atomic64_fetch_and(i, v);
1240}
1241#define atomic64_fetch_and atomic64_fetch_and
1242#endif
1243
1244#if defined(arch_atomic64_fetch_and_acquire)
1245static __always_inline s64
1246atomic64_fetch_and_acquire(s64 i, atomic64_t *v)
1247{
1248	instrument_atomic_write(v, sizeof(*v));
1249	return arch_atomic64_fetch_and_acquire(i, v);
1250}
1251#define atomic64_fetch_and_acquire atomic64_fetch_and_acquire
1252#endif
1253
1254#if defined(arch_atomic64_fetch_and_release)
1255static __always_inline s64
1256atomic64_fetch_and_release(s64 i, atomic64_t *v)
1257{
1258	instrument_atomic_write(v, sizeof(*v));
1259	return arch_atomic64_fetch_and_release(i, v);
1260}
1261#define atomic64_fetch_and_release atomic64_fetch_and_release
1262#endif
1263
1264#if defined(arch_atomic64_fetch_and_relaxed)
1265static __always_inline s64
1266atomic64_fetch_and_relaxed(s64 i, atomic64_t *v)
1267{
1268	instrument_atomic_write(v, sizeof(*v));
1269	return arch_atomic64_fetch_and_relaxed(i, v);
1270}
1271#define atomic64_fetch_and_relaxed atomic64_fetch_and_relaxed
1272#endif
1273
1274#if defined(arch_atomic64_andnot)
1275static __always_inline void
1276atomic64_andnot(s64 i, atomic64_t *v)
1277{
1278	instrument_atomic_write(v, sizeof(*v));
1279	arch_atomic64_andnot(i, v);
1280}
1281#define atomic64_andnot atomic64_andnot
1282#endif
1283
1284#if defined(arch_atomic64_fetch_andnot)
1285static __always_inline s64
1286atomic64_fetch_andnot(s64 i, atomic64_t *v)
1287{
1288	instrument_atomic_write(v, sizeof(*v));
1289	return arch_atomic64_fetch_andnot(i, v);
1290}
1291#define atomic64_fetch_andnot atomic64_fetch_andnot
1292#endif
1293
1294#if defined(arch_atomic64_fetch_andnot_acquire)
1295static __always_inline s64
1296atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
1297{
1298	instrument_atomic_write(v, sizeof(*v));
1299	return arch_atomic64_fetch_andnot_acquire(i, v);
1300}
1301#define atomic64_fetch_andnot_acquire atomic64_fetch_andnot_acquire
1302#endif
1303
1304#if defined(arch_atomic64_fetch_andnot_release)
1305static __always_inline s64
1306atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
1307{
1308	instrument_atomic_write(v, sizeof(*v));
1309	return arch_atomic64_fetch_andnot_release(i, v);
1310}
1311#define atomic64_fetch_andnot_release atomic64_fetch_andnot_release
1312#endif
1313
1314#if defined(arch_atomic64_fetch_andnot_relaxed)
1315static __always_inline s64
1316atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v)
1317{
1318	instrument_atomic_write(v, sizeof(*v));
1319	return arch_atomic64_fetch_andnot_relaxed(i, v);
1320}
1321#define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot_relaxed
1322#endif
1323
1324static __always_inline void
1325atomic64_or(s64 i, atomic64_t *v)
1326{
1327	instrument_atomic_write(v, sizeof(*v));
1328	arch_atomic64_or(i, v);
1329}
1330#define atomic64_or atomic64_or
1331
1332#if !defined(arch_atomic64_fetch_or_relaxed) || defined(arch_atomic64_fetch_or)
1333static __always_inline s64
1334atomic64_fetch_or(s64 i, atomic64_t *v)
1335{
1336	instrument_atomic_write(v, sizeof(*v));
1337	return arch_atomic64_fetch_or(i, v);
1338}
1339#define atomic64_fetch_or atomic64_fetch_or
1340#endif
1341
1342#if defined(arch_atomic64_fetch_or_acquire)
1343static __always_inline s64
1344atomic64_fetch_or_acquire(s64 i, atomic64_t *v)
1345{
1346	instrument_atomic_write(v, sizeof(*v));
1347	return arch_atomic64_fetch_or_acquire(i, v);
1348}
1349#define atomic64_fetch_or_acquire atomic64_fetch_or_acquire
1350#endif
1351
1352#if defined(arch_atomic64_fetch_or_release)
1353static __always_inline s64
1354atomic64_fetch_or_release(s64 i, atomic64_t *v)
1355{
1356	instrument_atomic_write(v, sizeof(*v));
1357	return arch_atomic64_fetch_or_release(i, v);
1358}
1359#define atomic64_fetch_or_release atomic64_fetch_or_release
1360#endif
1361
1362#if defined(arch_atomic64_fetch_or_relaxed)
1363static __always_inline s64
1364atomic64_fetch_or_relaxed(s64 i, atomic64_t *v)
1365{
1366	instrument_atomic_write(v, sizeof(*v));
1367	return arch_atomic64_fetch_or_relaxed(i, v);
1368}
1369#define atomic64_fetch_or_relaxed atomic64_fetch_or_relaxed
1370#endif
1371
1372static __always_inline void
1373atomic64_xor(s64 i, atomic64_t *v)
1374{
1375	instrument_atomic_write(v, sizeof(*v));
1376	arch_atomic64_xor(i, v);
1377}
1378#define atomic64_xor atomic64_xor
1379
1380#if !defined(arch_atomic64_fetch_xor_relaxed) || defined(arch_atomic64_fetch_xor)
1381static __always_inline s64
1382atomic64_fetch_xor(s64 i, atomic64_t *v)
1383{
1384	instrument_atomic_write(v, sizeof(*v));
1385	return arch_atomic64_fetch_xor(i, v);
1386}
1387#define atomic64_fetch_xor atomic64_fetch_xor
1388#endif
1389
1390#if defined(arch_atomic64_fetch_xor_acquire)
1391static __always_inline s64
1392atomic64_fetch_xor_acquire(s64 i, atomic64_t *v)
1393{
1394	instrument_atomic_write(v, sizeof(*v));
1395	return arch_atomic64_fetch_xor_acquire(i, v);
1396}
1397#define atomic64_fetch_xor_acquire atomic64_fetch_xor_acquire
1398#endif
1399
1400#if defined(arch_atomic64_fetch_xor_release)
1401static __always_inline s64
1402atomic64_fetch_xor_release(s64 i, atomic64_t *v)
1403{
1404	instrument_atomic_write(v, sizeof(*v));
1405	return arch_atomic64_fetch_xor_release(i, v);
1406}
1407#define atomic64_fetch_xor_release atomic64_fetch_xor_release
1408#endif
1409
1410#if defined(arch_atomic64_fetch_xor_relaxed)
1411static __always_inline s64
1412atomic64_fetch_xor_relaxed(s64 i, atomic64_t *v)
1413{
1414	instrument_atomic_write(v, sizeof(*v));
1415	return arch_atomic64_fetch_xor_relaxed(i, v);
1416}
1417#define atomic64_fetch_xor_relaxed atomic64_fetch_xor_relaxed
1418#endif
1419
1420#if !defined(arch_atomic64_xchg_relaxed) || defined(arch_atomic64_xchg)
1421static __always_inline s64
1422atomic64_xchg(atomic64_t *v, s64 i)
1423{
1424	instrument_atomic_write(v, sizeof(*v));
1425	return arch_atomic64_xchg(v, i);
1426}
1427#define atomic64_xchg atomic64_xchg
1428#endif
1429
1430#if defined(arch_atomic64_xchg_acquire)
1431static __always_inline s64
1432atomic64_xchg_acquire(atomic64_t *v, s64 i)
1433{
1434	instrument_atomic_write(v, sizeof(*v));
1435	return arch_atomic64_xchg_acquire(v, i);
1436}
1437#define atomic64_xchg_acquire atomic64_xchg_acquire
1438#endif
1439
1440#if defined(arch_atomic64_xchg_release)
1441static __always_inline s64
1442atomic64_xchg_release(atomic64_t *v, s64 i)
1443{
1444	instrument_atomic_write(v, sizeof(*v));
1445	return arch_atomic64_xchg_release(v, i);
1446}
1447#define atomic64_xchg_release atomic64_xchg_release
1448#endif
1449
1450#if defined(arch_atomic64_xchg_relaxed)
1451static __always_inline s64
1452atomic64_xchg_relaxed(atomic64_t *v, s64 i)
1453{
1454	instrument_atomic_write(v, sizeof(*v));
1455	return arch_atomic64_xchg_relaxed(v, i);
1456}
1457#define atomic64_xchg_relaxed atomic64_xchg_relaxed
1458#endif
1459
1460#if !defined(arch_atomic64_cmpxchg_relaxed) || defined(arch_atomic64_cmpxchg)
1461static __always_inline s64
1462atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
1463{
1464	instrument_atomic_write(v, sizeof(*v));
1465	return arch_atomic64_cmpxchg(v, old, new);
1466}
1467#define atomic64_cmpxchg atomic64_cmpxchg
1468#endif
1469
1470#if defined(arch_atomic64_cmpxchg_acquire)
1471static __always_inline s64
1472atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new)
1473{
1474	instrument_atomic_write(v, sizeof(*v));
1475	return arch_atomic64_cmpxchg_acquire(v, old, new);
1476}
1477#define atomic64_cmpxchg_acquire atomic64_cmpxchg_acquire
1478#endif
1479
1480#if defined(arch_atomic64_cmpxchg_release)
1481static __always_inline s64
1482atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new)
1483{
1484	instrument_atomic_write(v, sizeof(*v));
1485	return arch_atomic64_cmpxchg_release(v, old, new);
1486}
1487#define atomic64_cmpxchg_release atomic64_cmpxchg_release
1488#endif
1489
1490#if defined(arch_atomic64_cmpxchg_relaxed)
1491static __always_inline s64
1492atomic64_cmpxchg_relaxed(atomic64_t *v, s64 old, s64 new)
1493{
1494	instrument_atomic_write(v, sizeof(*v));
1495	return arch_atomic64_cmpxchg_relaxed(v, old, new);
1496}
1497#define atomic64_cmpxchg_relaxed atomic64_cmpxchg_relaxed
1498#endif
1499
1500#if defined(arch_atomic64_try_cmpxchg)
1501static __always_inline bool
1502atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
1503{
1504	instrument_atomic_write(v, sizeof(*v));
1505	instrument_atomic_write(old, sizeof(*old));
1506	return arch_atomic64_try_cmpxchg(v, old, new);
1507}
1508#define atomic64_try_cmpxchg atomic64_try_cmpxchg
1509#endif
1510
1511#if defined(arch_atomic64_try_cmpxchg_acquire)
1512static __always_inline bool
1513atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
1514{
1515	instrument_atomic_write(v, sizeof(*v));
1516	instrument_atomic_write(old, sizeof(*old));
1517	return arch_atomic64_try_cmpxchg_acquire(v, old, new);
1518}
1519#define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg_acquire
1520#endif
1521
1522#if defined(arch_atomic64_try_cmpxchg_release)
1523static __always_inline bool
1524atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
1525{
1526	instrument_atomic_write(v, sizeof(*v));
1527	instrument_atomic_write(old, sizeof(*old));
1528	return arch_atomic64_try_cmpxchg_release(v, old, new);
1529}
1530#define atomic64_try_cmpxchg_release atomic64_try_cmpxchg_release
1531#endif
1532
1533#if defined(arch_atomic64_try_cmpxchg_relaxed)
1534static __always_inline bool
1535atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new)
1536{
1537	instrument_atomic_write(v, sizeof(*v));
1538	instrument_atomic_write(old, sizeof(*old));
1539	return arch_atomic64_try_cmpxchg_relaxed(v, old, new);
1540}
1541#define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg_relaxed
1542#endif
1543
1544#if defined(arch_atomic64_sub_and_test)
1545static __always_inline bool
1546atomic64_sub_and_test(s64 i, atomic64_t *v)
1547{
1548	instrument_atomic_write(v, sizeof(*v));
1549	return arch_atomic64_sub_and_test(i, v);
1550}
1551#define atomic64_sub_and_test atomic64_sub_and_test
1552#endif
1553
1554#if defined(arch_atomic64_dec_and_test)
1555static __always_inline bool
1556atomic64_dec_and_test(atomic64_t *v)
1557{
1558	instrument_atomic_write(v, sizeof(*v));
1559	return arch_atomic64_dec_and_test(v);
1560}
1561#define atomic64_dec_and_test atomic64_dec_and_test
1562#endif
1563
1564#if defined(arch_atomic64_inc_and_test)
1565static __always_inline bool
1566atomic64_inc_and_test(atomic64_t *v)
1567{
1568	instrument_atomic_write(v, sizeof(*v));
1569	return arch_atomic64_inc_and_test(v);
1570}
1571#define atomic64_inc_and_test atomic64_inc_and_test
1572#endif
1573
1574#if defined(arch_atomic64_add_negative)
1575static __always_inline bool
1576atomic64_add_negative(s64 i, atomic64_t *v)
1577{
1578	instrument_atomic_write(v, sizeof(*v));
1579	return arch_atomic64_add_negative(i, v);
1580}
1581#define atomic64_add_negative atomic64_add_negative
1582#endif
1583
1584#if defined(arch_atomic64_fetch_add_unless)
1585static __always_inline s64
1586atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
1587{
1588	instrument_atomic_write(v, sizeof(*v));
1589	return arch_atomic64_fetch_add_unless(v, a, u);
1590}
1591#define atomic64_fetch_add_unless atomic64_fetch_add_unless
1592#endif
1593
1594#if defined(arch_atomic64_add_unless)
1595static __always_inline bool
1596atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
1597{
1598	instrument_atomic_write(v, sizeof(*v));
1599	return arch_atomic64_add_unless(v, a, u);
1600}
1601#define atomic64_add_unless atomic64_add_unless
1602#endif
1603
1604#if defined(arch_atomic64_inc_not_zero)
1605static __always_inline bool
1606atomic64_inc_not_zero(atomic64_t *v)
1607{
1608	instrument_atomic_write(v, sizeof(*v));
1609	return arch_atomic64_inc_not_zero(v);
1610}
1611#define atomic64_inc_not_zero atomic64_inc_not_zero
1612#endif
1613
1614#if defined(arch_atomic64_inc_unless_negative)
1615static __always_inline bool
1616atomic64_inc_unless_negative(atomic64_t *v)
1617{
1618	instrument_atomic_write(v, sizeof(*v));
1619	return arch_atomic64_inc_unless_negative(v);
1620}
1621#define atomic64_inc_unless_negative atomic64_inc_unless_negative
1622#endif
1623
1624#if defined(arch_atomic64_dec_unless_positive)
1625static __always_inline bool
1626atomic64_dec_unless_positive(atomic64_t *v)
1627{
1628	instrument_atomic_write(v, sizeof(*v));
1629	return arch_atomic64_dec_unless_positive(v);
1630}
1631#define atomic64_dec_unless_positive atomic64_dec_unless_positive
1632#endif
1633
1634#if defined(arch_atomic64_dec_if_positive)
1635static __always_inline s64
1636atomic64_dec_if_positive(atomic64_t *v)
1637{
1638	instrument_atomic_write(v, sizeof(*v));
1639	return arch_atomic64_dec_if_positive(v);
1640}
1641#define atomic64_dec_if_positive atomic64_dec_if_positive
1642#endif
1643
1644#if !defined(arch_xchg_relaxed) || defined(arch_xchg)
1645#define xchg(ptr, ...)						\
1646({									\
1647	typeof(ptr) __ai_ptr = (ptr);					\
1648	instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr));		\
1649	arch_xchg(__ai_ptr, __VA_ARGS__);				\
1650})
1651#endif
1652
1653#if defined(arch_xchg_acquire)
1654#define xchg_acquire(ptr, ...)						\
1655({									\
1656	typeof(ptr) __ai_ptr = (ptr);					\
1657	instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr));		\
1658	arch_xchg_acquire(__ai_ptr, __VA_ARGS__);				\
1659})
1660#endif
1661
1662#if defined(arch_xchg_release)
1663#define xchg_release(ptr, ...)						\
1664({									\
1665	typeof(ptr) __ai_ptr = (ptr);					\
1666	instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr));		\
1667	arch_xchg_release(__ai_ptr, __VA_ARGS__);				\
1668})
1669#endif
1670
1671#if defined(arch_xchg_relaxed)
1672#define xchg_relaxed(ptr, ...)						\
1673({									\
1674	typeof(ptr) __ai_ptr = (ptr);					\
1675	instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr));		\
1676	arch_xchg_relaxed(__ai_ptr, __VA_ARGS__);				\
1677})
1678#endif
1679
1680#if !defined(arch_cmpxchg_relaxed) || defined(arch_cmpxchg)
1681#define cmpxchg(ptr, ...)						\
1682({									\
1683	typeof(ptr) __ai_ptr = (ptr);					\
1684	instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr));		\
1685	arch_cmpxchg(__ai_ptr, __VA_ARGS__);				\
1686})
1687#endif
1688
1689#if defined(arch_cmpxchg_acquire)
1690#define cmpxchg_acquire(ptr, ...)						\
1691({									\
1692	typeof(ptr) __ai_ptr = (ptr);					\
1693	instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr));		\
1694	arch_cmpxchg_acquire(__ai_ptr, __VA_ARGS__);				\
1695})
1696#endif
1697
1698#if defined(arch_cmpxchg_release)
1699#define cmpxchg_release(ptr, ...)						\
1700({									\
1701	typeof(ptr) __ai_ptr = (ptr);					\
1702	instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr));		\
1703	arch_cmpxchg_release(__ai_ptr, __VA_ARGS__);				\
1704})
1705#endif
1706
1707#if defined(arch_cmpxchg_relaxed)
1708#define cmpxchg_relaxed(ptr, ...)						\
1709({									\
1710	typeof(ptr) __ai_ptr = (ptr);					\
1711	instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr));		\
1712	arch_cmpxchg_relaxed(__ai_ptr, __VA_ARGS__);				\
1713})
1714#endif
1715
1716#if !defined(arch_cmpxchg64_relaxed) || defined(arch_cmpxchg64)
1717#define cmpxchg64(ptr, ...)						\
1718({									\
1719	typeof(ptr) __ai_ptr = (ptr);					\
1720	instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr));		\
1721	arch_cmpxchg64(__ai_ptr, __VA_ARGS__);				\
1722})
1723#endif
1724
1725#if defined(arch_cmpxchg64_acquire)
1726#define cmpxchg64_acquire(ptr, ...)						\
1727({									\
1728	typeof(ptr) __ai_ptr = (ptr);					\
1729	instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr));		\
1730	arch_cmpxchg64_acquire(__ai_ptr, __VA_ARGS__);				\
1731})
1732#endif
1733
1734#if defined(arch_cmpxchg64_release)
1735#define cmpxchg64_release(ptr, ...)						\
1736({									\
1737	typeof(ptr) __ai_ptr = (ptr);					\
1738	instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr));		\
1739	arch_cmpxchg64_release(__ai_ptr, __VA_ARGS__);				\
1740})
1741#endif
1742
1743#if defined(arch_cmpxchg64_relaxed)
1744#define cmpxchg64_relaxed(ptr, ...)						\
1745({									\
1746	typeof(ptr) __ai_ptr = (ptr);					\
1747	instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr));		\
1748	arch_cmpxchg64_relaxed(__ai_ptr, __VA_ARGS__);				\
1749})
1750#endif
1751
1752#define cmpxchg_local(ptr, ...)						\
1753({									\
1754	typeof(ptr) __ai_ptr = (ptr);					\
1755	instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr));		\
1756	arch_cmpxchg_local(__ai_ptr, __VA_ARGS__);				\
1757})
1758
1759#define cmpxchg64_local(ptr, ...)						\
1760({									\
1761	typeof(ptr) __ai_ptr = (ptr);					\
1762	instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr));		\
1763	arch_cmpxchg64_local(__ai_ptr, __VA_ARGS__);				\
1764})
1765
1766#define sync_cmpxchg(ptr, ...)						\
1767({									\
1768	typeof(ptr) __ai_ptr = (ptr);					\
1769	instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr));		\
1770	arch_sync_cmpxchg(__ai_ptr, __VA_ARGS__);				\
1771})
1772
1773#define cmpxchg_double(ptr, ...)						\
1774({									\
1775	typeof(ptr) __ai_ptr = (ptr);					\
1776	instrument_atomic_write(__ai_ptr, 2 * sizeof(*__ai_ptr));		\
1777	arch_cmpxchg_double(__ai_ptr, __VA_ARGS__);				\
1778})
1779
1780
1781#define cmpxchg_double_local(ptr, ...)						\
1782({									\
1783	typeof(ptr) __ai_ptr = (ptr);					\
1784	instrument_atomic_write(__ai_ptr, 2 * sizeof(*__ai_ptr));		\
1785	arch_cmpxchg_double_local(__ai_ptr, __VA_ARGS__);				\
1786})
1787
1788#endif /* _ASM_GENERIC_ATOMIC_INSTRUMENTED_H */
1789// 89bf97f3a7509b740845e51ddf31055b48a81f40