Loading...
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 | /* SPDX-License-Identifier: GPL-2.0 */ #ifndef _ARCH_POWERPC_LOCAL_H #define _ARCH_POWERPC_LOCAL_H #ifdef CONFIG_PPC_BOOK3S_64 #include <linux/percpu.h> #include <linux/atomic.h> #include <linux/irqflags.h> #include <asm/hw_irq.h> typedef struct { long v; } local_t; #define LOCAL_INIT(i) { (i) } static __inline__ long local_read(const local_t *l) { return READ_ONCE(l->v); } static __inline__ void local_set(local_t *l, long i) { WRITE_ONCE(l->v, i); } #define LOCAL_OP(op, c_op) \ static __inline__ void local_##op(long i, local_t *l) \ { \ unsigned long flags; \ \ powerpc_local_irq_pmu_save(flags); \ l->v c_op i; \ powerpc_local_irq_pmu_restore(flags); \ } #define LOCAL_OP_RETURN(op, c_op) \ static __inline__ long local_##op##_return(long a, local_t *l) \ { \ long t; \ unsigned long flags; \ \ powerpc_local_irq_pmu_save(flags); \ t = (l->v c_op a); \ powerpc_local_irq_pmu_restore(flags); \ \ return t; \ } #define LOCAL_OPS(op, c_op) \ LOCAL_OP(op, c_op) \ LOCAL_OP_RETURN(op, c_op) LOCAL_OPS(add, +=) LOCAL_OPS(sub, -=) #define local_add_negative(a, l) (local_add_return((a), (l)) < 0) #define local_inc_return(l) local_add_return(1LL, l) #define local_inc(l) local_inc_return(l) /* * local_inc_and_test - increment and test * @l: pointer of type local_t * * Atomically increments @l by 1 * and returns true if the result is zero, or false for all * other cases. */ #define local_inc_and_test(l) (local_inc_return(l) == 0) #define local_dec_return(l) local_sub_return(1LL, l) #define local_dec(l) local_dec_return(l) #define local_sub_and_test(a, l) (local_sub_return((a), (l)) == 0) #define local_dec_and_test(l) (local_dec_return((l)) == 0) static __inline__ long local_cmpxchg(local_t *l, long o, long n) { long t; unsigned long flags; powerpc_local_irq_pmu_save(flags); t = l->v; if (t == o) l->v = n; powerpc_local_irq_pmu_restore(flags); return t; } static __inline__ bool local_try_cmpxchg(local_t *l, long *po, long n) { long o = *po, r; r = local_cmpxchg(l, o, n); if (unlikely(r != o)) *po = r; return likely(r == o); } static __inline__ long local_xchg(local_t *l, long n) { long t; unsigned long flags; powerpc_local_irq_pmu_save(flags); t = l->v; l->v = n; powerpc_local_irq_pmu_restore(flags); return t; } /** * local_add_unless - add unless the number is already a given value * @l: pointer of type local_t * @a: the amount to add to v... * @u: ...unless v is equal to u. * * Atomically adds @a to @l, if @v was not already @u. * Returns true if the addition was done. */ static __inline__ bool local_add_unless(local_t *l, long a, long u) { unsigned long flags; bool ret = false; powerpc_local_irq_pmu_save(flags); if (l->v != u) { l->v += a; ret = true; } powerpc_local_irq_pmu_restore(flags); return ret; } #define local_inc_not_zero(l) local_add_unless((l), 1, 0) /* Use these for per-cpu local_t variables: on some archs they are * much more efficient than these naive implementations. Note they take * a variable, not an address. */ #define __local_inc(l) ((l)->v++) #define __local_dec(l) ((l)->v++) #define __local_add(i,l) ((l)->v+=(i)) #define __local_sub(i,l) ((l)->v-=(i)) #else /* CONFIG_PPC64 */ #include <asm-generic/local.h> #endif /* CONFIG_PPC64 */ #endif /* _ARCH_POWERPC_LOCAL_H */ |