Loading...
1/* SPDX-License-Identifier: GPL-2.0 */
2#ifndef _ASM_GENERIC_PERCPU_H_
3#define _ASM_GENERIC_PERCPU_H_
4
5#include <linux/compiler.h>
6#include <linux/threads.h>
7#include <linux/percpu-defs.h>
8
9#ifdef CONFIG_SMP
10
11/*
12 * per_cpu_offset() is the offset that has to be added to a
13 * percpu variable to get to the instance for a certain processor.
14 *
15 * Most arches use the __per_cpu_offset array for those offsets but
16 * some arches have their own ways of determining the offset (x86_64, s390).
17 */
18#ifndef __per_cpu_offset
19extern unsigned long __per_cpu_offset[NR_CPUS];
20
21#define per_cpu_offset(x) (__per_cpu_offset[x])
22#endif
23
24/*
25 * Determine the offset for the currently active processor.
26 * An arch may define __my_cpu_offset to provide a more effective
27 * means of obtaining the offset to the per cpu variables of the
28 * current processor.
29 */
30#ifndef __my_cpu_offset
31#define __my_cpu_offset per_cpu_offset(raw_smp_processor_id())
32#endif
33#ifdef CONFIG_DEBUG_PREEMPT
34#define my_cpu_offset per_cpu_offset(smp_processor_id())
35#else
36#define my_cpu_offset __my_cpu_offset
37#endif
38
39/*
40 * Arch may define arch_raw_cpu_ptr() to provide more efficient address
41 * translations for raw_cpu_ptr().
42 */
43#ifndef arch_raw_cpu_ptr
44#define arch_raw_cpu_ptr(ptr) SHIFT_PERCPU_PTR(ptr, __my_cpu_offset)
45#endif
46
47#ifdef CONFIG_HAVE_SETUP_PER_CPU_AREA
48extern void setup_per_cpu_areas(void);
49#endif
50
51#endif /* SMP */
52
53#ifndef PER_CPU_BASE_SECTION
54#ifdef CONFIG_SMP
55#define PER_CPU_BASE_SECTION ".data..percpu"
56#else
57#define PER_CPU_BASE_SECTION ".data"
58#endif
59#endif
60
61#ifndef PER_CPU_ATTRIBUTES
62#define PER_CPU_ATTRIBUTES
63#endif
64
65#define raw_cpu_generic_read(pcp) \
66({ \
67 *raw_cpu_ptr(&(pcp)); \
68})
69
70#define raw_cpu_generic_to_op(pcp, val, op) \
71do { \
72 *raw_cpu_ptr(&(pcp)) op val; \
73} while (0)
74
75#define raw_cpu_generic_add_return(pcp, val) \
76({ \
77 typeof(pcp) *__p = raw_cpu_ptr(&(pcp)); \
78 \
79 *__p += val; \
80 *__p; \
81})
82
83#define raw_cpu_generic_xchg(pcp, nval) \
84({ \
85 typeof(pcp) *__p = raw_cpu_ptr(&(pcp)); \
86 typeof(pcp) __ret; \
87 __ret = *__p; \
88 *__p = nval; \
89 __ret; \
90})
91
92#define raw_cpu_generic_cmpxchg(pcp, oval, nval) \
93({ \
94 typeof(pcp) *__p = raw_cpu_ptr(&(pcp)); \
95 typeof(pcp) __ret; \
96 __ret = *__p; \
97 if (__ret == (oval)) \
98 *__p = nval; \
99 __ret; \
100})
101
102#define raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
103({ \
104 typeof(pcp1) *__p1 = raw_cpu_ptr(&(pcp1)); \
105 typeof(pcp2) *__p2 = raw_cpu_ptr(&(pcp2)); \
106 int __ret = 0; \
107 if (*__p1 == (oval1) && *__p2 == (oval2)) { \
108 *__p1 = nval1; \
109 *__p2 = nval2; \
110 __ret = 1; \
111 } \
112 (__ret); \
113})
114
115#define __this_cpu_generic_read_nopreempt(pcp) \
116({ \
117 typeof(pcp) ___ret; \
118 preempt_disable_notrace(); \
119 ___ret = READ_ONCE(*raw_cpu_ptr(&(pcp))); \
120 preempt_enable_notrace(); \
121 ___ret; \
122})
123
124#define __this_cpu_generic_read_noirq(pcp) \
125({ \
126 typeof(pcp) ___ret; \
127 unsigned long ___flags; \
128 raw_local_irq_save(___flags); \
129 ___ret = raw_cpu_generic_read(pcp); \
130 raw_local_irq_restore(___flags); \
131 ___ret; \
132})
133
134#define this_cpu_generic_read(pcp) \
135({ \
136 typeof(pcp) __ret; \
137 if (__native_word(pcp)) \
138 __ret = __this_cpu_generic_read_nopreempt(pcp); \
139 else \
140 __ret = __this_cpu_generic_read_noirq(pcp); \
141 __ret; \
142})
143
144#define this_cpu_generic_to_op(pcp, val, op) \
145do { \
146 unsigned long __flags; \
147 raw_local_irq_save(__flags); \
148 raw_cpu_generic_to_op(pcp, val, op); \
149 raw_local_irq_restore(__flags); \
150} while (0)
151
152
153#define this_cpu_generic_add_return(pcp, val) \
154({ \
155 typeof(pcp) __ret; \
156 unsigned long __flags; \
157 raw_local_irq_save(__flags); \
158 __ret = raw_cpu_generic_add_return(pcp, val); \
159 raw_local_irq_restore(__flags); \
160 __ret; \
161})
162
163#define this_cpu_generic_xchg(pcp, nval) \
164({ \
165 typeof(pcp) __ret; \
166 unsigned long __flags; \
167 raw_local_irq_save(__flags); \
168 __ret = raw_cpu_generic_xchg(pcp, nval); \
169 raw_local_irq_restore(__flags); \
170 __ret; \
171})
172
173#define this_cpu_generic_cmpxchg(pcp, oval, nval) \
174({ \
175 typeof(pcp) __ret; \
176 unsigned long __flags; \
177 raw_local_irq_save(__flags); \
178 __ret = raw_cpu_generic_cmpxchg(pcp, oval, nval); \
179 raw_local_irq_restore(__flags); \
180 __ret; \
181})
182
183#define this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
184({ \
185 int __ret; \
186 unsigned long __flags; \
187 raw_local_irq_save(__flags); \
188 __ret = raw_cpu_generic_cmpxchg_double(pcp1, pcp2, \
189 oval1, oval2, nval1, nval2); \
190 raw_local_irq_restore(__flags); \
191 __ret; \
192})
193
194#ifndef raw_cpu_read_1
195#define raw_cpu_read_1(pcp) raw_cpu_generic_read(pcp)
196#endif
197#ifndef raw_cpu_read_2
198#define raw_cpu_read_2(pcp) raw_cpu_generic_read(pcp)
199#endif
200#ifndef raw_cpu_read_4
201#define raw_cpu_read_4(pcp) raw_cpu_generic_read(pcp)
202#endif
203#ifndef raw_cpu_read_8
204#define raw_cpu_read_8(pcp) raw_cpu_generic_read(pcp)
205#endif
206
207#ifndef raw_cpu_write_1
208#define raw_cpu_write_1(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
209#endif
210#ifndef raw_cpu_write_2
211#define raw_cpu_write_2(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
212#endif
213#ifndef raw_cpu_write_4
214#define raw_cpu_write_4(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
215#endif
216#ifndef raw_cpu_write_8
217#define raw_cpu_write_8(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
218#endif
219
220#ifndef raw_cpu_add_1
221#define raw_cpu_add_1(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
222#endif
223#ifndef raw_cpu_add_2
224#define raw_cpu_add_2(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
225#endif
226#ifndef raw_cpu_add_4
227#define raw_cpu_add_4(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
228#endif
229#ifndef raw_cpu_add_8
230#define raw_cpu_add_8(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
231#endif
232
233#ifndef raw_cpu_and_1
234#define raw_cpu_and_1(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
235#endif
236#ifndef raw_cpu_and_2
237#define raw_cpu_and_2(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
238#endif
239#ifndef raw_cpu_and_4
240#define raw_cpu_and_4(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
241#endif
242#ifndef raw_cpu_and_8
243#define raw_cpu_and_8(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
244#endif
245
246#ifndef raw_cpu_or_1
247#define raw_cpu_or_1(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
248#endif
249#ifndef raw_cpu_or_2
250#define raw_cpu_or_2(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
251#endif
252#ifndef raw_cpu_or_4
253#define raw_cpu_or_4(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
254#endif
255#ifndef raw_cpu_or_8
256#define raw_cpu_or_8(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
257#endif
258
259#ifndef raw_cpu_add_return_1
260#define raw_cpu_add_return_1(pcp, val) raw_cpu_generic_add_return(pcp, val)
261#endif
262#ifndef raw_cpu_add_return_2
263#define raw_cpu_add_return_2(pcp, val) raw_cpu_generic_add_return(pcp, val)
264#endif
265#ifndef raw_cpu_add_return_4
266#define raw_cpu_add_return_4(pcp, val) raw_cpu_generic_add_return(pcp, val)
267#endif
268#ifndef raw_cpu_add_return_8
269#define raw_cpu_add_return_8(pcp, val) raw_cpu_generic_add_return(pcp, val)
270#endif
271
272#ifndef raw_cpu_xchg_1
273#define raw_cpu_xchg_1(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
274#endif
275#ifndef raw_cpu_xchg_2
276#define raw_cpu_xchg_2(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
277#endif
278#ifndef raw_cpu_xchg_4
279#define raw_cpu_xchg_4(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
280#endif
281#ifndef raw_cpu_xchg_8
282#define raw_cpu_xchg_8(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
283#endif
284
285#ifndef raw_cpu_cmpxchg_1
286#define raw_cpu_cmpxchg_1(pcp, oval, nval) \
287 raw_cpu_generic_cmpxchg(pcp, oval, nval)
288#endif
289#ifndef raw_cpu_cmpxchg_2
290#define raw_cpu_cmpxchg_2(pcp, oval, nval) \
291 raw_cpu_generic_cmpxchg(pcp, oval, nval)
292#endif
293#ifndef raw_cpu_cmpxchg_4
294#define raw_cpu_cmpxchg_4(pcp, oval, nval) \
295 raw_cpu_generic_cmpxchg(pcp, oval, nval)
296#endif
297#ifndef raw_cpu_cmpxchg_8
298#define raw_cpu_cmpxchg_8(pcp, oval, nval) \
299 raw_cpu_generic_cmpxchg(pcp, oval, nval)
300#endif
301
302#ifndef raw_cpu_cmpxchg_double_1
303#define raw_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \
304 raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
305#endif
306#ifndef raw_cpu_cmpxchg_double_2
307#define raw_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \
308 raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
309#endif
310#ifndef raw_cpu_cmpxchg_double_4
311#define raw_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \
312 raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
313#endif
314#ifndef raw_cpu_cmpxchg_double_8
315#define raw_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \
316 raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
317#endif
318
319#ifndef this_cpu_read_1
320#define this_cpu_read_1(pcp) this_cpu_generic_read(pcp)
321#endif
322#ifndef this_cpu_read_2
323#define this_cpu_read_2(pcp) this_cpu_generic_read(pcp)
324#endif
325#ifndef this_cpu_read_4
326#define this_cpu_read_4(pcp) this_cpu_generic_read(pcp)
327#endif
328#ifndef this_cpu_read_8
329#define this_cpu_read_8(pcp) this_cpu_generic_read(pcp)
330#endif
331
332#ifndef this_cpu_write_1
333#define this_cpu_write_1(pcp, val) this_cpu_generic_to_op(pcp, val, =)
334#endif
335#ifndef this_cpu_write_2
336#define this_cpu_write_2(pcp, val) this_cpu_generic_to_op(pcp, val, =)
337#endif
338#ifndef this_cpu_write_4
339#define this_cpu_write_4(pcp, val) this_cpu_generic_to_op(pcp, val, =)
340#endif
341#ifndef this_cpu_write_8
342#define this_cpu_write_8(pcp, val) this_cpu_generic_to_op(pcp, val, =)
343#endif
344
345#ifndef this_cpu_add_1
346#define this_cpu_add_1(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
347#endif
348#ifndef this_cpu_add_2
349#define this_cpu_add_2(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
350#endif
351#ifndef this_cpu_add_4
352#define this_cpu_add_4(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
353#endif
354#ifndef this_cpu_add_8
355#define this_cpu_add_8(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
356#endif
357
358#ifndef this_cpu_and_1
359#define this_cpu_and_1(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
360#endif
361#ifndef this_cpu_and_2
362#define this_cpu_and_2(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
363#endif
364#ifndef this_cpu_and_4
365#define this_cpu_and_4(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
366#endif
367#ifndef this_cpu_and_8
368#define this_cpu_and_8(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
369#endif
370
371#ifndef this_cpu_or_1
372#define this_cpu_or_1(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
373#endif
374#ifndef this_cpu_or_2
375#define this_cpu_or_2(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
376#endif
377#ifndef this_cpu_or_4
378#define this_cpu_or_4(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
379#endif
380#ifndef this_cpu_or_8
381#define this_cpu_or_8(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
382#endif
383
384#ifndef this_cpu_add_return_1
385#define this_cpu_add_return_1(pcp, val) this_cpu_generic_add_return(pcp, val)
386#endif
387#ifndef this_cpu_add_return_2
388#define this_cpu_add_return_2(pcp, val) this_cpu_generic_add_return(pcp, val)
389#endif
390#ifndef this_cpu_add_return_4
391#define this_cpu_add_return_4(pcp, val) this_cpu_generic_add_return(pcp, val)
392#endif
393#ifndef this_cpu_add_return_8
394#define this_cpu_add_return_8(pcp, val) this_cpu_generic_add_return(pcp, val)
395#endif
396
397#ifndef this_cpu_xchg_1
398#define this_cpu_xchg_1(pcp, nval) this_cpu_generic_xchg(pcp, nval)
399#endif
400#ifndef this_cpu_xchg_2
401#define this_cpu_xchg_2(pcp, nval) this_cpu_generic_xchg(pcp, nval)
402#endif
403#ifndef this_cpu_xchg_4
404#define this_cpu_xchg_4(pcp, nval) this_cpu_generic_xchg(pcp, nval)
405#endif
406#ifndef this_cpu_xchg_8
407#define this_cpu_xchg_8(pcp, nval) this_cpu_generic_xchg(pcp, nval)
408#endif
409
410#ifndef this_cpu_cmpxchg_1
411#define this_cpu_cmpxchg_1(pcp, oval, nval) \
412 this_cpu_generic_cmpxchg(pcp, oval, nval)
413#endif
414#ifndef this_cpu_cmpxchg_2
415#define this_cpu_cmpxchg_2(pcp, oval, nval) \
416 this_cpu_generic_cmpxchg(pcp, oval, nval)
417#endif
418#ifndef this_cpu_cmpxchg_4
419#define this_cpu_cmpxchg_4(pcp, oval, nval) \
420 this_cpu_generic_cmpxchg(pcp, oval, nval)
421#endif
422#ifndef this_cpu_cmpxchg_8
423#define this_cpu_cmpxchg_8(pcp, oval, nval) \
424 this_cpu_generic_cmpxchg(pcp, oval, nval)
425#endif
426
427#ifndef this_cpu_cmpxchg_double_1
428#define this_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \
429 this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
430#endif
431#ifndef this_cpu_cmpxchg_double_2
432#define this_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \
433 this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
434#endif
435#ifndef this_cpu_cmpxchg_double_4
436#define this_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \
437 this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
438#endif
439#ifndef this_cpu_cmpxchg_double_8
440#define this_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \
441 this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
442#endif
443
444#endif /* _ASM_GENERIC_PERCPU_H_ */
1/* SPDX-License-Identifier: GPL-2.0 */
2#ifndef _ASM_GENERIC_PERCPU_H_
3#define _ASM_GENERIC_PERCPU_H_
4
5#include <linux/compiler.h>
6#include <linux/threads.h>
7#include <linux/percpu-defs.h>
8
9#ifdef CONFIG_SMP
10
11/*
12 * per_cpu_offset() is the offset that has to be added to a
13 * percpu variable to get to the instance for a certain processor.
14 *
15 * Most arches use the __per_cpu_offset array for those offsets but
16 * some arches have their own ways of determining the offset (x86_64, s390).
17 */
18#ifndef __per_cpu_offset
19extern unsigned long __per_cpu_offset[NR_CPUS];
20
21#define per_cpu_offset(x) (__per_cpu_offset[x])
22#endif
23
24/*
25 * Determine the offset for the currently active processor.
26 * An arch may define __my_cpu_offset to provide a more effective
27 * means of obtaining the offset to the per cpu variables of the
28 * current processor.
29 */
30#ifndef __my_cpu_offset
31#define __my_cpu_offset per_cpu_offset(raw_smp_processor_id())
32#endif
33#ifdef CONFIG_DEBUG_PREEMPT
34#define my_cpu_offset per_cpu_offset(smp_processor_id())
35#else
36#define my_cpu_offset __my_cpu_offset
37#endif
38
39/*
40 * Arch may define arch_raw_cpu_ptr() to provide more efficient address
41 * translations for raw_cpu_ptr().
42 */
43#ifndef arch_raw_cpu_ptr
44#define arch_raw_cpu_ptr(ptr) SHIFT_PERCPU_PTR(ptr, __my_cpu_offset)
45#endif
46
47#ifdef CONFIG_HAVE_SETUP_PER_CPU_AREA
48extern void setup_per_cpu_areas(void);
49#endif
50
51#endif /* SMP */
52
53#ifndef PER_CPU_BASE_SECTION
54#ifdef CONFIG_SMP
55#define PER_CPU_BASE_SECTION ".data..percpu"
56#else
57#define PER_CPU_BASE_SECTION ".data"
58#endif
59#endif
60
61#ifndef PER_CPU_ATTRIBUTES
62#define PER_CPU_ATTRIBUTES
63#endif
64
65#ifndef PER_CPU_DEF_ATTRIBUTES
66#define PER_CPU_DEF_ATTRIBUTES
67#endif
68
69#define raw_cpu_generic_read(pcp) \
70({ \
71 *raw_cpu_ptr(&(pcp)); \
72})
73
74#define raw_cpu_generic_to_op(pcp, val, op) \
75do { \
76 *raw_cpu_ptr(&(pcp)) op val; \
77} while (0)
78
79#define raw_cpu_generic_add_return(pcp, val) \
80({ \
81 typeof(&(pcp)) __p = raw_cpu_ptr(&(pcp)); \
82 \
83 *__p += val; \
84 *__p; \
85})
86
87#define raw_cpu_generic_xchg(pcp, nval) \
88({ \
89 typeof(&(pcp)) __p = raw_cpu_ptr(&(pcp)); \
90 typeof(pcp) __ret; \
91 __ret = *__p; \
92 *__p = nval; \
93 __ret; \
94})
95
96#define raw_cpu_generic_cmpxchg(pcp, oval, nval) \
97({ \
98 typeof(&(pcp)) __p = raw_cpu_ptr(&(pcp)); \
99 typeof(pcp) __ret; \
100 __ret = *__p; \
101 if (__ret == (oval)) \
102 *__p = nval; \
103 __ret; \
104})
105
106#define raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
107({ \
108 typeof(&(pcp1)) __p1 = raw_cpu_ptr(&(pcp1)); \
109 typeof(&(pcp2)) __p2 = raw_cpu_ptr(&(pcp2)); \
110 int __ret = 0; \
111 if (*__p1 == (oval1) && *__p2 == (oval2)) { \
112 *__p1 = nval1; \
113 *__p2 = nval2; \
114 __ret = 1; \
115 } \
116 (__ret); \
117})
118
119#define __this_cpu_generic_read_nopreempt(pcp) \
120({ \
121 typeof(pcp) __ret; \
122 preempt_disable_notrace(); \
123 __ret = READ_ONCE(*raw_cpu_ptr(&(pcp))); \
124 preempt_enable_notrace(); \
125 __ret; \
126})
127
128#define __this_cpu_generic_read_noirq(pcp) \
129({ \
130 typeof(pcp) __ret; \
131 unsigned long __flags; \
132 raw_local_irq_save(__flags); \
133 __ret = raw_cpu_generic_read(pcp); \
134 raw_local_irq_restore(__flags); \
135 __ret; \
136})
137
138#define this_cpu_generic_read(pcp) \
139({ \
140 typeof(pcp) __ret; \
141 if (__native_word(pcp)) \
142 __ret = __this_cpu_generic_read_nopreempt(pcp); \
143 else \
144 __ret = __this_cpu_generic_read_noirq(pcp); \
145 __ret; \
146})
147
148#define this_cpu_generic_to_op(pcp, val, op) \
149do { \
150 unsigned long __flags; \
151 raw_local_irq_save(__flags); \
152 raw_cpu_generic_to_op(pcp, val, op); \
153 raw_local_irq_restore(__flags); \
154} while (0)
155
156
157#define this_cpu_generic_add_return(pcp, val) \
158({ \
159 typeof(pcp) __ret; \
160 unsigned long __flags; \
161 raw_local_irq_save(__flags); \
162 __ret = raw_cpu_generic_add_return(pcp, val); \
163 raw_local_irq_restore(__flags); \
164 __ret; \
165})
166
167#define this_cpu_generic_xchg(pcp, nval) \
168({ \
169 typeof(pcp) __ret; \
170 unsigned long __flags; \
171 raw_local_irq_save(__flags); \
172 __ret = raw_cpu_generic_xchg(pcp, nval); \
173 raw_local_irq_restore(__flags); \
174 __ret; \
175})
176
177#define this_cpu_generic_cmpxchg(pcp, oval, nval) \
178({ \
179 typeof(pcp) __ret; \
180 unsigned long __flags; \
181 raw_local_irq_save(__flags); \
182 __ret = raw_cpu_generic_cmpxchg(pcp, oval, nval); \
183 raw_local_irq_restore(__flags); \
184 __ret; \
185})
186
187#define this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
188({ \
189 int __ret; \
190 unsigned long __flags; \
191 raw_local_irq_save(__flags); \
192 __ret = raw_cpu_generic_cmpxchg_double(pcp1, pcp2, \
193 oval1, oval2, nval1, nval2); \
194 raw_local_irq_restore(__flags); \
195 __ret; \
196})
197
198#ifndef raw_cpu_read_1
199#define raw_cpu_read_1(pcp) raw_cpu_generic_read(pcp)
200#endif
201#ifndef raw_cpu_read_2
202#define raw_cpu_read_2(pcp) raw_cpu_generic_read(pcp)
203#endif
204#ifndef raw_cpu_read_4
205#define raw_cpu_read_4(pcp) raw_cpu_generic_read(pcp)
206#endif
207#ifndef raw_cpu_read_8
208#define raw_cpu_read_8(pcp) raw_cpu_generic_read(pcp)
209#endif
210
211#ifndef raw_cpu_write_1
212#define raw_cpu_write_1(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
213#endif
214#ifndef raw_cpu_write_2
215#define raw_cpu_write_2(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
216#endif
217#ifndef raw_cpu_write_4
218#define raw_cpu_write_4(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
219#endif
220#ifndef raw_cpu_write_8
221#define raw_cpu_write_8(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
222#endif
223
224#ifndef raw_cpu_add_1
225#define raw_cpu_add_1(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
226#endif
227#ifndef raw_cpu_add_2
228#define raw_cpu_add_2(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
229#endif
230#ifndef raw_cpu_add_4
231#define raw_cpu_add_4(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
232#endif
233#ifndef raw_cpu_add_8
234#define raw_cpu_add_8(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
235#endif
236
237#ifndef raw_cpu_and_1
238#define raw_cpu_and_1(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
239#endif
240#ifndef raw_cpu_and_2
241#define raw_cpu_and_2(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
242#endif
243#ifndef raw_cpu_and_4
244#define raw_cpu_and_4(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
245#endif
246#ifndef raw_cpu_and_8
247#define raw_cpu_and_8(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
248#endif
249
250#ifndef raw_cpu_or_1
251#define raw_cpu_or_1(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
252#endif
253#ifndef raw_cpu_or_2
254#define raw_cpu_or_2(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
255#endif
256#ifndef raw_cpu_or_4
257#define raw_cpu_or_4(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
258#endif
259#ifndef raw_cpu_or_8
260#define raw_cpu_or_8(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
261#endif
262
263#ifndef raw_cpu_add_return_1
264#define raw_cpu_add_return_1(pcp, val) raw_cpu_generic_add_return(pcp, val)
265#endif
266#ifndef raw_cpu_add_return_2
267#define raw_cpu_add_return_2(pcp, val) raw_cpu_generic_add_return(pcp, val)
268#endif
269#ifndef raw_cpu_add_return_4
270#define raw_cpu_add_return_4(pcp, val) raw_cpu_generic_add_return(pcp, val)
271#endif
272#ifndef raw_cpu_add_return_8
273#define raw_cpu_add_return_8(pcp, val) raw_cpu_generic_add_return(pcp, val)
274#endif
275
276#ifndef raw_cpu_xchg_1
277#define raw_cpu_xchg_1(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
278#endif
279#ifndef raw_cpu_xchg_2
280#define raw_cpu_xchg_2(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
281#endif
282#ifndef raw_cpu_xchg_4
283#define raw_cpu_xchg_4(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
284#endif
285#ifndef raw_cpu_xchg_8
286#define raw_cpu_xchg_8(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
287#endif
288
289#ifndef raw_cpu_cmpxchg_1
290#define raw_cpu_cmpxchg_1(pcp, oval, nval) \
291 raw_cpu_generic_cmpxchg(pcp, oval, nval)
292#endif
293#ifndef raw_cpu_cmpxchg_2
294#define raw_cpu_cmpxchg_2(pcp, oval, nval) \
295 raw_cpu_generic_cmpxchg(pcp, oval, nval)
296#endif
297#ifndef raw_cpu_cmpxchg_4
298#define raw_cpu_cmpxchg_4(pcp, oval, nval) \
299 raw_cpu_generic_cmpxchg(pcp, oval, nval)
300#endif
301#ifndef raw_cpu_cmpxchg_8
302#define raw_cpu_cmpxchg_8(pcp, oval, nval) \
303 raw_cpu_generic_cmpxchg(pcp, oval, nval)
304#endif
305
306#ifndef raw_cpu_cmpxchg_double_1
307#define raw_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \
308 raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
309#endif
310#ifndef raw_cpu_cmpxchg_double_2
311#define raw_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \
312 raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
313#endif
314#ifndef raw_cpu_cmpxchg_double_4
315#define raw_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \
316 raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
317#endif
318#ifndef raw_cpu_cmpxchg_double_8
319#define raw_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \
320 raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
321#endif
322
323#ifndef this_cpu_read_1
324#define this_cpu_read_1(pcp) this_cpu_generic_read(pcp)
325#endif
326#ifndef this_cpu_read_2
327#define this_cpu_read_2(pcp) this_cpu_generic_read(pcp)
328#endif
329#ifndef this_cpu_read_4
330#define this_cpu_read_4(pcp) this_cpu_generic_read(pcp)
331#endif
332#ifndef this_cpu_read_8
333#define this_cpu_read_8(pcp) this_cpu_generic_read(pcp)
334#endif
335
336#ifndef this_cpu_write_1
337#define this_cpu_write_1(pcp, val) this_cpu_generic_to_op(pcp, val, =)
338#endif
339#ifndef this_cpu_write_2
340#define this_cpu_write_2(pcp, val) this_cpu_generic_to_op(pcp, val, =)
341#endif
342#ifndef this_cpu_write_4
343#define this_cpu_write_4(pcp, val) this_cpu_generic_to_op(pcp, val, =)
344#endif
345#ifndef this_cpu_write_8
346#define this_cpu_write_8(pcp, val) this_cpu_generic_to_op(pcp, val, =)
347#endif
348
349#ifndef this_cpu_add_1
350#define this_cpu_add_1(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
351#endif
352#ifndef this_cpu_add_2
353#define this_cpu_add_2(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
354#endif
355#ifndef this_cpu_add_4
356#define this_cpu_add_4(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
357#endif
358#ifndef this_cpu_add_8
359#define this_cpu_add_8(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
360#endif
361
362#ifndef this_cpu_and_1
363#define this_cpu_and_1(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
364#endif
365#ifndef this_cpu_and_2
366#define this_cpu_and_2(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
367#endif
368#ifndef this_cpu_and_4
369#define this_cpu_and_4(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
370#endif
371#ifndef this_cpu_and_8
372#define this_cpu_and_8(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
373#endif
374
375#ifndef this_cpu_or_1
376#define this_cpu_or_1(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
377#endif
378#ifndef this_cpu_or_2
379#define this_cpu_or_2(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
380#endif
381#ifndef this_cpu_or_4
382#define this_cpu_or_4(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
383#endif
384#ifndef this_cpu_or_8
385#define this_cpu_or_8(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
386#endif
387
388#ifndef this_cpu_add_return_1
389#define this_cpu_add_return_1(pcp, val) this_cpu_generic_add_return(pcp, val)
390#endif
391#ifndef this_cpu_add_return_2
392#define this_cpu_add_return_2(pcp, val) this_cpu_generic_add_return(pcp, val)
393#endif
394#ifndef this_cpu_add_return_4
395#define this_cpu_add_return_4(pcp, val) this_cpu_generic_add_return(pcp, val)
396#endif
397#ifndef this_cpu_add_return_8
398#define this_cpu_add_return_8(pcp, val) this_cpu_generic_add_return(pcp, val)
399#endif
400
401#ifndef this_cpu_xchg_1
402#define this_cpu_xchg_1(pcp, nval) this_cpu_generic_xchg(pcp, nval)
403#endif
404#ifndef this_cpu_xchg_2
405#define this_cpu_xchg_2(pcp, nval) this_cpu_generic_xchg(pcp, nval)
406#endif
407#ifndef this_cpu_xchg_4
408#define this_cpu_xchg_4(pcp, nval) this_cpu_generic_xchg(pcp, nval)
409#endif
410#ifndef this_cpu_xchg_8
411#define this_cpu_xchg_8(pcp, nval) this_cpu_generic_xchg(pcp, nval)
412#endif
413
414#ifndef this_cpu_cmpxchg_1
415#define this_cpu_cmpxchg_1(pcp, oval, nval) \
416 this_cpu_generic_cmpxchg(pcp, oval, nval)
417#endif
418#ifndef this_cpu_cmpxchg_2
419#define this_cpu_cmpxchg_2(pcp, oval, nval) \
420 this_cpu_generic_cmpxchg(pcp, oval, nval)
421#endif
422#ifndef this_cpu_cmpxchg_4
423#define this_cpu_cmpxchg_4(pcp, oval, nval) \
424 this_cpu_generic_cmpxchg(pcp, oval, nval)
425#endif
426#ifndef this_cpu_cmpxchg_8
427#define this_cpu_cmpxchg_8(pcp, oval, nval) \
428 this_cpu_generic_cmpxchg(pcp, oval, nval)
429#endif
430
431#ifndef this_cpu_cmpxchg_double_1
432#define this_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \
433 this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
434#endif
435#ifndef this_cpu_cmpxchg_double_2
436#define this_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \
437 this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
438#endif
439#ifndef this_cpu_cmpxchg_double_4
440#define this_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \
441 this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
442#endif
443#ifndef this_cpu_cmpxchg_double_8
444#define this_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \
445 this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
446#endif
447
448#endif /* _ASM_GENERIC_PERCPU_H_ */