Linux Audio

Check our new training course

Loading...
v4.10.11
 
  1#ifndef _ASM_GENERIC_PERCPU_H_
  2#define _ASM_GENERIC_PERCPU_H_
  3
  4#include <linux/compiler.h>
  5#include <linux/threads.h>
  6#include <linux/percpu-defs.h>
  7
  8#ifdef CONFIG_SMP
  9
 10/*
 11 * per_cpu_offset() is the offset that has to be added to a
 12 * percpu variable to get to the instance for a certain processor.
 13 *
 14 * Most arches use the __per_cpu_offset array for those offsets but
 15 * some arches have their own ways of determining the offset (x86_64, s390).
 16 */
 17#ifndef __per_cpu_offset
 18extern unsigned long __per_cpu_offset[NR_CPUS];
 19
 20#define per_cpu_offset(x) (__per_cpu_offset[x])
 21#endif
 22
 23/*
 24 * Determine the offset for the currently active processor.
 25 * An arch may define __my_cpu_offset to provide a more effective
 26 * means of obtaining the offset to the per cpu variables of the
 27 * current processor.
 28 */
 29#ifndef __my_cpu_offset
 30#define __my_cpu_offset per_cpu_offset(raw_smp_processor_id())
 31#endif
 32#ifdef CONFIG_DEBUG_PREEMPT
 33#define my_cpu_offset per_cpu_offset(smp_processor_id())
 34#else
 35#define my_cpu_offset __my_cpu_offset
 36#endif
 37
 38/*
 39 * Arch may define arch_raw_cpu_ptr() to provide more efficient address
 40 * translations for raw_cpu_ptr().
 41 */
 42#ifndef arch_raw_cpu_ptr
 43#define arch_raw_cpu_ptr(ptr) SHIFT_PERCPU_PTR(ptr, __my_cpu_offset)
 44#endif
 45
 46#ifdef CONFIG_HAVE_SETUP_PER_CPU_AREA
 47extern void setup_per_cpu_areas(void);
 48#endif
 49
 50#endif	/* SMP */
 51
 52#ifndef PER_CPU_BASE_SECTION
 53#ifdef CONFIG_SMP
 54#define PER_CPU_BASE_SECTION ".data..percpu"
 55#else
 56#define PER_CPU_BASE_SECTION ".data"
 57#endif
 58#endif
 59
 60#ifndef PER_CPU_ATTRIBUTES
 61#define PER_CPU_ATTRIBUTES
 62#endif
 63
 64#ifndef PER_CPU_DEF_ATTRIBUTES
 65#define PER_CPU_DEF_ATTRIBUTES
 66#endif
 67
 68#define raw_cpu_generic_read(pcp)					\
 69({									\
 70	*raw_cpu_ptr(&(pcp));						\
 71})
 72
 73#define raw_cpu_generic_to_op(pcp, val, op)				\
 74do {									\
 75	*raw_cpu_ptr(&(pcp)) op val;					\
 76} while (0)
 77
 78#define raw_cpu_generic_add_return(pcp, val)				\
 79({									\
 80	typeof(&(pcp)) __p = raw_cpu_ptr(&(pcp));			\
 81									\
 82	*__p += val;							\
 83	*__p;								\
 84})
 85
 86#define raw_cpu_generic_xchg(pcp, nval)					\
 87({									\
 88	typeof(&(pcp)) __p = raw_cpu_ptr(&(pcp));			\
 89	typeof(pcp) __ret;						\
 90	__ret = *__p;							\
 91	*__p = nval;							\
 92	__ret;								\
 93})
 94
 95#define raw_cpu_generic_cmpxchg(pcp, oval, nval)			\
 96({									\
 97	typeof(&(pcp)) __p = raw_cpu_ptr(&(pcp));			\
 98	typeof(pcp) __ret;						\
 99	__ret = *__p;							\
100	if (__ret == (oval))						\
 
 
 
 
 
 
 
 
 
101		*__p = nval;						\
 
 
 
 
 
102	__ret;								\
103})
104
105#define raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
106({									\
107	typeof(&(pcp1)) __p1 = raw_cpu_ptr(&(pcp1));			\
108	typeof(&(pcp2)) __p2 = raw_cpu_ptr(&(pcp2));			\
109	int __ret = 0;							\
110	if (*__p1 == (oval1) && *__p2  == (oval2)) {			\
111		*__p1 = nval1;						\
112		*__p2 = nval2;						\
113		__ret = 1;						\
114	}								\
115	(__ret);							\
116})
117
118#define this_cpu_generic_read(pcp)					\
119({									\
120	typeof(pcp) __ret;						\
121	preempt_disable_notrace();					\
122	__ret = raw_cpu_generic_read(pcp);				\
123	preempt_enable_notrace();					\
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
124	__ret;								\
125})
126
127#define this_cpu_generic_to_op(pcp, val, op)				\
128do {									\
129	unsigned long __flags;						\
130	raw_local_irq_save(__flags);					\
131	raw_cpu_generic_to_op(pcp, val, op);				\
132	raw_local_irq_restore(__flags);					\
133} while (0)
134
135
136#define this_cpu_generic_add_return(pcp, val)				\
137({									\
138	typeof(pcp) __ret;						\
139	unsigned long __flags;						\
140	raw_local_irq_save(__flags);					\
141	__ret = raw_cpu_generic_add_return(pcp, val);			\
142	raw_local_irq_restore(__flags);					\
143	__ret;								\
144})
145
146#define this_cpu_generic_xchg(pcp, nval)				\
147({									\
148	typeof(pcp) __ret;						\
149	unsigned long __flags;						\
150	raw_local_irq_save(__flags);					\
151	__ret = raw_cpu_generic_xchg(pcp, nval);			\
152	raw_local_irq_restore(__flags);					\
153	__ret;								\
154})
155
156#define this_cpu_generic_cmpxchg(pcp, oval, nval)			\
157({									\
158	typeof(pcp) __ret;						\
159	unsigned long __flags;						\
160	raw_local_irq_save(__flags);					\
161	__ret = raw_cpu_generic_cmpxchg(pcp, oval, nval);		\
162	raw_local_irq_restore(__flags);					\
163	__ret;								\
164})
165
166#define this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)	\
167({									\
168	int __ret;							\
169	unsigned long __flags;						\
170	raw_local_irq_save(__flags);					\
171	__ret = raw_cpu_generic_cmpxchg_double(pcp1, pcp2,		\
172			oval1, oval2, nval1, nval2);			\
173	raw_local_irq_restore(__flags);					\
174	__ret;								\
175})
176
177#ifndef raw_cpu_read_1
178#define raw_cpu_read_1(pcp)		raw_cpu_generic_read(pcp)
179#endif
180#ifndef raw_cpu_read_2
181#define raw_cpu_read_2(pcp)		raw_cpu_generic_read(pcp)
182#endif
183#ifndef raw_cpu_read_4
184#define raw_cpu_read_4(pcp)		raw_cpu_generic_read(pcp)
185#endif
186#ifndef raw_cpu_read_8
187#define raw_cpu_read_8(pcp)		raw_cpu_generic_read(pcp)
188#endif
189
190#ifndef raw_cpu_write_1
191#define raw_cpu_write_1(pcp, val)	raw_cpu_generic_to_op(pcp, val, =)
192#endif
193#ifndef raw_cpu_write_2
194#define raw_cpu_write_2(pcp, val)	raw_cpu_generic_to_op(pcp, val, =)
195#endif
196#ifndef raw_cpu_write_4
197#define raw_cpu_write_4(pcp, val)	raw_cpu_generic_to_op(pcp, val, =)
198#endif
199#ifndef raw_cpu_write_8
200#define raw_cpu_write_8(pcp, val)	raw_cpu_generic_to_op(pcp, val, =)
201#endif
202
203#ifndef raw_cpu_add_1
204#define raw_cpu_add_1(pcp, val)		raw_cpu_generic_to_op(pcp, val, +=)
205#endif
206#ifndef raw_cpu_add_2
207#define raw_cpu_add_2(pcp, val)		raw_cpu_generic_to_op(pcp, val, +=)
208#endif
209#ifndef raw_cpu_add_4
210#define raw_cpu_add_4(pcp, val)		raw_cpu_generic_to_op(pcp, val, +=)
211#endif
212#ifndef raw_cpu_add_8
213#define raw_cpu_add_8(pcp, val)		raw_cpu_generic_to_op(pcp, val, +=)
214#endif
215
216#ifndef raw_cpu_and_1
217#define raw_cpu_and_1(pcp, val)		raw_cpu_generic_to_op(pcp, val, &=)
218#endif
219#ifndef raw_cpu_and_2
220#define raw_cpu_and_2(pcp, val)		raw_cpu_generic_to_op(pcp, val, &=)
221#endif
222#ifndef raw_cpu_and_4
223#define raw_cpu_and_4(pcp, val)		raw_cpu_generic_to_op(pcp, val, &=)
224#endif
225#ifndef raw_cpu_and_8
226#define raw_cpu_and_8(pcp, val)		raw_cpu_generic_to_op(pcp, val, &=)
227#endif
228
229#ifndef raw_cpu_or_1
230#define raw_cpu_or_1(pcp, val)		raw_cpu_generic_to_op(pcp, val, |=)
231#endif
232#ifndef raw_cpu_or_2
233#define raw_cpu_or_2(pcp, val)		raw_cpu_generic_to_op(pcp, val, |=)
234#endif
235#ifndef raw_cpu_or_4
236#define raw_cpu_or_4(pcp, val)		raw_cpu_generic_to_op(pcp, val, |=)
237#endif
238#ifndef raw_cpu_or_8
239#define raw_cpu_or_8(pcp, val)		raw_cpu_generic_to_op(pcp, val, |=)
240#endif
241
242#ifndef raw_cpu_add_return_1
243#define raw_cpu_add_return_1(pcp, val)	raw_cpu_generic_add_return(pcp, val)
244#endif
245#ifndef raw_cpu_add_return_2
246#define raw_cpu_add_return_2(pcp, val)	raw_cpu_generic_add_return(pcp, val)
247#endif
248#ifndef raw_cpu_add_return_4
249#define raw_cpu_add_return_4(pcp, val)	raw_cpu_generic_add_return(pcp, val)
250#endif
251#ifndef raw_cpu_add_return_8
252#define raw_cpu_add_return_8(pcp, val)	raw_cpu_generic_add_return(pcp, val)
253#endif
254
255#ifndef raw_cpu_xchg_1
256#define raw_cpu_xchg_1(pcp, nval)	raw_cpu_generic_xchg(pcp, nval)
257#endif
258#ifndef raw_cpu_xchg_2
259#define raw_cpu_xchg_2(pcp, nval)	raw_cpu_generic_xchg(pcp, nval)
260#endif
261#ifndef raw_cpu_xchg_4
262#define raw_cpu_xchg_4(pcp, nval)	raw_cpu_generic_xchg(pcp, nval)
263#endif
264#ifndef raw_cpu_xchg_8
265#define raw_cpu_xchg_8(pcp, nval)	raw_cpu_generic_xchg(pcp, nval)
266#endif
267
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
268#ifndef raw_cpu_cmpxchg_1
269#define raw_cpu_cmpxchg_1(pcp, oval, nval) \
270	raw_cpu_generic_cmpxchg(pcp, oval, nval)
271#endif
272#ifndef raw_cpu_cmpxchg_2
273#define raw_cpu_cmpxchg_2(pcp, oval, nval) \
274	raw_cpu_generic_cmpxchg(pcp, oval, nval)
275#endif
276#ifndef raw_cpu_cmpxchg_4
277#define raw_cpu_cmpxchg_4(pcp, oval, nval) \
278	raw_cpu_generic_cmpxchg(pcp, oval, nval)
279#endif
280#ifndef raw_cpu_cmpxchg_8
281#define raw_cpu_cmpxchg_8(pcp, oval, nval) \
282	raw_cpu_generic_cmpxchg(pcp, oval, nval)
283#endif
284
285#ifndef raw_cpu_cmpxchg_double_1
286#define raw_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \
287	raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
288#endif
289#ifndef raw_cpu_cmpxchg_double_2
290#define raw_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \
291	raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
292#endif
293#ifndef raw_cpu_cmpxchg_double_4
294#define raw_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \
295	raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
296#endif
297#ifndef raw_cpu_cmpxchg_double_8
298#define raw_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \
299	raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
300#endif
301
302#ifndef this_cpu_read_1
303#define this_cpu_read_1(pcp)		this_cpu_generic_read(pcp)
304#endif
305#ifndef this_cpu_read_2
306#define this_cpu_read_2(pcp)		this_cpu_generic_read(pcp)
307#endif
308#ifndef this_cpu_read_4
309#define this_cpu_read_4(pcp)		this_cpu_generic_read(pcp)
310#endif
311#ifndef this_cpu_read_8
312#define this_cpu_read_8(pcp)		this_cpu_generic_read(pcp)
313#endif
314
315#ifndef this_cpu_write_1
316#define this_cpu_write_1(pcp, val)	this_cpu_generic_to_op(pcp, val, =)
317#endif
318#ifndef this_cpu_write_2
319#define this_cpu_write_2(pcp, val)	this_cpu_generic_to_op(pcp, val, =)
320#endif
321#ifndef this_cpu_write_4
322#define this_cpu_write_4(pcp, val)	this_cpu_generic_to_op(pcp, val, =)
323#endif
324#ifndef this_cpu_write_8
325#define this_cpu_write_8(pcp, val)	this_cpu_generic_to_op(pcp, val, =)
326#endif
327
328#ifndef this_cpu_add_1
329#define this_cpu_add_1(pcp, val)	this_cpu_generic_to_op(pcp, val, +=)
330#endif
331#ifndef this_cpu_add_2
332#define this_cpu_add_2(pcp, val)	this_cpu_generic_to_op(pcp, val, +=)
333#endif
334#ifndef this_cpu_add_4
335#define this_cpu_add_4(pcp, val)	this_cpu_generic_to_op(pcp, val, +=)
336#endif
337#ifndef this_cpu_add_8
338#define this_cpu_add_8(pcp, val)	this_cpu_generic_to_op(pcp, val, +=)
339#endif
340
341#ifndef this_cpu_and_1
342#define this_cpu_and_1(pcp, val)	this_cpu_generic_to_op(pcp, val, &=)
343#endif
344#ifndef this_cpu_and_2
345#define this_cpu_and_2(pcp, val)	this_cpu_generic_to_op(pcp, val, &=)
346#endif
347#ifndef this_cpu_and_4
348#define this_cpu_and_4(pcp, val)	this_cpu_generic_to_op(pcp, val, &=)
349#endif
350#ifndef this_cpu_and_8
351#define this_cpu_and_8(pcp, val)	this_cpu_generic_to_op(pcp, val, &=)
352#endif
353
354#ifndef this_cpu_or_1
355#define this_cpu_or_1(pcp, val)		this_cpu_generic_to_op(pcp, val, |=)
356#endif
357#ifndef this_cpu_or_2
358#define this_cpu_or_2(pcp, val)		this_cpu_generic_to_op(pcp, val, |=)
359#endif
360#ifndef this_cpu_or_4
361#define this_cpu_or_4(pcp, val)		this_cpu_generic_to_op(pcp, val, |=)
362#endif
363#ifndef this_cpu_or_8
364#define this_cpu_or_8(pcp, val)		this_cpu_generic_to_op(pcp, val, |=)
365#endif
366
367#ifndef this_cpu_add_return_1
368#define this_cpu_add_return_1(pcp, val)	this_cpu_generic_add_return(pcp, val)
369#endif
370#ifndef this_cpu_add_return_2
371#define this_cpu_add_return_2(pcp, val)	this_cpu_generic_add_return(pcp, val)
372#endif
373#ifndef this_cpu_add_return_4
374#define this_cpu_add_return_4(pcp, val)	this_cpu_generic_add_return(pcp, val)
375#endif
376#ifndef this_cpu_add_return_8
377#define this_cpu_add_return_8(pcp, val)	this_cpu_generic_add_return(pcp, val)
378#endif
379
380#ifndef this_cpu_xchg_1
381#define this_cpu_xchg_1(pcp, nval)	this_cpu_generic_xchg(pcp, nval)
382#endif
383#ifndef this_cpu_xchg_2
384#define this_cpu_xchg_2(pcp, nval)	this_cpu_generic_xchg(pcp, nval)
385#endif
386#ifndef this_cpu_xchg_4
387#define this_cpu_xchg_4(pcp, nval)	this_cpu_generic_xchg(pcp, nval)
388#endif
389#ifndef this_cpu_xchg_8
390#define this_cpu_xchg_8(pcp, nval)	this_cpu_generic_xchg(pcp, nval)
391#endif
392
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
393#ifndef this_cpu_cmpxchg_1
394#define this_cpu_cmpxchg_1(pcp, oval, nval) \
395	this_cpu_generic_cmpxchg(pcp, oval, nval)
396#endif
397#ifndef this_cpu_cmpxchg_2
398#define this_cpu_cmpxchg_2(pcp, oval, nval) \
399	this_cpu_generic_cmpxchg(pcp, oval, nval)
400#endif
401#ifndef this_cpu_cmpxchg_4
402#define this_cpu_cmpxchg_4(pcp, oval, nval) \
403	this_cpu_generic_cmpxchg(pcp, oval, nval)
404#endif
405#ifndef this_cpu_cmpxchg_8
406#define this_cpu_cmpxchg_8(pcp, oval, nval) \
407	this_cpu_generic_cmpxchg(pcp, oval, nval)
408#endif
409
410#ifndef this_cpu_cmpxchg_double_1
411#define this_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \
412	this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
413#endif
414#ifndef this_cpu_cmpxchg_double_2
415#define this_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \
416	this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
417#endif
418#ifndef this_cpu_cmpxchg_double_4
419#define this_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \
420	this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
421#endif
422#ifndef this_cpu_cmpxchg_double_8
423#define this_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \
424	this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
425#endif
426
427#endif /* _ASM_GENERIC_PERCPU_H_ */
v6.9.4
  1/* SPDX-License-Identifier: GPL-2.0 */
  2#ifndef _ASM_GENERIC_PERCPU_H_
  3#define _ASM_GENERIC_PERCPU_H_
  4
  5#include <linux/compiler.h>
  6#include <linux/threads.h>
  7#include <linux/percpu-defs.h>
  8
  9#ifdef CONFIG_SMP
 10
 11/*
 12 * per_cpu_offset() is the offset that has to be added to a
 13 * percpu variable to get to the instance for a certain processor.
 14 *
 15 * Most arches use the __per_cpu_offset array for those offsets but
 16 * some arches have their own ways of determining the offset (x86_64, s390).
 17 */
 18#ifndef __per_cpu_offset
 19extern unsigned long __per_cpu_offset[NR_CPUS];
 20
 21#define per_cpu_offset(x) (__per_cpu_offset[x])
 22#endif
 23
 24/*
 25 * Determine the offset for the currently active processor.
 26 * An arch may define __my_cpu_offset to provide a more effective
 27 * means of obtaining the offset to the per cpu variables of the
 28 * current processor.
 29 */
 30#ifndef __my_cpu_offset
 31#define __my_cpu_offset per_cpu_offset(raw_smp_processor_id())
 32#endif
 33#ifdef CONFIG_DEBUG_PREEMPT
 34#define my_cpu_offset per_cpu_offset(smp_processor_id())
 35#else
 36#define my_cpu_offset __my_cpu_offset
 37#endif
 38
 39/*
 40 * Arch may define arch_raw_cpu_ptr() to provide more efficient address
 41 * translations for raw_cpu_ptr().
 42 */
 43#ifndef arch_raw_cpu_ptr
 44#define arch_raw_cpu_ptr(ptr) SHIFT_PERCPU_PTR(ptr, __my_cpu_offset)
 45#endif
 46
 47#ifdef CONFIG_HAVE_SETUP_PER_CPU_AREA
 48extern void setup_per_cpu_areas(void);
 49#endif
 50
 51#endif	/* SMP */
 52
 53#ifndef PER_CPU_BASE_SECTION
 54#ifdef CONFIG_SMP
 55#define PER_CPU_BASE_SECTION ".data..percpu"
 56#else
 57#define PER_CPU_BASE_SECTION ".data"
 58#endif
 59#endif
 60
 61#ifndef PER_CPU_ATTRIBUTES
 62#define PER_CPU_ATTRIBUTES
 63#endif
 64
 
 
 
 
 65#define raw_cpu_generic_read(pcp)					\
 66({									\
 67	*raw_cpu_ptr(&(pcp));						\
 68})
 69
 70#define raw_cpu_generic_to_op(pcp, val, op)				\
 71do {									\
 72	*raw_cpu_ptr(&(pcp)) op val;					\
 73} while (0)
 74
 75#define raw_cpu_generic_add_return(pcp, val)				\
 76({									\
 77	typeof(pcp) *__p = raw_cpu_ptr(&(pcp));				\
 78									\
 79	*__p += val;							\
 80	*__p;								\
 81})
 82
 83#define raw_cpu_generic_xchg(pcp, nval)					\
 84({									\
 85	typeof(pcp) *__p = raw_cpu_ptr(&(pcp));				\
 86	typeof(pcp) __ret;						\
 87	__ret = *__p;							\
 88	*__p = nval;							\
 89	__ret;								\
 90})
 91
 92#define __cpu_fallback_try_cmpxchg(pcp, ovalp, nval, _cmpxchg)		\
 93({									\
 94	typeof(pcp) __val, __old = *(ovalp);				\
 95	__val = _cmpxchg(pcp, __old, nval);				\
 96	if (__val != __old)						\
 97		*(ovalp) = __val;					\
 98	__val == __old;							\
 99})
100
101#define raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval)			\
102({									\
103	typeof(pcp) *__p = raw_cpu_ptr(&(pcp));				\
104	typeof(pcp) __val = *__p, ___old = *(ovalp);			\
105	bool __ret;							\
106	if (__val == ___old) {						\
107		*__p = nval;						\
108		__ret = true;						\
109	} else {							\
110		*(ovalp) = __val;					\
111		__ret = false;						\
112	}								\
113	__ret;								\
114})
115
116#define raw_cpu_generic_cmpxchg(pcp, oval, nval)			\
117({									\
118	typeof(pcp) __old = (oval);					\
119	raw_cpu_generic_try_cmpxchg(pcp, &__old, nval);			\
120	__old;								\
 
 
 
 
 
 
121})
122
123#define __this_cpu_generic_read_nopreempt(pcp)				\
124({									\
125	typeof(pcp) ___ret;						\
126	preempt_disable_notrace();					\
127	___ret = READ_ONCE(*raw_cpu_ptr(&(pcp)));			\
128	preempt_enable_notrace();					\
129	___ret;								\
130})
131
132#define __this_cpu_generic_read_noirq(pcp)				\
133({									\
134	typeof(pcp) ___ret;						\
135	unsigned long ___flags;						\
136	raw_local_irq_save(___flags);					\
137	___ret = raw_cpu_generic_read(pcp);				\
138	raw_local_irq_restore(___flags);				\
139	___ret;								\
140})
141
142#define this_cpu_generic_read(pcp)					\
143({									\
144	typeof(pcp) __ret;						\
145	if (__native_word(pcp))						\
146		__ret = __this_cpu_generic_read_nopreempt(pcp);		\
147	else								\
148		__ret = __this_cpu_generic_read_noirq(pcp);		\
149	__ret;								\
150})
151
152#define this_cpu_generic_to_op(pcp, val, op)				\
153do {									\
154	unsigned long __flags;						\
155	raw_local_irq_save(__flags);					\
156	raw_cpu_generic_to_op(pcp, val, op);				\
157	raw_local_irq_restore(__flags);					\
158} while (0)
159
160
161#define this_cpu_generic_add_return(pcp, val)				\
162({									\
163	typeof(pcp) __ret;						\
164	unsigned long __flags;						\
165	raw_local_irq_save(__flags);					\
166	__ret = raw_cpu_generic_add_return(pcp, val);			\
167	raw_local_irq_restore(__flags);					\
168	__ret;								\
169})
170
171#define this_cpu_generic_xchg(pcp, nval)				\
172({									\
173	typeof(pcp) __ret;						\
174	unsigned long __flags;						\
175	raw_local_irq_save(__flags);					\
176	__ret = raw_cpu_generic_xchg(pcp, nval);			\
177	raw_local_irq_restore(__flags);					\
178	__ret;								\
179})
180
181#define this_cpu_generic_try_cmpxchg(pcp, ovalp, nval)			\
182({									\
183	bool __ret;							\
184	unsigned long __flags;						\
185	raw_local_irq_save(__flags);					\
186	__ret = raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval);		\
187	raw_local_irq_restore(__flags);					\
188	__ret;								\
189})
190
191#define this_cpu_generic_cmpxchg(pcp, oval, nval)			\
192({									\
193	typeof(pcp) __ret;						\
194	unsigned long __flags;						\
195	raw_local_irq_save(__flags);					\
196	__ret = raw_cpu_generic_cmpxchg(pcp, oval, nval);		\
 
197	raw_local_irq_restore(__flags);					\
198	__ret;								\
199})
200
201#ifndef raw_cpu_read_1
202#define raw_cpu_read_1(pcp)		raw_cpu_generic_read(pcp)
203#endif
204#ifndef raw_cpu_read_2
205#define raw_cpu_read_2(pcp)		raw_cpu_generic_read(pcp)
206#endif
207#ifndef raw_cpu_read_4
208#define raw_cpu_read_4(pcp)		raw_cpu_generic_read(pcp)
209#endif
210#ifndef raw_cpu_read_8
211#define raw_cpu_read_8(pcp)		raw_cpu_generic_read(pcp)
212#endif
213
214#ifndef raw_cpu_write_1
215#define raw_cpu_write_1(pcp, val)	raw_cpu_generic_to_op(pcp, val, =)
216#endif
217#ifndef raw_cpu_write_2
218#define raw_cpu_write_2(pcp, val)	raw_cpu_generic_to_op(pcp, val, =)
219#endif
220#ifndef raw_cpu_write_4
221#define raw_cpu_write_4(pcp, val)	raw_cpu_generic_to_op(pcp, val, =)
222#endif
223#ifndef raw_cpu_write_8
224#define raw_cpu_write_8(pcp, val)	raw_cpu_generic_to_op(pcp, val, =)
225#endif
226
227#ifndef raw_cpu_add_1
228#define raw_cpu_add_1(pcp, val)		raw_cpu_generic_to_op(pcp, val, +=)
229#endif
230#ifndef raw_cpu_add_2
231#define raw_cpu_add_2(pcp, val)		raw_cpu_generic_to_op(pcp, val, +=)
232#endif
233#ifndef raw_cpu_add_4
234#define raw_cpu_add_4(pcp, val)		raw_cpu_generic_to_op(pcp, val, +=)
235#endif
236#ifndef raw_cpu_add_8
237#define raw_cpu_add_8(pcp, val)		raw_cpu_generic_to_op(pcp, val, +=)
238#endif
239
240#ifndef raw_cpu_and_1
241#define raw_cpu_and_1(pcp, val)		raw_cpu_generic_to_op(pcp, val, &=)
242#endif
243#ifndef raw_cpu_and_2
244#define raw_cpu_and_2(pcp, val)		raw_cpu_generic_to_op(pcp, val, &=)
245#endif
246#ifndef raw_cpu_and_4
247#define raw_cpu_and_4(pcp, val)		raw_cpu_generic_to_op(pcp, val, &=)
248#endif
249#ifndef raw_cpu_and_8
250#define raw_cpu_and_8(pcp, val)		raw_cpu_generic_to_op(pcp, val, &=)
251#endif
252
253#ifndef raw_cpu_or_1
254#define raw_cpu_or_1(pcp, val)		raw_cpu_generic_to_op(pcp, val, |=)
255#endif
256#ifndef raw_cpu_or_2
257#define raw_cpu_or_2(pcp, val)		raw_cpu_generic_to_op(pcp, val, |=)
258#endif
259#ifndef raw_cpu_or_4
260#define raw_cpu_or_4(pcp, val)		raw_cpu_generic_to_op(pcp, val, |=)
261#endif
262#ifndef raw_cpu_or_8
263#define raw_cpu_or_8(pcp, val)		raw_cpu_generic_to_op(pcp, val, |=)
264#endif
265
266#ifndef raw_cpu_add_return_1
267#define raw_cpu_add_return_1(pcp, val)	raw_cpu_generic_add_return(pcp, val)
268#endif
269#ifndef raw_cpu_add_return_2
270#define raw_cpu_add_return_2(pcp, val)	raw_cpu_generic_add_return(pcp, val)
271#endif
272#ifndef raw_cpu_add_return_4
273#define raw_cpu_add_return_4(pcp, val)	raw_cpu_generic_add_return(pcp, val)
274#endif
275#ifndef raw_cpu_add_return_8
276#define raw_cpu_add_return_8(pcp, val)	raw_cpu_generic_add_return(pcp, val)
277#endif
278
279#ifndef raw_cpu_xchg_1
280#define raw_cpu_xchg_1(pcp, nval)	raw_cpu_generic_xchg(pcp, nval)
281#endif
282#ifndef raw_cpu_xchg_2
283#define raw_cpu_xchg_2(pcp, nval)	raw_cpu_generic_xchg(pcp, nval)
284#endif
285#ifndef raw_cpu_xchg_4
286#define raw_cpu_xchg_4(pcp, nval)	raw_cpu_generic_xchg(pcp, nval)
287#endif
288#ifndef raw_cpu_xchg_8
289#define raw_cpu_xchg_8(pcp, nval)	raw_cpu_generic_xchg(pcp, nval)
290#endif
291
292#ifndef raw_cpu_try_cmpxchg_1
293#ifdef raw_cpu_cmpxchg_1
294#define raw_cpu_try_cmpxchg_1(pcp, ovalp, nval) \
295	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, raw_cpu_cmpxchg_1)
296#else
297#define raw_cpu_try_cmpxchg_1(pcp, ovalp, nval) \
298	raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
299#endif
300#endif
301#ifndef raw_cpu_try_cmpxchg_2
302#ifdef raw_cpu_cmpxchg_2
303#define raw_cpu_try_cmpxchg_2(pcp, ovalp, nval) \
304	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, raw_cpu_cmpxchg_2)
305#else
306#define raw_cpu_try_cmpxchg_2(pcp, ovalp, nval) \
307	raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
308#endif
309#endif
310#ifndef raw_cpu_try_cmpxchg_4
311#ifdef raw_cpu_cmpxchg_4
312#define raw_cpu_try_cmpxchg_4(pcp, ovalp, nval) \
313	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, raw_cpu_cmpxchg_4)
314#else
315#define raw_cpu_try_cmpxchg_4(pcp, ovalp, nval) \
316	raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
317#endif
318#endif
319#ifndef raw_cpu_try_cmpxchg_8
320#ifdef raw_cpu_cmpxchg_8
321#define raw_cpu_try_cmpxchg_8(pcp, ovalp, nval) \
322	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, raw_cpu_cmpxchg_8)
323#else
324#define raw_cpu_try_cmpxchg_8(pcp, ovalp, nval) \
325	raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
326#endif
327#endif
328
329#ifndef raw_cpu_try_cmpxchg64
330#ifdef raw_cpu_cmpxchg64
331#define raw_cpu_try_cmpxchg64(pcp, ovalp, nval) \
332	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, raw_cpu_cmpxchg64)
333#else
334#define raw_cpu_try_cmpxchg64(pcp, ovalp, nval) \
335	raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
336#endif
337#endif
338#ifndef raw_cpu_try_cmpxchg128
339#ifdef raw_cpu_cmpxchg128
340#define raw_cpu_try_cmpxchg128(pcp, ovalp, nval) \
341	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, raw_cpu_cmpxchg128)
342#else
343#define raw_cpu_try_cmpxchg128(pcp, ovalp, nval) \
344	raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
345#endif
346#endif
347
348#ifndef raw_cpu_cmpxchg_1
349#define raw_cpu_cmpxchg_1(pcp, oval, nval) \
350	raw_cpu_generic_cmpxchg(pcp, oval, nval)
351#endif
352#ifndef raw_cpu_cmpxchg_2
353#define raw_cpu_cmpxchg_2(pcp, oval, nval) \
354	raw_cpu_generic_cmpxchg(pcp, oval, nval)
355#endif
356#ifndef raw_cpu_cmpxchg_4
357#define raw_cpu_cmpxchg_4(pcp, oval, nval) \
358	raw_cpu_generic_cmpxchg(pcp, oval, nval)
359#endif
360#ifndef raw_cpu_cmpxchg_8
361#define raw_cpu_cmpxchg_8(pcp, oval, nval) \
362	raw_cpu_generic_cmpxchg(pcp, oval, nval)
363#endif
364
365#ifndef raw_cpu_cmpxchg64
366#define raw_cpu_cmpxchg64(pcp, oval, nval) \
367	raw_cpu_generic_cmpxchg(pcp, oval, nval)
368#endif
369#ifndef raw_cpu_cmpxchg128
370#define raw_cpu_cmpxchg128(pcp, oval, nval) \
371	raw_cpu_generic_cmpxchg(pcp, oval, nval)
 
 
 
 
 
 
 
 
372#endif
373
374#ifndef this_cpu_read_1
375#define this_cpu_read_1(pcp)		this_cpu_generic_read(pcp)
376#endif
377#ifndef this_cpu_read_2
378#define this_cpu_read_2(pcp)		this_cpu_generic_read(pcp)
379#endif
380#ifndef this_cpu_read_4
381#define this_cpu_read_4(pcp)		this_cpu_generic_read(pcp)
382#endif
383#ifndef this_cpu_read_8
384#define this_cpu_read_8(pcp)		this_cpu_generic_read(pcp)
385#endif
386
387#ifndef this_cpu_write_1
388#define this_cpu_write_1(pcp, val)	this_cpu_generic_to_op(pcp, val, =)
389#endif
390#ifndef this_cpu_write_2
391#define this_cpu_write_2(pcp, val)	this_cpu_generic_to_op(pcp, val, =)
392#endif
393#ifndef this_cpu_write_4
394#define this_cpu_write_4(pcp, val)	this_cpu_generic_to_op(pcp, val, =)
395#endif
396#ifndef this_cpu_write_8
397#define this_cpu_write_8(pcp, val)	this_cpu_generic_to_op(pcp, val, =)
398#endif
399
400#ifndef this_cpu_add_1
401#define this_cpu_add_1(pcp, val)	this_cpu_generic_to_op(pcp, val, +=)
402#endif
403#ifndef this_cpu_add_2
404#define this_cpu_add_2(pcp, val)	this_cpu_generic_to_op(pcp, val, +=)
405#endif
406#ifndef this_cpu_add_4
407#define this_cpu_add_4(pcp, val)	this_cpu_generic_to_op(pcp, val, +=)
408#endif
409#ifndef this_cpu_add_8
410#define this_cpu_add_8(pcp, val)	this_cpu_generic_to_op(pcp, val, +=)
411#endif
412
413#ifndef this_cpu_and_1
414#define this_cpu_and_1(pcp, val)	this_cpu_generic_to_op(pcp, val, &=)
415#endif
416#ifndef this_cpu_and_2
417#define this_cpu_and_2(pcp, val)	this_cpu_generic_to_op(pcp, val, &=)
418#endif
419#ifndef this_cpu_and_4
420#define this_cpu_and_4(pcp, val)	this_cpu_generic_to_op(pcp, val, &=)
421#endif
422#ifndef this_cpu_and_8
423#define this_cpu_and_8(pcp, val)	this_cpu_generic_to_op(pcp, val, &=)
424#endif
425
426#ifndef this_cpu_or_1
427#define this_cpu_or_1(pcp, val)		this_cpu_generic_to_op(pcp, val, |=)
428#endif
429#ifndef this_cpu_or_2
430#define this_cpu_or_2(pcp, val)		this_cpu_generic_to_op(pcp, val, |=)
431#endif
432#ifndef this_cpu_or_4
433#define this_cpu_or_4(pcp, val)		this_cpu_generic_to_op(pcp, val, |=)
434#endif
435#ifndef this_cpu_or_8
436#define this_cpu_or_8(pcp, val)		this_cpu_generic_to_op(pcp, val, |=)
437#endif
438
439#ifndef this_cpu_add_return_1
440#define this_cpu_add_return_1(pcp, val)	this_cpu_generic_add_return(pcp, val)
441#endif
442#ifndef this_cpu_add_return_2
443#define this_cpu_add_return_2(pcp, val)	this_cpu_generic_add_return(pcp, val)
444#endif
445#ifndef this_cpu_add_return_4
446#define this_cpu_add_return_4(pcp, val)	this_cpu_generic_add_return(pcp, val)
447#endif
448#ifndef this_cpu_add_return_8
449#define this_cpu_add_return_8(pcp, val)	this_cpu_generic_add_return(pcp, val)
450#endif
451
452#ifndef this_cpu_xchg_1
453#define this_cpu_xchg_1(pcp, nval)	this_cpu_generic_xchg(pcp, nval)
454#endif
455#ifndef this_cpu_xchg_2
456#define this_cpu_xchg_2(pcp, nval)	this_cpu_generic_xchg(pcp, nval)
457#endif
458#ifndef this_cpu_xchg_4
459#define this_cpu_xchg_4(pcp, nval)	this_cpu_generic_xchg(pcp, nval)
460#endif
461#ifndef this_cpu_xchg_8
462#define this_cpu_xchg_8(pcp, nval)	this_cpu_generic_xchg(pcp, nval)
463#endif
464
465#ifndef this_cpu_try_cmpxchg_1
466#ifdef this_cpu_cmpxchg_1
467#define this_cpu_try_cmpxchg_1(pcp, ovalp, nval) \
468	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, this_cpu_cmpxchg_1)
469#else
470#define this_cpu_try_cmpxchg_1(pcp, ovalp, nval) \
471	this_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
472#endif
473#endif
474#ifndef this_cpu_try_cmpxchg_2
475#ifdef this_cpu_cmpxchg_2
476#define this_cpu_try_cmpxchg_2(pcp, ovalp, nval) \
477	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, this_cpu_cmpxchg_2)
478#else
479#define this_cpu_try_cmpxchg_2(pcp, ovalp, nval) \
480	this_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
481#endif
482#endif
483#ifndef this_cpu_try_cmpxchg_4
484#ifdef this_cpu_cmpxchg_4
485#define this_cpu_try_cmpxchg_4(pcp, ovalp, nval) \
486	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, this_cpu_cmpxchg_4)
487#else
488#define this_cpu_try_cmpxchg_4(pcp, ovalp, nval) \
489	this_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
490#endif
491#endif
492#ifndef this_cpu_try_cmpxchg_8
493#ifdef this_cpu_cmpxchg_8
494#define this_cpu_try_cmpxchg_8(pcp, ovalp, nval) \
495	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, this_cpu_cmpxchg_8)
496#else
497#define this_cpu_try_cmpxchg_8(pcp, ovalp, nval) \
498	this_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
499#endif
500#endif
501
502#ifndef this_cpu_try_cmpxchg64
503#ifdef this_cpu_cmpxchg64
504#define this_cpu_try_cmpxchg64(pcp, ovalp, nval) \
505	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, this_cpu_cmpxchg64)
506#else
507#define this_cpu_try_cmpxchg64(pcp, ovalp, nval) \
508	this_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
509#endif
510#endif
511#ifndef this_cpu_try_cmpxchg128
512#ifdef this_cpu_cmpxchg128
513#define this_cpu_try_cmpxchg128(pcp, ovalp, nval) \
514	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, this_cpu_cmpxchg128)
515#else
516#define this_cpu_try_cmpxchg128(pcp, ovalp, nval) \
517	this_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
518#endif
519#endif
520
521#ifndef this_cpu_cmpxchg_1
522#define this_cpu_cmpxchg_1(pcp, oval, nval) \
523	this_cpu_generic_cmpxchg(pcp, oval, nval)
524#endif
525#ifndef this_cpu_cmpxchg_2
526#define this_cpu_cmpxchg_2(pcp, oval, nval) \
527	this_cpu_generic_cmpxchg(pcp, oval, nval)
528#endif
529#ifndef this_cpu_cmpxchg_4
530#define this_cpu_cmpxchg_4(pcp, oval, nval) \
531	this_cpu_generic_cmpxchg(pcp, oval, nval)
532#endif
533#ifndef this_cpu_cmpxchg_8
534#define this_cpu_cmpxchg_8(pcp, oval, nval) \
535	this_cpu_generic_cmpxchg(pcp, oval, nval)
536#endif
537
538#ifndef this_cpu_cmpxchg64
539#define this_cpu_cmpxchg64(pcp, oval, nval) \
540	this_cpu_generic_cmpxchg(pcp, oval, nval)
541#endif
542#ifndef this_cpu_cmpxchg128
543#define this_cpu_cmpxchg128(pcp, oval, nval) \
544	this_cpu_generic_cmpxchg(pcp, oval, nval)
 
 
 
 
 
 
 
 
545#endif
546
547#endif /* _ASM_GENERIC_PERCPU_H_ */