Linux Audio

Check our new training course

Loading...
v6.2
  1/* SPDX-License-Identifier: GPL-2.0 */
  2#ifndef _ASM_GENERIC_PERCPU_H_
  3#define _ASM_GENERIC_PERCPU_H_
  4
  5#include <linux/compiler.h>
  6#include <linux/threads.h>
  7#include <linux/percpu-defs.h>
  8
  9#ifdef CONFIG_SMP
 10
 11/*
 12 * per_cpu_offset() is the offset that has to be added to a
 13 * percpu variable to get to the instance for a certain processor.
 14 *
 15 * Most arches use the __per_cpu_offset array for those offsets but
 16 * some arches have their own ways of determining the offset (x86_64, s390).
 17 */
 18#ifndef __per_cpu_offset
 19extern unsigned long __per_cpu_offset[NR_CPUS];
 20
 21#define per_cpu_offset(x) (__per_cpu_offset[x])
 22#endif
 23
 24/*
 25 * Determine the offset for the currently active processor.
 26 * An arch may define __my_cpu_offset to provide a more effective
 27 * means of obtaining the offset to the per cpu variables of the
 28 * current processor.
 29 */
 30#ifndef __my_cpu_offset
 31#define __my_cpu_offset per_cpu_offset(raw_smp_processor_id())
 32#endif
 33#ifdef CONFIG_DEBUG_PREEMPT
 34#define my_cpu_offset per_cpu_offset(smp_processor_id())
 35#else
 36#define my_cpu_offset __my_cpu_offset
 37#endif
 38
 39/*
 40 * Arch may define arch_raw_cpu_ptr() to provide more efficient address
 41 * translations for raw_cpu_ptr().
 42 */
 43#ifndef arch_raw_cpu_ptr
 44#define arch_raw_cpu_ptr(ptr) SHIFT_PERCPU_PTR(ptr, __my_cpu_offset)
 45#endif
 46
 47#ifdef CONFIG_HAVE_SETUP_PER_CPU_AREA
 48extern void setup_per_cpu_areas(void);
 49#endif
 50
 51#endif	/* SMP */
 52
 53#ifndef PER_CPU_BASE_SECTION
 54#ifdef CONFIG_SMP
 55#define PER_CPU_BASE_SECTION ".data..percpu"
 56#else
 57#define PER_CPU_BASE_SECTION ".data"
 58#endif
 59#endif
 60
 61#ifndef PER_CPU_ATTRIBUTES
 62#define PER_CPU_ATTRIBUTES
 63#endif
 64
 65#define raw_cpu_generic_read(pcp)					\
 66({									\
 67	*raw_cpu_ptr(&(pcp));						\
 68})
 69
 70#define raw_cpu_generic_to_op(pcp, val, op)				\
 71do {									\
 72	*raw_cpu_ptr(&(pcp)) op val;					\
 73} while (0)
 74
 75#define raw_cpu_generic_add_return(pcp, val)				\
 76({									\
 77	typeof(pcp) *__p = raw_cpu_ptr(&(pcp));				\
 78									\
 79	*__p += val;							\
 80	*__p;								\
 81})
 82
 83#define raw_cpu_generic_xchg(pcp, nval)					\
 84({									\
 85	typeof(pcp) *__p = raw_cpu_ptr(&(pcp));				\
 86	typeof(pcp) __ret;						\
 87	__ret = *__p;							\
 88	*__p = nval;							\
 89	__ret;								\
 90})
 91
 92#define raw_cpu_generic_cmpxchg(pcp, oval, nval)			\
 93({									\
 94	typeof(pcp) *__p = raw_cpu_ptr(&(pcp));				\
 95	typeof(pcp) __ret;						\
 96	__ret = *__p;							\
 97	if (__ret == (oval))						\
 98		*__p = nval;						\
 99	__ret;								\
100})
101
102#define raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
103({									\
104	typeof(pcp1) *__p1 = raw_cpu_ptr(&(pcp1));			\
105	typeof(pcp2) *__p2 = raw_cpu_ptr(&(pcp2));			\
106	int __ret = 0;							\
107	if (*__p1 == (oval1) && *__p2  == (oval2)) {			\
108		*__p1 = nval1;						\
109		*__p2 = nval2;						\
 
110		__ret = 1;						\
111	}								\
112	(__ret);							\
113})
114
115#define __this_cpu_generic_read_nopreempt(pcp)				\
116({									\
117	typeof(pcp) ___ret;						\
118	preempt_disable_notrace();					\
119	___ret = READ_ONCE(*raw_cpu_ptr(&(pcp)));			\
120	preempt_enable_notrace();					\
121	___ret;								\
122})
123
124#define __this_cpu_generic_read_noirq(pcp)				\
125({									\
126	typeof(pcp) ___ret;						\
127	unsigned long ___flags;						\
128	raw_local_irq_save(___flags);					\
129	___ret = raw_cpu_generic_read(pcp);				\
130	raw_local_irq_restore(___flags);				\
131	___ret;								\
132})
133
134#define this_cpu_generic_read(pcp)					\
135({									\
136	typeof(pcp) __ret;						\
137	if (__native_word(pcp))						\
138		__ret = __this_cpu_generic_read_nopreempt(pcp);		\
139	else								\
140		__ret = __this_cpu_generic_read_noirq(pcp);		\
141	__ret;								\
142})
143
144#define this_cpu_generic_to_op(pcp, val, op)				\
145do {									\
146	unsigned long __flags;						\
147	raw_local_irq_save(__flags);					\
148	raw_cpu_generic_to_op(pcp, val, op);				\
149	raw_local_irq_restore(__flags);					\
150} while (0)
151
152
153#define this_cpu_generic_add_return(pcp, val)				\
154({									\
155	typeof(pcp) __ret;						\
156	unsigned long __flags;						\
157	raw_local_irq_save(__flags);					\
158	__ret = raw_cpu_generic_add_return(pcp, val);			\
 
159	raw_local_irq_restore(__flags);					\
160	__ret;								\
161})
162
163#define this_cpu_generic_xchg(pcp, nval)				\
164({									\
165	typeof(pcp) __ret;						\
166	unsigned long __flags;						\
167	raw_local_irq_save(__flags);					\
168	__ret = raw_cpu_generic_xchg(pcp, nval);			\
 
169	raw_local_irq_restore(__flags);					\
170	__ret;								\
171})
172
173#define this_cpu_generic_cmpxchg(pcp, oval, nval)			\
174({									\
175	typeof(pcp) __ret;						\
176	unsigned long __flags;						\
177	raw_local_irq_save(__flags);					\
178	__ret = raw_cpu_generic_cmpxchg(pcp, oval, nval);		\
 
 
179	raw_local_irq_restore(__flags);					\
180	__ret;								\
181})
182
183#define this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)	\
184({									\
185	int __ret;							\
186	unsigned long __flags;						\
187	raw_local_irq_save(__flags);					\
188	__ret = raw_cpu_generic_cmpxchg_double(pcp1, pcp2,		\
189			oval1, oval2, nval1, nval2);			\
190	raw_local_irq_restore(__flags);					\
191	__ret;								\
192})
193
194#ifndef raw_cpu_read_1
195#define raw_cpu_read_1(pcp)		raw_cpu_generic_read(pcp)
196#endif
197#ifndef raw_cpu_read_2
198#define raw_cpu_read_2(pcp)		raw_cpu_generic_read(pcp)
199#endif
200#ifndef raw_cpu_read_4
201#define raw_cpu_read_4(pcp)		raw_cpu_generic_read(pcp)
202#endif
203#ifndef raw_cpu_read_8
204#define raw_cpu_read_8(pcp)		raw_cpu_generic_read(pcp)
205#endif
206
207#ifndef raw_cpu_write_1
208#define raw_cpu_write_1(pcp, val)	raw_cpu_generic_to_op(pcp, val, =)
209#endif
210#ifndef raw_cpu_write_2
211#define raw_cpu_write_2(pcp, val)	raw_cpu_generic_to_op(pcp, val, =)
212#endif
213#ifndef raw_cpu_write_4
214#define raw_cpu_write_4(pcp, val)	raw_cpu_generic_to_op(pcp, val, =)
215#endif
216#ifndef raw_cpu_write_8
217#define raw_cpu_write_8(pcp, val)	raw_cpu_generic_to_op(pcp, val, =)
218#endif
219
220#ifndef raw_cpu_add_1
221#define raw_cpu_add_1(pcp, val)		raw_cpu_generic_to_op(pcp, val, +=)
222#endif
223#ifndef raw_cpu_add_2
224#define raw_cpu_add_2(pcp, val)		raw_cpu_generic_to_op(pcp, val, +=)
225#endif
226#ifndef raw_cpu_add_4
227#define raw_cpu_add_4(pcp, val)		raw_cpu_generic_to_op(pcp, val, +=)
228#endif
229#ifndef raw_cpu_add_8
230#define raw_cpu_add_8(pcp, val)		raw_cpu_generic_to_op(pcp, val, +=)
231#endif
232
233#ifndef raw_cpu_and_1
234#define raw_cpu_and_1(pcp, val)		raw_cpu_generic_to_op(pcp, val, &=)
235#endif
236#ifndef raw_cpu_and_2
237#define raw_cpu_and_2(pcp, val)		raw_cpu_generic_to_op(pcp, val, &=)
238#endif
239#ifndef raw_cpu_and_4
240#define raw_cpu_and_4(pcp, val)		raw_cpu_generic_to_op(pcp, val, &=)
241#endif
242#ifndef raw_cpu_and_8
243#define raw_cpu_and_8(pcp, val)		raw_cpu_generic_to_op(pcp, val, &=)
244#endif
245
246#ifndef raw_cpu_or_1
247#define raw_cpu_or_1(pcp, val)		raw_cpu_generic_to_op(pcp, val, |=)
248#endif
249#ifndef raw_cpu_or_2
250#define raw_cpu_or_2(pcp, val)		raw_cpu_generic_to_op(pcp, val, |=)
251#endif
252#ifndef raw_cpu_or_4
253#define raw_cpu_or_4(pcp, val)		raw_cpu_generic_to_op(pcp, val, |=)
254#endif
255#ifndef raw_cpu_or_8
256#define raw_cpu_or_8(pcp, val)		raw_cpu_generic_to_op(pcp, val, |=)
257#endif
258
259#ifndef raw_cpu_add_return_1
260#define raw_cpu_add_return_1(pcp, val)	raw_cpu_generic_add_return(pcp, val)
261#endif
262#ifndef raw_cpu_add_return_2
263#define raw_cpu_add_return_2(pcp, val)	raw_cpu_generic_add_return(pcp, val)
264#endif
265#ifndef raw_cpu_add_return_4
266#define raw_cpu_add_return_4(pcp, val)	raw_cpu_generic_add_return(pcp, val)
267#endif
268#ifndef raw_cpu_add_return_8
269#define raw_cpu_add_return_8(pcp, val)	raw_cpu_generic_add_return(pcp, val)
270#endif
271
272#ifndef raw_cpu_xchg_1
273#define raw_cpu_xchg_1(pcp, nval)	raw_cpu_generic_xchg(pcp, nval)
274#endif
275#ifndef raw_cpu_xchg_2
276#define raw_cpu_xchg_2(pcp, nval)	raw_cpu_generic_xchg(pcp, nval)
277#endif
278#ifndef raw_cpu_xchg_4
279#define raw_cpu_xchg_4(pcp, nval)	raw_cpu_generic_xchg(pcp, nval)
280#endif
281#ifndef raw_cpu_xchg_8
282#define raw_cpu_xchg_8(pcp, nval)	raw_cpu_generic_xchg(pcp, nval)
283#endif
284
285#ifndef raw_cpu_cmpxchg_1
286#define raw_cpu_cmpxchg_1(pcp, oval, nval) \
287	raw_cpu_generic_cmpxchg(pcp, oval, nval)
288#endif
289#ifndef raw_cpu_cmpxchg_2
290#define raw_cpu_cmpxchg_2(pcp, oval, nval) \
291	raw_cpu_generic_cmpxchg(pcp, oval, nval)
292#endif
293#ifndef raw_cpu_cmpxchg_4
294#define raw_cpu_cmpxchg_4(pcp, oval, nval) \
295	raw_cpu_generic_cmpxchg(pcp, oval, nval)
296#endif
297#ifndef raw_cpu_cmpxchg_8
298#define raw_cpu_cmpxchg_8(pcp, oval, nval) \
299	raw_cpu_generic_cmpxchg(pcp, oval, nval)
300#endif
301
302#ifndef raw_cpu_cmpxchg_double_1
303#define raw_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \
304	raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
305#endif
306#ifndef raw_cpu_cmpxchg_double_2
307#define raw_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \
308	raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
309#endif
310#ifndef raw_cpu_cmpxchg_double_4
311#define raw_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \
312	raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
313#endif
314#ifndef raw_cpu_cmpxchg_double_8
315#define raw_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \
316	raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
317#endif
318
319#ifndef this_cpu_read_1
320#define this_cpu_read_1(pcp)		this_cpu_generic_read(pcp)
321#endif
322#ifndef this_cpu_read_2
323#define this_cpu_read_2(pcp)		this_cpu_generic_read(pcp)
324#endif
325#ifndef this_cpu_read_4
326#define this_cpu_read_4(pcp)		this_cpu_generic_read(pcp)
327#endif
328#ifndef this_cpu_read_8
329#define this_cpu_read_8(pcp)		this_cpu_generic_read(pcp)
330#endif
331
332#ifndef this_cpu_write_1
333#define this_cpu_write_1(pcp, val)	this_cpu_generic_to_op(pcp, val, =)
334#endif
335#ifndef this_cpu_write_2
336#define this_cpu_write_2(pcp, val)	this_cpu_generic_to_op(pcp, val, =)
337#endif
338#ifndef this_cpu_write_4
339#define this_cpu_write_4(pcp, val)	this_cpu_generic_to_op(pcp, val, =)
340#endif
341#ifndef this_cpu_write_8
342#define this_cpu_write_8(pcp, val)	this_cpu_generic_to_op(pcp, val, =)
343#endif
344
345#ifndef this_cpu_add_1
346#define this_cpu_add_1(pcp, val)	this_cpu_generic_to_op(pcp, val, +=)
347#endif
348#ifndef this_cpu_add_2
349#define this_cpu_add_2(pcp, val)	this_cpu_generic_to_op(pcp, val, +=)
350#endif
351#ifndef this_cpu_add_4
352#define this_cpu_add_4(pcp, val)	this_cpu_generic_to_op(pcp, val, +=)
353#endif
354#ifndef this_cpu_add_8
355#define this_cpu_add_8(pcp, val)	this_cpu_generic_to_op(pcp, val, +=)
356#endif
357
358#ifndef this_cpu_and_1
359#define this_cpu_and_1(pcp, val)	this_cpu_generic_to_op(pcp, val, &=)
360#endif
361#ifndef this_cpu_and_2
362#define this_cpu_and_2(pcp, val)	this_cpu_generic_to_op(pcp, val, &=)
363#endif
364#ifndef this_cpu_and_4
365#define this_cpu_and_4(pcp, val)	this_cpu_generic_to_op(pcp, val, &=)
366#endif
367#ifndef this_cpu_and_8
368#define this_cpu_and_8(pcp, val)	this_cpu_generic_to_op(pcp, val, &=)
369#endif
370
371#ifndef this_cpu_or_1
372#define this_cpu_or_1(pcp, val)		this_cpu_generic_to_op(pcp, val, |=)
373#endif
374#ifndef this_cpu_or_2
375#define this_cpu_or_2(pcp, val)		this_cpu_generic_to_op(pcp, val, |=)
376#endif
377#ifndef this_cpu_or_4
378#define this_cpu_or_4(pcp, val)		this_cpu_generic_to_op(pcp, val, |=)
379#endif
380#ifndef this_cpu_or_8
381#define this_cpu_or_8(pcp, val)		this_cpu_generic_to_op(pcp, val, |=)
382#endif
383
384#ifndef this_cpu_add_return_1
385#define this_cpu_add_return_1(pcp, val)	this_cpu_generic_add_return(pcp, val)
386#endif
387#ifndef this_cpu_add_return_2
388#define this_cpu_add_return_2(pcp, val)	this_cpu_generic_add_return(pcp, val)
389#endif
390#ifndef this_cpu_add_return_4
391#define this_cpu_add_return_4(pcp, val)	this_cpu_generic_add_return(pcp, val)
392#endif
393#ifndef this_cpu_add_return_8
394#define this_cpu_add_return_8(pcp, val)	this_cpu_generic_add_return(pcp, val)
395#endif
396
397#ifndef this_cpu_xchg_1
398#define this_cpu_xchg_1(pcp, nval)	this_cpu_generic_xchg(pcp, nval)
399#endif
400#ifndef this_cpu_xchg_2
401#define this_cpu_xchg_2(pcp, nval)	this_cpu_generic_xchg(pcp, nval)
402#endif
403#ifndef this_cpu_xchg_4
404#define this_cpu_xchg_4(pcp, nval)	this_cpu_generic_xchg(pcp, nval)
405#endif
406#ifndef this_cpu_xchg_8
407#define this_cpu_xchg_8(pcp, nval)	this_cpu_generic_xchg(pcp, nval)
408#endif
409
410#ifndef this_cpu_cmpxchg_1
411#define this_cpu_cmpxchg_1(pcp, oval, nval) \
412	this_cpu_generic_cmpxchg(pcp, oval, nval)
413#endif
414#ifndef this_cpu_cmpxchg_2
415#define this_cpu_cmpxchg_2(pcp, oval, nval) \
416	this_cpu_generic_cmpxchg(pcp, oval, nval)
417#endif
418#ifndef this_cpu_cmpxchg_4
419#define this_cpu_cmpxchg_4(pcp, oval, nval) \
420	this_cpu_generic_cmpxchg(pcp, oval, nval)
421#endif
422#ifndef this_cpu_cmpxchg_8
423#define this_cpu_cmpxchg_8(pcp, oval, nval) \
424	this_cpu_generic_cmpxchg(pcp, oval, nval)
425#endif
426
427#ifndef this_cpu_cmpxchg_double_1
428#define this_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \
429	this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
430#endif
431#ifndef this_cpu_cmpxchg_double_2
432#define this_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \
433	this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
434#endif
435#ifndef this_cpu_cmpxchg_double_4
436#define this_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \
437	this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
438#endif
439#ifndef this_cpu_cmpxchg_double_8
440#define this_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \
441	this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
442#endif
443
444#endif /* _ASM_GENERIC_PERCPU_H_ */
v4.6
 
  1#ifndef _ASM_GENERIC_PERCPU_H_
  2#define _ASM_GENERIC_PERCPU_H_
  3
  4#include <linux/compiler.h>
  5#include <linux/threads.h>
  6#include <linux/percpu-defs.h>
  7
  8#ifdef CONFIG_SMP
  9
 10/*
 11 * per_cpu_offset() is the offset that has to be added to a
 12 * percpu variable to get to the instance for a certain processor.
 13 *
 14 * Most arches use the __per_cpu_offset array for those offsets but
 15 * some arches have their own ways of determining the offset (x86_64, s390).
 16 */
 17#ifndef __per_cpu_offset
 18extern unsigned long __per_cpu_offset[NR_CPUS];
 19
 20#define per_cpu_offset(x) (__per_cpu_offset[x])
 21#endif
 22
 23/*
 24 * Determine the offset for the currently active processor.
 25 * An arch may define __my_cpu_offset to provide a more effective
 26 * means of obtaining the offset to the per cpu variables of the
 27 * current processor.
 28 */
 29#ifndef __my_cpu_offset
 30#define __my_cpu_offset per_cpu_offset(raw_smp_processor_id())
 31#endif
 32#ifdef CONFIG_DEBUG_PREEMPT
 33#define my_cpu_offset per_cpu_offset(smp_processor_id())
 34#else
 35#define my_cpu_offset __my_cpu_offset
 36#endif
 37
 38/*
 39 * Arch may define arch_raw_cpu_ptr() to provide more efficient address
 40 * translations for raw_cpu_ptr().
 41 */
 42#ifndef arch_raw_cpu_ptr
 43#define arch_raw_cpu_ptr(ptr) SHIFT_PERCPU_PTR(ptr, __my_cpu_offset)
 44#endif
 45
 46#ifdef CONFIG_HAVE_SETUP_PER_CPU_AREA
 47extern void setup_per_cpu_areas(void);
 48#endif
 49
 50#endif	/* SMP */
 51
 52#ifndef PER_CPU_BASE_SECTION
 53#ifdef CONFIG_SMP
 54#define PER_CPU_BASE_SECTION ".data..percpu"
 55#else
 56#define PER_CPU_BASE_SECTION ".data"
 57#endif
 58#endif
 59
 60#ifndef PER_CPU_ATTRIBUTES
 61#define PER_CPU_ATTRIBUTES
 62#endif
 63
 64#ifndef PER_CPU_DEF_ATTRIBUTES
 65#define PER_CPU_DEF_ATTRIBUTES
 66#endif
 
 67
 68#define raw_cpu_generic_to_op(pcp, val, op)				\
 69do {									\
 70	*raw_cpu_ptr(&(pcp)) op val;					\
 71} while (0)
 72
 73#define raw_cpu_generic_add_return(pcp, val)				\
 74({									\
 75	raw_cpu_add(pcp, val);						\
 76	raw_cpu_read(pcp);						\
 
 
 77})
 78
 79#define raw_cpu_generic_xchg(pcp, nval)					\
 80({									\
 
 81	typeof(pcp) __ret;						\
 82	__ret = raw_cpu_read(pcp);					\
 83	raw_cpu_write(pcp, nval);					\
 84	__ret;								\
 85})
 86
 87#define raw_cpu_generic_cmpxchg(pcp, oval, nval)			\
 88({									\
 
 89	typeof(pcp) __ret;						\
 90	__ret = raw_cpu_read(pcp);					\
 91	if (__ret == (oval))						\
 92		raw_cpu_write(pcp, nval);				\
 93	__ret;								\
 94})
 95
 96#define raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
 97({									\
 
 
 98	int __ret = 0;							\
 99	if (raw_cpu_read(pcp1) == (oval1) &&				\
100			 raw_cpu_read(pcp2)  == (oval2)) {		\
101		raw_cpu_write(pcp1, nval1);				\
102		raw_cpu_write(pcp2, nval2);				\
103		__ret = 1;						\
104	}								\
105	(__ret);							\
106})
107
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
108#define this_cpu_generic_read(pcp)					\
109({									\
110	typeof(pcp) __ret;						\
111	preempt_disable();						\
112	__ret = *this_cpu_ptr(&(pcp));					\
113	preempt_enable();						\
 
114	__ret;								\
115})
116
117#define this_cpu_generic_to_op(pcp, val, op)				\
118do {									\
119	unsigned long __flags;						\
120	raw_local_irq_save(__flags);					\
121	*raw_cpu_ptr(&(pcp)) op val;					\
122	raw_local_irq_restore(__flags);					\
123} while (0)
124
 
125#define this_cpu_generic_add_return(pcp, val)				\
126({									\
127	typeof(pcp) __ret;						\
128	unsigned long __flags;						\
129	raw_local_irq_save(__flags);					\
130	raw_cpu_add(pcp, val);						\
131	__ret = raw_cpu_read(pcp);					\
132	raw_local_irq_restore(__flags);					\
133	__ret;								\
134})
135
136#define this_cpu_generic_xchg(pcp, nval)				\
137({									\
138	typeof(pcp) __ret;						\
139	unsigned long __flags;						\
140	raw_local_irq_save(__flags);					\
141	__ret = raw_cpu_read(pcp);					\
142	raw_cpu_write(pcp, nval);					\
143	raw_local_irq_restore(__flags);					\
144	__ret;								\
145})
146
147#define this_cpu_generic_cmpxchg(pcp, oval, nval)			\
148({									\
149	typeof(pcp) __ret;						\
150	unsigned long __flags;						\
151	raw_local_irq_save(__flags);					\
152	__ret = raw_cpu_read(pcp);					\
153	if (__ret == (oval))						\
154		raw_cpu_write(pcp, nval);				\
155	raw_local_irq_restore(__flags);					\
156	__ret;								\
157})
158
159#define this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)	\
160({									\
161	int __ret;							\
162	unsigned long __flags;						\
163	raw_local_irq_save(__flags);					\
164	__ret = raw_cpu_generic_cmpxchg_double(pcp1, pcp2,		\
165			oval1, oval2, nval1, nval2);			\
166	raw_local_irq_restore(__flags);					\
167	__ret;								\
168})
169
170#ifndef raw_cpu_read_1
171#define raw_cpu_read_1(pcp)		(*raw_cpu_ptr(&(pcp)))
172#endif
173#ifndef raw_cpu_read_2
174#define raw_cpu_read_2(pcp)		(*raw_cpu_ptr(&(pcp)))
175#endif
176#ifndef raw_cpu_read_4
177#define raw_cpu_read_4(pcp)		(*raw_cpu_ptr(&(pcp)))
178#endif
179#ifndef raw_cpu_read_8
180#define raw_cpu_read_8(pcp)		(*raw_cpu_ptr(&(pcp)))
181#endif
182
183#ifndef raw_cpu_write_1
184#define raw_cpu_write_1(pcp, val)	raw_cpu_generic_to_op(pcp, val, =)
185#endif
186#ifndef raw_cpu_write_2
187#define raw_cpu_write_2(pcp, val)	raw_cpu_generic_to_op(pcp, val, =)
188#endif
189#ifndef raw_cpu_write_4
190#define raw_cpu_write_4(pcp, val)	raw_cpu_generic_to_op(pcp, val, =)
191#endif
192#ifndef raw_cpu_write_8
193#define raw_cpu_write_8(pcp, val)	raw_cpu_generic_to_op(pcp, val, =)
194#endif
195
196#ifndef raw_cpu_add_1
197#define raw_cpu_add_1(pcp, val)		raw_cpu_generic_to_op(pcp, val, +=)
198#endif
199#ifndef raw_cpu_add_2
200#define raw_cpu_add_2(pcp, val)		raw_cpu_generic_to_op(pcp, val, +=)
201#endif
202#ifndef raw_cpu_add_4
203#define raw_cpu_add_4(pcp, val)		raw_cpu_generic_to_op(pcp, val, +=)
204#endif
205#ifndef raw_cpu_add_8
206#define raw_cpu_add_8(pcp, val)		raw_cpu_generic_to_op(pcp, val, +=)
207#endif
208
209#ifndef raw_cpu_and_1
210#define raw_cpu_and_1(pcp, val)		raw_cpu_generic_to_op(pcp, val, &=)
211#endif
212#ifndef raw_cpu_and_2
213#define raw_cpu_and_2(pcp, val)		raw_cpu_generic_to_op(pcp, val, &=)
214#endif
215#ifndef raw_cpu_and_4
216#define raw_cpu_and_4(pcp, val)		raw_cpu_generic_to_op(pcp, val, &=)
217#endif
218#ifndef raw_cpu_and_8
219#define raw_cpu_and_8(pcp, val)		raw_cpu_generic_to_op(pcp, val, &=)
220#endif
221
222#ifndef raw_cpu_or_1
223#define raw_cpu_or_1(pcp, val)		raw_cpu_generic_to_op(pcp, val, |=)
224#endif
225#ifndef raw_cpu_or_2
226#define raw_cpu_or_2(pcp, val)		raw_cpu_generic_to_op(pcp, val, |=)
227#endif
228#ifndef raw_cpu_or_4
229#define raw_cpu_or_4(pcp, val)		raw_cpu_generic_to_op(pcp, val, |=)
230#endif
231#ifndef raw_cpu_or_8
232#define raw_cpu_or_8(pcp, val)		raw_cpu_generic_to_op(pcp, val, |=)
233#endif
234
235#ifndef raw_cpu_add_return_1
236#define raw_cpu_add_return_1(pcp, val)	raw_cpu_generic_add_return(pcp, val)
237#endif
238#ifndef raw_cpu_add_return_2
239#define raw_cpu_add_return_2(pcp, val)	raw_cpu_generic_add_return(pcp, val)
240#endif
241#ifndef raw_cpu_add_return_4
242#define raw_cpu_add_return_4(pcp, val)	raw_cpu_generic_add_return(pcp, val)
243#endif
244#ifndef raw_cpu_add_return_8
245#define raw_cpu_add_return_8(pcp, val)	raw_cpu_generic_add_return(pcp, val)
246#endif
247
248#ifndef raw_cpu_xchg_1
249#define raw_cpu_xchg_1(pcp, nval)	raw_cpu_generic_xchg(pcp, nval)
250#endif
251#ifndef raw_cpu_xchg_2
252#define raw_cpu_xchg_2(pcp, nval)	raw_cpu_generic_xchg(pcp, nval)
253#endif
254#ifndef raw_cpu_xchg_4
255#define raw_cpu_xchg_4(pcp, nval)	raw_cpu_generic_xchg(pcp, nval)
256#endif
257#ifndef raw_cpu_xchg_8
258#define raw_cpu_xchg_8(pcp, nval)	raw_cpu_generic_xchg(pcp, nval)
259#endif
260
261#ifndef raw_cpu_cmpxchg_1
262#define raw_cpu_cmpxchg_1(pcp, oval, nval) \
263	raw_cpu_generic_cmpxchg(pcp, oval, nval)
264#endif
265#ifndef raw_cpu_cmpxchg_2
266#define raw_cpu_cmpxchg_2(pcp, oval, nval) \
267	raw_cpu_generic_cmpxchg(pcp, oval, nval)
268#endif
269#ifndef raw_cpu_cmpxchg_4
270#define raw_cpu_cmpxchg_4(pcp, oval, nval) \
271	raw_cpu_generic_cmpxchg(pcp, oval, nval)
272#endif
273#ifndef raw_cpu_cmpxchg_8
274#define raw_cpu_cmpxchg_8(pcp, oval, nval) \
275	raw_cpu_generic_cmpxchg(pcp, oval, nval)
276#endif
277
278#ifndef raw_cpu_cmpxchg_double_1
279#define raw_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \
280	raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
281#endif
282#ifndef raw_cpu_cmpxchg_double_2
283#define raw_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \
284	raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
285#endif
286#ifndef raw_cpu_cmpxchg_double_4
287#define raw_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \
288	raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
289#endif
290#ifndef raw_cpu_cmpxchg_double_8
291#define raw_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \
292	raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
293#endif
294
295#ifndef this_cpu_read_1
296#define this_cpu_read_1(pcp)		this_cpu_generic_read(pcp)
297#endif
298#ifndef this_cpu_read_2
299#define this_cpu_read_2(pcp)		this_cpu_generic_read(pcp)
300#endif
301#ifndef this_cpu_read_4
302#define this_cpu_read_4(pcp)		this_cpu_generic_read(pcp)
303#endif
304#ifndef this_cpu_read_8
305#define this_cpu_read_8(pcp)		this_cpu_generic_read(pcp)
306#endif
307
308#ifndef this_cpu_write_1
309#define this_cpu_write_1(pcp, val)	this_cpu_generic_to_op(pcp, val, =)
310#endif
311#ifndef this_cpu_write_2
312#define this_cpu_write_2(pcp, val)	this_cpu_generic_to_op(pcp, val, =)
313#endif
314#ifndef this_cpu_write_4
315#define this_cpu_write_4(pcp, val)	this_cpu_generic_to_op(pcp, val, =)
316#endif
317#ifndef this_cpu_write_8
318#define this_cpu_write_8(pcp, val)	this_cpu_generic_to_op(pcp, val, =)
319#endif
320
321#ifndef this_cpu_add_1
322#define this_cpu_add_1(pcp, val)	this_cpu_generic_to_op(pcp, val, +=)
323#endif
324#ifndef this_cpu_add_2
325#define this_cpu_add_2(pcp, val)	this_cpu_generic_to_op(pcp, val, +=)
326#endif
327#ifndef this_cpu_add_4
328#define this_cpu_add_4(pcp, val)	this_cpu_generic_to_op(pcp, val, +=)
329#endif
330#ifndef this_cpu_add_8
331#define this_cpu_add_8(pcp, val)	this_cpu_generic_to_op(pcp, val, +=)
332#endif
333
334#ifndef this_cpu_and_1
335#define this_cpu_and_1(pcp, val)	this_cpu_generic_to_op(pcp, val, &=)
336#endif
337#ifndef this_cpu_and_2
338#define this_cpu_and_2(pcp, val)	this_cpu_generic_to_op(pcp, val, &=)
339#endif
340#ifndef this_cpu_and_4
341#define this_cpu_and_4(pcp, val)	this_cpu_generic_to_op(pcp, val, &=)
342#endif
343#ifndef this_cpu_and_8
344#define this_cpu_and_8(pcp, val)	this_cpu_generic_to_op(pcp, val, &=)
345#endif
346
347#ifndef this_cpu_or_1
348#define this_cpu_or_1(pcp, val)		this_cpu_generic_to_op(pcp, val, |=)
349#endif
350#ifndef this_cpu_or_2
351#define this_cpu_or_2(pcp, val)		this_cpu_generic_to_op(pcp, val, |=)
352#endif
353#ifndef this_cpu_or_4
354#define this_cpu_or_4(pcp, val)		this_cpu_generic_to_op(pcp, val, |=)
355#endif
356#ifndef this_cpu_or_8
357#define this_cpu_or_8(pcp, val)		this_cpu_generic_to_op(pcp, val, |=)
358#endif
359
360#ifndef this_cpu_add_return_1
361#define this_cpu_add_return_1(pcp, val)	this_cpu_generic_add_return(pcp, val)
362#endif
363#ifndef this_cpu_add_return_2
364#define this_cpu_add_return_2(pcp, val)	this_cpu_generic_add_return(pcp, val)
365#endif
366#ifndef this_cpu_add_return_4
367#define this_cpu_add_return_4(pcp, val)	this_cpu_generic_add_return(pcp, val)
368#endif
369#ifndef this_cpu_add_return_8
370#define this_cpu_add_return_8(pcp, val)	this_cpu_generic_add_return(pcp, val)
371#endif
372
373#ifndef this_cpu_xchg_1
374#define this_cpu_xchg_1(pcp, nval)	this_cpu_generic_xchg(pcp, nval)
375#endif
376#ifndef this_cpu_xchg_2
377#define this_cpu_xchg_2(pcp, nval)	this_cpu_generic_xchg(pcp, nval)
378#endif
379#ifndef this_cpu_xchg_4
380#define this_cpu_xchg_4(pcp, nval)	this_cpu_generic_xchg(pcp, nval)
381#endif
382#ifndef this_cpu_xchg_8
383#define this_cpu_xchg_8(pcp, nval)	this_cpu_generic_xchg(pcp, nval)
384#endif
385
386#ifndef this_cpu_cmpxchg_1
387#define this_cpu_cmpxchg_1(pcp, oval, nval) \
388	this_cpu_generic_cmpxchg(pcp, oval, nval)
389#endif
390#ifndef this_cpu_cmpxchg_2
391#define this_cpu_cmpxchg_2(pcp, oval, nval) \
392	this_cpu_generic_cmpxchg(pcp, oval, nval)
393#endif
394#ifndef this_cpu_cmpxchg_4
395#define this_cpu_cmpxchg_4(pcp, oval, nval) \
396	this_cpu_generic_cmpxchg(pcp, oval, nval)
397#endif
398#ifndef this_cpu_cmpxchg_8
399#define this_cpu_cmpxchg_8(pcp, oval, nval) \
400	this_cpu_generic_cmpxchg(pcp, oval, nval)
401#endif
402
403#ifndef this_cpu_cmpxchg_double_1
404#define this_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \
405	this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
406#endif
407#ifndef this_cpu_cmpxchg_double_2
408#define this_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \
409	this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
410#endif
411#ifndef this_cpu_cmpxchg_double_4
412#define this_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \
413	this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
414#endif
415#ifndef this_cpu_cmpxchg_double_8
416#define this_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \
417	this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
418#endif
419
420#endif /* _ASM_GENERIC_PERCPU_H_ */