Linux Audio

Check our new training course

Loading...
v3.1
 
 1#ifndef __ASM_SH_ATOMIC_IRQ_H
 2#define __ASM_SH_ATOMIC_IRQ_H
 3
 
 
 4/*
 5 * To get proper branch prediction for the main line, we must branch
 6 * forward to code at the end of this object's .text section, then
 7 * branch back to restart the operation.
 8 */
 9static inline void atomic_add(int i, atomic_t *v)
10{
11	unsigned long flags;
12
13	raw_local_irq_save(flags);
14	v->counter += i;
15	raw_local_irq_restore(flags);
16}
17
18static inline void atomic_sub(int i, atomic_t *v)
19{
20	unsigned long flags;
21
22	raw_local_irq_save(flags);
23	v->counter -= i;
24	raw_local_irq_restore(flags);
25}
26
27static inline int atomic_add_return(int i, atomic_t *v)
28{
29	unsigned long temp, flags;
30
31	raw_local_irq_save(flags);
32	temp = v->counter;
33	temp += i;
34	v->counter = temp;
35	raw_local_irq_restore(flags);
36
37	return temp;
38}
39
40static inline int atomic_sub_return(int i, atomic_t *v)
41{
42	unsigned long temp, flags;
43
44	raw_local_irq_save(flags);
45	temp = v->counter;
46	temp -= i;
47	v->counter = temp;
48	raw_local_irq_restore(flags);
49
50	return temp;
51}
52
53static inline void atomic_clear_mask(unsigned int mask, atomic_t *v)
54{
55	unsigned long flags;
56
57	raw_local_irq_save(flags);
58	v->counter &= ~mask;
59	raw_local_irq_restore(flags);
60}
61
62static inline void atomic_set_mask(unsigned int mask, atomic_t *v)
63{
64	unsigned long flags;
65
66	raw_local_irq_save(flags);
67	v->counter |= mask;
68	raw_local_irq_restore(flags);
69}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
70
71#endif /* __ASM_SH_ATOMIC_IRQ_H */
v6.9.4
 1/* SPDX-License-Identifier: GPL-2.0 */
 2#ifndef __ASM_SH_ATOMIC_IRQ_H
 3#define __ASM_SH_ATOMIC_IRQ_H
 4
 5#include <linux/irqflags.h>
 6
 7/*
 8 * To get proper branch prediction for the main line, we must branch
 9 * forward to code at the end of this object's .text section, then
10 * branch back to restart the operation.
11 */
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
12
13#define ATOMIC_OP(op, c_op)						\
14static inline void arch_atomic_##op(int i, atomic_t *v)			\
15{									\
16	unsigned long flags;						\
17									\
18	raw_local_irq_save(flags);					\
19	v->counter c_op i;						\
20	raw_local_irq_restore(flags);					\
21}
22
23#define ATOMIC_OP_RETURN(op, c_op)					\
24static inline int arch_atomic_##op##_return(int i, atomic_t *v)		\
25{									\
26	unsigned long temp, flags;					\
27									\
28	raw_local_irq_save(flags);					\
29	temp = v->counter;						\
30	temp c_op i;							\
31	v->counter = temp;						\
32	raw_local_irq_restore(flags);					\
33									\
34	return temp;							\
35}
36
37#define ATOMIC_FETCH_OP(op, c_op)					\
38static inline int arch_atomic_fetch_##op(int i, atomic_t *v)		\
39{									\
40	unsigned long temp, flags;					\
41									\
42	raw_local_irq_save(flags);					\
43	temp = v->counter;						\
44	v->counter c_op i;						\
45	raw_local_irq_restore(flags);					\
46									\
47	return temp;							\
48}
49
50#define ATOMIC_OPS(op, c_op)						\
51	ATOMIC_OP(op, c_op)						\
52	ATOMIC_OP_RETURN(op, c_op)					\
53	ATOMIC_FETCH_OP(op, c_op)
54
55ATOMIC_OPS(add, +=)
56ATOMIC_OPS(sub, -=)
57
58#define arch_atomic_add_return	arch_atomic_add_return
59#define arch_atomic_sub_return	arch_atomic_sub_return
60#define arch_atomic_fetch_add	arch_atomic_fetch_add
61#define arch_atomic_fetch_sub	arch_atomic_fetch_sub
62
63#undef ATOMIC_OPS
64#define ATOMIC_OPS(op, c_op)						\
65	ATOMIC_OP(op, c_op)						\
66	ATOMIC_FETCH_OP(op, c_op)
67
68ATOMIC_OPS(and, &=)
69ATOMIC_OPS(or, |=)
70ATOMIC_OPS(xor, ^=)
71
72#define arch_atomic_fetch_and	arch_atomic_fetch_and
73#define arch_atomic_fetch_or	arch_atomic_fetch_or
74#define arch_atomic_fetch_xor	arch_atomic_fetch_xor
75
76#undef ATOMIC_OPS
77#undef ATOMIC_FETCH_OP
78#undef ATOMIC_OP_RETURN
79#undef ATOMIC_OP
80
81#endif /* __ASM_SH_ATOMIC_IRQ_H */