Linux Audio

Check our new training course

Embedded Linux training

Mar 31-Apr 8, 2025
Register
Loading...
v3.5.6
 
 1#ifndef __ASM_SH_ATOMIC_IRQ_H
 2#define __ASM_SH_ATOMIC_IRQ_H
 3
 4#include <linux/irqflags.h>
 5
 6/*
 7 * To get proper branch prediction for the main line, we must branch
 8 * forward to code at the end of this object's .text section, then
 9 * branch back to restart the operation.
10 */
11static inline void atomic_add(int i, atomic_t *v)
12{
13	unsigned long flags;
14
15	raw_local_irq_save(flags);
16	v->counter += i;
17	raw_local_irq_restore(flags);
18}
19
20static inline void atomic_sub(int i, atomic_t *v)
21{
22	unsigned long flags;
23
24	raw_local_irq_save(flags);
25	v->counter -= i;
26	raw_local_irq_restore(flags);
27}
28
29static inline int atomic_add_return(int i, atomic_t *v)
30{
31	unsigned long temp, flags;
32
33	raw_local_irq_save(flags);
34	temp = v->counter;
35	temp += i;
36	v->counter = temp;
37	raw_local_irq_restore(flags);
38
39	return temp;
40}
41
42static inline int atomic_sub_return(int i, atomic_t *v)
43{
44	unsigned long temp, flags;
45
46	raw_local_irq_save(flags);
47	temp = v->counter;
48	temp -= i;
49	v->counter = temp;
50	raw_local_irq_restore(flags);
51
52	return temp;
53}
54
55static inline void atomic_clear_mask(unsigned int mask, atomic_t *v)
56{
57	unsigned long flags;
58
59	raw_local_irq_save(flags);
60	v->counter &= ~mask;
61	raw_local_irq_restore(flags);
62}
63
64static inline void atomic_set_mask(unsigned int mask, atomic_t *v)
65{
66	unsigned long flags;
67
68	raw_local_irq_save(flags);
69	v->counter |= mask;
70	raw_local_irq_restore(flags);
71}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
72
73#endif /* __ASM_SH_ATOMIC_IRQ_H */
v6.2
 1/* SPDX-License-Identifier: GPL-2.0 */
 2#ifndef __ASM_SH_ATOMIC_IRQ_H
 3#define __ASM_SH_ATOMIC_IRQ_H
 4
 5#include <linux/irqflags.h>
 6
 7/*
 8 * To get proper branch prediction for the main line, we must branch
 9 * forward to code at the end of this object's .text section, then
10 * branch back to restart the operation.
11 */
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
12
13#define ATOMIC_OP(op, c_op)						\
14static inline void arch_atomic_##op(int i, atomic_t *v)			\
15{									\
16	unsigned long flags;						\
17									\
18	raw_local_irq_save(flags);					\
19	v->counter c_op i;						\
20	raw_local_irq_restore(flags);					\
21}
22
23#define ATOMIC_OP_RETURN(op, c_op)					\
24static inline int arch_atomic_##op##_return(int i, atomic_t *v)		\
25{									\
26	unsigned long temp, flags;					\
27									\
28	raw_local_irq_save(flags);					\
29	temp = v->counter;						\
30	temp c_op i;							\
31	v->counter = temp;						\
32	raw_local_irq_restore(flags);					\
33									\
34	return temp;							\
35}
36
37#define ATOMIC_FETCH_OP(op, c_op)					\
38static inline int arch_atomic_fetch_##op(int i, atomic_t *v)		\
39{									\
40	unsigned long temp, flags;					\
41									\
42	raw_local_irq_save(flags);					\
43	temp = v->counter;						\
44	v->counter c_op i;						\
45	raw_local_irq_restore(flags);					\
46									\
47	return temp;							\
48}
49
50#define ATOMIC_OPS(op, c_op)						\
51	ATOMIC_OP(op, c_op)						\
52	ATOMIC_OP_RETURN(op, c_op)					\
53	ATOMIC_FETCH_OP(op, c_op)
54
55ATOMIC_OPS(add, +=)
56ATOMIC_OPS(sub, -=)
57
58#undef ATOMIC_OPS
59#define ATOMIC_OPS(op, c_op)						\
60	ATOMIC_OP(op, c_op)						\
61	ATOMIC_FETCH_OP(op, c_op)
62
63ATOMIC_OPS(and, &=)
64ATOMIC_OPS(or, |=)
65ATOMIC_OPS(xor, ^=)
66
67#undef ATOMIC_OPS
68#undef ATOMIC_FETCH_OP
69#undef ATOMIC_OP_RETURN
70#undef ATOMIC_OP
71
72#endif /* __ASM_SH_ATOMIC_IRQ_H */