Loading...
1#ifndef __ASM_SH_ATOMIC_IRQ_H
2#define __ASM_SH_ATOMIC_IRQ_H
3
4#include <linux/irqflags.h>
5
6/*
7 * To get proper branch prediction for the main line, we must branch
8 * forward to code at the end of this object's .text section, then
9 * branch back to restart the operation.
10 */
11static inline void atomic_add(int i, atomic_t *v)
12{
13 unsigned long flags;
14
15 raw_local_irq_save(flags);
16 v->counter += i;
17 raw_local_irq_restore(flags);
18}
19
20static inline void atomic_sub(int i, atomic_t *v)
21{
22 unsigned long flags;
23
24 raw_local_irq_save(flags);
25 v->counter -= i;
26 raw_local_irq_restore(flags);
27}
28
29static inline int atomic_add_return(int i, atomic_t *v)
30{
31 unsigned long temp, flags;
32
33 raw_local_irq_save(flags);
34 temp = v->counter;
35 temp += i;
36 v->counter = temp;
37 raw_local_irq_restore(flags);
38
39 return temp;
40}
41
42static inline int atomic_sub_return(int i, atomic_t *v)
43{
44 unsigned long temp, flags;
45
46 raw_local_irq_save(flags);
47 temp = v->counter;
48 temp -= i;
49 v->counter = temp;
50 raw_local_irq_restore(flags);
51
52 return temp;
53}
54
55static inline void atomic_clear_mask(unsigned int mask, atomic_t *v)
56{
57 unsigned long flags;
58
59 raw_local_irq_save(flags);
60 v->counter &= ~mask;
61 raw_local_irq_restore(flags);
62}
63
64static inline void atomic_set_mask(unsigned int mask, atomic_t *v)
65{
66 unsigned long flags;
67
68 raw_local_irq_save(flags);
69 v->counter |= mask;
70 raw_local_irq_restore(flags);
71}
72
73#endif /* __ASM_SH_ATOMIC_IRQ_H */
1#ifndef __ASM_SH_ATOMIC_IRQ_H
2#define __ASM_SH_ATOMIC_IRQ_H
3
4#include <linux/irqflags.h>
5
6/*
7 * To get proper branch prediction for the main line, we must branch
8 * forward to code at the end of this object's .text section, then
9 * branch back to restart the operation.
10 */
11
12#define ATOMIC_OP(op, c_op) \
13static inline void atomic_##op(int i, atomic_t *v) \
14{ \
15 unsigned long flags; \
16 \
17 raw_local_irq_save(flags); \
18 v->counter c_op i; \
19 raw_local_irq_restore(flags); \
20}
21
22#define ATOMIC_OP_RETURN(op, c_op) \
23static inline int atomic_##op##_return(int i, atomic_t *v) \
24{ \
25 unsigned long temp, flags; \
26 \
27 raw_local_irq_save(flags); \
28 temp = v->counter; \
29 temp c_op i; \
30 v->counter = temp; \
31 raw_local_irq_restore(flags); \
32 \
33 return temp; \
34}
35
36#define ATOMIC_OPS(op, c_op) ATOMIC_OP(op, c_op) ATOMIC_OP_RETURN(op, c_op)
37
38ATOMIC_OPS(add, +=)
39ATOMIC_OPS(sub, -=)
40ATOMIC_OP(and, &=)
41ATOMIC_OP(or, |=)
42ATOMIC_OP(xor, ^=)
43
44#undef ATOMIC_OPS
45#undef ATOMIC_OP_RETURN
46#undef ATOMIC_OP
47
48#endif /* __ASM_SH_ATOMIC_IRQ_H */