Loading...
1#ifndef __ASM_SH_ATOMIC_IRQ_H
2#define __ASM_SH_ATOMIC_IRQ_H
3
4/*
5 * To get proper branch prediction for the main line, we must branch
6 * forward to code at the end of this object's .text section, then
7 * branch back to restart the operation.
8 */
9static inline void atomic_add(int i, atomic_t *v)
10{
11 unsigned long flags;
12
13 raw_local_irq_save(flags);
14 v->counter += i;
15 raw_local_irq_restore(flags);
16}
17
18static inline void atomic_sub(int i, atomic_t *v)
19{
20 unsigned long flags;
21
22 raw_local_irq_save(flags);
23 v->counter -= i;
24 raw_local_irq_restore(flags);
25}
26
27static inline int atomic_add_return(int i, atomic_t *v)
28{
29 unsigned long temp, flags;
30
31 raw_local_irq_save(flags);
32 temp = v->counter;
33 temp += i;
34 v->counter = temp;
35 raw_local_irq_restore(flags);
36
37 return temp;
38}
39
40static inline int atomic_sub_return(int i, atomic_t *v)
41{
42 unsigned long temp, flags;
43
44 raw_local_irq_save(flags);
45 temp = v->counter;
46 temp -= i;
47 v->counter = temp;
48 raw_local_irq_restore(flags);
49
50 return temp;
51}
52
53static inline void atomic_clear_mask(unsigned int mask, atomic_t *v)
54{
55 unsigned long flags;
56
57 raw_local_irq_save(flags);
58 v->counter &= ~mask;
59 raw_local_irq_restore(flags);
60}
61
62static inline void atomic_set_mask(unsigned int mask, atomic_t *v)
63{
64 unsigned long flags;
65
66 raw_local_irq_save(flags);
67 v->counter |= mask;
68 raw_local_irq_restore(flags);
69}
70
71#endif /* __ASM_SH_ATOMIC_IRQ_H */
1#ifndef __ASM_SH_ATOMIC_IRQ_H
2#define __ASM_SH_ATOMIC_IRQ_H
3
4#include <linux/irqflags.h>
5
6/*
7 * To get proper branch prediction for the main line, we must branch
8 * forward to code at the end of this object's .text section, then
9 * branch back to restart the operation.
10 */
11
12#define ATOMIC_OP(op, c_op) \
13static inline void atomic_##op(int i, atomic_t *v) \
14{ \
15 unsigned long flags; \
16 \
17 raw_local_irq_save(flags); \
18 v->counter c_op i; \
19 raw_local_irq_restore(flags); \
20}
21
22#define ATOMIC_OP_RETURN(op, c_op) \
23static inline int atomic_##op##_return(int i, atomic_t *v) \
24{ \
25 unsigned long temp, flags; \
26 \
27 raw_local_irq_save(flags); \
28 temp = v->counter; \
29 temp c_op i; \
30 v->counter = temp; \
31 raw_local_irq_restore(flags); \
32 \
33 return temp; \
34}
35
36#define ATOMIC_FETCH_OP(op, c_op) \
37static inline int atomic_fetch_##op(int i, atomic_t *v) \
38{ \
39 unsigned long temp, flags; \
40 \
41 raw_local_irq_save(flags); \
42 temp = v->counter; \
43 v->counter c_op i; \
44 raw_local_irq_restore(flags); \
45 \
46 return temp; \
47}
48
49#define ATOMIC_OPS(op, c_op) \
50 ATOMIC_OP(op, c_op) \
51 ATOMIC_OP_RETURN(op, c_op) \
52 ATOMIC_FETCH_OP(op, c_op)
53
54ATOMIC_OPS(add, +=)
55ATOMIC_OPS(sub, -=)
56
57#undef ATOMIC_OPS
58#define ATOMIC_OPS(op, c_op) \
59 ATOMIC_OP(op, c_op) \
60 ATOMIC_FETCH_OP(op, c_op)
61
62ATOMIC_OPS(and, &=)
63ATOMIC_OPS(or, |=)
64ATOMIC_OPS(xor, ^=)
65
66#undef ATOMIC_OPS
67#undef ATOMIC_FETCH_OP
68#undef ATOMIC_OP_RETURN
69#undef ATOMIC_OP
70
71#endif /* __ASM_SH_ATOMIC_IRQ_H */