Loading...
1/* SPDX-License-Identifier: GPL-2.0 */
2#ifndef __ASM_SH_ATOMIC_IRQ_H
3#define __ASM_SH_ATOMIC_IRQ_H
4
5#include <linux/irqflags.h>
6
7/*
8 * To get proper branch prediction for the main line, we must branch
9 * forward to code at the end of this object's .text section, then
10 * branch back to restart the operation.
11 */
12
13#define ATOMIC_OP(op, c_op) \
14static inline void arch_atomic_##op(int i, atomic_t *v) \
15{ \
16 unsigned long flags; \
17 \
18 raw_local_irq_save(flags); \
19 v->counter c_op i; \
20 raw_local_irq_restore(flags); \
21}
22
23#define ATOMIC_OP_RETURN(op, c_op) \
24static inline int arch_atomic_##op##_return(int i, atomic_t *v) \
25{ \
26 unsigned long temp, flags; \
27 \
28 raw_local_irq_save(flags); \
29 temp = v->counter; \
30 temp c_op i; \
31 v->counter = temp; \
32 raw_local_irq_restore(flags); \
33 \
34 return temp; \
35}
36
37#define ATOMIC_FETCH_OP(op, c_op) \
38static inline int arch_atomic_fetch_##op(int i, atomic_t *v) \
39{ \
40 unsigned long temp, flags; \
41 \
42 raw_local_irq_save(flags); \
43 temp = v->counter; \
44 v->counter c_op i; \
45 raw_local_irq_restore(flags); \
46 \
47 return temp; \
48}
49
50#define ATOMIC_OPS(op, c_op) \
51 ATOMIC_OP(op, c_op) \
52 ATOMIC_OP_RETURN(op, c_op) \
53 ATOMIC_FETCH_OP(op, c_op)
54
55ATOMIC_OPS(add, +=)
56ATOMIC_OPS(sub, -=)
57
58#undef ATOMIC_OPS
59#define ATOMIC_OPS(op, c_op) \
60 ATOMIC_OP(op, c_op) \
61 ATOMIC_FETCH_OP(op, c_op)
62
63ATOMIC_OPS(and, &=)
64ATOMIC_OPS(or, |=)
65ATOMIC_OPS(xor, ^=)
66
67#undef ATOMIC_OPS
68#undef ATOMIC_FETCH_OP
69#undef ATOMIC_OP_RETURN
70#undef ATOMIC_OP
71
72#endif /* __ASM_SH_ATOMIC_IRQ_H */
1/* SPDX-License-Identifier: GPL-2.0 */
2#ifndef __ASM_SH_ATOMIC_IRQ_H
3#define __ASM_SH_ATOMIC_IRQ_H
4
5#include <linux/irqflags.h>
6
7/*
8 * To get proper branch prediction for the main line, we must branch
9 * forward to code at the end of this object's .text section, then
10 * branch back to restart the operation.
11 */
12
13#define ATOMIC_OP(op, c_op) \
14static inline void arch_atomic_##op(int i, atomic_t *v) \
15{ \
16 unsigned long flags; \
17 \
18 raw_local_irq_save(flags); \
19 v->counter c_op i; \
20 raw_local_irq_restore(flags); \
21}
22
23#define ATOMIC_OP_RETURN(op, c_op) \
24static inline int arch_atomic_##op##_return(int i, atomic_t *v) \
25{ \
26 unsigned long temp, flags; \
27 \
28 raw_local_irq_save(flags); \
29 temp = v->counter; \
30 temp c_op i; \
31 v->counter = temp; \
32 raw_local_irq_restore(flags); \
33 \
34 return temp; \
35}
36
37#define ATOMIC_FETCH_OP(op, c_op) \
38static inline int arch_atomic_fetch_##op(int i, atomic_t *v) \
39{ \
40 unsigned long temp, flags; \
41 \
42 raw_local_irq_save(flags); \
43 temp = v->counter; \
44 v->counter c_op i; \
45 raw_local_irq_restore(flags); \
46 \
47 return temp; \
48}
49
50#define ATOMIC_OPS(op, c_op) \
51 ATOMIC_OP(op, c_op) \
52 ATOMIC_OP_RETURN(op, c_op) \
53 ATOMIC_FETCH_OP(op, c_op)
54
55ATOMIC_OPS(add, +=)
56ATOMIC_OPS(sub, -=)
57
58#undef ATOMIC_OPS
59#define ATOMIC_OPS(op, c_op) \
60 ATOMIC_OP(op, c_op) \
61 ATOMIC_FETCH_OP(op, c_op)
62
63ATOMIC_OPS(and, &=)
64ATOMIC_OPS(or, |=)
65ATOMIC_OPS(xor, ^=)
66
67#undef ATOMIC_OPS
68#undef ATOMIC_FETCH_OP
69#undef ATOMIC_OP_RETURN
70#undef ATOMIC_OP
71
72#endif /* __ASM_SH_ATOMIC_IRQ_H */