Loading...
1/* SPDX-License-Identifier: GPL-2.0 */
2#ifndef __ARCH_H8300_ATOMIC__
3#define __ARCH_H8300_ATOMIC__
4
5#include <linux/types.h>
6#include <asm/cmpxchg.h>
7
8/*
9 * Atomic operations that C can't guarantee us. Useful for
10 * resource counting etc..
11 */
12
13#define ATOMIC_INIT(i) { (i) }
14
15#define atomic_read(v) READ_ONCE((v)->counter)
16#define atomic_set(v, i) WRITE_ONCE(((v)->counter), (i))
17
18#include <linux/kernel.h>
19
20#define ATOMIC_OP_RETURN(op, c_op) \
21static inline int atomic_##op##_return(int i, atomic_t *v) \
22{ \
23 h8300flags flags; \
24 int ret; \
25 \
26 flags = arch_local_irq_save(); \
27 ret = v->counter c_op i; \
28 arch_local_irq_restore(flags); \
29 return ret; \
30}
31
32#define ATOMIC_FETCH_OP(op, c_op) \
33static inline int atomic_fetch_##op(int i, atomic_t *v) \
34{ \
35 h8300flags flags; \
36 int ret; \
37 \
38 flags = arch_local_irq_save(); \
39 ret = v->counter; \
40 v->counter c_op i; \
41 arch_local_irq_restore(flags); \
42 return ret; \
43}
44
45#define ATOMIC_OP(op, c_op) \
46static inline void atomic_##op(int i, atomic_t *v) \
47{ \
48 h8300flags flags; \
49 \
50 flags = arch_local_irq_save(); \
51 v->counter c_op i; \
52 arch_local_irq_restore(flags); \
53}
54
55ATOMIC_OP_RETURN(add, +=)
56ATOMIC_OP_RETURN(sub, -=)
57
58#define ATOMIC_OPS(op, c_op) \
59 ATOMIC_OP(op, c_op) \
60 ATOMIC_FETCH_OP(op, c_op)
61
62ATOMIC_OPS(and, &=)
63ATOMIC_OPS(or, |=)
64ATOMIC_OPS(xor, ^=)
65ATOMIC_OPS(add, +=)
66ATOMIC_OPS(sub, -=)
67
68#undef ATOMIC_OPS
69#undef ATOMIC_OP_RETURN
70#undef ATOMIC_OP
71
72#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
73#define atomic_sub_and_test(i, v) (atomic_sub_return(i, v) == 0)
74
75#define atomic_inc_return(v) atomic_add_return(1, v)
76#define atomic_dec_return(v) atomic_sub_return(1, v)
77
78#define atomic_inc(v) (void)atomic_inc_return(v)
79#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
80
81#define atomic_dec(v) (void)atomic_dec_return(v)
82#define atomic_dec_and_test(v) (atomic_dec_return(v) == 0)
83
84static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
85{
86 int ret;
87 h8300flags flags;
88
89 flags = arch_local_irq_save();
90 ret = v->counter;
91 if (likely(ret == old))
92 v->counter = new;
93 arch_local_irq_restore(flags);
94 return ret;
95}
96
97static inline int __atomic_add_unless(atomic_t *v, int a, int u)
98{
99 int ret;
100 h8300flags flags;
101
102 flags = arch_local_irq_save();
103 ret = v->counter;
104 if (ret != u)
105 v->counter += a;
106 arch_local_irq_restore(flags);
107 return ret;
108}
109
110#endif /* __ARCH_H8300_ATOMIC __ */
1/* SPDX-License-Identifier: GPL-2.0 */
2#ifndef __ARCH_H8300_ATOMIC__
3#define __ARCH_H8300_ATOMIC__
4
5#include <linux/compiler.h>
6#include <linux/types.h>
7#include <asm/cmpxchg.h>
8#include <asm/irqflags.h>
9
10/*
11 * Atomic operations that C can't guarantee us. Useful for
12 * resource counting etc..
13 */
14
15#define ATOMIC_INIT(i) { (i) }
16
17#define atomic_read(v) READ_ONCE((v)->counter)
18#define atomic_set(v, i) WRITE_ONCE(((v)->counter), (i))
19
20#define ATOMIC_OP_RETURN(op, c_op) \
21static inline int atomic_##op##_return(int i, atomic_t *v) \
22{ \
23 h8300flags flags; \
24 int ret; \
25 \
26 flags = arch_local_irq_save(); \
27 ret = v->counter c_op i; \
28 arch_local_irq_restore(flags); \
29 return ret; \
30}
31
32#define ATOMIC_FETCH_OP(op, c_op) \
33static inline int atomic_fetch_##op(int i, atomic_t *v) \
34{ \
35 h8300flags flags; \
36 int ret; \
37 \
38 flags = arch_local_irq_save(); \
39 ret = v->counter; \
40 v->counter c_op i; \
41 arch_local_irq_restore(flags); \
42 return ret; \
43}
44
45#define ATOMIC_OP(op, c_op) \
46static inline void atomic_##op(int i, atomic_t *v) \
47{ \
48 h8300flags flags; \
49 \
50 flags = arch_local_irq_save(); \
51 v->counter c_op i; \
52 arch_local_irq_restore(flags); \
53}
54
55ATOMIC_OP_RETURN(add, +=)
56ATOMIC_OP_RETURN(sub, -=)
57
58#define ATOMIC_OPS(op, c_op) \
59 ATOMIC_OP(op, c_op) \
60 ATOMIC_FETCH_OP(op, c_op)
61
62ATOMIC_OPS(and, &=)
63ATOMIC_OPS(or, |=)
64ATOMIC_OPS(xor, ^=)
65ATOMIC_OPS(add, +=)
66ATOMIC_OPS(sub, -=)
67
68#undef ATOMIC_OPS
69#undef ATOMIC_OP_RETURN
70#undef ATOMIC_OP
71
72static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
73{
74 int ret;
75 h8300flags flags;
76
77 flags = arch_local_irq_save();
78 ret = v->counter;
79 if (likely(ret == old))
80 v->counter = new;
81 arch_local_irq_restore(flags);
82 return ret;
83}
84
85static inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
86{
87 int ret;
88 h8300flags flags;
89
90 flags = arch_local_irq_save();
91 ret = v->counter;
92 if (ret != u)
93 v->counter += a;
94 arch_local_irq_restore(flags);
95 return ret;
96}
97#define atomic_fetch_add_unless atomic_fetch_add_unless
98
99#endif /* __ARCH_H8300_ATOMIC __ */