Loading...
1#ifndef _ASM_M32R_ATOMIC_H
2#define _ASM_M32R_ATOMIC_H
3
4/*
5 * linux/include/asm-m32r/atomic.h
6 *
7 * M32R version:
8 * Copyright (C) 2001, 2002 Hitoshi Yamamoto
9 * Copyright (C) 2004 Hirokazu Takata <takata at linux-m32r.org>
10 */
11
12#include <linux/types.h>
13#include <asm/assembler.h>
14#include <asm/cmpxchg.h>
15#include <asm/dcache_clear.h>
16#include <asm/barrier.h>
17
18/*
19 * Atomic operations that C can't guarantee us. Useful for
20 * resource counting etc..
21 */
22
23#define ATOMIC_INIT(i) { (i) }
24
25/**
26 * atomic_read - read atomic variable
27 * @v: pointer of type atomic_t
28 *
29 * Atomically reads the value of @v.
30 */
31#define atomic_read(v) READ_ONCE((v)->counter)
32
33/**
34 * atomic_set - set atomic variable
35 * @v: pointer of type atomic_t
36 * @i: required value
37 *
38 * Atomically sets the value of @v to @i.
39 */
40#define atomic_set(v,i) WRITE_ONCE(((v)->counter), (i))
41
42#ifdef CONFIG_CHIP_M32700_TS1
43#define __ATOMIC_CLOBBER , "r4"
44#else
45#define __ATOMIC_CLOBBER
46#endif
47
48#define ATOMIC_OP(op) \
49static __inline__ void atomic_##op(int i, atomic_t *v) \
50{ \
51 unsigned long flags; \
52 int result; \
53 \
54 local_irq_save(flags); \
55 __asm__ __volatile__ ( \
56 "# atomic_" #op " \n\t" \
57 DCACHE_CLEAR("%0", "r4", "%1") \
58 M32R_LOCK" %0, @%1; \n\t" \
59 #op " %0, %2; \n\t" \
60 M32R_UNLOCK" %0, @%1; \n\t" \
61 : "=&r" (result) \
62 : "r" (&v->counter), "r" (i) \
63 : "memory" \
64 __ATOMIC_CLOBBER \
65 ); \
66 local_irq_restore(flags); \
67} \
68
69#define ATOMIC_OP_RETURN(op) \
70static __inline__ int atomic_##op##_return(int i, atomic_t *v) \
71{ \
72 unsigned long flags; \
73 int result; \
74 \
75 local_irq_save(flags); \
76 __asm__ __volatile__ ( \
77 "# atomic_" #op "_return \n\t" \
78 DCACHE_CLEAR("%0", "r4", "%1") \
79 M32R_LOCK" %0, @%1; \n\t" \
80 #op " %0, %2; \n\t" \
81 M32R_UNLOCK" %0, @%1; \n\t" \
82 : "=&r" (result) \
83 : "r" (&v->counter), "r" (i) \
84 : "memory" \
85 __ATOMIC_CLOBBER \
86 ); \
87 local_irq_restore(flags); \
88 \
89 return result; \
90}
91
92#define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op)
93
94ATOMIC_OPS(add)
95ATOMIC_OPS(sub)
96
97ATOMIC_OP(and)
98ATOMIC_OP(or)
99ATOMIC_OP(xor)
100
101#undef ATOMIC_OPS
102#undef ATOMIC_OP_RETURN
103#undef ATOMIC_OP
104
105/**
106 * atomic_sub_and_test - subtract value from variable and test result
107 * @i: integer value to subtract
108 * @v: pointer of type atomic_t
109 *
110 * Atomically subtracts @i from @v and returns
111 * true if the result is zero, or false for all
112 * other cases.
113 */
114#define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
115
116/**
117 * atomic_inc_return - increment atomic variable and return it
118 * @v: pointer of type atomic_t
119 *
120 * Atomically increments @v by 1 and returns the result.
121 */
122static __inline__ int atomic_inc_return(atomic_t *v)
123{
124 unsigned long flags;
125 int result;
126
127 local_irq_save(flags);
128 __asm__ __volatile__ (
129 "# atomic_inc_return \n\t"
130 DCACHE_CLEAR("%0", "r4", "%1")
131 M32R_LOCK" %0, @%1; \n\t"
132 "addi %0, #1; \n\t"
133 M32R_UNLOCK" %0, @%1; \n\t"
134 : "=&r" (result)
135 : "r" (&v->counter)
136 : "memory"
137 __ATOMIC_CLOBBER
138 );
139 local_irq_restore(flags);
140
141 return result;
142}
143
144/**
145 * atomic_dec_return - decrement atomic variable and return it
146 * @v: pointer of type atomic_t
147 *
148 * Atomically decrements @v by 1 and returns the result.
149 */
150static __inline__ int atomic_dec_return(atomic_t *v)
151{
152 unsigned long flags;
153 int result;
154
155 local_irq_save(flags);
156 __asm__ __volatile__ (
157 "# atomic_dec_return \n\t"
158 DCACHE_CLEAR("%0", "r4", "%1")
159 M32R_LOCK" %0, @%1; \n\t"
160 "addi %0, #-1; \n\t"
161 M32R_UNLOCK" %0, @%1; \n\t"
162 : "=&r" (result)
163 : "r" (&v->counter)
164 : "memory"
165 __ATOMIC_CLOBBER
166 );
167 local_irq_restore(flags);
168
169 return result;
170}
171
172/**
173 * atomic_inc - increment atomic variable
174 * @v: pointer of type atomic_t
175 *
176 * Atomically increments @v by 1.
177 */
178#define atomic_inc(v) ((void)atomic_inc_return(v))
179
180/**
181 * atomic_dec - decrement atomic variable
182 * @v: pointer of type atomic_t
183 *
184 * Atomically decrements @v by 1.
185 */
186#define atomic_dec(v) ((void)atomic_dec_return(v))
187
188/**
189 * atomic_inc_and_test - increment and test
190 * @v: pointer of type atomic_t
191 *
192 * Atomically increments @v by 1
193 * and returns true if the result is zero, or false for all
194 * other cases.
195 */
196#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
197
198/**
199 * atomic_dec_and_test - decrement and test
200 * @v: pointer of type atomic_t
201 *
202 * Atomically decrements @v by 1 and
203 * returns true if the result is 0, or false for all
204 * other cases.
205 */
206#define atomic_dec_and_test(v) (atomic_dec_return(v) == 0)
207
208/**
209 * atomic_add_negative - add and test if negative
210 * @v: pointer of type atomic_t
211 * @i: integer value to add
212 *
213 * Atomically adds @i to @v and returns true
214 * if the result is negative, or false when
215 * result is greater than or equal to zero.
216 */
217#define atomic_add_negative(i,v) (atomic_add_return((i), (v)) < 0)
218
219#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
220#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
221
222/**
223 * __atomic_add_unless - add unless the number is a given value
224 * @v: pointer of type atomic_t
225 * @a: the amount to add to v...
226 * @u: ...unless v is equal to u.
227 *
228 * Atomically adds @a to @v, so long as it was not @u.
229 * Returns the old value of @v.
230 */
231static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
232{
233 int c, old;
234 c = atomic_read(v);
235 for (;;) {
236 if (unlikely(c == (u)))
237 break;
238 old = atomic_cmpxchg((v), c, c + (a));
239 if (likely(old == c))
240 break;
241 c = old;
242 }
243 return c;
244}
245
246#endif /* _ASM_M32R_ATOMIC_H */
1#ifndef _ASM_M32R_ATOMIC_H
2#define _ASM_M32R_ATOMIC_H
3
4/*
5 * linux/include/asm-m32r/atomic.h
6 *
7 * M32R version:
8 * Copyright (C) 2001, 2002 Hitoshi Yamamoto
9 * Copyright (C) 2004 Hirokazu Takata <takata at linux-m32r.org>
10 */
11
12#include <linux/types.h>
13#include <asm/assembler.h>
14#include <asm/system.h>
15
16/*
17 * Atomic operations that C can't guarantee us. Useful for
18 * resource counting etc..
19 */
20
21#define ATOMIC_INIT(i) { (i) }
22
23/**
24 * atomic_read - read atomic variable
25 * @v: pointer of type atomic_t
26 *
27 * Atomically reads the value of @v.
28 */
29#define atomic_read(v) (*(volatile int *)&(v)->counter)
30
31/**
32 * atomic_set - set atomic variable
33 * @v: pointer of type atomic_t
34 * @i: required value
35 *
36 * Atomically sets the value of @v to @i.
37 */
38#define atomic_set(v,i) (((v)->counter) = (i))
39
40/**
41 * atomic_add_return - add integer to atomic variable and return it
42 * @i: integer value to add
43 * @v: pointer of type atomic_t
44 *
45 * Atomically adds @i to @v and return (@i + @v).
46 */
47static __inline__ int atomic_add_return(int i, atomic_t *v)
48{
49 unsigned long flags;
50 int result;
51
52 local_irq_save(flags);
53 __asm__ __volatile__ (
54 "# atomic_add_return \n\t"
55 DCACHE_CLEAR("%0", "r4", "%1")
56 M32R_LOCK" %0, @%1; \n\t"
57 "add %0, %2; \n\t"
58 M32R_UNLOCK" %0, @%1; \n\t"
59 : "=&r" (result)
60 : "r" (&v->counter), "r" (i)
61 : "memory"
62#ifdef CONFIG_CHIP_M32700_TS1
63 , "r4"
64#endif /* CONFIG_CHIP_M32700_TS1 */
65 );
66 local_irq_restore(flags);
67
68 return result;
69}
70
71/**
72 * atomic_sub_return - subtract integer from atomic variable and return it
73 * @i: integer value to subtract
74 * @v: pointer of type atomic_t
75 *
76 * Atomically subtracts @i from @v and return (@v - @i).
77 */
78static __inline__ int atomic_sub_return(int i, atomic_t *v)
79{
80 unsigned long flags;
81 int result;
82
83 local_irq_save(flags);
84 __asm__ __volatile__ (
85 "# atomic_sub_return \n\t"
86 DCACHE_CLEAR("%0", "r4", "%1")
87 M32R_LOCK" %0, @%1; \n\t"
88 "sub %0, %2; \n\t"
89 M32R_UNLOCK" %0, @%1; \n\t"
90 : "=&r" (result)
91 : "r" (&v->counter), "r" (i)
92 : "memory"
93#ifdef CONFIG_CHIP_M32700_TS1
94 , "r4"
95#endif /* CONFIG_CHIP_M32700_TS1 */
96 );
97 local_irq_restore(flags);
98
99 return result;
100}
101
102/**
103 * atomic_add - add integer to atomic variable
104 * @i: integer value to add
105 * @v: pointer of type atomic_t
106 *
107 * Atomically adds @i to @v.
108 */
109#define atomic_add(i,v) ((void) atomic_add_return((i), (v)))
110
111/**
112 * atomic_sub - subtract the atomic variable
113 * @i: integer value to subtract
114 * @v: pointer of type atomic_t
115 *
116 * Atomically subtracts @i from @v.
117 */
118#define atomic_sub(i,v) ((void) atomic_sub_return((i), (v)))
119
120/**
121 * atomic_sub_and_test - subtract value from variable and test result
122 * @i: integer value to subtract
123 * @v: pointer of type atomic_t
124 *
125 * Atomically subtracts @i from @v and returns
126 * true if the result is zero, or false for all
127 * other cases.
128 */
129#define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
130
131/**
132 * atomic_inc_return - increment atomic variable and return it
133 * @v: pointer of type atomic_t
134 *
135 * Atomically increments @v by 1 and returns the result.
136 */
137static __inline__ int atomic_inc_return(atomic_t *v)
138{
139 unsigned long flags;
140 int result;
141
142 local_irq_save(flags);
143 __asm__ __volatile__ (
144 "# atomic_inc_return \n\t"
145 DCACHE_CLEAR("%0", "r4", "%1")
146 M32R_LOCK" %0, @%1; \n\t"
147 "addi %0, #1; \n\t"
148 M32R_UNLOCK" %0, @%1; \n\t"
149 : "=&r" (result)
150 : "r" (&v->counter)
151 : "memory"
152#ifdef CONFIG_CHIP_M32700_TS1
153 , "r4"
154#endif /* CONFIG_CHIP_M32700_TS1 */
155 );
156 local_irq_restore(flags);
157
158 return result;
159}
160
161/**
162 * atomic_dec_return - decrement atomic variable and return it
163 * @v: pointer of type atomic_t
164 *
165 * Atomically decrements @v by 1 and returns the result.
166 */
167static __inline__ int atomic_dec_return(atomic_t *v)
168{
169 unsigned long flags;
170 int result;
171
172 local_irq_save(flags);
173 __asm__ __volatile__ (
174 "# atomic_dec_return \n\t"
175 DCACHE_CLEAR("%0", "r4", "%1")
176 M32R_LOCK" %0, @%1; \n\t"
177 "addi %0, #-1; \n\t"
178 M32R_UNLOCK" %0, @%1; \n\t"
179 : "=&r" (result)
180 : "r" (&v->counter)
181 : "memory"
182#ifdef CONFIG_CHIP_M32700_TS1
183 , "r4"
184#endif /* CONFIG_CHIP_M32700_TS1 */
185 );
186 local_irq_restore(flags);
187
188 return result;
189}
190
191/**
192 * atomic_inc - increment atomic variable
193 * @v: pointer of type atomic_t
194 *
195 * Atomically increments @v by 1.
196 */
197#define atomic_inc(v) ((void)atomic_inc_return(v))
198
199/**
200 * atomic_dec - decrement atomic variable
201 * @v: pointer of type atomic_t
202 *
203 * Atomically decrements @v by 1.
204 */
205#define atomic_dec(v) ((void)atomic_dec_return(v))
206
207/**
208 * atomic_inc_and_test - increment and test
209 * @v: pointer of type atomic_t
210 *
211 * Atomically increments @v by 1
212 * and returns true if the result is zero, or false for all
213 * other cases.
214 */
215#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
216
217/**
218 * atomic_dec_and_test - decrement and test
219 * @v: pointer of type atomic_t
220 *
221 * Atomically decrements @v by 1 and
222 * returns true if the result is 0, or false for all
223 * other cases.
224 */
225#define atomic_dec_and_test(v) (atomic_dec_return(v) == 0)
226
227/**
228 * atomic_add_negative - add and test if negative
229 * @v: pointer of type atomic_t
230 * @i: integer value to add
231 *
232 * Atomically adds @i to @v and returns true
233 * if the result is negative, or false when
234 * result is greater than or equal to zero.
235 */
236#define atomic_add_negative(i,v) (atomic_add_return((i), (v)) < 0)
237
238#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
239#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
240
241/**
242 * __atomic_add_unless - add unless the number is a given value
243 * @v: pointer of type atomic_t
244 * @a: the amount to add to v...
245 * @u: ...unless v is equal to u.
246 *
247 * Atomically adds @a to @v, so long as it was not @u.
248 * Returns the old value of @v.
249 */
250static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
251{
252 int c, old;
253 c = atomic_read(v);
254 for (;;) {
255 if (unlikely(c == (u)))
256 break;
257 old = atomic_cmpxchg((v), c, c + (a));
258 if (likely(old == c))
259 break;
260 c = old;
261 }
262 return c;
263}
264
265
266static __inline__ void atomic_clear_mask(unsigned long mask, atomic_t *addr)
267{
268 unsigned long flags;
269 unsigned long tmp;
270
271 local_irq_save(flags);
272 __asm__ __volatile__ (
273 "# atomic_clear_mask \n\t"
274 DCACHE_CLEAR("%0", "r5", "%1")
275 M32R_LOCK" %0, @%1; \n\t"
276 "and %0, %2; \n\t"
277 M32R_UNLOCK" %0, @%1; \n\t"
278 : "=&r" (tmp)
279 : "r" (addr), "r" (~mask)
280 : "memory"
281#ifdef CONFIG_CHIP_M32700_TS1
282 , "r5"
283#endif /* CONFIG_CHIP_M32700_TS1 */
284 );
285 local_irq_restore(flags);
286}
287
288static __inline__ void atomic_set_mask(unsigned long mask, atomic_t *addr)
289{
290 unsigned long flags;
291 unsigned long tmp;
292
293 local_irq_save(flags);
294 __asm__ __volatile__ (
295 "# atomic_set_mask \n\t"
296 DCACHE_CLEAR("%0", "r5", "%1")
297 M32R_LOCK" %0, @%1; \n\t"
298 "or %0, %2; \n\t"
299 M32R_UNLOCK" %0, @%1; \n\t"
300 : "=&r" (tmp)
301 : "r" (addr), "r" (mask)
302 : "memory"
303#ifdef CONFIG_CHIP_M32700_TS1
304 , "r5"
305#endif /* CONFIG_CHIP_M32700_TS1 */
306 );
307 local_irq_restore(flags);
308}
309
310/* Atomic operations are already serializing on m32r */
311#define smp_mb__before_atomic_dec() barrier()
312#define smp_mb__after_atomic_dec() barrier()
313#define smp_mb__before_atomic_inc() barrier()
314#define smp_mb__after_atomic_inc() barrier()
315
316#endif /* _ASM_M32R_ATOMIC_H */