Linux Audio

Check our new training course

Loading...
Note: File does not exist in v6.2.
  1#ifndef _ASM_M32R_SPINLOCK_H
  2#define _ASM_M32R_SPINLOCK_H
  3
  4/*
  5 *  linux/include/asm-m32r/spinlock.h
  6 *
  7 *  M32R version:
  8 *    Copyright (C) 2001, 2002  Hitoshi Yamamoto
  9 *    Copyright (C) 2004  Hirokazu Takata <takata at linux-m32r.org>
 10 */
 11
 12#include <linux/compiler.h>
 13#include <linux/atomic.h>
 14#include <asm/dcache_clear.h>
 15#include <asm/page.h>
 16#include <asm/barrier.h>
 17#include <asm/processor.h>
 18
 19/*
 20 * Your basic SMP spinlocks, allowing only a single CPU anywhere
 21 *
 22 * (the type definitions are in asm/spinlock_types.h)
 23 *
 24 * Simple spin lock operations.  There are two variants, one clears IRQ's
 25 * on the local processor, one does not.
 26 *
 27 * We make no fairness assumptions. They have a cost.
 28 */
 29
 30#define arch_spin_is_locked(x)		(*(volatile int *)(&(x)->slock) <= 0)
 31#define arch_spin_lock_flags(lock, flags) arch_spin_lock(lock)
 32
 33static inline void arch_spin_unlock_wait(arch_spinlock_t *lock)
 34{
 35	smp_cond_load_acquire(&lock->slock, VAL > 0);
 36}
 37
 38/**
 39 * arch_spin_trylock - Try spin lock and return a result
 40 * @lock: Pointer to the lock variable
 41 *
 42 * arch_spin_trylock() tries to get the lock and returns a result.
 43 * On the m32r, the result value is 1 (= Success) or 0 (= Failure).
 44 */
 45static inline int arch_spin_trylock(arch_spinlock_t *lock)
 46{
 47	int oldval;
 48	unsigned long tmp1, tmp2;
 49
 50	/*
 51	 * lock->slock :  =1 : unlock
 52	 *             : <=0 : lock
 53	 * {
 54	 *   oldval = lock->slock; <--+ need atomic operation
 55	 *   lock->slock = 0;      <--+
 56	 * }
 57	 */
 58	__asm__ __volatile__ (
 59		"# arch_spin_trylock		\n\t"
 60		"ldi	%1, #0;			\n\t"
 61		"mvfc	%2, psw;		\n\t"
 62		"clrpsw	#0x40 -> nop;		\n\t"
 63		DCACHE_CLEAR("%0", "r6", "%3")
 64		"lock	%0, @%3;		\n\t"
 65		"unlock	%1, @%3;		\n\t"
 66		"mvtc	%2, psw;		\n\t"
 67		: "=&r" (oldval), "=&r" (tmp1), "=&r" (tmp2)
 68		: "r" (&lock->slock)
 69		: "memory"
 70#ifdef CONFIG_CHIP_M32700_TS1
 71		, "r6"
 72#endif	/* CONFIG_CHIP_M32700_TS1 */
 73	);
 74
 75	return (oldval > 0);
 76}
 77
 78static inline void arch_spin_lock(arch_spinlock_t *lock)
 79{
 80	unsigned long tmp0, tmp1;
 81
 82	/*
 83	 * lock->slock :  =1 : unlock
 84	 *             : <=0 : lock
 85	 *
 86	 * for ( ; ; ) {
 87	 *   lock->slock -= 1;  <-- need atomic operation
 88	 *   if (lock->slock == 0) break;
 89	 *   for ( ; lock->slock <= 0 ; );
 90	 * }
 91	 */
 92	__asm__ __volatile__ (
 93		"# arch_spin_lock		\n\t"
 94		".fillinsn			\n"
 95		"1:				\n\t"
 96		"mvfc	%1, psw;		\n\t"
 97		"clrpsw	#0x40 -> nop;		\n\t"
 98		DCACHE_CLEAR("%0", "r6", "%2")
 99		"lock	%0, @%2;		\n\t"
100		"addi	%0, #-1;		\n\t"
101		"unlock	%0, @%2;		\n\t"
102		"mvtc	%1, psw;		\n\t"
103		"bltz	%0, 2f;			\n\t"
104		LOCK_SECTION_START(".balign 4 \n\t")
105		".fillinsn			\n"
106		"2:				\n\t"
107		"ld	%0, @%2;		\n\t"
108		"bgtz	%0, 1b;			\n\t"
109		"bra	2b;			\n\t"
110		LOCK_SECTION_END
111		: "=&r" (tmp0), "=&r" (tmp1)
112		: "r" (&lock->slock)
113		: "memory"
114#ifdef CONFIG_CHIP_M32700_TS1
115		, "r6"
116#endif	/* CONFIG_CHIP_M32700_TS1 */
117	);
118}
119
120static inline void arch_spin_unlock(arch_spinlock_t *lock)
121{
122	mb();
123	lock->slock = 1;
124}
125
126/*
127 * Read-write spinlocks, allowing multiple readers
128 * but only one writer.
129 *
130 * NOTE! it is quite common to have readers in interrupts
131 * but no interrupt writers. For those circumstances we
132 * can "mix" irq-safe locks - any writer needs to get a
133 * irq-safe write-lock, but readers can get non-irqsafe
134 * read-locks.
135 *
136 * On x86, we implement read-write locks as a 32-bit counter
137 * with the high bit (sign) being the "contended" bit.
138 *
139 * The inline assembly is non-obvious. Think about it.
140 *
141 * Changed to use the same technique as rw semaphores.  See
142 * semaphore.h for details.  -ben
143 */
144
145/**
146 * read_can_lock - would read_trylock() succeed?
147 * @lock: the rwlock in question.
148 */
149#define arch_read_can_lock(x) ((int)(x)->lock > 0)
150
151/**
152 * write_can_lock - would write_trylock() succeed?
153 * @lock: the rwlock in question.
154 */
155#define arch_write_can_lock(x) ((x)->lock == RW_LOCK_BIAS)
156
157static inline void arch_read_lock(arch_rwlock_t *rw)
158{
159	unsigned long tmp0, tmp1;
160
161	/*
162	 * rw->lock :  >0 : unlock
163	 *          : <=0 : lock
164	 *
165	 * for ( ; ; ) {
166	 *   rw->lock -= 1;  <-- need atomic operation
167	 *   if (rw->lock >= 0) break;
168	 *   rw->lock += 1;  <-- need atomic operation
169	 *   for ( ; rw->lock <= 0 ; );
170	 * }
171	 */
172	__asm__ __volatile__ (
173		"# read_lock			\n\t"
174		".fillinsn			\n"
175		"1:				\n\t"
176		"mvfc	%1, psw;		\n\t"
177		"clrpsw	#0x40 -> nop;		\n\t"
178		DCACHE_CLEAR("%0", "r6", "%2")
179		"lock	%0, @%2;		\n\t"
180		"addi	%0, #-1;		\n\t"
181		"unlock	%0, @%2;		\n\t"
182		"mvtc	%1, psw;		\n\t"
183		"bltz	%0, 2f;			\n\t"
184		LOCK_SECTION_START(".balign 4 \n\t")
185		".fillinsn			\n"
186		"2:				\n\t"
187		"clrpsw	#0x40 -> nop;		\n\t"
188		DCACHE_CLEAR("%0", "r6", "%2")
189		"lock	%0, @%2;		\n\t"
190		"addi	%0, #1;			\n\t"
191		"unlock	%0, @%2;		\n\t"
192		"mvtc	%1, psw;		\n\t"
193		".fillinsn			\n"
194		"3:				\n\t"
195		"ld	%0, @%2;		\n\t"
196		"bgtz	%0, 1b;			\n\t"
197		"bra	3b;			\n\t"
198		LOCK_SECTION_END
199		: "=&r" (tmp0), "=&r" (tmp1)
200		: "r" (&rw->lock)
201		: "memory"
202#ifdef CONFIG_CHIP_M32700_TS1
203		, "r6"
204#endif	/* CONFIG_CHIP_M32700_TS1 */
205	);
206}
207
208static inline void arch_write_lock(arch_rwlock_t *rw)
209{
210	unsigned long tmp0, tmp1, tmp2;
211
212	/*
213	 * rw->lock :  =RW_LOCK_BIAS_STR : unlock
214	 *          : !=RW_LOCK_BIAS_STR : lock
215	 *
216	 * for ( ; ; ) {
217	 *   rw->lock -= RW_LOCK_BIAS_STR;  <-- need atomic operation
218	 *   if (rw->lock == 0) break;
219	 *   rw->lock += RW_LOCK_BIAS_STR;  <-- need atomic operation
220	 *   for ( ; rw->lock != RW_LOCK_BIAS_STR ; ) ;
221	 * }
222	 */
223	__asm__ __volatile__ (
224		"# write_lock					\n\t"
225		"seth	%1, #high(" RW_LOCK_BIAS_STR ");	\n\t"
226		"or3	%1, %1, #low(" RW_LOCK_BIAS_STR ");	\n\t"
227		".fillinsn					\n"
228		"1:						\n\t"
229		"mvfc	%2, psw;				\n\t"
230		"clrpsw	#0x40 -> nop;				\n\t"
231		DCACHE_CLEAR("%0", "r7", "%3")
232		"lock	%0, @%3;				\n\t"
233		"sub	%0, %1;					\n\t"
234		"unlock	%0, @%3;				\n\t"
235		"mvtc	%2, psw;				\n\t"
236		"bnez	%0, 2f;					\n\t"
237		LOCK_SECTION_START(".balign 4 \n\t")
238		".fillinsn					\n"
239		"2:						\n\t"
240		"clrpsw	#0x40 -> nop;				\n\t"
241		DCACHE_CLEAR("%0", "r7", "%3")
242		"lock	%0, @%3;				\n\t"
243		"add	%0, %1;					\n\t"
244		"unlock	%0, @%3;				\n\t"
245		"mvtc	%2, psw;				\n\t"
246		".fillinsn					\n"
247		"3:						\n\t"
248		"ld	%0, @%3;				\n\t"
249		"beq	%0, %1, 1b;				\n\t"
250		"bra	3b;					\n\t"
251		LOCK_SECTION_END
252		: "=&r" (tmp0), "=&r" (tmp1), "=&r" (tmp2)
253		: "r" (&rw->lock)
254		: "memory"
255#ifdef CONFIG_CHIP_M32700_TS1
256		, "r7"
257#endif	/* CONFIG_CHIP_M32700_TS1 */
258	);
259}
260
261static inline void arch_read_unlock(arch_rwlock_t *rw)
262{
263	unsigned long tmp0, tmp1;
264
265	__asm__ __volatile__ (
266		"# read_unlock			\n\t"
267		"mvfc	%1, psw;		\n\t"
268		"clrpsw	#0x40 -> nop;		\n\t"
269		DCACHE_CLEAR("%0", "r6", "%2")
270		"lock	%0, @%2;		\n\t"
271		"addi	%0, #1;			\n\t"
272		"unlock	%0, @%2;		\n\t"
273		"mvtc	%1, psw;		\n\t"
274		: "=&r" (tmp0), "=&r" (tmp1)
275		: "r" (&rw->lock)
276		: "memory"
277#ifdef CONFIG_CHIP_M32700_TS1
278		, "r6"
279#endif	/* CONFIG_CHIP_M32700_TS1 */
280	);
281}
282
283static inline void arch_write_unlock(arch_rwlock_t *rw)
284{
285	unsigned long tmp0, tmp1, tmp2;
286
287	__asm__ __volatile__ (
288		"# write_unlock					\n\t"
289		"seth	%1, #high(" RW_LOCK_BIAS_STR ");	\n\t"
290		"or3	%1, %1, #low(" RW_LOCK_BIAS_STR ");	\n\t"
291		"mvfc	%2, psw;				\n\t"
292		"clrpsw	#0x40 -> nop;				\n\t"
293		DCACHE_CLEAR("%0", "r7", "%3")
294		"lock	%0, @%3;				\n\t"
295		"add	%0, %1;					\n\t"
296		"unlock	%0, @%3;				\n\t"
297		"mvtc	%2, psw;				\n\t"
298		: "=&r" (tmp0), "=&r" (tmp1), "=&r" (tmp2)
299		: "r" (&rw->lock)
300		: "memory"
301#ifdef CONFIG_CHIP_M32700_TS1
302		, "r7"
303#endif	/* CONFIG_CHIP_M32700_TS1 */
304	);
305}
306
307static inline int arch_read_trylock(arch_rwlock_t *lock)
308{
309	atomic_t *count = (atomic_t*)lock;
310	if (atomic_dec_return(count) >= 0)
311		return 1;
312	atomic_inc(count);
313	return 0;
314}
315
316static inline int arch_write_trylock(arch_rwlock_t *lock)
317{
318	atomic_t *count = (atomic_t *)lock;
319	if (atomic_sub_and_test(RW_LOCK_BIAS, count))
320		return 1;
321	atomic_add(RW_LOCK_BIAS, count);
322	return 0;
323}
324
325#define arch_read_lock_flags(lock, flags) arch_read_lock(lock)
326#define arch_write_lock_flags(lock, flags) arch_write_lock(lock)
327
328#define arch_spin_relax(lock)	cpu_relax()
329#define arch_read_relax(lock)	cpu_relax()
330#define arch_write_relax(lock)	cpu_relax()
331
332#endif	/* _ASM_M32R_SPINLOCK_H */