Linux Audio

Check our new training course

Loading...
v6.8
  1/* SPDX-License-Identifier: GPL-2.0 */
  2#ifndef __ARCH_M68K_CMPXCHG__
  3#define __ARCH_M68K_CMPXCHG__
  4
  5#include <linux/irqflags.h>
  6
  7#define __xg(type, x) ((volatile type *)(x))
 
  8
  9extern unsigned long __invalid_xchg_size(unsigned long, volatile void *, int);
 10
 11#ifndef CONFIG_RMW_INSNS
 12static inline unsigned long __arch_xchg(unsigned long x, volatile void * ptr, int size)
 13{
 14	unsigned long flags, tmp;
 15
 16	local_irq_save(flags);
 17
 18	switch (size) {
 19	case 1:
 20		tmp = *(u8 *)ptr;
 21		*(u8 *)ptr = x;
 22		x = tmp;
 23		break;
 24	case 2:
 25		tmp = *(u16 *)ptr;
 26		*(u16 *)ptr = x;
 27		x = tmp;
 28		break;
 29	case 4:
 30		tmp = *(u32 *)ptr;
 31		*(u32 *)ptr = x;
 32		x = tmp;
 33		break;
 34	default:
 35		tmp = __invalid_xchg_size(x, ptr, size);
 36		break;
 37	}
 38
 39	local_irq_restore(flags);
 40	return x;
 41}
 42#else
 43static inline unsigned long __arch_xchg(unsigned long x, volatile void * ptr, int size)
 44{
 45	switch (size) {
 46	case 1:
 47		__asm__ __volatile__
 48			("moveb %2,%0\n\t"
 49			 "1:\n\t"
 50			 "casb %0,%1,%2\n\t"
 51			 "jne 1b"
 52			 : "=&d" (x) : "d" (x), "m" (*__xg(u8, ptr)) : "memory");
 53		break;
 54	case 2:
 55		__asm__ __volatile__
 56			("movew %2,%0\n\t"
 57			 "1:\n\t"
 58			 "casw %0,%1,%2\n\t"
 59			 "jne 1b"
 60			 : "=&d" (x) : "d" (x), "m" (*__xg(u16, ptr)) : "memory");
 61		break;
 62	case 4:
 63		__asm__ __volatile__
 64			("movel %2,%0\n\t"
 65			 "1:\n\t"
 66			 "casl %0,%1,%2\n\t"
 67			 "jne 1b"
 68			 : "=&d" (x) : "d" (x), "m" (*__xg(u32, ptr)) : "memory");
 69		break;
 70	default:
 71		x = __invalid_xchg_size(x, ptr, size);
 72		break;
 73	}
 74	return x;
 75}
 76#endif
 77
 78#define arch_xchg(ptr,x) ({(__typeof__(*(ptr)))__arch_xchg((unsigned long)(x),(ptr),sizeof(*(ptr)));})
 79
 80#include <asm-generic/cmpxchg-local.h>
 81
 82#define arch_cmpxchg64_local(ptr, o, n) __generic_cmpxchg64_local((ptr), (o), (n))
 83
 84extern unsigned long __invalid_cmpxchg_size(volatile void *,
 85					    unsigned long, unsigned long, int);
 86
 87/*
 88 * Atomic compare and exchange.  Compare OLD with MEM, if identical,
 89 * store NEW in MEM.  Return the initial value in MEM.  Success is
 90 * indicated by comparing RETURN with OLD.
 91 */
 92#ifdef CONFIG_RMW_INSNS
 93
 94static inline unsigned long __cmpxchg(volatile void *p, unsigned long old,
 95				      unsigned long new, int size)
 96{
 97	switch (size) {
 98	case 1:
 99		__asm__ __volatile__ ("casb %0,%2,%1"
100				      : "=d" (old), "=m" (*(char *)p)
101				      : "d" (new), "0" (old), "m" (*(char *)p));
102		break;
103	case 2:
104		__asm__ __volatile__ ("casw %0,%2,%1"
105				      : "=d" (old), "=m" (*(short *)p)
106				      : "d" (new), "0" (old), "m" (*(short *)p));
107		break;
108	case 4:
109		__asm__ __volatile__ ("casl %0,%2,%1"
110				      : "=d" (old), "=m" (*(int *)p)
111				      : "d" (new), "0" (old), "m" (*(int *)p));
112		break;
113	default:
114		old = __invalid_cmpxchg_size(p, old, new, size);
115		break;
116	}
117	return old;
118}
119
120#define arch_cmpxchg(ptr, o, n)						    \
121	({(__typeof__(*(ptr)))__cmpxchg((ptr), (unsigned long)(o),	    \
122			(unsigned long)(n), sizeof(*(ptr)));})
123#define arch_cmpxchg_local(ptr, o, n)					    \
124	({(__typeof__(*(ptr)))__cmpxchg((ptr), (unsigned long)(o),	    \
125			(unsigned long)(n), sizeof(*(ptr)));})
126
127#define arch_cmpxchg64(ptr, o, n)	arch_cmpxchg64_local((ptr), (o), (n))
128
129#else
130
131#include <asm-generic/cmpxchg.h>
132
133#endif
134
135#endif /* __ARCH_M68K_CMPXCHG__ */
v5.14.15
  1/* SPDX-License-Identifier: GPL-2.0 */
  2#ifndef __ARCH_M68K_CMPXCHG__
  3#define __ARCH_M68K_CMPXCHG__
  4
  5#include <linux/irqflags.h>
  6
  7struct __xchg_dummy { unsigned long a[100]; };
  8#define __xg(x) ((volatile struct __xchg_dummy *)(x))
  9
 10extern unsigned long __invalid_xchg_size(unsigned long, volatile void *, int);
 11
 12#ifndef CONFIG_RMW_INSNS
 13static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size)
 14{
 15	unsigned long flags, tmp;
 16
 17	local_irq_save(flags);
 18
 19	switch (size) {
 20	case 1:
 21		tmp = *(u8 *)ptr;
 22		*(u8 *)ptr = x;
 23		x = tmp;
 24		break;
 25	case 2:
 26		tmp = *(u16 *)ptr;
 27		*(u16 *)ptr = x;
 28		x = tmp;
 29		break;
 30	case 4:
 31		tmp = *(u32 *)ptr;
 32		*(u32 *)ptr = x;
 33		x = tmp;
 34		break;
 35	default:
 36		tmp = __invalid_xchg_size(x, ptr, size);
 37		break;
 38	}
 39
 40	local_irq_restore(flags);
 41	return x;
 42}
 43#else
 44static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size)
 45{
 46	switch (size) {
 47	case 1:
 48		__asm__ __volatile__
 49			("moveb %2,%0\n\t"
 50			 "1:\n\t"
 51			 "casb %0,%1,%2\n\t"
 52			 "jne 1b"
 53			 : "=&d" (x) : "d" (x), "m" (*__xg(ptr)) : "memory");
 54		break;
 55	case 2:
 56		__asm__ __volatile__
 57			("movew %2,%0\n\t"
 58			 "1:\n\t"
 59			 "casw %0,%1,%2\n\t"
 60			 "jne 1b"
 61			 : "=&d" (x) : "d" (x), "m" (*__xg(ptr)) : "memory");
 62		break;
 63	case 4:
 64		__asm__ __volatile__
 65			("movel %2,%0\n\t"
 66			 "1:\n\t"
 67			 "casl %0,%1,%2\n\t"
 68			 "jne 1b"
 69			 : "=&d" (x) : "d" (x), "m" (*__xg(ptr)) : "memory");
 70		break;
 71	default:
 72		x = __invalid_xchg_size(x, ptr, size);
 73		break;
 74	}
 75	return x;
 76}
 77#endif
 78
 79#define arch_xchg(ptr,x) ({(__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr)));})
 80
 81#include <asm-generic/cmpxchg-local.h>
 82
 83#define arch_cmpxchg64_local(ptr, o, n) __generic_cmpxchg64_local((ptr), (o), (n))
 84
 85extern unsigned long __invalid_cmpxchg_size(volatile void *,
 86					    unsigned long, unsigned long, int);
 87
 88/*
 89 * Atomic compare and exchange.  Compare OLD with MEM, if identical,
 90 * store NEW in MEM.  Return the initial value in MEM.  Success is
 91 * indicated by comparing RETURN with OLD.
 92 */
 93#ifdef CONFIG_RMW_INSNS
 94
 95static inline unsigned long __cmpxchg(volatile void *p, unsigned long old,
 96				      unsigned long new, int size)
 97{
 98	switch (size) {
 99	case 1:
100		__asm__ __volatile__ ("casb %0,%2,%1"
101				      : "=d" (old), "=m" (*(char *)p)
102				      : "d" (new), "0" (old), "m" (*(char *)p));
103		break;
104	case 2:
105		__asm__ __volatile__ ("casw %0,%2,%1"
106				      : "=d" (old), "=m" (*(short *)p)
107				      : "d" (new), "0" (old), "m" (*(short *)p));
108		break;
109	case 4:
110		__asm__ __volatile__ ("casl %0,%2,%1"
111				      : "=d" (old), "=m" (*(int *)p)
112				      : "d" (new), "0" (old), "m" (*(int *)p));
113		break;
114	default:
115		old = __invalid_cmpxchg_size(p, old, new, size);
116		break;
117	}
118	return old;
119}
120
121#define arch_cmpxchg(ptr, o, n)						    \
122	({(__typeof__(*(ptr)))__cmpxchg((ptr), (unsigned long)(o),	    \
123			(unsigned long)(n), sizeof(*(ptr)));})
124#define arch_cmpxchg_local(ptr, o, n)					    \
125	({(__typeof__(*(ptr)))__cmpxchg((ptr), (unsigned long)(o),	    \
126			(unsigned long)(n), sizeof(*(ptr)));})
127
128#define arch_cmpxchg64(ptr, o, n)	arch_cmpxchg64_local((ptr), (o), (n))
129
130#else
131
132#include <asm-generic/cmpxchg.h>
133
134#endif
135
136#endif /* __ARCH_M68K_CMPXCHG__ */