Linux Audio

Check our new training course

Open-source upstreaming

Need help get the support for your hardware in upstream Linux?
Loading...
v6.13.7
  1/* SPDX-License-Identifier: GPL-2.0 */
  2#ifndef __ARCH_M68K_CMPXCHG__
  3#define __ARCH_M68K_CMPXCHG__
  4
  5#include <linux/irqflags.h>
  6#include <linux/minmax.h>
  7
  8#define __xg(type, x) ((volatile type *)(x))
 
  9
 10extern unsigned long __invalid_xchg_size(unsigned long, volatile void *, int);
 11
 12#ifndef CONFIG_RMW_INSNS
 13static inline unsigned long __arch_xchg(unsigned long x, volatile void * ptr, int size)
 14{
 15	unsigned long flags;
 16
 17	local_irq_save(flags);
 18
 19	switch (size) {
 20	case 1:
 21		swap(*(u8 *)ptr, x);
 
 
 22		break;
 23	case 2:
 24		swap(*(u16 *)ptr, x);
 
 
 25		break;
 26	case 4:
 27		swap(*(u32 *)ptr, x);
 
 
 28		break;
 29	default:
 30		x = __invalid_xchg_size(x, ptr, size);
 31		break;
 32	}
 33
 34	local_irq_restore(flags);
 35	return x;
 36}
 37#else
 38static inline unsigned long __arch_xchg(unsigned long x, volatile void * ptr, int size)
 39{
 40	switch (size) {
 41	case 1:
 42		__asm__ __volatile__
 43			("moveb %2,%0\n\t"
 44			 "1:\n\t"
 45			 "casb %0,%1,%2\n\t"
 46			 "jne 1b"
 47			 : "=&d" (x) : "d" (x), "m" (*__xg(u8, ptr)) : "memory");
 48		break;
 49	case 2:
 50		__asm__ __volatile__
 51			("movew %2,%0\n\t"
 52			 "1:\n\t"
 53			 "casw %0,%1,%2\n\t"
 54			 "jne 1b"
 55			 : "=&d" (x) : "d" (x), "m" (*__xg(u16, ptr)) : "memory");
 56		break;
 57	case 4:
 58		__asm__ __volatile__
 59			("movel %2,%0\n\t"
 60			 "1:\n\t"
 61			 "casl %0,%1,%2\n\t"
 62			 "jne 1b"
 63			 : "=&d" (x) : "d" (x), "m" (*__xg(u32, ptr)) : "memory");
 64		break;
 65	default:
 66		x = __invalid_xchg_size(x, ptr, size);
 67		break;
 68	}
 69	return x;
 70}
 71#endif
 72
 73#define arch_xchg(ptr,x) ({(__typeof__(*(ptr)))__arch_xchg((unsigned long)(x),(ptr),sizeof(*(ptr)));})
 74
 75#include <asm-generic/cmpxchg-local.h>
 76
 77#define arch_cmpxchg64_local(ptr, o, n) __generic_cmpxchg64_local((ptr), (o), (n))
 78
 79extern unsigned long __invalid_cmpxchg_size(volatile void *,
 80					    unsigned long, unsigned long, int);
 81
 82/*
 83 * Atomic compare and exchange.  Compare OLD with MEM, if identical,
 84 * store NEW in MEM.  Return the initial value in MEM.  Success is
 85 * indicated by comparing RETURN with OLD.
 86 */
 87#ifdef CONFIG_RMW_INSNS
 
 88
 89static inline unsigned long __cmpxchg(volatile void *p, unsigned long old,
 90				      unsigned long new, int size)
 91{
 92	switch (size) {
 93	case 1:
 94		__asm__ __volatile__ ("casb %0,%2,%1"
 95				      : "=d" (old), "=m" (*(char *)p)
 96				      : "d" (new), "0" (old), "m" (*(char *)p));
 97		break;
 98	case 2:
 99		__asm__ __volatile__ ("casw %0,%2,%1"
100				      : "=d" (old), "=m" (*(short *)p)
101				      : "d" (new), "0" (old), "m" (*(short *)p));
102		break;
103	case 4:
104		__asm__ __volatile__ ("casl %0,%2,%1"
105				      : "=d" (old), "=m" (*(int *)p)
106				      : "d" (new), "0" (old), "m" (*(int *)p));
107		break;
108	default:
109		old = __invalid_cmpxchg_size(p, old, new, size);
110		break;
111	}
112	return old;
113}
114
115#define arch_cmpxchg(ptr, o, n)						    \
116	({(__typeof__(*(ptr)))__cmpxchg((ptr), (unsigned long)(o),	    \
117			(unsigned long)(n), sizeof(*(ptr)));})
118#define arch_cmpxchg_local(ptr, o, n)					    \
119	({(__typeof__(*(ptr)))__cmpxchg((ptr), (unsigned long)(o),	    \
120			(unsigned long)(n), sizeof(*(ptr)));})
121
122#define arch_cmpxchg64(ptr, o, n)	arch_cmpxchg64_local((ptr), (o), (n))
123
124#else
 
 
 
 
 
 
 
 
125
126#include <asm-generic/cmpxchg.h>
127
128#endif
129
130#endif /* __ARCH_M68K_CMPXCHG__ */
v3.15
 
  1#ifndef __ARCH_M68K_CMPXCHG__
  2#define __ARCH_M68K_CMPXCHG__
  3
  4#include <linux/irqflags.h>
 
  5
  6struct __xchg_dummy { unsigned long a[100]; };
  7#define __xg(x) ((volatile struct __xchg_dummy *)(x))
  8
  9extern unsigned long __invalid_xchg_size(unsigned long, volatile void *, int);
 10
 11#ifndef CONFIG_RMW_INSNS
 12static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size)
 13{
 14	unsigned long flags, tmp;
 15
 16	local_irq_save(flags);
 17
 18	switch (size) {
 19	case 1:
 20		tmp = *(u8 *)ptr;
 21		*(u8 *)ptr = x;
 22		x = tmp;
 23		break;
 24	case 2:
 25		tmp = *(u16 *)ptr;
 26		*(u16 *)ptr = x;
 27		x = tmp;
 28		break;
 29	case 4:
 30		tmp = *(u32 *)ptr;
 31		*(u32 *)ptr = x;
 32		x = tmp;
 33		break;
 34	default:
 35		tmp = __invalid_xchg_size(x, ptr, size);
 36		break;
 37	}
 38
 39	local_irq_restore(flags);
 40	return x;
 41}
 42#else
 43static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size)
 44{
 45	switch (size) {
 46	case 1:
 47		__asm__ __volatile__
 48			("moveb %2,%0\n\t"
 49			 "1:\n\t"
 50			 "casb %0,%1,%2\n\t"
 51			 "jne 1b"
 52			 : "=&d" (x) : "d" (x), "m" (*__xg(ptr)) : "memory");
 53		break;
 54	case 2:
 55		__asm__ __volatile__
 56			("movew %2,%0\n\t"
 57			 "1:\n\t"
 58			 "casw %0,%1,%2\n\t"
 59			 "jne 1b"
 60			 : "=&d" (x) : "d" (x), "m" (*__xg(ptr)) : "memory");
 61		break;
 62	case 4:
 63		__asm__ __volatile__
 64			("movel %2,%0\n\t"
 65			 "1:\n\t"
 66			 "casl %0,%1,%2\n\t"
 67			 "jne 1b"
 68			 : "=&d" (x) : "d" (x), "m" (*__xg(ptr)) : "memory");
 69		break;
 70	default:
 71		x = __invalid_xchg_size(x, ptr, size);
 72		break;
 73	}
 74	return x;
 75}
 76#endif
 77
 78#define xchg(ptr,x) ((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr))))
 79
 80#include <asm-generic/cmpxchg-local.h>
 81
 82#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
 83
 84extern unsigned long __invalid_cmpxchg_size(volatile void *,
 85					    unsigned long, unsigned long, int);
 86
 87/*
 88 * Atomic compare and exchange.  Compare OLD with MEM, if identical,
 89 * store NEW in MEM.  Return the initial value in MEM.  Success is
 90 * indicated by comparing RETURN with OLD.
 91 */
 92#ifdef CONFIG_RMW_INSNS
 93#define __HAVE_ARCH_CMPXCHG	1
 94
 95static inline unsigned long __cmpxchg(volatile void *p, unsigned long old,
 96				      unsigned long new, int size)
 97{
 98	switch (size) {
 99	case 1:
100		__asm__ __volatile__ ("casb %0,%2,%1"
101				      : "=d" (old), "=m" (*(char *)p)
102				      : "d" (new), "0" (old), "m" (*(char *)p));
103		break;
104	case 2:
105		__asm__ __volatile__ ("casw %0,%2,%1"
106				      : "=d" (old), "=m" (*(short *)p)
107				      : "d" (new), "0" (old), "m" (*(short *)p));
108		break;
109	case 4:
110		__asm__ __volatile__ ("casl %0,%2,%1"
111				      : "=d" (old), "=m" (*(int *)p)
112				      : "d" (new), "0" (old), "m" (*(int *)p));
113		break;
114	default:
115		old = __invalid_cmpxchg_size(p, old, new, size);
116		break;
117	}
118	return old;
119}
120
121#define cmpxchg(ptr, o, n)						    \
122	((__typeof__(*(ptr)))__cmpxchg((ptr), (unsigned long)(o),	    \
123			(unsigned long)(n), sizeof(*(ptr))))
124#define cmpxchg_local(ptr, o, n)					    \
125	((__typeof__(*(ptr)))__cmpxchg((ptr), (unsigned long)(o),	    \
126			(unsigned long)(n), sizeof(*(ptr))))
127
128#define cmpxchg64(ptr, o, n)	cmpxchg64_local((ptr), (o), (n))
129
130#else
131
132/*
133 * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
134 * them available.
135 */
136#define cmpxchg_local(ptr, o, n)				  	       \
137	((__typeof__(*(ptr)))__cmpxchg_local_generic((ptr), (unsigned long)(o),\
138			(unsigned long)(n), sizeof(*(ptr))))
139
140#include <asm-generic/cmpxchg.h>
141
142#endif
143
144#endif /* __ARCH_M68K_CMPXCHG__ */