Linux Audio

Check our new training course

Loading...
v6.9.4
  1/* SPDX-License-Identifier: GPL-2.0 */
  2#ifndef _ASM_POWERPC_CACHE_H
  3#define _ASM_POWERPC_CACHE_H
  4
  5#ifdef __KERNEL__
  6
  7
  8/* bytes per L1 cache line */
  9#if defined(CONFIG_PPC_8xx)
 10#define L1_CACHE_SHIFT		4
 11#define MAX_COPY_PREFETCH	1
 12#define IFETCH_ALIGN_SHIFT	2
 13#elif defined(CONFIG_PPC_E500MC)
 14#define L1_CACHE_SHIFT		6
 15#define MAX_COPY_PREFETCH	4
 16#define IFETCH_ALIGN_SHIFT	3
 17#elif defined(CONFIG_PPC32)
 18#define MAX_COPY_PREFETCH	4
 19#define IFETCH_ALIGN_SHIFT	3	/* 603 fetches 2 insn at a time */
 20#if defined(CONFIG_PPC_47x)
 21#define L1_CACHE_SHIFT		7
 22#else
 23#define L1_CACHE_SHIFT		5
 24#endif
 25#else /* CONFIG_PPC64 */
 26#define L1_CACHE_SHIFT		7
 27#define IFETCH_ALIGN_SHIFT	4 /* POWER8,9 */
 28#endif
 29
 30#define	L1_CACHE_BYTES		(1 << L1_CACHE_SHIFT)
 31
 32#define	SMP_CACHE_BYTES		L1_CACHE_BYTES
 33
 34#define IFETCH_ALIGN_BYTES	(1 << IFETCH_ALIGN_SHIFT)
 35
 36#ifdef CONFIG_NOT_COHERENT_CACHE
 37#define ARCH_DMA_MINALIGN	L1_CACHE_BYTES
 38#endif
 39
 40#if !defined(__ASSEMBLY__)
 41#ifdef CONFIG_PPC64
 42
 43struct ppc_cache_info {
 44	u32 size;
 45	u32 line_size;
 46	u32 block_size;	/* L1 only */
 47	u32 log_block_size;
 48	u32 blocks_per_page;
 49	u32 sets;
 50	u32 assoc;
 51};
 52
 53struct ppc64_caches {
 54	struct ppc_cache_info l1d;
 55	struct ppc_cache_info l1i;
 56	struct ppc_cache_info l2;
 57	struct ppc_cache_info l3;
 58};
 59
 60extern struct ppc64_caches ppc64_caches;
 61
 62static inline u32 l1_dcache_shift(void)
 63{
 64	return ppc64_caches.l1d.log_block_size;
 65}
 66
 67static inline u32 l1_dcache_bytes(void)
 68{
 69	return ppc64_caches.l1d.block_size;
 70}
 71
 72static inline u32 l1_icache_shift(void)
 73{
 74	return ppc64_caches.l1i.log_block_size;
 75}
 76
 77static inline u32 l1_icache_bytes(void)
 78{
 79	return ppc64_caches.l1i.block_size;
 80}
 81#else
 82static inline u32 l1_dcache_shift(void)
 83{
 84	return L1_CACHE_SHIFT;
 85}
 86
 87static inline u32 l1_dcache_bytes(void)
 88{
 89	return L1_CACHE_BYTES;
 90}
 91
 92static inline u32 l1_icache_shift(void)
 93{
 94	return L1_CACHE_SHIFT;
 95}
 96
 97static inline u32 l1_icache_bytes(void)
 98{
 99	return L1_CACHE_BYTES;
100}
101
102#endif
103
104#define __read_mostly __section(".data..read_mostly")
105
106#ifdef CONFIG_PPC_BOOK3S_32
107extern long _get_L2CR(void);
108extern long _get_L3CR(void);
109extern void _set_L2CR(unsigned long);
110extern void _set_L3CR(unsigned long);
111#else
112#define _get_L2CR()	0L
113#define _get_L3CR()	0L
114#define _set_L2CR(val)	do { } while(0)
115#define _set_L3CR(val)	do { } while(0)
116#endif
117
118static inline void dcbz(void *addr)
119{
120	__asm__ __volatile__ ("dcbz 0, %0" : : "r"(addr) : "memory");
121}
122
123static inline void dcbi(void *addr)
124{
125	__asm__ __volatile__ ("dcbi 0, %0" : : "r"(addr) : "memory");
126}
127
128static inline void dcbf(void *addr)
129{
130	__asm__ __volatile__ ("dcbf 0, %0" : : "r"(addr) : "memory");
131}
132
133static inline void dcbst(void *addr)
134{
135	__asm__ __volatile__ ("dcbst 0, %0" : : "r"(addr) : "memory");
136}
137
138static inline void icbi(void *addr)
139{
140	asm volatile ("icbi 0, %0" : : "r"(addr) : "memory");
141}
142
143static inline void iccci(void *addr)
144{
145	asm volatile ("iccci 0, %0" : : "r"(addr) : "memory");
146}
147
148#endif /* !__ASSEMBLY__ */
149#endif /* __KERNEL__ */
150#endif /* _ASM_POWERPC_CACHE_H */
v6.8
  1/* SPDX-License-Identifier: GPL-2.0 */
  2#ifndef _ASM_POWERPC_CACHE_H
  3#define _ASM_POWERPC_CACHE_H
  4
  5#ifdef __KERNEL__
  6
  7
  8/* bytes per L1 cache line */
  9#if defined(CONFIG_PPC_8xx)
 10#define L1_CACHE_SHIFT		4
 11#define MAX_COPY_PREFETCH	1
 12#define IFETCH_ALIGN_SHIFT	2
 13#elif defined(CONFIG_PPC_E500MC)
 14#define L1_CACHE_SHIFT		6
 15#define MAX_COPY_PREFETCH	4
 16#define IFETCH_ALIGN_SHIFT	3
 17#elif defined(CONFIG_PPC32)
 18#define MAX_COPY_PREFETCH	4
 19#define IFETCH_ALIGN_SHIFT	3	/* 603 fetches 2 insn at a time */
 20#if defined(CONFIG_PPC_47x)
 21#define L1_CACHE_SHIFT		7
 22#else
 23#define L1_CACHE_SHIFT		5
 24#endif
 25#else /* CONFIG_PPC64 */
 26#define L1_CACHE_SHIFT		7
 27#define IFETCH_ALIGN_SHIFT	4 /* POWER8,9 */
 28#endif
 29
 30#define	L1_CACHE_BYTES		(1 << L1_CACHE_SHIFT)
 31
 32#define	SMP_CACHE_BYTES		L1_CACHE_BYTES
 33
 34#define IFETCH_ALIGN_BYTES	(1 << IFETCH_ALIGN_SHIFT)
 35
 36#ifdef CONFIG_NOT_COHERENT_CACHE
 37#define ARCH_DMA_MINALIGN	L1_CACHE_BYTES
 38#endif
 39
 40#if !defined(__ASSEMBLY__)
 41#ifdef CONFIG_PPC64
 42
 43struct ppc_cache_info {
 44	u32 size;
 45	u32 line_size;
 46	u32 block_size;	/* L1 only */
 47	u32 log_block_size;
 48	u32 blocks_per_page;
 49	u32 sets;
 50	u32 assoc;
 51};
 52
 53struct ppc64_caches {
 54	struct ppc_cache_info l1d;
 55	struct ppc_cache_info l1i;
 56	struct ppc_cache_info l2;
 57	struct ppc_cache_info l3;
 58};
 59
 60extern struct ppc64_caches ppc64_caches;
 61
 62static inline u32 l1_dcache_shift(void)
 63{
 64	return ppc64_caches.l1d.log_block_size;
 65}
 66
 67static inline u32 l1_dcache_bytes(void)
 68{
 69	return ppc64_caches.l1d.block_size;
 70}
 71
 72static inline u32 l1_icache_shift(void)
 73{
 74	return ppc64_caches.l1i.log_block_size;
 75}
 76
 77static inline u32 l1_icache_bytes(void)
 78{
 79	return ppc64_caches.l1i.block_size;
 80}
 81#else
 82static inline u32 l1_dcache_shift(void)
 83{
 84	return L1_CACHE_SHIFT;
 85}
 86
 87static inline u32 l1_dcache_bytes(void)
 88{
 89	return L1_CACHE_BYTES;
 90}
 91
 92static inline u32 l1_icache_shift(void)
 93{
 94	return L1_CACHE_SHIFT;
 95}
 96
 97static inline u32 l1_icache_bytes(void)
 98{
 99	return L1_CACHE_BYTES;
100}
101
102#endif
103
104#define __read_mostly __section(".data..read_mostly")
105
106#ifdef CONFIG_PPC_BOOK3S_32
107extern long _get_L2CR(void);
108extern long _get_L3CR(void);
109extern void _set_L2CR(unsigned long);
110extern void _set_L3CR(unsigned long);
111#else
112#define _get_L2CR()	0L
113#define _get_L3CR()	0L
114#define _set_L2CR(val)	do { } while(0)
115#define _set_L3CR(val)	do { } while(0)
116#endif
117
118static inline void dcbz(void *addr)
119{
120	__asm__ __volatile__ ("dcbz 0, %0" : : "r"(addr) : "memory");
121}
122
123static inline void dcbi(void *addr)
124{
125	__asm__ __volatile__ ("dcbi 0, %0" : : "r"(addr) : "memory");
126}
127
128static inline void dcbf(void *addr)
129{
130	__asm__ __volatile__ ("dcbf 0, %0" : : "r"(addr) : "memory");
131}
132
133static inline void dcbst(void *addr)
134{
135	__asm__ __volatile__ ("dcbst 0, %0" : : "r"(addr) : "memory");
136}
137
138static inline void icbi(void *addr)
139{
140	asm volatile ("icbi 0, %0" : : "r"(addr) : "memory");
141}
142
143static inline void iccci(void *addr)
144{
145	asm volatile ("iccci 0, %0" : : "r"(addr) : "memory");
146}
147
148#endif /* !__ASSEMBLY__ */
149#endif /* __KERNEL__ */
150#endif /* _ASM_POWERPC_CACHE_H */