Linux Audio

Check our new training course

Loading...
v6.9.4
  1/* SPDX-License-Identifier: GPL-2.0 */
  2#ifndef _ASM_POWERPC_CACHE_H
  3#define _ASM_POWERPC_CACHE_H
  4
  5#ifdef __KERNEL__
  6
  7
  8/* bytes per L1 cache line */
  9#if defined(CONFIG_PPC_8xx)
 10#define L1_CACHE_SHIFT		4
 11#define MAX_COPY_PREFETCH	1
 12#define IFETCH_ALIGN_SHIFT	2
 13#elif defined(CONFIG_PPC_E500MC)
 14#define L1_CACHE_SHIFT		6
 15#define MAX_COPY_PREFETCH	4
 16#define IFETCH_ALIGN_SHIFT	3
 17#elif defined(CONFIG_PPC32)
 18#define MAX_COPY_PREFETCH	4
 19#define IFETCH_ALIGN_SHIFT	3	/* 603 fetches 2 insn at a time */
 20#if defined(CONFIG_PPC_47x)
 21#define L1_CACHE_SHIFT		7
 22#else
 23#define L1_CACHE_SHIFT		5
 24#endif
 25#else /* CONFIG_PPC64 */
 26#define L1_CACHE_SHIFT		7
 27#define IFETCH_ALIGN_SHIFT	4 /* POWER8,9 */
 28#endif
 29
 30#define	L1_CACHE_BYTES		(1 << L1_CACHE_SHIFT)
 31
 32#define	SMP_CACHE_BYTES		L1_CACHE_BYTES
 33
 34#define IFETCH_ALIGN_BYTES	(1 << IFETCH_ALIGN_SHIFT)
 35
 36#ifdef CONFIG_NOT_COHERENT_CACHE
 37#define ARCH_DMA_MINALIGN	L1_CACHE_BYTES
 38#endif
 39
 40#if !defined(__ASSEMBLY__)
 41#ifdef CONFIG_PPC64
 42
 43struct ppc_cache_info {
 44	u32 size;
 45	u32 line_size;
 46	u32 block_size;	/* L1 only */
 47	u32 log_block_size;
 48	u32 blocks_per_page;
 49	u32 sets;
 50	u32 assoc;
 51};
 52
 53struct ppc64_caches {
 54	struct ppc_cache_info l1d;
 55	struct ppc_cache_info l1i;
 56	struct ppc_cache_info l2;
 57	struct ppc_cache_info l3;
 58};
 59
 60extern struct ppc64_caches ppc64_caches;
 61
 62static inline u32 l1_dcache_shift(void)
 63{
 64	return ppc64_caches.l1d.log_block_size;
 65}
 66
 67static inline u32 l1_dcache_bytes(void)
 68{
 69	return ppc64_caches.l1d.block_size;
 70}
 71
 72static inline u32 l1_icache_shift(void)
 73{
 74	return ppc64_caches.l1i.log_block_size;
 75}
 76
 77static inline u32 l1_icache_bytes(void)
 78{
 79	return ppc64_caches.l1i.block_size;
 80}
 81#else
 82static inline u32 l1_dcache_shift(void)
 83{
 84	return L1_CACHE_SHIFT;
 85}
 86
 87static inline u32 l1_dcache_bytes(void)
 88{
 89	return L1_CACHE_BYTES;
 90}
 91
 92static inline u32 l1_icache_shift(void)
 93{
 94	return L1_CACHE_SHIFT;
 95}
 96
 97static inline u32 l1_icache_bytes(void)
 98{
 99	return L1_CACHE_BYTES;
100}
101
102#endif
103
104#define __read_mostly __section(".data..read_mostly")
105
106#ifdef CONFIG_PPC_BOOK3S_32
107extern long _get_L2CR(void);
108extern long _get_L3CR(void);
109extern void _set_L2CR(unsigned long);
110extern void _set_L3CR(unsigned long);
111#else
112#define _get_L2CR()	0L
113#define _get_L3CR()	0L
114#define _set_L2CR(val)	do { } while(0)
115#define _set_L3CR(val)	do { } while(0)
116#endif
117
118static inline void dcbz(void *addr)
119{
120	__asm__ __volatile__ ("dcbz 0, %0" : : "r"(addr) : "memory");
121}
122
123static inline void dcbi(void *addr)
124{
125	__asm__ __volatile__ ("dcbi 0, %0" : : "r"(addr) : "memory");
126}
127
128static inline void dcbf(void *addr)
129{
130	__asm__ __volatile__ ("dcbf 0, %0" : : "r"(addr) : "memory");
131}
132
133static inline void dcbst(void *addr)
134{
135	__asm__ __volatile__ ("dcbst 0, %0" : : "r"(addr) : "memory");
136}
137
138static inline void icbi(void *addr)
139{
140	asm volatile ("icbi 0, %0" : : "r"(addr) : "memory");
141}
142
143static inline void iccci(void *addr)
144{
145	asm volatile ("iccci 0, %0" : : "r"(addr) : "memory");
146}
147
148#endif /* !__ASSEMBLY__ */
149#endif /* __KERNEL__ */
150#endif /* _ASM_POWERPC_CACHE_H */
v5.9
  1/* SPDX-License-Identifier: GPL-2.0 */
  2#ifndef _ASM_POWERPC_CACHE_H
  3#define _ASM_POWERPC_CACHE_H
  4
  5#ifdef __KERNEL__
  6
  7
  8/* bytes per L1 cache line */
  9#if defined(CONFIG_PPC_8xx)
 10#define L1_CACHE_SHIFT		4
 11#define MAX_COPY_PREFETCH	1
 12#define IFETCH_ALIGN_SHIFT	2
 13#elif defined(CONFIG_PPC_E500MC)
 14#define L1_CACHE_SHIFT		6
 15#define MAX_COPY_PREFETCH	4
 16#define IFETCH_ALIGN_SHIFT	3
 17#elif defined(CONFIG_PPC32)
 18#define MAX_COPY_PREFETCH	4
 19#define IFETCH_ALIGN_SHIFT	3	/* 603 fetches 2 insn at a time */
 20#if defined(CONFIG_PPC_47x)
 21#define L1_CACHE_SHIFT		7
 22#else
 23#define L1_CACHE_SHIFT		5
 24#endif
 25#else /* CONFIG_PPC64 */
 26#define L1_CACHE_SHIFT		7
 27#define IFETCH_ALIGN_SHIFT	4 /* POWER8,9 */
 28#endif
 29
 30#define	L1_CACHE_BYTES		(1 << L1_CACHE_SHIFT)
 31
 32#define	SMP_CACHE_BYTES		L1_CACHE_BYTES
 33
 34#define IFETCH_ALIGN_BYTES	(1 << IFETCH_ALIGN_SHIFT)
 35
 
 
 
 
 36#if !defined(__ASSEMBLY__)
 37#ifdef CONFIG_PPC64
 38
 39struct ppc_cache_info {
 40	u32 size;
 41	u32 line_size;
 42	u32 block_size;	/* L1 only */
 43	u32 log_block_size;
 44	u32 blocks_per_page;
 45	u32 sets;
 46	u32 assoc;
 47};
 48
 49struct ppc64_caches {
 50	struct ppc_cache_info l1d;
 51	struct ppc_cache_info l1i;
 52	struct ppc_cache_info l2;
 53	struct ppc_cache_info l3;
 54};
 55
 56extern struct ppc64_caches ppc64_caches;
 57
 58static inline u32 l1_dcache_shift(void)
 59{
 60	return ppc64_caches.l1d.log_block_size;
 61}
 62
 63static inline u32 l1_dcache_bytes(void)
 64{
 65	return ppc64_caches.l1d.block_size;
 66}
 67
 68static inline u32 l1_icache_shift(void)
 69{
 70	return ppc64_caches.l1i.log_block_size;
 71}
 72
 73static inline u32 l1_icache_bytes(void)
 74{
 75	return ppc64_caches.l1i.block_size;
 76}
 77#else
 78static inline u32 l1_dcache_shift(void)
 79{
 80	return L1_CACHE_SHIFT;
 81}
 82
 83static inline u32 l1_dcache_bytes(void)
 84{
 85	return L1_CACHE_BYTES;
 86}
 87
 88static inline u32 l1_icache_shift(void)
 89{
 90	return L1_CACHE_SHIFT;
 91}
 92
 93static inline u32 l1_icache_bytes(void)
 94{
 95	return L1_CACHE_BYTES;
 96}
 97
 98#endif
 99
100#define __read_mostly __section(.data..read_mostly)
101
102#ifdef CONFIG_PPC_BOOK3S_32
103extern long _get_L2CR(void);
104extern long _get_L3CR(void);
105extern void _set_L2CR(unsigned long);
106extern void _set_L3CR(unsigned long);
107#else
108#define _get_L2CR()	0L
109#define _get_L3CR()	0L
110#define _set_L2CR(val)	do { } while(0)
111#define _set_L3CR(val)	do { } while(0)
112#endif
113
114static inline void dcbz(void *addr)
115{
116	__asm__ __volatile__ ("dcbz 0, %0" : : "r"(addr) : "memory");
117}
118
119static inline void dcbi(void *addr)
120{
121	__asm__ __volatile__ ("dcbi 0, %0" : : "r"(addr) : "memory");
122}
123
124static inline void dcbf(void *addr)
125{
126	__asm__ __volatile__ ("dcbf 0, %0" : : "r"(addr) : "memory");
127}
128
129static inline void dcbst(void *addr)
130{
131	__asm__ __volatile__ ("dcbst 0, %0" : : "r"(addr) : "memory");
132}
133
134static inline void icbi(void *addr)
135{
136	asm volatile ("icbi 0, %0" : : "r"(addr) : "memory");
137}
138
139static inline void iccci(void *addr)
140{
141	asm volatile ("iccci 0, %0" : : "r"(addr) : "memory");
142}
143
144#endif /* !__ASSEMBLY__ */
145#endif /* __KERNEL__ */
146#endif /* _ASM_POWERPC_CACHE_H */