Loading...
Note: File does not exist in v6.8.
1// SPDX-License-Identifier: GPL-2.0-only
2/*
3 * This file defines C prototypes for the low-level cache assembly functions
4 * and populates a vtable for each selected ARM CPU cache type.
5 */
6
7#include <linux/types.h>
8#include <asm/cacheflush.h>
9
10#ifdef CONFIG_CPU_CACHE_V4
11void v4_flush_icache_all(void);
12void v4_flush_kern_cache_all(void);
13void v4_flush_user_cache_all(void);
14void v4_flush_user_cache_range(unsigned long, unsigned long, unsigned int);
15void v4_coherent_kern_range(unsigned long, unsigned long);
16int v4_coherent_user_range(unsigned long, unsigned long);
17void v4_flush_kern_dcache_area(void *, size_t);
18void v4_dma_map_area(const void *, size_t, int);
19void v4_dma_unmap_area(const void *, size_t, int);
20void v4_dma_flush_range(const void *, const void *);
21
22struct cpu_cache_fns v4_cache_fns __initconst = {
23 .flush_icache_all = v4_flush_icache_all,
24 .flush_kern_all = v4_flush_kern_cache_all,
25 .flush_kern_louis = v4_flush_kern_cache_all,
26 .flush_user_all = v4_flush_user_cache_all,
27 .flush_user_range = v4_flush_user_cache_range,
28 .coherent_kern_range = v4_coherent_kern_range,
29 .coherent_user_range = v4_coherent_user_range,
30 .flush_kern_dcache_area = v4_flush_kern_dcache_area,
31 .dma_map_area = v4_dma_map_area,
32 .dma_unmap_area = v4_dma_unmap_area,
33 .dma_flush_range = v4_dma_flush_range,
34};
35#endif
36
37/* V4 write-back cache "V4WB" */
38#ifdef CONFIG_CPU_CACHE_V4WB
39void v4wb_flush_icache_all(void);
40void v4wb_flush_kern_cache_all(void);
41void v4wb_flush_user_cache_all(void);
42void v4wb_flush_user_cache_range(unsigned long, unsigned long, unsigned int);
43void v4wb_coherent_kern_range(unsigned long, unsigned long);
44int v4wb_coherent_user_range(unsigned long, unsigned long);
45void v4wb_flush_kern_dcache_area(void *, size_t);
46void v4wb_dma_map_area(const void *, size_t, int);
47void v4wb_dma_unmap_area(const void *, size_t, int);
48void v4wb_dma_flush_range(const void *, const void *);
49
50struct cpu_cache_fns v4wb_cache_fns __initconst = {
51 .flush_icache_all = v4wb_flush_icache_all,
52 .flush_kern_all = v4wb_flush_kern_cache_all,
53 .flush_kern_louis = v4wb_flush_kern_cache_all,
54 .flush_user_all = v4wb_flush_user_cache_all,
55 .flush_user_range = v4wb_flush_user_cache_range,
56 .coherent_kern_range = v4wb_coherent_kern_range,
57 .coherent_user_range = v4wb_coherent_user_range,
58 .flush_kern_dcache_area = v4wb_flush_kern_dcache_area,
59 .dma_map_area = v4wb_dma_map_area,
60 .dma_unmap_area = v4wb_dma_unmap_area,
61 .dma_flush_range = v4wb_dma_flush_range,
62};
63#endif
64
65/* V4 write-through cache "V4WT" */
66#ifdef CONFIG_CPU_CACHE_V4WT
67void v4wt_flush_icache_all(void);
68void v4wt_flush_kern_cache_all(void);
69void v4wt_flush_user_cache_all(void);
70void v4wt_flush_user_cache_range(unsigned long, unsigned long, unsigned int);
71void v4wt_coherent_kern_range(unsigned long, unsigned long);
72int v4wt_coherent_user_range(unsigned long, unsigned long);
73void v4wt_flush_kern_dcache_area(void *, size_t);
74void v4wt_dma_map_area(const void *, size_t, int);
75void v4wt_dma_unmap_area(const void *, size_t, int);
76void v4wt_dma_flush_range(const void *, const void *);
77
78struct cpu_cache_fns v4wt_cache_fns __initconst = {
79 .flush_icache_all = v4wt_flush_icache_all,
80 .flush_kern_all = v4wt_flush_kern_cache_all,
81 .flush_kern_louis = v4wt_flush_kern_cache_all,
82 .flush_user_all = v4wt_flush_user_cache_all,
83 .flush_user_range = v4wt_flush_user_cache_range,
84 .coherent_kern_range = v4wt_coherent_kern_range,
85 .coherent_user_range = v4wt_coherent_user_range,
86 .flush_kern_dcache_area = v4wt_flush_kern_dcache_area,
87 .dma_map_area = v4wt_dma_map_area,
88 .dma_unmap_area = v4wt_dma_unmap_area,
89 .dma_flush_range = v4wt_dma_flush_range,
90};
91#endif
92
93/* Faraday FA526 cache */
94#ifdef CONFIG_CPU_CACHE_FA
95void fa_flush_icache_all(void);
96void fa_flush_kern_cache_all(void);
97void fa_flush_user_cache_all(void);
98void fa_flush_user_cache_range(unsigned long, unsigned long, unsigned int);
99void fa_coherent_kern_range(unsigned long, unsigned long);
100int fa_coherent_user_range(unsigned long, unsigned long);
101void fa_flush_kern_dcache_area(void *, size_t);
102void fa_dma_map_area(const void *, size_t, int);
103void fa_dma_unmap_area(const void *, size_t, int);
104void fa_dma_flush_range(const void *, const void *);
105
106struct cpu_cache_fns fa_cache_fns __initconst = {
107 .flush_icache_all = fa_flush_icache_all,
108 .flush_kern_all = fa_flush_kern_cache_all,
109 .flush_kern_louis = fa_flush_kern_cache_all,
110 .flush_user_all = fa_flush_user_cache_all,
111 .flush_user_range = fa_flush_user_cache_range,
112 .coherent_kern_range = fa_coherent_kern_range,
113 .coherent_user_range = fa_coherent_user_range,
114 .flush_kern_dcache_area = fa_flush_kern_dcache_area,
115 .dma_map_area = fa_dma_map_area,
116 .dma_unmap_area = fa_dma_unmap_area,
117 .dma_flush_range = fa_dma_flush_range,
118};
119#endif
120
121#ifdef CONFIG_CPU_CACHE_V6
122void v6_flush_icache_all(void);
123void v6_flush_kern_cache_all(void);
124void v6_flush_user_cache_all(void);
125void v6_flush_user_cache_range(unsigned long, unsigned long, unsigned int);
126void v6_coherent_kern_range(unsigned long, unsigned long);
127int v6_coherent_user_range(unsigned long, unsigned long);
128void v6_flush_kern_dcache_area(void *, size_t);
129void v6_dma_map_area(const void *, size_t, int);
130void v6_dma_unmap_area(const void *, size_t, int);
131void v6_dma_flush_range(const void *, const void *);
132
133struct cpu_cache_fns v6_cache_fns __initconst = {
134 .flush_icache_all = v6_flush_icache_all,
135 .flush_kern_all = v6_flush_kern_cache_all,
136 .flush_kern_louis = v6_flush_kern_cache_all,
137 .flush_user_all = v6_flush_user_cache_all,
138 .flush_user_range = v6_flush_user_cache_range,
139 .coherent_kern_range = v6_coherent_kern_range,
140 .coherent_user_range = v6_coherent_user_range,
141 .flush_kern_dcache_area = v6_flush_kern_dcache_area,
142 .dma_map_area = v6_dma_map_area,
143 .dma_unmap_area = v6_dma_unmap_area,
144 .dma_flush_range = v6_dma_flush_range,
145};
146#endif
147
148#ifdef CONFIG_CPU_CACHE_V7
149void v7_flush_icache_all(void);
150void v7_flush_kern_cache_all(void);
151void v7_flush_kern_cache_louis(void);
152void v7_flush_user_cache_all(void);
153void v7_flush_user_cache_range(unsigned long, unsigned long, unsigned int);
154void v7_coherent_kern_range(unsigned long, unsigned long);
155int v7_coherent_user_range(unsigned long, unsigned long);
156void v7_flush_kern_dcache_area(void *, size_t);
157void v7_dma_map_area(const void *, size_t, int);
158void v7_dma_unmap_area(const void *, size_t, int);
159void v7_dma_flush_range(const void *, const void *);
160
161struct cpu_cache_fns v7_cache_fns __initconst = {
162 .flush_icache_all = v7_flush_icache_all,
163 .flush_kern_all = v7_flush_kern_cache_all,
164 .flush_kern_louis = v7_flush_kern_cache_louis,
165 .flush_user_all = v7_flush_user_cache_all,
166 .flush_user_range = v7_flush_user_cache_range,
167 .coherent_kern_range = v7_coherent_kern_range,
168 .coherent_user_range = v7_coherent_user_range,
169 .flush_kern_dcache_area = v7_flush_kern_dcache_area,
170 .dma_map_area = v7_dma_map_area,
171 .dma_unmap_area = v7_dma_unmap_area,
172 .dma_flush_range = v7_dma_flush_range,
173};
174
175/* Special quirky cache flush function for Broadcom B15 v7 caches */
176void b15_flush_kern_cache_all(void);
177
178struct cpu_cache_fns b15_cache_fns __initconst = {
179 .flush_icache_all = v7_flush_icache_all,
180#ifdef CONFIG_CACHE_B15_RAC
181 .flush_kern_all = b15_flush_kern_cache_all,
182#else
183 .flush_kern_all = v7_flush_kern_cache_all,
184#endif
185 .flush_kern_louis = v7_flush_kern_cache_louis,
186 .flush_user_all = v7_flush_user_cache_all,
187 .flush_user_range = v7_flush_user_cache_range,
188 .coherent_kern_range = v7_coherent_kern_range,
189 .coherent_user_range = v7_coherent_user_range,
190 .flush_kern_dcache_area = v7_flush_kern_dcache_area,
191 .dma_map_area = v7_dma_map_area,
192 .dma_unmap_area = v7_dma_unmap_area,
193 .dma_flush_range = v7_dma_flush_range,
194};
195#endif
196
197/* The NOP cache is just a set of dummy stubs that by definition does nothing */
198#ifdef CONFIG_CPU_CACHE_NOP
199void nop_flush_icache_all(void);
200void nop_flush_kern_cache_all(void);
201void nop_flush_user_cache_all(void);
202void nop_flush_user_cache_range(unsigned long start, unsigned long end, unsigned int flags);
203void nop_coherent_kern_range(unsigned long start, unsigned long end);
204int nop_coherent_user_range(unsigned long, unsigned long);
205void nop_flush_kern_dcache_area(void *kaddr, size_t size);
206void nop_dma_map_area(const void *start, size_t size, int flags);
207void nop_dma_unmap_area(const void *start, size_t size, int flags);
208void nop_dma_flush_range(const void *start, const void *end);
209
210struct cpu_cache_fns nop_cache_fns __initconst = {
211 .flush_icache_all = nop_flush_icache_all,
212 .flush_kern_all = nop_flush_kern_cache_all,
213 .flush_kern_louis = nop_flush_kern_cache_all,
214 .flush_user_all = nop_flush_user_cache_all,
215 .flush_user_range = nop_flush_user_cache_range,
216 .coherent_kern_range = nop_coherent_kern_range,
217 .coherent_user_range = nop_coherent_user_range,
218 .flush_kern_dcache_area = nop_flush_kern_dcache_area,
219 .dma_map_area = nop_dma_map_area,
220 .dma_unmap_area = nop_dma_unmap_area,
221 .dma_flush_range = nop_dma_flush_range,
222};
223#endif
224
225#ifdef CONFIG_CPU_CACHE_V7M
226void v7m_flush_icache_all(void);
227void v7m_flush_kern_cache_all(void);
228void v7m_flush_user_cache_all(void);
229void v7m_flush_user_cache_range(unsigned long, unsigned long, unsigned int);
230void v7m_coherent_kern_range(unsigned long, unsigned long);
231int v7m_coherent_user_range(unsigned long, unsigned long);
232void v7m_flush_kern_dcache_area(void *, size_t);
233void v7m_dma_map_area(const void *, size_t, int);
234void v7m_dma_unmap_area(const void *, size_t, int);
235void v7m_dma_flush_range(const void *, const void *);
236
237struct cpu_cache_fns v7m_cache_fns __initconst = {
238 .flush_icache_all = v7m_flush_icache_all,
239 .flush_kern_all = v7m_flush_kern_cache_all,
240 .flush_kern_louis = v7m_flush_kern_cache_all,
241 .flush_user_all = v7m_flush_user_cache_all,
242 .flush_user_range = v7m_flush_user_cache_range,
243 .coherent_kern_range = v7m_coherent_kern_range,
244 .coherent_user_range = v7m_coherent_user_range,
245 .flush_kern_dcache_area = v7m_flush_kern_dcache_area,
246 .dma_map_area = v7m_dma_map_area,
247 .dma_unmap_area = v7m_dma_unmap_area,
248 .dma_flush_range = v7m_dma_flush_range,
249};
250#endif
251
252#ifdef CONFIG_CPU_ARM1020
253void arm1020_flush_icache_all(void);
254void arm1020_flush_kern_cache_all(void);
255void arm1020_flush_user_cache_all(void);
256void arm1020_flush_user_cache_range(unsigned long, unsigned long, unsigned int);
257void arm1020_coherent_kern_range(unsigned long, unsigned long);
258int arm1020_coherent_user_range(unsigned long, unsigned long);
259void arm1020_flush_kern_dcache_area(void *, size_t);
260void arm1020_dma_map_area(const void *, size_t, int);
261void arm1020_dma_unmap_area(const void *, size_t, int);
262void arm1020_dma_flush_range(const void *, const void *);
263
264struct cpu_cache_fns arm1020_cache_fns __initconst = {
265 .flush_icache_all = arm1020_flush_icache_all,
266 .flush_kern_all = arm1020_flush_kern_cache_all,
267 .flush_kern_louis = arm1020_flush_kern_cache_all,
268 .flush_user_all = arm1020_flush_user_cache_all,
269 .flush_user_range = arm1020_flush_user_cache_range,
270 .coherent_kern_range = arm1020_coherent_kern_range,
271 .coherent_user_range = arm1020_coherent_user_range,
272 .flush_kern_dcache_area = arm1020_flush_kern_dcache_area,
273 .dma_map_area = arm1020_dma_map_area,
274 .dma_unmap_area = arm1020_dma_unmap_area,
275 .dma_flush_range = arm1020_dma_flush_range,
276};
277#endif
278
279#ifdef CONFIG_CPU_ARM1020E
280void arm1020e_flush_icache_all(void);
281void arm1020e_flush_kern_cache_all(void);
282void arm1020e_flush_user_cache_all(void);
283void arm1020e_flush_user_cache_range(unsigned long, unsigned long, unsigned int);
284void arm1020e_coherent_kern_range(unsigned long, unsigned long);
285int arm1020e_coherent_user_range(unsigned long, unsigned long);
286void arm1020e_flush_kern_dcache_area(void *, size_t);
287void arm1020e_dma_map_area(const void *, size_t, int);
288void arm1020e_dma_unmap_area(const void *, size_t, int);
289void arm1020e_dma_flush_range(const void *, const void *);
290
291struct cpu_cache_fns arm1020e_cache_fns __initconst = {
292 .flush_icache_all = arm1020e_flush_icache_all,
293 .flush_kern_all = arm1020e_flush_kern_cache_all,
294 .flush_kern_louis = arm1020e_flush_kern_cache_all,
295 .flush_user_all = arm1020e_flush_user_cache_all,
296 .flush_user_range = arm1020e_flush_user_cache_range,
297 .coherent_kern_range = arm1020e_coherent_kern_range,
298 .coherent_user_range = arm1020e_coherent_user_range,
299 .flush_kern_dcache_area = arm1020e_flush_kern_dcache_area,
300 .dma_map_area = arm1020e_dma_map_area,
301 .dma_unmap_area = arm1020e_dma_unmap_area,
302 .dma_flush_range = arm1020e_dma_flush_range,
303};
304#endif
305
306#ifdef CONFIG_CPU_ARM1022
307void arm1022_flush_icache_all(void);
308void arm1022_flush_kern_cache_all(void);
309void arm1022_flush_user_cache_all(void);
310void arm1022_flush_user_cache_range(unsigned long, unsigned long, unsigned int);
311void arm1022_coherent_kern_range(unsigned long, unsigned long);
312int arm1022_coherent_user_range(unsigned long, unsigned long);
313void arm1022_flush_kern_dcache_area(void *, size_t);
314void arm1022_dma_map_area(const void *, size_t, int);
315void arm1022_dma_unmap_area(const void *, size_t, int);
316void arm1022_dma_flush_range(const void *, const void *);
317
318struct cpu_cache_fns arm1022_cache_fns __initconst = {
319 .flush_icache_all = arm1022_flush_icache_all,
320 .flush_kern_all = arm1022_flush_kern_cache_all,
321 .flush_kern_louis = arm1022_flush_kern_cache_all,
322 .flush_user_all = arm1022_flush_user_cache_all,
323 .flush_user_range = arm1022_flush_user_cache_range,
324 .coherent_kern_range = arm1022_coherent_kern_range,
325 .coherent_user_range = arm1022_coherent_user_range,
326 .flush_kern_dcache_area = arm1022_flush_kern_dcache_area,
327 .dma_map_area = arm1022_dma_map_area,
328 .dma_unmap_area = arm1022_dma_unmap_area,
329 .dma_flush_range = arm1022_dma_flush_range,
330};
331#endif
332
333#ifdef CONFIG_CPU_ARM1026
334void arm1026_flush_icache_all(void);
335void arm1026_flush_kern_cache_all(void);
336void arm1026_flush_user_cache_all(void);
337void arm1026_flush_user_cache_range(unsigned long, unsigned long, unsigned int);
338void arm1026_coherent_kern_range(unsigned long, unsigned long);
339int arm1026_coherent_user_range(unsigned long, unsigned long);
340void arm1026_flush_kern_dcache_area(void *, size_t);
341void arm1026_dma_map_area(const void *, size_t, int);
342void arm1026_dma_unmap_area(const void *, size_t, int);
343void arm1026_dma_flush_range(const void *, const void *);
344
345struct cpu_cache_fns arm1026_cache_fns __initconst = {
346 .flush_icache_all = arm1026_flush_icache_all,
347 .flush_kern_all = arm1026_flush_kern_cache_all,
348 .flush_kern_louis = arm1026_flush_kern_cache_all,
349 .flush_user_all = arm1026_flush_user_cache_all,
350 .flush_user_range = arm1026_flush_user_cache_range,
351 .coherent_kern_range = arm1026_coherent_kern_range,
352 .coherent_user_range = arm1026_coherent_user_range,
353 .flush_kern_dcache_area = arm1026_flush_kern_dcache_area,
354 .dma_map_area = arm1026_dma_map_area,
355 .dma_unmap_area = arm1026_dma_unmap_area,
356 .dma_flush_range = arm1026_dma_flush_range,
357};
358#endif
359
360#if defined(CONFIG_CPU_ARM920T) && !defined(CONFIG_CPU_DCACHE_WRITETHROUGH)
361void arm920_flush_icache_all(void);
362void arm920_flush_kern_cache_all(void);
363void arm920_flush_user_cache_all(void);
364void arm920_flush_user_cache_range(unsigned long, unsigned long, unsigned int);
365void arm920_coherent_kern_range(unsigned long, unsigned long);
366int arm920_coherent_user_range(unsigned long, unsigned long);
367void arm920_flush_kern_dcache_area(void *, size_t);
368void arm920_dma_map_area(const void *, size_t, int);
369void arm920_dma_unmap_area(const void *, size_t, int);
370void arm920_dma_flush_range(const void *, const void *);
371
372struct cpu_cache_fns arm920_cache_fns __initconst = {
373 .flush_icache_all = arm920_flush_icache_all,
374 .flush_kern_all = arm920_flush_kern_cache_all,
375 .flush_kern_louis = arm920_flush_kern_cache_all,
376 .flush_user_all = arm920_flush_user_cache_all,
377 .flush_user_range = arm920_flush_user_cache_range,
378 .coherent_kern_range = arm920_coherent_kern_range,
379 .coherent_user_range = arm920_coherent_user_range,
380 .flush_kern_dcache_area = arm920_flush_kern_dcache_area,
381 .dma_map_area = arm920_dma_map_area,
382 .dma_unmap_area = arm920_dma_unmap_area,
383 .dma_flush_range = arm920_dma_flush_range,
384};
385#endif
386
387#if defined(CONFIG_CPU_ARM922T) && !defined(CONFIG_CPU_DCACHE_WRITETHROUGH)
388void arm922_flush_icache_all(void);
389void arm922_flush_kern_cache_all(void);
390void arm922_flush_user_cache_all(void);
391void arm922_flush_user_cache_range(unsigned long, unsigned long, unsigned int);
392void arm922_coherent_kern_range(unsigned long, unsigned long);
393int arm922_coherent_user_range(unsigned long, unsigned long);
394void arm922_flush_kern_dcache_area(void *, size_t);
395void arm922_dma_map_area(const void *, size_t, int);
396void arm922_dma_unmap_area(const void *, size_t, int);
397void arm922_dma_flush_range(const void *, const void *);
398
399struct cpu_cache_fns arm922_cache_fns __initconst = {
400 .flush_icache_all = arm922_flush_icache_all,
401 .flush_kern_all = arm922_flush_kern_cache_all,
402 .flush_kern_louis = arm922_flush_kern_cache_all,
403 .flush_user_all = arm922_flush_user_cache_all,
404 .flush_user_range = arm922_flush_user_cache_range,
405 .coherent_kern_range = arm922_coherent_kern_range,
406 .coherent_user_range = arm922_coherent_user_range,
407 .flush_kern_dcache_area = arm922_flush_kern_dcache_area,
408 .dma_map_area = arm922_dma_map_area,
409 .dma_unmap_area = arm922_dma_unmap_area,
410 .dma_flush_range = arm922_dma_flush_range,
411};
412#endif
413
414#ifdef CONFIG_CPU_ARM925T
415void arm925_flush_icache_all(void);
416void arm925_flush_kern_cache_all(void);
417void arm925_flush_user_cache_all(void);
418void arm925_flush_user_cache_range(unsigned long, unsigned long, unsigned int);
419void arm925_coherent_kern_range(unsigned long, unsigned long);
420int arm925_coherent_user_range(unsigned long, unsigned long);
421void arm925_flush_kern_dcache_area(void *, size_t);
422void arm925_dma_map_area(const void *, size_t, int);
423void arm925_dma_unmap_area(const void *, size_t, int);
424void arm925_dma_flush_range(const void *, const void *);
425
426struct cpu_cache_fns arm925_cache_fns __initconst = {
427 .flush_icache_all = arm925_flush_icache_all,
428 .flush_kern_all = arm925_flush_kern_cache_all,
429 .flush_kern_louis = arm925_flush_kern_cache_all,
430 .flush_user_all = arm925_flush_user_cache_all,
431 .flush_user_range = arm925_flush_user_cache_range,
432 .coherent_kern_range = arm925_coherent_kern_range,
433 .coherent_user_range = arm925_coherent_user_range,
434 .flush_kern_dcache_area = arm925_flush_kern_dcache_area,
435 .dma_map_area = arm925_dma_map_area,
436 .dma_unmap_area = arm925_dma_unmap_area,
437 .dma_flush_range = arm925_dma_flush_range,
438};
439#endif
440
441#ifdef CONFIG_CPU_ARM926T
442void arm926_flush_icache_all(void);
443void arm926_flush_kern_cache_all(void);
444void arm926_flush_user_cache_all(void);
445void arm926_flush_user_cache_range(unsigned long, unsigned long, unsigned int);
446void arm926_coherent_kern_range(unsigned long, unsigned long);
447int arm926_coherent_user_range(unsigned long, unsigned long);
448void arm926_flush_kern_dcache_area(void *, size_t);
449void arm926_dma_map_area(const void *, size_t, int);
450void arm926_dma_unmap_area(const void *, size_t, int);
451void arm926_dma_flush_range(const void *, const void *);
452
453struct cpu_cache_fns arm926_cache_fns __initconst = {
454 .flush_icache_all = arm926_flush_icache_all,
455 .flush_kern_all = arm926_flush_kern_cache_all,
456 .flush_kern_louis = arm926_flush_kern_cache_all,
457 .flush_user_all = arm926_flush_user_cache_all,
458 .flush_user_range = arm926_flush_user_cache_range,
459 .coherent_kern_range = arm926_coherent_kern_range,
460 .coherent_user_range = arm926_coherent_user_range,
461 .flush_kern_dcache_area = arm926_flush_kern_dcache_area,
462 .dma_map_area = arm926_dma_map_area,
463 .dma_unmap_area = arm926_dma_unmap_area,
464 .dma_flush_range = arm926_dma_flush_range,
465};
466#endif
467
468#ifdef CONFIG_CPU_ARM940T
469void arm940_flush_icache_all(void);
470void arm940_flush_kern_cache_all(void);
471void arm940_flush_user_cache_all(void);
472void arm940_flush_user_cache_range(unsigned long, unsigned long, unsigned int);
473void arm940_coherent_kern_range(unsigned long, unsigned long);
474int arm940_coherent_user_range(unsigned long, unsigned long);
475void arm940_flush_kern_dcache_area(void *, size_t);
476void arm940_dma_map_area(const void *, size_t, int);
477void arm940_dma_unmap_area(const void *, size_t, int);
478void arm940_dma_flush_range(const void *, const void *);
479
480struct cpu_cache_fns arm940_cache_fns __initconst = {
481 .flush_icache_all = arm940_flush_icache_all,
482 .flush_kern_all = arm940_flush_kern_cache_all,
483 .flush_kern_louis = arm940_flush_kern_cache_all,
484 .flush_user_all = arm940_flush_user_cache_all,
485 .flush_user_range = arm940_flush_user_cache_range,
486 .coherent_kern_range = arm940_coherent_kern_range,
487 .coherent_user_range = arm940_coherent_user_range,
488 .flush_kern_dcache_area = arm940_flush_kern_dcache_area,
489 .dma_map_area = arm940_dma_map_area,
490 .dma_unmap_area = arm940_dma_unmap_area,
491 .dma_flush_range = arm940_dma_flush_range,
492};
493#endif
494
495#ifdef CONFIG_CPU_ARM946E
496void arm946_flush_icache_all(void);
497void arm946_flush_kern_cache_all(void);
498void arm946_flush_user_cache_all(void);
499void arm946_flush_user_cache_range(unsigned long, unsigned long, unsigned int);
500void arm946_coherent_kern_range(unsigned long, unsigned long);
501int arm946_coherent_user_range(unsigned long, unsigned long);
502void arm946_flush_kern_dcache_area(void *, size_t);
503void arm946_dma_map_area(const void *, size_t, int);
504void arm946_dma_unmap_area(const void *, size_t, int);
505void arm946_dma_flush_range(const void *, const void *);
506
507struct cpu_cache_fns arm946_cache_fns __initconst = {
508 .flush_icache_all = arm946_flush_icache_all,
509 .flush_kern_all = arm946_flush_kern_cache_all,
510 .flush_kern_louis = arm946_flush_kern_cache_all,
511 .flush_user_all = arm946_flush_user_cache_all,
512 .flush_user_range = arm946_flush_user_cache_range,
513 .coherent_kern_range = arm946_coherent_kern_range,
514 .coherent_user_range = arm946_coherent_user_range,
515 .flush_kern_dcache_area = arm946_flush_kern_dcache_area,
516 .dma_map_area = arm946_dma_map_area,
517 .dma_unmap_area = arm946_dma_unmap_area,
518 .dma_flush_range = arm946_dma_flush_range,
519};
520#endif
521
522#ifdef CONFIG_CPU_XSCALE
523void xscale_flush_icache_all(void);
524void xscale_flush_kern_cache_all(void);
525void xscale_flush_user_cache_all(void);
526void xscale_flush_user_cache_range(unsigned long, unsigned long, unsigned int);
527void xscale_coherent_kern_range(unsigned long, unsigned long);
528int xscale_coherent_user_range(unsigned long, unsigned long);
529void xscale_flush_kern_dcache_area(void *, size_t);
530void xscale_dma_map_area(const void *, size_t, int);
531void xscale_dma_unmap_area(const void *, size_t, int);
532void xscale_dma_flush_range(const void *, const void *);
533
534struct cpu_cache_fns xscale_cache_fns __initconst = {
535 .flush_icache_all = xscale_flush_icache_all,
536 .flush_kern_all = xscale_flush_kern_cache_all,
537 .flush_kern_louis = xscale_flush_kern_cache_all,
538 .flush_user_all = xscale_flush_user_cache_all,
539 .flush_user_range = xscale_flush_user_cache_range,
540 .coherent_kern_range = xscale_coherent_kern_range,
541 .coherent_user_range = xscale_coherent_user_range,
542 .flush_kern_dcache_area = xscale_flush_kern_dcache_area,
543 .dma_map_area = xscale_dma_map_area,
544 .dma_unmap_area = xscale_dma_unmap_area,
545 .dma_flush_range = xscale_dma_flush_range,
546};
547
548/* The 80200 A0 and A1 need a special quirk for dma_map_area() */
549void xscale_80200_A0_A1_dma_map_area(const void *, size_t, int);
550
551struct cpu_cache_fns xscale_80200_A0_A1_cache_fns __initconst = {
552 .flush_icache_all = xscale_flush_icache_all,
553 .flush_kern_all = xscale_flush_kern_cache_all,
554 .flush_kern_louis = xscale_flush_kern_cache_all,
555 .flush_user_all = xscale_flush_user_cache_all,
556 .flush_user_range = xscale_flush_user_cache_range,
557 .coherent_kern_range = xscale_coherent_kern_range,
558 .coherent_user_range = xscale_coherent_user_range,
559 .flush_kern_dcache_area = xscale_flush_kern_dcache_area,
560 .dma_map_area = xscale_80200_A0_A1_dma_map_area,
561 .dma_unmap_area = xscale_dma_unmap_area,
562 .dma_flush_range = xscale_dma_flush_range,
563};
564#endif
565
566#ifdef CONFIG_CPU_XSC3
567void xsc3_flush_icache_all(void);
568void xsc3_flush_kern_cache_all(void);
569void xsc3_flush_user_cache_all(void);
570void xsc3_flush_user_cache_range(unsigned long, unsigned long, unsigned int);
571void xsc3_coherent_kern_range(unsigned long, unsigned long);
572int xsc3_coherent_user_range(unsigned long, unsigned long);
573void xsc3_flush_kern_dcache_area(void *, size_t);
574void xsc3_dma_map_area(const void *, size_t, int);
575void xsc3_dma_unmap_area(const void *, size_t, int);
576void xsc3_dma_flush_range(const void *, const void *);
577
578struct cpu_cache_fns xsc3_cache_fns __initconst = {
579 .flush_icache_all = xsc3_flush_icache_all,
580 .flush_kern_all = xsc3_flush_kern_cache_all,
581 .flush_kern_louis = xsc3_flush_kern_cache_all,
582 .flush_user_all = xsc3_flush_user_cache_all,
583 .flush_user_range = xsc3_flush_user_cache_range,
584 .coherent_kern_range = xsc3_coherent_kern_range,
585 .coherent_user_range = xsc3_coherent_user_range,
586 .flush_kern_dcache_area = xsc3_flush_kern_dcache_area,
587 .dma_map_area = xsc3_dma_map_area,
588 .dma_unmap_area = xsc3_dma_unmap_area,
589 .dma_flush_range = xsc3_dma_flush_range,
590};
591#endif
592
593#ifdef CONFIG_CPU_MOHAWK
594void mohawk_flush_icache_all(void);
595void mohawk_flush_kern_cache_all(void);
596void mohawk_flush_user_cache_all(void);
597void mohawk_flush_user_cache_range(unsigned long, unsigned long, unsigned int);
598void mohawk_coherent_kern_range(unsigned long, unsigned long);
599int mohawk_coherent_user_range(unsigned long, unsigned long);
600void mohawk_flush_kern_dcache_area(void *, size_t);
601void mohawk_dma_map_area(const void *, size_t, int);
602void mohawk_dma_unmap_area(const void *, size_t, int);
603void mohawk_dma_flush_range(const void *, const void *);
604
605struct cpu_cache_fns mohawk_cache_fns __initconst = {
606 .flush_icache_all = mohawk_flush_icache_all,
607 .flush_kern_all = mohawk_flush_kern_cache_all,
608 .flush_kern_louis = mohawk_flush_kern_cache_all,
609 .flush_user_all = mohawk_flush_user_cache_all,
610 .flush_user_range = mohawk_flush_user_cache_range,
611 .coherent_kern_range = mohawk_coherent_kern_range,
612 .coherent_user_range = mohawk_coherent_user_range,
613 .flush_kern_dcache_area = mohawk_flush_kern_dcache_area,
614 .dma_map_area = mohawk_dma_map_area,
615 .dma_unmap_area = mohawk_dma_unmap_area,
616 .dma_flush_range = mohawk_dma_flush_range,
617};
618#endif
619
620#ifdef CONFIG_CPU_FEROCEON
621void feroceon_flush_icache_all(void);
622void feroceon_flush_kern_cache_all(void);
623void feroceon_flush_user_cache_all(void);
624void feroceon_flush_user_cache_range(unsigned long, unsigned long, unsigned int);
625void feroceon_coherent_kern_range(unsigned long, unsigned long);
626int feroceon_coherent_user_range(unsigned long, unsigned long);
627void feroceon_flush_kern_dcache_area(void *, size_t);
628void feroceon_dma_map_area(const void *, size_t, int);
629void feroceon_dma_unmap_area(const void *, size_t, int);
630void feroceon_dma_flush_range(const void *, const void *);
631
632struct cpu_cache_fns feroceon_cache_fns __initconst = {
633 .flush_icache_all = feroceon_flush_icache_all,
634 .flush_kern_all = feroceon_flush_kern_cache_all,
635 .flush_kern_louis = feroceon_flush_kern_cache_all,
636 .flush_user_all = feroceon_flush_user_cache_all,
637 .flush_user_range = feroceon_flush_user_cache_range,
638 .coherent_kern_range = feroceon_coherent_kern_range,
639 .coherent_user_range = feroceon_coherent_user_range,
640 .flush_kern_dcache_area = feroceon_flush_kern_dcache_area,
641 .dma_map_area = feroceon_dma_map_area,
642 .dma_unmap_area = feroceon_dma_unmap_area,
643 .dma_flush_range = feroceon_dma_flush_range,
644};
645
646void feroceon_range_flush_kern_dcache_area(void *, size_t);
647void feroceon_range_dma_map_area(const void *, size_t, int);
648void feroceon_range_dma_flush_range(const void *, const void *);
649
650struct cpu_cache_fns feroceon_range_cache_fns __initconst = {
651 .flush_icache_all = feroceon_flush_icache_all,
652 .flush_kern_all = feroceon_flush_kern_cache_all,
653 .flush_kern_louis = feroceon_flush_kern_cache_all,
654 .flush_user_all = feroceon_flush_user_cache_all,
655 .flush_user_range = feroceon_flush_user_cache_range,
656 .coherent_kern_range = feroceon_coherent_kern_range,
657 .coherent_user_range = feroceon_coherent_user_range,
658 .flush_kern_dcache_area = feroceon_range_flush_kern_dcache_area,
659 .dma_map_area = feroceon_range_dma_map_area,
660 .dma_unmap_area = feroceon_dma_unmap_area,
661 .dma_flush_range = feroceon_range_dma_flush_range,
662};
663#endif