Linux Audio

Check our new training course

Loading...
v5.9
 1/* SPDX-License-Identifier: GPL-2.0 */
 2#ifndef _M68KNOMMU_CACHEFLUSH_H
 3#define _M68KNOMMU_CACHEFLUSH_H
 4
 5/*
 6 * (C) Copyright 2000-2010, Greg Ungerer <gerg@snapgear.com>
 7 */
 8#include <linux/mm.h>
 9#include <asm/mcfsim.h>
10
11#define flush_cache_all()			__flush_cache_all()
 
 
 
 
12#define flush_dcache_range(start, len)		__flush_dcache_all()
 
 
 
 
13#define flush_icache_range(start, len)		__flush_icache_all()
 
 
 
 
 
 
 
 
 
14
15void mcf_cache_push(void);
16
17static inline void __clear_cache_all(void)
18{
19#ifdef CACHE_INVALIDATE
20	__asm__ __volatile__ (
21		"movec	%0, %%CACR\n\t"
22		"nop\n\t"
23		: : "r" (CACHE_INVALIDATE) );
24#endif
25}
26
27static inline void __flush_cache_all(void)
28{
29#ifdef CACHE_PUSH
30	mcf_cache_push();
31#endif
32	__clear_cache_all();
33}
34
35/*
36 * Some ColdFire parts implement separate instruction and data caches,
37 * on those we should just flush the appropriate cache. If we don't need
38 * to do any specific flushing then this will be optimized away.
39 */
40static inline void __flush_icache_all(void)
41{
42#ifdef CACHE_INVALIDATEI
43	__asm__ __volatile__ (
44		"movec	%0, %%CACR\n\t"
45		"nop\n\t"
46		: : "r" (CACHE_INVALIDATEI) );
47#endif
48}
49
50static inline void __flush_dcache_all(void)
51{
52#ifdef CACHE_PUSH
53	mcf_cache_push();
54#endif
55#ifdef CACHE_INVALIDATED
56	__asm__ __volatile__ (
57		"movec	%0, %%CACR\n\t"
58		"nop\n\t"
59		: : "r" (CACHE_INVALIDATED) );
60#else
61	/* Flush the write buffer */
62	__asm__ __volatile__ ( "nop" );
63#endif
64}
65
66/*
67 * Push cache entries at supplied address. We want to write back any dirty
68 * data and then invalidate the cache lines associated with this address.
69 */
70static inline void cache_push(unsigned long paddr, int len)
71{
72	__flush_cache_all();
73}
74
75/*
76 * Clear cache entries at supplied address (that is don't write back any
77 * dirty data).
78 */
79static inline void cache_clear(unsigned long paddr, int len)
80{
81	__clear_cache_all();
82}
83
84#include <asm-generic/cacheflush.h>
85
86#endif /* _M68KNOMMU_CACHEFLUSH_H */
v4.6
 
  1#ifndef _M68KNOMMU_CACHEFLUSH_H
  2#define _M68KNOMMU_CACHEFLUSH_H
  3
  4/*
  5 * (C) Copyright 2000-2010, Greg Ungerer <gerg@snapgear.com>
  6 */
  7#include <linux/mm.h>
  8#include <asm/mcfsim.h>
  9
 10#define flush_cache_all()			__flush_cache_all()
 11#define flush_cache_mm(mm)			do { } while (0)
 12#define flush_cache_dup_mm(mm)			do { } while (0)
 13#define flush_cache_range(vma, start, end)	do { } while (0)
 14#define flush_cache_page(vma, vmaddr)		do { } while (0)
 15#define flush_dcache_range(start, len)		__flush_dcache_all()
 16#define ARCH_IMPLEMENTS_FLUSH_DCACHE_PAGE 0
 17#define flush_dcache_page(page)			do { } while (0)
 18#define flush_dcache_mmap_lock(mapping)		do { } while (0)
 19#define flush_dcache_mmap_unlock(mapping)	do { } while (0)
 20#define flush_icache_range(start, len)		__flush_icache_all()
 21#define flush_icache_page(vma,pg)		do { } while (0)
 22#define flush_icache_user_range(vma,pg,adr,len)	do { } while (0)
 23#define flush_cache_vmap(start, end)		do { } while (0)
 24#define flush_cache_vunmap(start, end)		do { } while (0)
 25
 26#define copy_to_user_page(vma, page, vaddr, dst, src, len) \
 27	memcpy(dst, src, len)
 28#define copy_from_user_page(vma, page, vaddr, dst, src, len) \
 29	memcpy(dst, src, len)
 30
 31void mcf_cache_push(void);
 32
 33static inline void __clear_cache_all(void)
 34{
 35#ifdef CACHE_INVALIDATE
 36	__asm__ __volatile__ (
 37		"movec	%0, %%CACR\n\t"
 38		"nop\n\t"
 39		: : "r" (CACHE_INVALIDATE) );
 40#endif
 41}
 42
 43static inline void __flush_cache_all(void)
 44{
 45#ifdef CACHE_PUSH
 46	mcf_cache_push();
 47#endif
 48	__clear_cache_all();
 49}
 50
 51/*
 52 * Some ColdFire parts implement separate instruction and data caches,
 53 * on those we should just flush the appropriate cache. If we don't need
 54 * to do any specific flushing then this will be optimized away.
 55 */
 56static inline void __flush_icache_all(void)
 57{
 58#ifdef CACHE_INVALIDATEI
 59	__asm__ __volatile__ (
 60		"movec	%0, %%CACR\n\t"
 61		"nop\n\t"
 62		: : "r" (CACHE_INVALIDATEI) );
 63#endif
 64}
 65
 66static inline void __flush_dcache_all(void)
 67{
 68#ifdef CACHE_PUSH
 69	mcf_cache_push();
 70#endif
 71#ifdef CACHE_INVALIDATED
 72	__asm__ __volatile__ (
 73		"movec	%0, %%CACR\n\t"
 74		"nop\n\t"
 75		: : "r" (CACHE_INVALIDATED) );
 76#else
 77	/* Flush the write buffer */
 78	__asm__ __volatile__ ( "nop" );
 79#endif
 80}
 81
 82/*
 83 * Push cache entries at supplied address. We want to write back any dirty
 84 * data and then invalidate the cache lines associated with this address.
 85 */
 86static inline void cache_push(unsigned long paddr, int len)
 87{
 88	__flush_cache_all();
 89}
 90
 91/*
 92 * Clear cache entries at supplied address (that is don't write back any
 93 * dirty data).
 94 */
 95static inline void cache_clear(unsigned long paddr, int len)
 96{
 97	__clear_cache_all();
 98}
 
 
 99
100#endif /* _M68KNOMMU_CACHEFLUSH_H */