Linux Audio

Check our new training course

Loading...
v4.6
 
  1/*
  2 * Cache maintenance
  3 *
  4 * Copyright (C) 2001 Deep Blue Solutions Ltd.
  5 * Copyright (C) 2012 ARM Ltd.
  6 *
  7 * This program is free software; you can redistribute it and/or modify
  8 * it under the terms of the GNU General Public License version 2 as
  9 * published by the Free Software Foundation.
 10 *
 11 * This program is distributed in the hope that it will be useful,
 12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 14 * GNU General Public License for more details.
 15 *
 16 * You should have received a copy of the GNU General Public License
 17 * along with this program.  If not, see <http://www.gnu.org/licenses/>.
 18 */
 19
 20#include <linux/errno.h>
 21#include <linux/linkage.h>
 22#include <linux/init.h>
 23#include <asm/assembler.h>
 24#include <asm/cpufeature.h>
 25#include <asm/alternative.h>
 
 26
 27#include "proc-macros.S"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 28
 29/*
 30 *	flush_icache_range(start,end)
 31 *
 32 *	Ensure that the I and D caches are coherent within specified region.
 33 *	This is typically used when code has been written to a memory region,
 34 *	and will be executed.
 35 *
 36 *	- start   - virtual start address of region
 37 *	- end     - virtual end address of region
 38 */
 39ENTRY(flush_icache_range)
 40	/* FALLTHROUGH */
 
 
 
 41
 42/*
 43 *	__flush_cache_user_range(start,end)
 44 *
 45 *	Ensure that the I and D caches are coherent within specified region.
 46 *	This is typically used when code has been written to a memory region,
 47 *	and will be executed.
 48 *
 49 *	- start   - virtual start address of region
 50 *	- end     - virtual end address of region
 51 */
 52ENTRY(__flush_cache_user_range)
 53	dcache_line_size x2, x3
 54	sub	x3, x2, #1
 55	bic	x4, x0, x3
 561:
 57USER(9f, dc	cvau, x4	)		// clean D line to PoU
 58	add	x4, x4, x2
 59	cmp	x4, x1
 60	b.lo	1b
 61	dsb	ish
 62
 63	icache_line_size x2, x3
 64	sub	x3, x2, #1
 65	bic	x4, x0, x3
 661:
 67USER(9f, ic	ivau, x4	)		// invalidate I line PoU
 68	add	x4, x4, x2
 69	cmp	x4, x1
 70	b.lo	1b
 71	dsb	ish
 72	isb
 73	mov	x0, #0
 74	ret
 759:
 76	mov	x0, #-EFAULT
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 77	ret
 78ENDPROC(flush_icache_range)
 79ENDPROC(__flush_cache_user_range)
 80
 81/*
 82 *	__flush_dcache_area(kaddr, size)
 83 *
 84 *	Ensure that any D-cache lines for the interval [kaddr, kaddr+size)
 85 *	are cleaned and invalidated to the PoC.
 86 *
 87 *	- kaddr   - kernel address
 88 *	- size    - size in question
 89 */
 90ENTRY(__flush_dcache_area)
 91	dcache_by_line_op civac, sy, x0, x1, x2, x3
 92	ret
 93ENDPIPROC(__flush_dcache_area)
 
 94
 95/*
 96 *	__clean_dcache_area_pou(kaddr, size)
 97 *
 98 * 	Ensure that any D-cache lines for the interval [kaddr, kaddr+size)
 99 * 	are cleaned to the PoU.
100 *
101 *	- kaddr   - kernel address
102 *	- size    - size in question
103 */
104ENTRY(__clean_dcache_area_pou)
 
 
 
 
105	dcache_by_line_op cvau, ish, x0, x1, x2, x3
106	ret
107ENDPROC(__clean_dcache_area_pou)
108
109/*
110 *	__inval_cache_range(start, end)
111 *	- start   - start address of region
112 *	- end     - end address of region
113 */
114ENTRY(__inval_cache_range)
115	/* FALLTHROUGH */
116
117/*
118 *	__dma_inv_range(start, end)
119 *	- start   - virtual start address of region
120 *	- end     - virtual end address of region
 
 
 
 
 
121 */
122__dma_inv_range:
123	dcache_line_size x2, x3
124	sub	x3, x2, #1
125	tst	x1, x3				// end cache line aligned?
126	bic	x1, x1, x3
127	b.eq	1f
128	dc	civac, x1			// clean & invalidate D / U line
1291:	tst	x0, x3				// start cache line aligned?
130	bic	x0, x0, x3
131	b.eq	2f
132	dc	civac, x0			// clean & invalidate D / U line
133	b	3f
1342:	dc	ivac, x0			// invalidate D / U line
1353:	add	x0, x0, x2
136	cmp	x0, x1
137	b.lo	2b
138	dsb	sy
139	ret
140ENDPIPROC(__inval_cache_range)
141ENDPROC(__dma_inv_range)
142
143/*
144 *	__dma_clean_range(start, end)
 
 
 
 
145 *	- start   - virtual start address of region
146 *	- end     - virtual end address of region
147 */
148__dma_clean_range:
149	dcache_line_size x2, x3
150	sub	x3, x2, #1
151	bic	x0, x0, x3
1521:
153alternative_if_not ARM64_WORKAROUND_CLEAN_CACHE
154	dc	cvac, x0
155alternative_else
156	dc	civac, x0
157alternative_endif
158	add	x0, x0, x2
159	cmp	x0, x1
160	b.lo	1b
161	dsb	sy
162	ret
163ENDPROC(__dma_clean_range)
 
164
165/*
166 *	__dma_flush_range(start, end)
 
 
 
 
167 *	- start   - virtual start address of region
168 *	- end     - virtual end address of region
169 */
170ENTRY(__dma_flush_range)
171	dcache_line_size x2, x3
172	sub	x3, x2, #1
173	bic	x0, x0, x3
1741:	dc	civac, x0			// clean & invalidate D / U line
175	add	x0, x0, x2
176	cmp	x0, x1
177	b.lo	1b
178	dsb	sy
179	ret
180ENDPIPROC(__dma_flush_range)
181
182/*
183 *	__dma_map_area(start, size, dir)
184 *	- start	- kernel virtual start address
185 *	- size	- size of region
186 *	- dir	- DMA direction
187 */
188ENTRY(__dma_map_area)
189	add	x1, x1, x0
190	cmp	w2, #DMA_FROM_DEVICE
191	b.eq	__dma_inv_range
192	b	__dma_clean_range
193ENDPIPROC(__dma_map_area)
194
195/*
196 *	__dma_unmap_area(start, size, dir)
197 *	- start	- kernel virtual start address
198 *	- size	- size of region
199 *	- dir	- DMA direction
200 */
201ENTRY(__dma_unmap_area)
202	add	x1, x1, x0
203	cmp	w2, #DMA_TO_DEVICE
204	b.ne	__dma_inv_range
205	ret
206ENDPIPROC(__dma_unmap_area)
 
v6.13.7
  1/* SPDX-License-Identifier: GPL-2.0-only */
  2/*
  3 * Cache maintenance
  4 *
  5 * Copyright (C) 2001 Deep Blue Solutions Ltd.
  6 * Copyright (C) 2012 ARM Ltd.
 
 
 
 
 
 
 
 
 
 
 
 
  7 */
  8
  9#include <linux/errno.h>
 10#include <linux/linkage.h>
 11#include <linux/init.h>
 12#include <asm/assembler.h>
 13#include <asm/cpufeature.h>
 14#include <asm/alternative.h>
 15#include <asm/asm-uaccess.h>
 16
 17/*
 18 *	caches_clean_inval_pou_macro(start,end) [fixup]
 19 *
 20 *	Ensure that the I and D caches are coherent within specified region.
 21 *	This is typically used when code has been written to a memory region,
 22 *	and will be executed.
 23 *
 24 *	- start   - virtual start address of region
 25 *	- end     - virtual end address of region
 26 *	- fixup   - optional label to branch to on user fault
 27 */
 28.macro	caches_clean_inval_pou_macro, fixup
 29alternative_if ARM64_HAS_CACHE_IDC
 30	dsb     ishst
 31	b       .Ldc_skip_\@
 32alternative_else_nop_endif
 33	mov     x2, x0
 34	mov     x3, x1
 35	dcache_by_line_op cvau, ish, x2, x3, x4, x5, \fixup
 36.Ldc_skip_\@:
 37alternative_if ARM64_HAS_CACHE_DIC
 38	isb
 39	b	.Lic_skip_\@
 40alternative_else_nop_endif
 41	invalidate_icache_by_line x0, x1, x2, x3, \fixup
 42.Lic_skip_\@:
 43.endm
 44
 45/*
 46 *	caches_clean_inval_pou(start,end)
 47 *
 48 *	Ensure that the I and D caches are coherent within specified region.
 49 *	This is typically used when code has been written to a memory region,
 50 *	and will be executed.
 51 *
 52 *	- start   - virtual start address of region
 53 *	- end     - virtual end address of region
 54 */
 55SYM_FUNC_START(caches_clean_inval_pou)
 56	caches_clean_inval_pou_macro
 57	ret
 58SYM_FUNC_END(caches_clean_inval_pou)
 59SYM_FUNC_ALIAS(__pi_caches_clean_inval_pou, caches_clean_inval_pou)
 60
 61/*
 62 *	caches_clean_inval_user_pou(start,end)
 63 *
 64 *	Ensure that the I and D caches are coherent within specified region.
 65 *	This is typically used when code has been written to a memory region,
 66 *	and will be executed.
 67 *
 68 *	- start   - virtual start address of region
 69 *	- end     - virtual end address of region
 70 */
 71SYM_FUNC_START(caches_clean_inval_user_pou)
 72	uaccess_ttbr0_enable x2, x3, x4
 
 
 
 
 
 
 
 
 73
 74	caches_clean_inval_pou_macro 2f
 75	mov	x0, xzr
 
 761:
 77	uaccess_ttbr0_disable x1, x2
 
 
 
 
 
 
 78	ret
 792:
 80	mov	x0, #-EFAULT
 81	b	1b
 82SYM_FUNC_END(caches_clean_inval_user_pou)
 83
 84/*
 85 *	icache_inval_pou(start,end)
 86 *
 87 *	Ensure that the I cache is invalid within specified region.
 88 *
 89 *	- start   - virtual start address of region
 90 *	- end     - virtual end address of region
 91 */
 92SYM_FUNC_START(icache_inval_pou)
 93alternative_if ARM64_HAS_CACHE_DIC
 94	isb
 95	ret
 96alternative_else_nop_endif
 97
 98	invalidate_icache_by_line x0, x1, x2, x3
 99	ret
100SYM_FUNC_END(icache_inval_pou)
 
101
102/*
103 *	dcache_clean_inval_poc(start, end)
104 *
105 *	Ensure that any D-cache lines for the interval [start, end)
106 *	are cleaned and invalidated to the PoC.
107 *
108 *	- start   - virtual start address of region
109 *	- end     - virtual end address of region
110 */
111SYM_FUNC_START(__pi_dcache_clean_inval_poc)
112	dcache_by_line_op civac, sy, x0, x1, x2, x3
113	ret
114SYM_FUNC_END(__pi_dcache_clean_inval_poc)
115SYM_FUNC_ALIAS(dcache_clean_inval_poc, __pi_dcache_clean_inval_poc)
116
117/*
118 *	dcache_clean_pou(start, end)
119 *
120 * 	Ensure that any D-cache lines for the interval [start, end)
121 * 	are cleaned to the PoU.
122 *
123 *	- start   - virtual start address of region
124 *	- end     - virtual end address of region
125 */
126SYM_FUNC_START(dcache_clean_pou)
127alternative_if ARM64_HAS_CACHE_IDC
128	dsb	ishst
129	ret
130alternative_else_nop_endif
131	dcache_by_line_op cvau, ish, x0, x1, x2, x3
132	ret
133SYM_FUNC_END(dcache_clean_pou)
 
 
 
 
 
 
 
 
134
135/*
136 *	dcache_inval_poc(start, end)
137 *
138 * 	Ensure that any D-cache lines for the interval [start, end)
139 * 	are invalidated. Any partial lines at the ends of the interval are
140 *	also cleaned to PoC to prevent data loss.
141 *
142 *	- start   - kernel start address of region
143 *	- end     - kernel end address of region
144 */
145SYM_FUNC_START(__pi_dcache_inval_poc)
146	dcache_line_size x2, x3
147	sub	x3, x2, #1
148	tst	x1, x3				// end cache line aligned?
149	bic	x1, x1, x3
150	b.eq	1f
151	dc	civac, x1			// clean & invalidate D / U line
1521:	tst	x0, x3				// start cache line aligned?
153	bic	x0, x0, x3
154	b.eq	2f
155	dc	civac, x0			// clean & invalidate D / U line
156	b	3f
1572:	dc	ivac, x0			// invalidate D / U line
1583:	add	x0, x0, x2
159	cmp	x0, x1
160	b.lo	2b
161	dsb	sy
162	ret
163SYM_FUNC_END(__pi_dcache_inval_poc)
164SYM_FUNC_ALIAS(dcache_inval_poc, __pi_dcache_inval_poc)
165
166/*
167 *	dcache_clean_poc(start, end)
168 *
169 * 	Ensure that any D-cache lines for the interval [start, end)
170 * 	are cleaned to the PoC.
171 *
172 *	- start   - virtual start address of region
173 *	- end     - virtual end address of region
174 */
175SYM_FUNC_START(__pi_dcache_clean_poc)
176	dcache_by_line_op cvac, sy, x0, x1, x2, x3
 
 
 
 
 
 
 
 
 
 
 
 
177	ret
178SYM_FUNC_END(__pi_dcache_clean_poc)
179SYM_FUNC_ALIAS(dcache_clean_poc, __pi_dcache_clean_poc)
180
181/*
182 *	dcache_clean_pop(start, end)
183 *
184 * 	Ensure that any D-cache lines for the interval [start, end)
185 * 	are cleaned to the PoP.
186 *
187 *	- start   - virtual start address of region
188 *	- end     - virtual end address of region
189 */
190SYM_FUNC_START(__pi_dcache_clean_pop)
191	alternative_if_not ARM64_HAS_DCPOP
192	b	dcache_clean_poc
193	alternative_else_nop_endif
194	dcache_by_line_op cvap, sy, x0, x1, x2, x3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
195	ret
196SYM_FUNC_END(__pi_dcache_clean_pop)
197SYM_FUNC_ALIAS(dcache_clean_pop, __pi_dcache_clean_pop)