Linux Audio

Check our new training course

Loading...
Note: File does not exist in v5.9.
  1#include <linux/irqchip/arm-gic.h>
  2
  3#define VCPU_USR_REG(_reg_nr)	(VCPU_USR_REGS + (_reg_nr * 4))
  4#define VCPU_USR_SP		(VCPU_USR_REG(13))
  5#define VCPU_USR_LR		(VCPU_USR_REG(14))
  6#define CP15_OFFSET(_cp15_reg_idx) (VCPU_CP15 + (_cp15_reg_idx * 4))
  7
  8/*
  9 * Many of these macros need to access the VCPU structure, which is always
 10 * held in r0. These macros should never clobber r1, as it is used to hold the
 11 * exception code on the return path (except of course the macro that switches
 12 * all the registers before the final jump to the VM).
 13 */
 14vcpu	.req	r0		@ vcpu pointer always in r0
 15
 16/* Clobbers {r2-r6} */
 17.macro store_vfp_state vfp_base
 18	@ The VFPFMRX and VFPFMXR macros are the VMRS and VMSR instructions
 19	VFPFMRX	r2, FPEXC
 20	@ Make sure VFP is enabled so we can touch the registers.
 21	orr	r6, r2, #FPEXC_EN
 22	VFPFMXR	FPEXC, r6
 23
 24	VFPFMRX	r3, FPSCR
 25	tst	r2, #FPEXC_EX		@ Check for VFP Subarchitecture
 26	beq	1f
 27	@ If FPEXC_EX is 0, then FPINST/FPINST2 reads are upredictable, so
 28	@ we only need to save them if FPEXC_EX is set.
 29	VFPFMRX r4, FPINST
 30	tst	r2, #FPEXC_FP2V
 31	VFPFMRX r5, FPINST2, ne		@ vmrsne
 32	bic	r6, r2, #FPEXC_EX	@ FPEXC_EX disable
 33	VFPFMXR	FPEXC, r6
 341:
 35	VFPFSTMIA \vfp_base, r6		@ Save VFP registers
 36	stm	\vfp_base, {r2-r5}	@ Save FPEXC, FPSCR, FPINST, FPINST2
 37.endm
 38
 39/* Assume FPEXC_EN is on and FPEXC_EX is off, clobbers {r2-r6} */
 40.macro restore_vfp_state vfp_base
 41	VFPFLDMIA \vfp_base, r6		@ Load VFP registers
 42	ldm	\vfp_base, {r2-r5}	@ Load FPEXC, FPSCR, FPINST, FPINST2
 43
 44	VFPFMXR FPSCR, r3
 45	tst	r2, #FPEXC_EX		@ Check for VFP Subarchitecture
 46	beq	1f
 47	VFPFMXR FPINST, r4
 48	tst	r2, #FPEXC_FP2V
 49	VFPFMXR FPINST2, r5, ne
 501:
 51	VFPFMXR FPEXC, r2	@ FPEXC	(last, in case !EN)
 52.endm
 53
 54/* These are simply for the macros to work - value don't have meaning */
 55.equ usr, 0
 56.equ svc, 1
 57.equ abt, 2
 58.equ und, 3
 59.equ irq, 4
 60.equ fiq, 5
 61
 62.macro push_host_regs_mode mode
 63	mrs	r2, SP_\mode
 64	mrs	r3, LR_\mode
 65	mrs	r4, SPSR_\mode
 66	push	{r2, r3, r4}
 67.endm
 68
 69/*
 70 * Store all host persistent registers on the stack.
 71 * Clobbers all registers, in all modes, except r0 and r1.
 72 */
 73.macro save_host_regs
 74	/* Hyp regs. Only ELR_hyp (SPSR_hyp already saved) */
 75	mrs	r2, ELR_hyp
 76	push	{r2}
 77
 78	/* usr regs */
 79	push	{r4-r12}	@ r0-r3 are always clobbered
 80	mrs	r2, SP_usr
 81	mov	r3, lr
 82	push	{r2, r3}
 83
 84	push_host_regs_mode svc
 85	push_host_regs_mode abt
 86	push_host_regs_mode und
 87	push_host_regs_mode irq
 88
 89	/* fiq regs */
 90	mrs	r2, r8_fiq
 91	mrs	r3, r9_fiq
 92	mrs	r4, r10_fiq
 93	mrs	r5, r11_fiq
 94	mrs	r6, r12_fiq
 95	mrs	r7, SP_fiq
 96	mrs	r8, LR_fiq
 97	mrs	r9, SPSR_fiq
 98	push	{r2-r9}
 99.endm
100
101.macro pop_host_regs_mode mode
102	pop	{r2, r3, r4}
103	msr	SP_\mode, r2
104	msr	LR_\mode, r3
105	msr	SPSR_\mode, r4
106.endm
107
108/*
109 * Restore all host registers from the stack.
110 * Clobbers all registers, in all modes, except r0 and r1.
111 */
112.macro restore_host_regs
113	pop	{r2-r9}
114	msr	r8_fiq, r2
115	msr	r9_fiq, r3
116	msr	r10_fiq, r4
117	msr	r11_fiq, r5
118	msr	r12_fiq, r6
119	msr	SP_fiq, r7
120	msr	LR_fiq, r8
121	msr	SPSR_fiq, r9
122
123	pop_host_regs_mode irq
124	pop_host_regs_mode und
125	pop_host_regs_mode abt
126	pop_host_regs_mode svc
127
128	pop	{r2, r3}
129	msr	SP_usr, r2
130	mov	lr, r3
131	pop	{r4-r12}
132
133	pop	{r2}
134	msr	ELR_hyp, r2
135.endm
136
137/*
138 * Restore SP, LR and SPSR for a given mode. offset is the offset of
139 * this mode's registers from the VCPU base.
140 *
141 * Assumes vcpu pointer in vcpu reg
142 *
143 * Clobbers r1, r2, r3, r4.
144 */
145.macro restore_guest_regs_mode mode, offset
146	add	r1, vcpu, \offset
147	ldm	r1, {r2, r3, r4}
148	msr	SP_\mode, r2
149	msr	LR_\mode, r3
150	msr	SPSR_\mode, r4
151.endm
152
153/*
154 * Restore all guest registers from the vcpu struct.
155 *
156 * Assumes vcpu pointer in vcpu reg
157 *
158 * Clobbers *all* registers.
159 */
160.macro restore_guest_regs
161	restore_guest_regs_mode svc, #VCPU_SVC_REGS
162	restore_guest_regs_mode abt, #VCPU_ABT_REGS
163	restore_guest_regs_mode und, #VCPU_UND_REGS
164	restore_guest_regs_mode irq, #VCPU_IRQ_REGS
165
166	add	r1, vcpu, #VCPU_FIQ_REGS
167	ldm	r1, {r2-r9}
168	msr	r8_fiq, r2
169	msr	r9_fiq, r3
170	msr	r10_fiq, r4
171	msr	r11_fiq, r5
172	msr	r12_fiq, r6
173	msr	SP_fiq, r7
174	msr	LR_fiq, r8
175	msr	SPSR_fiq, r9
176
177	@ Load return state
178	ldr	r2, [vcpu, #VCPU_PC]
179	ldr	r3, [vcpu, #VCPU_CPSR]
180	msr	ELR_hyp, r2
181	msr	SPSR_cxsf, r3
182
183	@ Load user registers
184	ldr	r2, [vcpu, #VCPU_USR_SP]
185	ldr	r3, [vcpu, #VCPU_USR_LR]
186	msr	SP_usr, r2
187	mov	lr, r3
188	add	vcpu, vcpu, #(VCPU_USR_REGS)
189	ldm	vcpu, {r0-r12}
190.endm
191
192/*
193 * Save SP, LR and SPSR for a given mode. offset is the offset of
194 * this mode's registers from the VCPU base.
195 *
196 * Assumes vcpu pointer in vcpu reg
197 *
198 * Clobbers r2, r3, r4, r5.
199 */
200.macro save_guest_regs_mode mode, offset
201	add	r2, vcpu, \offset
202	mrs	r3, SP_\mode
203	mrs	r4, LR_\mode
204	mrs	r5, SPSR_\mode
205	stm	r2, {r3, r4, r5}
206.endm
207
208/*
209 * Save all guest registers to the vcpu struct
210 * Expects guest's r0, r1, r2 on the stack.
211 *
212 * Assumes vcpu pointer in vcpu reg
213 *
214 * Clobbers r2, r3, r4, r5.
215 */
216.macro save_guest_regs
217	@ Store usr registers
218	add	r2, vcpu, #VCPU_USR_REG(3)
219	stm	r2, {r3-r12}
220	add	r2, vcpu, #VCPU_USR_REG(0)
221	pop	{r3, r4, r5}		@ r0, r1, r2
222	stm	r2, {r3, r4, r5}
223	mrs	r2, SP_usr
224	mov	r3, lr
225	str	r2, [vcpu, #VCPU_USR_SP]
226	str	r3, [vcpu, #VCPU_USR_LR]
227
228	@ Store return state
229	mrs	r2, ELR_hyp
230	mrs	r3, spsr
231	str	r2, [vcpu, #VCPU_PC]
232	str	r3, [vcpu, #VCPU_CPSR]
233
234	@ Store other guest registers
235	save_guest_regs_mode svc, #VCPU_SVC_REGS
236	save_guest_regs_mode abt, #VCPU_ABT_REGS
237	save_guest_regs_mode und, #VCPU_UND_REGS
238	save_guest_regs_mode irq, #VCPU_IRQ_REGS
239.endm
240
241/* Reads cp15 registers from hardware and stores them in memory
242 * @store_to_vcpu: If 0, registers are written in-order to the stack,
243 * 		   otherwise to the VCPU struct pointed to by vcpup
244 *
245 * Assumes vcpu pointer in vcpu reg
246 *
247 * Clobbers r2 - r12
248 */
249.macro read_cp15_state store_to_vcpu
250	mrc	p15, 0, r2, c1, c0, 0	@ SCTLR
251	mrc	p15, 0, r3, c1, c0, 2	@ CPACR
252	mrc	p15, 0, r4, c2, c0, 2	@ TTBCR
253	mrc	p15, 0, r5, c3, c0, 0	@ DACR
254	mrrc	p15, 0, r6, r7, c2	@ TTBR 0
255	mrrc	p15, 1, r8, r9, c2	@ TTBR 1
256	mrc	p15, 0, r10, c10, c2, 0	@ PRRR
257	mrc	p15, 0, r11, c10, c2, 1	@ NMRR
258	mrc	p15, 2, r12, c0, c0, 0	@ CSSELR
259
260	.if \store_to_vcpu == 0
261	push	{r2-r12}		@ Push CP15 registers
262	.else
263	str	r2, [vcpu, #CP15_OFFSET(c1_SCTLR)]
264	str	r3, [vcpu, #CP15_OFFSET(c1_CPACR)]
265	str	r4, [vcpu, #CP15_OFFSET(c2_TTBCR)]
266	str	r5, [vcpu, #CP15_OFFSET(c3_DACR)]
267	add	r2, vcpu, #CP15_OFFSET(c2_TTBR0)
268	strd	r6, r7, [r2]
269	add	r2, vcpu, #CP15_OFFSET(c2_TTBR1)
270	strd	r8, r9, [r2]
271	str	r10, [vcpu, #CP15_OFFSET(c10_PRRR)]
272	str	r11, [vcpu, #CP15_OFFSET(c10_NMRR)]
273	str	r12, [vcpu, #CP15_OFFSET(c0_CSSELR)]
274	.endif
275
276	mrc	p15, 0, r2, c13, c0, 1	@ CID
277	mrc	p15, 0, r3, c13, c0, 2	@ TID_URW
278	mrc	p15, 0, r4, c13, c0, 3	@ TID_URO
279	mrc	p15, 0, r5, c13, c0, 4	@ TID_PRIV
280	mrc	p15, 0, r6, c5, c0, 0	@ DFSR
281	mrc	p15, 0, r7, c5, c0, 1	@ IFSR
282	mrc	p15, 0, r8, c5, c1, 0	@ ADFSR
283	mrc	p15, 0, r9, c5, c1, 1	@ AIFSR
284	mrc	p15, 0, r10, c6, c0, 0	@ DFAR
285	mrc	p15, 0, r11, c6, c0, 2	@ IFAR
286	mrc	p15, 0, r12, c12, c0, 0	@ VBAR
287
288	.if \store_to_vcpu == 0
289	push	{r2-r12}		@ Push CP15 registers
290	.else
291	str	r2, [vcpu, #CP15_OFFSET(c13_CID)]
292	str	r3, [vcpu, #CP15_OFFSET(c13_TID_URW)]
293	str	r4, [vcpu, #CP15_OFFSET(c13_TID_URO)]
294	str	r5, [vcpu, #CP15_OFFSET(c13_TID_PRIV)]
295	str	r6, [vcpu, #CP15_OFFSET(c5_DFSR)]
296	str	r7, [vcpu, #CP15_OFFSET(c5_IFSR)]
297	str	r8, [vcpu, #CP15_OFFSET(c5_ADFSR)]
298	str	r9, [vcpu, #CP15_OFFSET(c5_AIFSR)]
299	str	r10, [vcpu, #CP15_OFFSET(c6_DFAR)]
300	str	r11, [vcpu, #CP15_OFFSET(c6_IFAR)]
301	str	r12, [vcpu, #CP15_OFFSET(c12_VBAR)]
302	.endif
303
304	mrc	p15, 0, r2, c14, c1, 0	@ CNTKCTL
305	mrrc	p15, 0, r4, r5, c7	@ PAR
306	mrc	p15, 0, r6, c10, c3, 0	@ AMAIR0
307	mrc	p15, 0, r7, c10, c3, 1	@ AMAIR1
308
309	.if \store_to_vcpu == 0
310	push	{r2,r4-r7}
311	.else
312	str	r2, [vcpu, #CP15_OFFSET(c14_CNTKCTL)]
313	add	r12, vcpu, #CP15_OFFSET(c7_PAR)
314	strd	r4, r5, [r12]
315	str	r6, [vcpu, #CP15_OFFSET(c10_AMAIR0)]
316	str	r7, [vcpu, #CP15_OFFSET(c10_AMAIR1)]
317	.endif
318.endm
319
320/*
321 * Reads cp15 registers from memory and writes them to hardware
322 * @read_from_vcpu: If 0, registers are read in-order from the stack,
323 *		    otherwise from the VCPU struct pointed to by vcpup
324 *
325 * Assumes vcpu pointer in vcpu reg
326 */
327.macro write_cp15_state read_from_vcpu
328	.if \read_from_vcpu == 0
329	pop	{r2,r4-r7}
330	.else
331	ldr	r2, [vcpu, #CP15_OFFSET(c14_CNTKCTL)]
332	add	r12, vcpu, #CP15_OFFSET(c7_PAR)
333	ldrd	r4, r5, [r12]
334	ldr	r6, [vcpu, #CP15_OFFSET(c10_AMAIR0)]
335	ldr	r7, [vcpu, #CP15_OFFSET(c10_AMAIR1)]
336	.endif
337
338	mcr	p15, 0, r2, c14, c1, 0	@ CNTKCTL
339	mcrr	p15, 0, r4, r5, c7	@ PAR
340	mcr	p15, 0, r6, c10, c3, 0	@ AMAIR0
341	mcr	p15, 0, r7, c10, c3, 1	@ AMAIR1
342
343	.if \read_from_vcpu == 0
344	pop	{r2-r12}
345	.else
346	ldr	r2, [vcpu, #CP15_OFFSET(c13_CID)]
347	ldr	r3, [vcpu, #CP15_OFFSET(c13_TID_URW)]
348	ldr	r4, [vcpu, #CP15_OFFSET(c13_TID_URO)]
349	ldr	r5, [vcpu, #CP15_OFFSET(c13_TID_PRIV)]
350	ldr	r6, [vcpu, #CP15_OFFSET(c5_DFSR)]
351	ldr	r7, [vcpu, #CP15_OFFSET(c5_IFSR)]
352	ldr	r8, [vcpu, #CP15_OFFSET(c5_ADFSR)]
353	ldr	r9, [vcpu, #CP15_OFFSET(c5_AIFSR)]
354	ldr	r10, [vcpu, #CP15_OFFSET(c6_DFAR)]
355	ldr	r11, [vcpu, #CP15_OFFSET(c6_IFAR)]
356	ldr	r12, [vcpu, #CP15_OFFSET(c12_VBAR)]
357	.endif
358
359	mcr	p15, 0, r2, c13, c0, 1	@ CID
360	mcr	p15, 0, r3, c13, c0, 2	@ TID_URW
361	mcr	p15, 0, r4, c13, c0, 3	@ TID_URO
362	mcr	p15, 0, r5, c13, c0, 4	@ TID_PRIV
363	mcr	p15, 0, r6, c5, c0, 0	@ DFSR
364	mcr	p15, 0, r7, c5, c0, 1	@ IFSR
365	mcr	p15, 0, r8, c5, c1, 0	@ ADFSR
366	mcr	p15, 0, r9, c5, c1, 1	@ AIFSR
367	mcr	p15, 0, r10, c6, c0, 0	@ DFAR
368	mcr	p15, 0, r11, c6, c0, 2	@ IFAR
369	mcr	p15, 0, r12, c12, c0, 0	@ VBAR
370
371	.if \read_from_vcpu == 0
372	pop	{r2-r12}
373	.else
374	ldr	r2, [vcpu, #CP15_OFFSET(c1_SCTLR)]
375	ldr	r3, [vcpu, #CP15_OFFSET(c1_CPACR)]
376	ldr	r4, [vcpu, #CP15_OFFSET(c2_TTBCR)]
377	ldr	r5, [vcpu, #CP15_OFFSET(c3_DACR)]
378	add	r12, vcpu, #CP15_OFFSET(c2_TTBR0)
379	ldrd	r6, r7, [r12]
380	add	r12, vcpu, #CP15_OFFSET(c2_TTBR1)
381	ldrd	r8, r9, [r12]
382	ldr	r10, [vcpu, #CP15_OFFSET(c10_PRRR)]
383	ldr	r11, [vcpu, #CP15_OFFSET(c10_NMRR)]
384	ldr	r12, [vcpu, #CP15_OFFSET(c0_CSSELR)]
385	.endif
386
387	mcr	p15, 0, r2, c1, c0, 0	@ SCTLR
388	mcr	p15, 0, r3, c1, c0, 2	@ CPACR
389	mcr	p15, 0, r4, c2, c0, 2	@ TTBCR
390	mcr	p15, 0, r5, c3, c0, 0	@ DACR
391	mcrr	p15, 0, r6, r7, c2	@ TTBR 0
392	mcrr	p15, 1, r8, r9, c2	@ TTBR 1
393	mcr	p15, 0, r10, c10, c2, 0	@ PRRR
394	mcr	p15, 0, r11, c10, c2, 1	@ NMRR
395	mcr	p15, 2, r12, c0, c0, 0	@ CSSELR
396.endm
397
398/*
399 * Save the VGIC CPU state into memory
400 *
401 * Assumes vcpu pointer in vcpu reg
402 */
403.macro save_vgic_state
404#ifdef CONFIG_KVM_ARM_VGIC
405	/* Get VGIC VCTRL base into r2 */
406	ldr	r2, [vcpu, #VCPU_KVM]
407	ldr	r2, [r2, #KVM_VGIC_VCTRL]
408	cmp	r2, #0
409	beq	2f
410
411	/* Compute the address of struct vgic_cpu */
412	add	r11, vcpu, #VCPU_VGIC_CPU
413
414	/* Save all interesting registers */
415	ldr	r3, [r2, #GICH_HCR]
416	ldr	r4, [r2, #GICH_VMCR]
417	ldr	r5, [r2, #GICH_MISR]
418	ldr	r6, [r2, #GICH_EISR0]
419	ldr	r7, [r2, #GICH_EISR1]
420	ldr	r8, [r2, #GICH_ELRSR0]
421	ldr	r9, [r2, #GICH_ELRSR1]
422	ldr	r10, [r2, #GICH_APR]
423
424	str	r3, [r11, #VGIC_CPU_HCR]
425	str	r4, [r11, #VGIC_CPU_VMCR]
426	str	r5, [r11, #VGIC_CPU_MISR]
427	str	r6, [r11, #VGIC_CPU_EISR]
428	str	r7, [r11, #(VGIC_CPU_EISR + 4)]
429	str	r8, [r11, #VGIC_CPU_ELRSR]
430	str	r9, [r11, #(VGIC_CPU_ELRSR + 4)]
431	str	r10, [r11, #VGIC_CPU_APR]
432
433	/* Clear GICH_HCR */
434	mov	r5, #0
435	str	r5, [r2, #GICH_HCR]
436
437	/* Save list registers */
438	add	r2, r2, #GICH_LR0
439	add	r3, r11, #VGIC_CPU_LR
440	ldr	r4, [r11, #VGIC_CPU_NR_LR]
4411:	ldr	r6, [r2], #4
442	str	r6, [r3], #4
443	subs	r4, r4, #1
444	bne	1b
4452:
446#endif
447.endm
448
449/*
450 * Restore the VGIC CPU state from memory
451 *
452 * Assumes vcpu pointer in vcpu reg
453 */
454.macro restore_vgic_state
455#ifdef CONFIG_KVM_ARM_VGIC
456	/* Get VGIC VCTRL base into r2 */
457	ldr	r2, [vcpu, #VCPU_KVM]
458	ldr	r2, [r2, #KVM_VGIC_VCTRL]
459	cmp	r2, #0
460	beq	2f
461
462	/* Compute the address of struct vgic_cpu */
463	add	r11, vcpu, #VCPU_VGIC_CPU
464
465	/* We only restore a minimal set of registers */
466	ldr	r3, [r11, #VGIC_CPU_HCR]
467	ldr	r4, [r11, #VGIC_CPU_VMCR]
468	ldr	r8, [r11, #VGIC_CPU_APR]
469
470	str	r3, [r2, #GICH_HCR]
471	str	r4, [r2, #GICH_VMCR]
472	str	r8, [r2, #GICH_APR]
473
474	/* Restore list registers */
475	add	r2, r2, #GICH_LR0
476	add	r3, r11, #VGIC_CPU_LR
477	ldr	r4, [r11, #VGIC_CPU_NR_LR]
4781:	ldr	r6, [r3], #4
479	str	r6, [r2], #4
480	subs	r4, r4, #1
481	bne	1b
4822:
483#endif
484.endm
485
486#define CNTHCTL_PL1PCTEN	(1 << 0)
487#define CNTHCTL_PL1PCEN		(1 << 1)
488
489/*
490 * Save the timer state onto the VCPU and allow physical timer/counter access
491 * for the host.
492 *
493 * Assumes vcpu pointer in vcpu reg
494 * Clobbers r2-r5
495 */
496.macro save_timer_state
497#ifdef CONFIG_KVM_ARM_TIMER
498	ldr	r4, [vcpu, #VCPU_KVM]
499	ldr	r2, [r4, #KVM_TIMER_ENABLED]
500	cmp	r2, #0
501	beq	1f
502
503	mrc	p15, 0, r2, c14, c3, 1	@ CNTV_CTL
504	str	r2, [vcpu, #VCPU_TIMER_CNTV_CTL]
505	bic	r2, #1			@ Clear ENABLE
506	mcr	p15, 0, r2, c14, c3, 1	@ CNTV_CTL
507	isb
508
509	mrrc	p15, 3, r2, r3, c14	@ CNTV_CVAL
510	ldr	r4, =VCPU_TIMER_CNTV_CVAL
511	add	r5, vcpu, r4
512	strd	r2, r3, [r5]
513
514	@ Ensure host CNTVCT == CNTPCT
515	mov	r2, #0
516	mcrr	p15, 4, r2, r2, c14	@ CNTVOFF
517
5181:
519#endif
520	@ Allow physical timer/counter access for the host
521	mrc	p15, 4, r2, c14, c1, 0	@ CNTHCTL
522	orr	r2, r2, #(CNTHCTL_PL1PCEN | CNTHCTL_PL1PCTEN)
523	mcr	p15, 4, r2, c14, c1, 0	@ CNTHCTL
524.endm
525
526/*
527 * Load the timer state from the VCPU and deny physical timer/counter access
528 * for the host.
529 *
530 * Assumes vcpu pointer in vcpu reg
531 * Clobbers r2-r5
532 */
533.macro restore_timer_state
534	@ Disallow physical timer access for the guest
535	@ Physical counter access is allowed
536	mrc	p15, 4, r2, c14, c1, 0	@ CNTHCTL
537	orr	r2, r2, #CNTHCTL_PL1PCTEN
538	bic	r2, r2, #CNTHCTL_PL1PCEN
539	mcr	p15, 4, r2, c14, c1, 0	@ CNTHCTL
540
541#ifdef CONFIG_KVM_ARM_TIMER
542	ldr	r4, [vcpu, #VCPU_KVM]
543	ldr	r2, [r4, #KVM_TIMER_ENABLED]
544	cmp	r2, #0
545	beq	1f
546
547	ldr	r2, [r4, #KVM_TIMER_CNTVOFF]
548	ldr	r3, [r4, #(KVM_TIMER_CNTVOFF + 4)]
549	mcrr	p15, 4, r2, r3, c14	@ CNTVOFF
550
551	ldr	r4, =VCPU_TIMER_CNTV_CVAL
552	add	r5, vcpu, r4
553	ldrd	r2, r3, [r5]
554	mcrr	p15, 3, r2, r3, c14	@ CNTV_CVAL
555	isb
556
557	ldr	r2, [vcpu, #VCPU_TIMER_CNTV_CTL]
558	and	r2, r2, #3
559	mcr	p15, 0, r2, c14, c3, 1	@ CNTV_CTL
5601:
561#endif
562.endm
563
564.equ vmentry,	0
565.equ vmexit,	1
566
567/* Configures the HSTR (Hyp System Trap Register) on entry/return
568 * (hardware reset value is 0) */
569.macro set_hstr operation
570	mrc	p15, 4, r2, c1, c1, 3
571	ldr	r3, =HSTR_T(15)
572	.if \operation == vmentry
573	orr	r2, r2, r3		@ Trap CR{15}
574	.else
575	bic	r2, r2, r3		@ Don't trap any CRx accesses
576	.endif
577	mcr	p15, 4, r2, c1, c1, 3
578.endm
579
580/* Configures the HCPTR (Hyp Coprocessor Trap Register) on entry/return
581 * (hardware reset value is 0). Keep previous value in r2. */
582.macro set_hcptr operation, mask
583	mrc	p15, 4, r2, c1, c1, 2
584	ldr	r3, =\mask
585	.if \operation == vmentry
586	orr	r3, r2, r3		@ Trap coproc-accesses defined in mask
587	.else
588	bic	r3, r2, r3		@ Don't trap defined coproc-accesses
589	.endif
590	mcr	p15, 4, r3, c1, c1, 2
591.endm
592
593/* Configures the HDCR (Hyp Debug Configuration Register) on entry/return
594 * (hardware reset value is 0) */
595.macro set_hdcr operation
596	mrc	p15, 4, r2, c1, c1, 1
597	ldr	r3, =(HDCR_TPM|HDCR_TPMCR)
598	.if \operation == vmentry
599	orr	r2, r2, r3		@ Trap some perfmon accesses
600	.else
601	bic	r2, r2, r3		@ Don't trap any perfmon accesses
602	.endif
603	mcr	p15, 4, r2, c1, c1, 1
604.endm
605
606/* Enable/Disable: stage-2 trans., trap interrupts, trap wfi, trap smc */
607.macro configure_hyp_role operation
608	.if \operation == vmentry
609	ldr	r2, [vcpu, #VCPU_HCR]
610	ldr	r3, [vcpu, #VCPU_IRQ_LINES]
611	orr	r2, r2, r3
612	.else
613	mov	r2, #0
614	.endif
615	mcr	p15, 4, r2, c1, c1, 0	@ HCR
616.endm
617
618.macro load_vcpu
619	mrc	p15, 4, vcpu, c13, c0, 2	@ HTPIDR
620.endm