Linux Audio

Check our new training course

Loading...
v5.9
  1/* SPDX-License-Identifier: GPL-2.0-or-later */
  2/*
  3 *  FPU support code, moved here from head.S so that it can be used
  4 *  by chips which use other head-whatever.S files.
  5 *
  6 *    Copyright (C) 1995-1996 Gary Thomas (gdt@linuxppc.org)
  7 *    Copyright (C) 1996 Cort Dougan <cort@cs.nmt.edu>
  8 *    Copyright (C) 1996 Paul Mackerras.
  9 *    Copyright (C) 1997 Dan Malek (dmalek@jlc.net).
 
 
 
 
 
 
 10 */
 11
 12#include <asm/reg.h>
 13#include <asm/page.h>
 14#include <asm/mmu.h>
 
 15#include <asm/cputable.h>
 16#include <asm/cache.h>
 17#include <asm/thread_info.h>
 18#include <asm/ppc_asm.h>
 19#include <asm/asm-offsets.h>
 20#include <asm/ptrace.h>
 21#include <asm/export.h>
 22#include <asm/asm-compat.h>
 23#include <asm/feature-fixups.h>
 24
 25#ifdef CONFIG_VSX
 26#define __REST_32FPVSRS(n,c,base)					\
 27BEGIN_FTR_SECTION							\
 28	b	2f;							\
 29END_FTR_SECTION_IFSET(CPU_FTR_VSX);					\
 30	REST_32FPRS(n,base);						\
 31	b	3f;							\
 322:	REST_32VSRS(n,c,base);						\
 333:
 34
 35#define __SAVE_32FPVSRS(n,c,base)					\
 36BEGIN_FTR_SECTION							\
 37	b	2f;							\
 38END_FTR_SECTION_IFSET(CPU_FTR_VSX);					\
 39	SAVE_32FPRS(n,base);						\
 40	b	3f;							\
 412:	SAVE_32VSRS(n,c,base);						\
 423:
 43#else
 44#define __REST_32FPVSRS(n,b,base)	REST_32FPRS(n, base)
 45#define __SAVE_32FPVSRS(n,b,base)	SAVE_32FPRS(n, base)
 46#endif
 47#define REST_32FPVSRS(n,c,base) __REST_32FPVSRS(n,__REG_##c,__REG_##base)
 48#define SAVE_32FPVSRS(n,c,base) __SAVE_32FPVSRS(n,__REG_##c,__REG_##base)
 49
 50/*
 51 * Load state from memory into FP registers including FPSCR.
 52 * Assumes the caller has enabled FP in the MSR.
 53 */
 54_GLOBAL(load_fp_state)
 55	lfd	fr0,FPSTATE_FPSCR(r3)
 56	MTFSF_L(fr0)
 57	REST_32FPVSRS(0, R4, R3)
 58	blr
 59EXPORT_SYMBOL(load_fp_state)
 60_ASM_NOKPROBE_SYMBOL(load_fp_state); /* used by restore_math */
 61
 62/*
 63 * Store FP state into memory, including FPSCR
 64 * Assumes the caller has enabled FP in the MSR.
 65 */
 66_GLOBAL(store_fp_state)
 67	SAVE_32FPVSRS(0, R4, R3)
 68	mffs	fr0
 69	stfd	fr0,FPSTATE_FPSCR(r3)
 70	blr
 71EXPORT_SYMBOL(store_fp_state)
 72
 73/*
 74 * This task wants to use the FPU now.
 75 * On UP, disable FP for the task which had the FPU previously,
 76 * and save its floating-point registers in its thread_struct.
 77 * Load up this task's FP registers from its thread_struct,
 78 * enable the FPU for the current task and return to the task.
 79 * Note that on 32-bit this can only use registers that will be
 80 * restored by fast_exception_return, i.e. r3 - r6, r10 and r11.
 81 */
 82_GLOBAL(load_up_fpu)
 83	mfmsr	r5
 84	ori	r5,r5,MSR_FP
 85#ifdef CONFIG_VSX
 86BEGIN_FTR_SECTION
 87	oris	r5,r5,MSR_VSX@h
 88END_FTR_SECTION_IFSET(CPU_FTR_VSX)
 89#endif
 90	SYNC
 91	MTMSRD(r5)			/* enable use of fpu now */
 92	isync
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 93	/* enable use of FP after return */
 94#ifdef CONFIG_PPC32
 95	mfspr	r5,SPRN_SPRG_THREAD	/* current task's THREAD (phys) */
 96#ifdef CONFIG_VMAP_STACK
 97	tovirt(r5, r5)
 98#endif
 99	lwz	r4,THREAD_FPEXC_MODE(r5)
100	ori	r9,r9,MSR_FP		/* enable FP for current */
101	or	r9,r9,r4
102#else
103	ld	r4,PACACURRENT(r13)
104	addi	r5,r4,THREAD		/* Get THREAD */
105	lwz	r4,THREAD_FPEXC_MODE(r5)
106	ori	r12,r12,MSR_FP
107	or	r12,r12,r4
108	std	r12,_MSR(r1)
109#endif
110	li	r4,1
111	stb	r4,THREAD_LOAD_FP(r5)
112	addi	r10,r5,THREAD_FPSTATE
113	lfd	fr0,FPSTATE_FPSCR(r10)
114	MTFSF_L(fr0)
115	REST_32FPVSRS(0, R4, R10)
 
 
 
 
 
116	/* restore registers and return */
117	/* we haven't used ctr or xer or lr */
118	blr
119_ASM_NOKPROBE_SYMBOL(load_up_fpu)
120
121/*
122 * save_fpu(tsk)
123 * Save the floating-point registers in its thread_struct.
 
124 * Enables the FPU for use in the kernel on return.
125 */
126_GLOBAL(save_fpu)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
127	addi	r3,r3,THREAD	        /* want THREAD of task */
128	PPC_LL	r6,THREAD_FPSAVEAREA(r3)
129	PPC_LL	r5,PT_REGS(r3)
130	PPC_LCMPI	0,r6,0
131	bne	2f
132	addi	r6,r3,THREAD_FPSTATE
1332:	SAVE_32FPVSRS(0, R4, R6)
134	mffs	fr0
135	stfd	fr0,FPSTATE_FPSCR(r6)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
136	blr
137
138/*
139 * These are used in the alignment trap handler when emulating
140 * single-precision loads and stores.
141 */
142
143_GLOBAL(cvt_fd)
144	lfs	0,0(r3)
145	stfd	0,0(r4)
146	blr
147
148_GLOBAL(cvt_df)
149	lfd	0,0(r3)
150	stfs	0,0(r4)
151	blr
v3.5.6
 
  1/*
  2 *  FPU support code, moved here from head.S so that it can be used
  3 *  by chips which use other head-whatever.S files.
  4 *
  5 *    Copyright (C) 1995-1996 Gary Thomas (gdt@linuxppc.org)
  6 *    Copyright (C) 1996 Cort Dougan <cort@cs.nmt.edu>
  7 *    Copyright (C) 1996 Paul Mackerras.
  8 *    Copyright (C) 1997 Dan Malek (dmalek@jlc.net).
  9 *
 10 *  This program is free software; you can redistribute it and/or
 11 *  modify it under the terms of the GNU General Public License
 12 *  as published by the Free Software Foundation; either version
 13 *  2 of the License, or (at your option) any later version.
 14 *
 15 */
 16
 17#include <asm/reg.h>
 18#include <asm/page.h>
 19#include <asm/mmu.h>
 20#include <asm/pgtable.h>
 21#include <asm/cputable.h>
 22#include <asm/cache.h>
 23#include <asm/thread_info.h>
 24#include <asm/ppc_asm.h>
 25#include <asm/asm-offsets.h>
 26#include <asm/ptrace.h>
 
 
 
 27
 28#ifdef CONFIG_VSX
 29#define REST_32FPVSRS(n,c,base)						\
 30BEGIN_FTR_SECTION							\
 31	b	2f;							\
 32END_FTR_SECTION_IFSET(CPU_FTR_VSX);					\
 33	REST_32FPRS(n,base);						\
 34	b	3f;							\
 352:	REST_32VSRS(n,c,base);						\
 363:
 37
 38#define SAVE_32FPVSRS(n,c,base)						\
 39BEGIN_FTR_SECTION							\
 40	b	2f;							\
 41END_FTR_SECTION_IFSET(CPU_FTR_VSX);					\
 42	SAVE_32FPRS(n,base);						\
 43	b	3f;							\
 442:	SAVE_32VSRS(n,c,base);						\
 453:
 46#else
 47#define REST_32FPVSRS(n,b,base)	REST_32FPRS(n, base)
 48#define SAVE_32FPVSRS(n,b,base)	SAVE_32FPRS(n, base)
 49#endif
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 50
 51/*
 52 * This task wants to use the FPU now.
 53 * On UP, disable FP for the task which had the FPU previously,
 54 * and save its floating-point registers in its thread_struct.
 55 * Load up this task's FP registers from its thread_struct,
 56 * enable the FPU for the current task and return to the task.
 
 
 57 */
 58_GLOBAL(load_up_fpu)
 59	mfmsr	r5
 60	ori	r5,r5,MSR_FP
 61#ifdef CONFIG_VSX
 62BEGIN_FTR_SECTION
 63	oris	r5,r5,MSR_VSX@h
 64END_FTR_SECTION_IFSET(CPU_FTR_VSX)
 65#endif
 66	SYNC
 67	MTMSRD(r5)			/* enable use of fpu now */
 68	isync
 69/*
 70 * For SMP, we don't do lazy FPU switching because it just gets too
 71 * horrendously complex, especially when a task switches from one CPU
 72 * to another.  Instead we call giveup_fpu in switch_to.
 73 */
 74#ifndef CONFIG_SMP
 75	LOAD_REG_ADDRBASE(r3, last_task_used_math)
 76	toreal(r3)
 77	PPC_LL	r4,ADDROFF(last_task_used_math)(r3)
 78	PPC_LCMPI	0,r4,0
 79	beq	1f
 80	toreal(r4)
 81	addi	r4,r4,THREAD		/* want last_task_used_math->thread */
 82	SAVE_32FPVSRS(0, r5, r4)
 83	mffs	fr0
 84	stfd	fr0,THREAD_FPSCR(r4)
 85	PPC_LL	r5,PT_REGS(r4)
 86	toreal(r5)
 87	PPC_LL	r4,_MSR-STACK_FRAME_OVERHEAD(r5)
 88	li	r10,MSR_FP|MSR_FE0|MSR_FE1
 89	andc	r4,r4,r10		/* disable FP for previous task */
 90	PPC_STL	r4,_MSR-STACK_FRAME_OVERHEAD(r5)
 911:
 92#endif /* CONFIG_SMP */
 93	/* enable use of FP after return */
 94#ifdef CONFIG_PPC32
 95	mfspr	r5,SPRN_SPRG_THREAD		/* current task's THREAD (phys) */
 
 
 
 96	lwz	r4,THREAD_FPEXC_MODE(r5)
 97	ori	r9,r9,MSR_FP		/* enable FP for current */
 98	or	r9,r9,r4
 99#else
100	ld	r4,PACACURRENT(r13)
101	addi	r5,r4,THREAD		/* Get THREAD */
102	lwz	r4,THREAD_FPEXC_MODE(r5)
103	ori	r12,r12,MSR_FP
104	or	r12,r12,r4
105	std	r12,_MSR(r1)
106#endif
107	lfd	fr0,THREAD_FPSCR(r5)
 
 
 
108	MTFSF_L(fr0)
109	REST_32FPVSRS(0, r4, r5)
110#ifndef CONFIG_SMP
111	subi	r4,r5,THREAD
112	fromreal(r4)
113	PPC_STL	r4,ADDROFF(last_task_used_math)(r3)
114#endif /* CONFIG_SMP */
115	/* restore registers and return */
116	/* we haven't used ctr or xer or lr */
117	blr
 
118
119/*
120 * giveup_fpu(tsk)
121 * Disable FP for the task given as the argument,
122 * and save the floating-point registers in its thread_struct.
123 * Enables the FPU for use in the kernel on return.
124 */
125_GLOBAL(giveup_fpu)
126	mfmsr	r5
127	ori	r5,r5,MSR_FP
128#ifdef CONFIG_VSX
129BEGIN_FTR_SECTION
130	oris	r5,r5,MSR_VSX@h
131END_FTR_SECTION_IFSET(CPU_FTR_VSX)
132#endif
133	SYNC_601
134	ISYNC_601
135	MTMSRD(r5)			/* enable use of fpu now */
136	SYNC_601
137	isync
138	PPC_LCMPI	0,r3,0
139	beqlr-				/* if no previous owner, done */
140	addi	r3,r3,THREAD	        /* want THREAD of task */
 
141	PPC_LL	r5,PT_REGS(r3)
142	PPC_LCMPI	0,r5,0
143	SAVE_32FPVSRS(0, r4 ,r3)
 
 
144	mffs	fr0
145	stfd	fr0,THREAD_FPSCR(r3)
146	beq	1f
147	PPC_LL	r4,_MSR-STACK_FRAME_OVERHEAD(r5)
148	li	r3,MSR_FP|MSR_FE0|MSR_FE1
149#ifdef CONFIG_VSX
150BEGIN_FTR_SECTION
151	oris	r3,r3,MSR_VSX@h
152END_FTR_SECTION_IFSET(CPU_FTR_VSX)
153#endif
154	andc	r4,r4,r3		/* disable FP for previous task */
155	PPC_STL	r4,_MSR-STACK_FRAME_OVERHEAD(r5)
1561:
157#ifndef CONFIG_SMP
158	li	r5,0
159	LOAD_REG_ADDRBASE(r4,last_task_used_math)
160	PPC_STL	r5,ADDROFF(last_task_used_math)(r4)
161#endif /* CONFIG_SMP */
162	blr
163
164/*
165 * These are used in the alignment trap handler when emulating
166 * single-precision loads and stores.
167 */
168
169_GLOBAL(cvt_fd)
170	lfs	0,0(r3)
171	stfd	0,0(r4)
172	blr
173
174_GLOBAL(cvt_df)
175	lfd	0,0(r3)
176	stfs	0,0(r4)
177	blr