2 * FPU support code, moved here from head.S so that it can be used
3 * by chips which use other head-whatever.S files.
5 * Copyright (C) 1995-1996 Gary Thomas (gdt@linuxppc.org)
6 * Copyright (C) 1996 Cort Dougan <cort@cs.nmt.edu>
7 * Copyright (C) 1996 Paul Mackerras.
8 * Copyright (C) 1997 Dan Malek (dmalek@jlc.net).
10 * This program is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU General Public License
12 * as published by the Free Software Foundation; either version
13 * 2 of the License, or (at your option) any later version.
20 #include <asm/pgtable.h>
21 #include <asm/cputable.h>
22 #include <asm/cache.h>
23 #include <asm/thread_info.h>
24 #include <asm/ppc_asm.h>
25 #include <asm/asm-offsets.h>
28 #define REST_32FPVSRS(n,c,base) \
31 END_FTR_SECTION_IFSET(CPU_FTR_VSX); \
32 REST_32FPRS(n,base); \
34 2: REST_32VSRS(n,c,base); \
37 #define SAVE_32FPVSRS(n,c,base) \
40 END_FTR_SECTION_IFSET(CPU_FTR_VSX); \
41 SAVE_32FPRS(n,base); \
43 2: SAVE_32VSRS(n,c,base); \
46 #define REST_32FPVSRS(n,b,base) REST_32FPRS(n, base)
47 #define SAVE_32FPVSRS(n,b,base) SAVE_32FPRS(n, base)
51 * This task wants to use the FPU now.
52 * On UP, disable FP for the task which had the FPU previously,
53 * and save its floating-point registers in its thread_struct.
54 * Load up this task's FP registers from its thread_struct,
55 * enable the FPU for the current task and return to the task.
61 MTMSRD(r5) /* enable use of fpu now */
64 * For SMP, we don't do lazy FPU switching because it just gets too
65 * horrendously complex, especially when a task switches from one CPU
66 * to another. Instead we call giveup_fpu in switch_to.
69 LOAD_REG_ADDRBASE(r3, last_task_used_math)
71 PPC_LL r4,ADDROFF(last_task_used_math)(r3)
75 addi r4,r4,THREAD /* want last_task_used_math->thread */
78 stfd fr0,THREAD_FPSCR(r4)
81 PPC_LL r4,_MSR-STACK_FRAME_OVERHEAD(r5)
82 li r10,MSR_FP|MSR_FE0|MSR_FE1
83 andc r4,r4,r10 /* disable FP for previous task */
84 PPC_STL r4,_MSR-STACK_FRAME_OVERHEAD(r5)
86 #endif /* CONFIG_SMP */
87 /* enable use of FP after return */
89 mfspr r5,SPRN_SPRG3 /* current task's THREAD (phys) */
90 lwz r4,THREAD_FPEXC_MODE(r5)
91 ori r9,r9,MSR_FP /* enable FP for current */
94 ld r4,PACACURRENT(r13)
95 addi r5,r4,THREAD /* Get THREAD */
96 lwz r4,THREAD_FPEXC_MODE(r5)
101 lfd fr0,THREAD_FPSCR(r5)
107 PPC_STL r4,ADDROFF(last_task_used_math)(r3)
108 #endif /* CONFIG_SMP */
109 /* restore registers and return */
110 /* we haven't used ctr or xer or lr */
115 * Disable FP for the task given as the argument,
116 * and save the floating-point registers in its thread_struct.
117 * Enables the FPU for use in the kernel on return.
124 MTMSRD(r5) /* enable use of fpu now */
128 beqlr- /* if no previous owner, done */
129 addi r3,r3,THREAD /* want THREAD of task */
130 PPC_LL r5,PT_REGS(r3)
134 stfd fr0,THREAD_FPSCR(r3)
136 PPC_LL r4,_MSR-STACK_FRAME_OVERHEAD(r5)
137 li r3,MSR_FP|MSR_FE0|MSR_FE1
138 andc r4,r4,r3 /* disable FP for previous task */
139 PPC_STL r4,_MSR-STACK_FRAME_OVERHEAD(r5)
143 LOAD_REG_ADDRBASE(r4,last_task_used_math)
144 PPC_STL r5,ADDROFF(last_task_used_math)(r4)
145 #endif /* CONFIG_SMP */
149 * These are used in the alignment trap handler when emulating
150 * single-precision loads and stores.
151 * We restore and save the fpscr so the task gets the same result
152 * and exceptions as if the cpu had performed the load or store.
156 lfd 0,THREAD_FPSCR(r5) /* load up fpscr value */
161 stfd 0,THREAD_FPSCR(r5) /* save new fpscr value */
165 lfd 0,THREAD_FPSCR(r5) /* load up fpscr value */
170 stfd 0,THREAD_FPSCR(r5) /* save new fpscr value */