1 /* sun4v_tlb_miss.S: Sun4v TLB miss handlers.
3 * Copyright (C) 2006 <davem@davemloft.net>
9 /* Load ITLB fault information into VADDR and CTX, using BASE. */
10 #define LOAD_ITLB_INFO(BASE, VADDR, CTX) \
11 ldx [BASE + HV_FAULT_I_ADDR_OFFSET], VADDR; \
12 ldx [BASE + HV_FAULT_I_CTX_OFFSET], CTX;
14 /* Load DTLB fault information into VADDR and CTX, using BASE. */
15 #define LOAD_DTLB_INFO(BASE, VADDR, CTX) \
16 ldx [BASE + HV_FAULT_D_ADDR_OFFSET], VADDR; \
17 ldx [BASE + HV_FAULT_D_CTX_OFFSET], CTX;
19 /* DEST = (CTX << 48) | (VADDR >> 22)
21 * Branch to ZERO_CTX_LABEL is context is zero.
23 #define COMPUTE_TAG_TARGET(DEST, VADDR, CTX, TMP, ZERO_CTX_LABEL) \
24 srlx VADDR, 22, TMP; \
26 brz,pn CTX, ZERO_CTX_LABEL; \
29 /* Create TSB pointer. This is something like:
31 * index_mask = (512 << (tsb_reg & 0x7UL)) - 1UL;
32 * tsb_base = tsb_reg & ~0x7UL;
33 * tsb_index = ((vaddr >> PAGE_SHIFT) & tsb_mask);
34 * tsb_ptr = tsb_base + (tsb_index * 16);
36 #define COMPUTE_TSB_PTR(TSB_PTR, VADDR, TMP1, TMP2) \
37 and TSB_PTR, 0x7, TMP1; \
39 andn TSB_PTR, 0x7, TSB_PTR; \
40 sllx TMP2, TMP1, TMP2; \
41 srlx VADDR, PAGE_SHIFT, TMP1; \
43 and TMP1, TMP2, TMP1; \
45 add TSB_PTR, TMP1, TSB_PTR;
48 /* Load MMU Miss base into %g2. */
49 ldxa [%g0] ASI_SCRATCHPAD, %g2
51 /* Load UTSB reg into %g1. */
52 mov SCRATCHPAD_UTSBREG1, %g1
53 ldxa [%g1] ASI_SCRATCHPAD, %g1
55 LOAD_ITLB_INFO(%g2, %g4, %g5)
56 COMPUTE_TAG_TARGET(%g6, %g4, %g5, %g3, kvmap_itlb_4v)
57 COMPUTE_TSB_PTR(%g1, %g4, %g3, %g7)
59 /* Load TSB tag/pte into %g2/%g3 and compare the tag. */
60 ldda [%g1] ASI_QUAD_LDD_PHYS, %g2
62 sethi %hi(_PAGE_EXEC), %g7
63 bne,a,pn %xcc, tsb_miss_page_table_walk
64 mov FAULT_CODE_ITLB, %g3
66 be,a,pn %xcc, tsb_do_fault
67 mov FAULT_CODE_ITLB, %g3
69 /* We have a valid entry, make hypervisor call to load
70 * I-TLB and return from trap.
74 * %g6: TAG TARGET (only "CTX << 48" part matters)
77 mov %o0, %g1 ! save %o0
78 mov %o1, %g2 ! save %o1
79 mov %o2, %g5 ! save %o2
80 mov %o3, %g7 ! save %o3
82 srlx %g6, 48, %o1 ! ctx
84 mov HV_MMU_IMMU, %o3 ! flags
85 ta HV_MMU_MAP_ADDR_TRAP
86 mov %g1, %o0 ! restore %o0
87 mov %g2, %o1 ! restore %o1
88 mov %g5, %o2 ! restore %o2
89 mov %g7, %o3 ! restore %o3
94 /* Load MMU Miss base into %g2. */
95 ldxa [%g0] ASI_SCRATCHPAD, %g2
97 /* Load UTSB reg into %g1. */
98 mov SCRATCHPAD_UTSBREG1, %g1
99 ldxa [%g1] ASI_SCRATCHPAD, %g1
101 LOAD_DTLB_INFO(%g2, %g4, %g5)
102 COMPUTE_TAG_TARGET(%g6, %g4, %g5, %g3, kvmap_dtlb_4v)
103 COMPUTE_TSB_PTR(%g1, %g4, %g3, %g7)
105 /* Load TSB tag/pte into %g2/%g3 and compare the tag. */
106 ldda [%g1] ASI_QUAD_LDD_PHYS, %g2
108 bne,a,pn %xcc, tsb_miss_page_table_walk
109 mov FAULT_CODE_ITLB, %g3
111 /* We have a valid entry, make hypervisor call to load
112 * D-TLB and return from trap.
116 * %g6: TAG TARGET (only "CTX << 48" part matters)
119 mov %o0, %g1 ! save %o0
120 mov %o1, %g2 ! save %o1
121 mov %o2, %g5 ! save %o2
122 mov %o3, %g7 ! save %o3
124 srlx %g6, 48, %o1 ! ctx
126 mov HV_MMU_DMMU, %o3 ! flags
127 ta HV_MMU_MAP_ADDR_TRAP
128 mov %g1, %o0 ! restore %o0
129 mov %g2, %o1 ! restore %o1
130 mov %g5, %o2 ! restore %o2
131 mov %g7, %o3 ! restore %o3
136 /* Load MMU Miss base into %g2. */
137 ldxa [%g0] ASI_SCRATCHPAD, %g2
139 ldx [%g2 + HV_FAULT_D_ADDR_OFFSET], %g5
142 bgu,pn %xcc, winfix_trampoline
144 ba,pt %xcc, sparc64_realfault_common
145 mov FAULT_CODE_DTLB | FAULT_CODE_WRITE, %g4
147 /* Called from trap table with TAG TARGET placed into
148 * %g6, SCRATCHPAD_UTSBREG1 contents in %g1, and
149 * SCRATCHPAD_MMU_MISS contents in %g2.
152 mov SCRATCHPAD_UTSBREG1, %g1
153 ldxa [%g1] ASI_SCRATCHPAD, %g1
154 brz,pn %g5, kvmap_itlb_4v
155 mov FAULT_CODE_ITLB, %g3
157 /* Called from trap table with TAG TARGET placed into
158 * %g6 and SCRATCHPAD_UTSBREG1 contents in %g1.
161 mov SCRATCHPAD_UTSBREG1, %g1
162 ldxa [%g1] ASI_SCRATCHPAD, %g1
163 brz,pn %g5, kvmap_dtlb_4v
164 mov FAULT_CODE_DTLB, %g3
166 /* Create TSB pointer into %g1. This is something like:
168 * index_mask = (512 << (tsb_reg & 0x7UL)) - 1UL;
169 * tsb_base = tsb_reg & ~0x7UL;
170 * tsb_index = ((vaddr >> PAGE_SHIFT) & tsb_mask);
171 * tsb_ptr = tsb_base + (tsb_index * 16);
173 sun4v_tsb_miss_common:
174 COMPUTE_TSB_PTR(%g1, %g4, %g5, %g7)
176 /* Branch directly to page table lookup. We have SCRATCHPAD_MMU_MISS
177 * still in %g2, so it's quite trivial to get at the PGD PHYS value
178 * so we can preload it into %g7.
180 sub %g2, TRAP_PER_CPU_FAULT_INFO, %g2
181 ba,pt %xcc, tsb_miss_page_table_walk_sun4v_fastpath
182 ldx [%g2 + TRAP_PER_CPU_PGD_PADDR], %g7
184 /* Instruction Access Exception, tl0. */
186 ldxa [%g0] ASI_SCRATCHPAD, %g2
187 ldx [%g2 + HV_FAULT_I_TYPE_OFFSET], %g3
188 ldx [%g2 + HV_FAULT_I_ADDR_OFFSET], %g4
189 ldx [%g2 + HV_FAULT_I_CTX_OFFSET], %g5
196 call sun4v_insn_access_exception
197 add %sp, PTREGS_OFF, %o0
198 ba,a,pt %xcc, rtrap_clr_l6
200 /* Instruction Access Exception, tl1. */
202 ldxa [%g0] ASI_SCRATCHPAD, %g2
203 ldx [%g2 + HV_FAULT_I_TYPE_OFFSET], %g3
204 ldx [%g2 + HV_FAULT_I_ADDR_OFFSET], %g4
205 ldx [%g2 + HV_FAULT_I_CTX_OFFSET], %g5
212 call sun4v_insn_access_exception_tl1
213 add %sp, PTREGS_OFF, %o0
214 ba,a,pt %xcc, rtrap_clr_l6
216 /* Data Access Exception, tl0. */
218 ldxa [%g0] ASI_SCRATCHPAD, %g2
219 ldx [%g2 + HV_FAULT_D_TYPE_OFFSET], %g3
220 ldx [%g2 + HV_FAULT_D_ADDR_OFFSET], %g4
221 ldx [%g2 + HV_FAULT_D_CTX_OFFSET], %g5
228 call sun4v_data_access_exception
229 add %sp, PTREGS_OFF, %o0
230 ba,a,pt %xcc, rtrap_clr_l6
232 /* Data Access Exception, tl1. */
234 ldxa [%g0] ASI_SCRATCHPAD, %g2
235 ldx [%g2 + HV_FAULT_D_TYPE_OFFSET], %g3
236 ldx [%g2 + HV_FAULT_D_ADDR_OFFSET], %g4
237 ldx [%g2 + HV_FAULT_D_CTX_OFFSET], %g5
244 call sun4v_data_access_exception_tl1
245 add %sp, PTREGS_OFF, %o0
246 ba,a,pt %xcc, rtrap_clr_l6
248 /* Memory Address Unaligned. */
250 ldxa [%g0] ASI_SCRATCHPAD, %g2
251 mov HV_FAULT_TYPE_UNALIGNED, %g3
252 ldx [%g2 + HV_FAULT_D_ADDR_OFFSET], %g4
253 ldx [%g2 + HV_FAULT_D_CTX_OFFSET], %g5
260 bgu,pn %icc, winfix_mna
268 add %sp, PTREGS_OFF, %o0
269 ba,a,pt %xcc, rtrap_clr_l6
271 /* Privileged Action. */
276 add %sp, PTREGS_OFF, %o0
277 ba,a,pt %xcc, rtrap_clr_l6
279 /* Unaligned ldd float, tl0. */
281 ldxa [%g0] ASI_SCRATCHPAD, %g2
282 ldx [%g2 + HV_FAULT_D_TYPE_OFFSET], %g3
283 ldx [%g2 + HV_FAULT_D_ADDR_OFFSET], %g4
284 ldx [%g2 + HV_FAULT_D_CTX_OFFSET], %g5
292 add %sp, PTREGS_OFF, %o0
293 ba,a,pt %xcc, rtrap_clr_l6
295 /* Unaligned std float, tl0. */
297 ldxa [%g0] ASI_SCRATCHPAD, %g2
298 ldx [%g2 + HV_FAULT_D_TYPE_OFFSET], %g3
299 ldx [%g2 + HV_FAULT_D_ADDR_OFFSET], %g4
300 ldx [%g2 + HV_FAULT_D_CTX_OFFSET], %g5
308 add %sp, PTREGS_OFF, %o0
309 ba,a,pt %xcc, rtrap_clr_l6
311 #define BRANCH_ALWAYS 0x10680000
312 #define NOP 0x01000000
313 #define SUN4V_DO_PATCH(OLD, NEW) \
314 sethi %hi(NEW), %g1; \
315 or %g1, %lo(NEW), %g1; \
316 sethi %hi(OLD), %g2; \
317 or %g2, %lo(OLD), %g2; \
319 sethi %hi(BRANCH_ALWAYS), %g3; \
321 srl %g1, 11 + 2, %g1; \
322 or %g3, %lo(BRANCH_ALWAYS), %g3; \
325 sethi %hi(NOP), %g3; \
326 or %g3, %lo(NOP), %g3; \
327 stw %g3, [%g2 + 0x4]; \
330 .globl sun4v_patch_tlb_handlers
331 .type sun4v_patch_tlb_handlers,#function
332 sun4v_patch_tlb_handlers:
333 SUN4V_DO_PATCH(tl0_iamiss, sun4v_itlb_miss)
334 SUN4V_DO_PATCH(tl1_iamiss, sun4v_itlb_miss)
335 SUN4V_DO_PATCH(tl0_damiss, sun4v_dtlb_miss)
336 SUN4V_DO_PATCH(tl1_damiss, sun4v_dtlb_miss)
337 SUN4V_DO_PATCH(tl0_daprot, sun4v_dtlb_prot)
338 SUN4V_DO_PATCH(tl1_daprot, sun4v_dtlb_prot)
339 SUN4V_DO_PATCH(tl0_iax, sun4v_iacc)
340 SUN4V_DO_PATCH(tl1_iax, sun4v_iacc_tl1)
341 SUN4V_DO_PATCH(tl0_dax, sun4v_dacc)
342 SUN4V_DO_PATCH(tl1_dax, sun4v_dacc_tl1)
343 SUN4V_DO_PATCH(tl0_mna, sun4v_mna)
344 SUN4V_DO_PATCH(tl1_mna, sun4v_mna)
345 SUN4V_DO_PATCH(tl0_lddfmna, sun4v_lddfmna)
346 SUN4V_DO_PATCH(tl0_stdfmna, sun4v_stdfmna)
347 SUN4V_DO_PATCH(tl0_privact, sun4v_privact)
350 .size sun4v_patch_tlb_handlers,.-sun4v_patch_tlb_handlers