1 /* sun4v_tlb_miss.S: Sun4v TLB miss handlers.
3 * Copyright (C) 2006 <davem@davemloft.net>
9 /* Load ITLB fault information into VADDR and CTX, using BASE. */
10 #define LOAD_ITLB_INFO(BASE, VADDR, CTX) \
11 ldx [BASE + HV_FAULT_I_ADDR_OFFSET], VADDR; \
12 ldx [BASE + HV_FAULT_I_CTX_OFFSET], CTX;
14 /* Load DTLB fault information into VADDR and CTX, using BASE. */
15 #define LOAD_DTLB_INFO(BASE, VADDR, CTX) \
16 ldx [BASE + HV_FAULT_D_ADDR_OFFSET], VADDR; \
17 ldx [BASE + HV_FAULT_D_CTX_OFFSET], CTX;
19 /* DEST = (CTX << 48) | (VADDR >> 22)
21 * Branch to ZERO_CTX_LABEL is context is zero.
23 #define COMPUTE_TAG_TARGET(DEST, VADDR, CTX, TMP, ZERO_CTX_LABEL) \
24 srlx VADDR, 22, TMP; \
26 brz,pn CTX, ZERO_CTX_LABEL; \
29 /* Create TSB pointer. This is something like:
31 * index_mask = (512 << (tsb_reg & 0x7UL)) - 1UL;
32 * tsb_base = tsb_reg & ~0x7UL;
33 * tsb_index = ((vaddr >> PAGE_SHIFT) & tsb_mask);
34 * tsb_ptr = tsb_base + (tsb_index * 16);
36 #define COMPUTE_TSB_PTR(TSB_PTR, VADDR, TMP1, TMP2) \
37 and TSB_PTR, 0x7, TMP1; \
39 andn TSB_PTR, 0x7, TSB_PTR; \
40 sllx TMP2, TMP1, TMP2; \
41 srlx VADDR, PAGE_SHIFT, TMP1; \
43 and TMP1, TMP2, TMP1; \
45 add TSB_PTR, TMP1, TSB_PTR;
48 /* Load MMU Miss base into %g2. */
49 ldxa [%g0] ASI_SCRATCHPAD, %g2
51 /* Load UTSB reg into %g1. */
52 mov SCRATCHPAD_UTSBREG1, %g1
53 ldxa [%g1] ASI_SCRATCHPAD, %g1
55 LOAD_ITLB_INFO(%g2, %g4, %g5)
56 COMPUTE_TAG_TARGET(%g6, %g4, %g5, %g3, kvmap_itlb_4v)
57 COMPUTE_TSB_PTR(%g1, %g4, %g3, %g7)
59 /* Load TSB tag/pte into %g2/%g3 and compare the tag. */
60 ldda [%g1] ASI_QUAD_LDD_PHYS_4V, %g2
62 sethi %hi(PAGE_EXEC), %g7
63 ldx [%g7 + %lo(PAGE_EXEC)], %g7
64 bne,a,pn %xcc, tsb_miss_page_table_walk
65 mov FAULT_CODE_ITLB, %g3
67 be,a,pn %xcc, tsb_do_fault
68 mov FAULT_CODE_ITLB, %g3
70 /* We have a valid entry, make hypervisor call to load
71 * I-TLB and return from trap.
75 * %g6: TAG TARGET (only "CTX << 48" part matters)
78 mov %o0, %g1 ! save %o0
79 mov %o1, %g2 ! save %o1
80 mov %o2, %g5 ! save %o2
81 mov %o3, %g7 ! save %o3
83 srlx %g6, 48, %o1 ! ctx
85 mov HV_MMU_IMMU, %o3 ! flags
86 ta HV_MMU_MAP_ADDR_TRAP
87 mov %g1, %o0 ! restore %o0
88 mov %g2, %o1 ! restore %o1
89 mov %g5, %o2 ! restore %o2
90 mov %g7, %o3 ! restore %o3
95 /* Load MMU Miss base into %g2. */
96 ldxa [%g0] ASI_SCRATCHPAD, %g2
98 /* Load UTSB reg into %g1. */
99 mov SCRATCHPAD_UTSBREG1, %g1
100 ldxa [%g1] ASI_SCRATCHPAD, %g1
102 LOAD_DTLB_INFO(%g2, %g4, %g5)
103 COMPUTE_TAG_TARGET(%g6, %g4, %g5, %g3, kvmap_dtlb_4v)
104 COMPUTE_TSB_PTR(%g1, %g4, %g3, %g7)
106 /* Load TSB tag/pte into %g2/%g3 and compare the tag. */
107 ldda [%g1] ASI_QUAD_LDD_PHYS_4V, %g2
109 bne,a,pn %xcc, tsb_miss_page_table_walk
110 mov FAULT_CODE_ITLB, %g3
112 /* We have a valid entry, make hypervisor call to load
113 * D-TLB and return from trap.
117 * %g6: TAG TARGET (only "CTX << 48" part matters)
120 mov %o0, %g1 ! save %o0
121 mov %o1, %g2 ! save %o1
122 mov %o2, %g5 ! save %o2
123 mov %o3, %g7 ! save %o3
125 srlx %g6, 48, %o1 ! ctx
127 mov HV_MMU_DMMU, %o3 ! flags
128 ta HV_MMU_MAP_ADDR_TRAP
129 mov %g1, %o0 ! restore %o0
130 mov %g2, %o1 ! restore %o1
131 mov %g5, %o2 ! restore %o2
132 mov %g7, %o3 ! restore %o3
137 /* Load MMU Miss base into %g2. */
138 ldxa [%g0] ASI_SCRATCHPAD, %g2
140 ldx [%g2 + HV_FAULT_D_ADDR_OFFSET], %g5
143 bgu,pn %xcc, winfix_trampoline
145 ba,pt %xcc, sparc64_realfault_common
146 mov FAULT_CODE_DTLB | FAULT_CODE_WRITE, %g4
148 /* Called from trap table with TAG TARGET placed into
149 * %g6, SCRATCHPAD_UTSBREG1 contents in %g1, and
150 * SCRATCHPAD_MMU_MISS contents in %g2.
153 mov SCRATCHPAD_UTSBREG1, %g1
154 ldxa [%g1] ASI_SCRATCHPAD, %g1
155 brz,pn %g5, kvmap_itlb_4v
156 mov FAULT_CODE_ITLB, %g3
158 /* Called from trap table with TAG TARGET placed into
159 * %g6 and SCRATCHPAD_UTSBREG1 contents in %g1.
162 mov SCRATCHPAD_UTSBREG1, %g1
163 ldxa [%g1] ASI_SCRATCHPAD, %g1
164 brz,pn %g5, kvmap_dtlb_4v
165 mov FAULT_CODE_DTLB, %g3
167 /* Create TSB pointer into %g1. This is something like:
169 * index_mask = (512 << (tsb_reg & 0x7UL)) - 1UL;
170 * tsb_base = tsb_reg & ~0x7UL;
171 * tsb_index = ((vaddr >> PAGE_SHIFT) & tsb_mask);
172 * tsb_ptr = tsb_base + (tsb_index * 16);
174 sun4v_tsb_miss_common:
175 COMPUTE_TSB_PTR(%g1, %g4, %g5, %g7)
177 /* Branch directly to page table lookup. We have SCRATCHPAD_MMU_MISS
178 * still in %g2, so it's quite trivial to get at the PGD PHYS value
179 * so we can preload it into %g7.
181 sub %g2, TRAP_PER_CPU_FAULT_INFO, %g2
182 ba,pt %xcc, tsb_miss_page_table_walk_sun4v_fastpath
183 ldx [%g2 + TRAP_PER_CPU_PGD_PADDR], %g7
185 /* Instruction Access Exception, tl0. */
187 ldxa [%g0] ASI_SCRATCHPAD, %g2
188 ldx [%g2 + HV_FAULT_I_TYPE_OFFSET], %g3
189 ldx [%g2 + HV_FAULT_I_ADDR_OFFSET], %g4
190 ldx [%g2 + HV_FAULT_I_CTX_OFFSET], %g5
197 call sun4v_insn_access_exception
198 add %sp, PTREGS_OFF, %o0
199 ba,a,pt %xcc, rtrap_clr_l6
201 /* Instruction Access Exception, tl1. */
203 ldxa [%g0] ASI_SCRATCHPAD, %g2
204 ldx [%g2 + HV_FAULT_I_TYPE_OFFSET], %g3
205 ldx [%g2 + HV_FAULT_I_ADDR_OFFSET], %g4
206 ldx [%g2 + HV_FAULT_I_CTX_OFFSET], %g5
213 call sun4v_insn_access_exception_tl1
214 add %sp, PTREGS_OFF, %o0
215 ba,a,pt %xcc, rtrap_clr_l6
217 /* Data Access Exception, tl0. */
219 ldxa [%g0] ASI_SCRATCHPAD, %g2
220 ldx [%g2 + HV_FAULT_D_TYPE_OFFSET], %g3
221 ldx [%g2 + HV_FAULT_D_ADDR_OFFSET], %g4
222 ldx [%g2 + HV_FAULT_D_CTX_OFFSET], %g5
229 call sun4v_data_access_exception
230 add %sp, PTREGS_OFF, %o0
231 ba,a,pt %xcc, rtrap_clr_l6
233 /* Data Access Exception, tl1. */
235 ldxa [%g0] ASI_SCRATCHPAD, %g2
236 ldx [%g2 + HV_FAULT_D_TYPE_OFFSET], %g3
237 ldx [%g2 + HV_FAULT_D_ADDR_OFFSET], %g4
238 ldx [%g2 + HV_FAULT_D_CTX_OFFSET], %g5
245 call sun4v_data_access_exception_tl1
246 add %sp, PTREGS_OFF, %o0
247 ba,a,pt %xcc, rtrap_clr_l6
249 /* Memory Address Unaligned. */
251 ldxa [%g0] ASI_SCRATCHPAD, %g2
252 mov HV_FAULT_TYPE_UNALIGNED, %g3
253 ldx [%g2 + HV_FAULT_D_ADDR_OFFSET], %g4
254 ldx [%g2 + HV_FAULT_D_CTX_OFFSET], %g5
261 bgu,pn %icc, winfix_mna
269 add %sp, PTREGS_OFF, %o0
270 ba,a,pt %xcc, rtrap_clr_l6
272 /* Privileged Action. */
277 add %sp, PTREGS_OFF, %o0
278 ba,a,pt %xcc, rtrap_clr_l6
280 /* Unaligned ldd float, tl0. */
282 ldxa [%g0] ASI_SCRATCHPAD, %g2
283 ldx [%g2 + HV_FAULT_D_TYPE_OFFSET], %g3
284 ldx [%g2 + HV_FAULT_D_ADDR_OFFSET], %g4
285 ldx [%g2 + HV_FAULT_D_CTX_OFFSET], %g5
293 add %sp, PTREGS_OFF, %o0
294 ba,a,pt %xcc, rtrap_clr_l6
296 /* Unaligned std float, tl0. */
298 ldxa [%g0] ASI_SCRATCHPAD, %g2
299 ldx [%g2 + HV_FAULT_D_TYPE_OFFSET], %g3
300 ldx [%g2 + HV_FAULT_D_ADDR_OFFSET], %g4
301 ldx [%g2 + HV_FAULT_D_CTX_OFFSET], %g5
309 add %sp, PTREGS_OFF, %o0
310 ba,a,pt %xcc, rtrap_clr_l6
312 #define BRANCH_ALWAYS 0x10680000
313 #define NOP 0x01000000
314 #define SUN4V_DO_PATCH(OLD, NEW) \
315 sethi %hi(NEW), %g1; \
316 or %g1, %lo(NEW), %g1; \
317 sethi %hi(OLD), %g2; \
318 or %g2, %lo(OLD), %g2; \
320 sethi %hi(BRANCH_ALWAYS), %g3; \
322 srl %g1, 11 + 2, %g1; \
323 or %g3, %lo(BRANCH_ALWAYS), %g3; \
326 sethi %hi(NOP), %g3; \
327 or %g3, %lo(NOP), %g3; \
328 stw %g3, [%g2 + 0x4]; \
331 .globl sun4v_patch_tlb_handlers
332 .type sun4v_patch_tlb_handlers,#function
333 sun4v_patch_tlb_handlers:
334 SUN4V_DO_PATCH(tl0_iamiss, sun4v_itlb_miss)
335 SUN4V_DO_PATCH(tl1_iamiss, sun4v_itlb_miss)
336 SUN4V_DO_PATCH(tl0_damiss, sun4v_dtlb_miss)
337 SUN4V_DO_PATCH(tl1_damiss, sun4v_dtlb_miss)
338 SUN4V_DO_PATCH(tl0_daprot, sun4v_dtlb_prot)
339 SUN4V_DO_PATCH(tl1_daprot, sun4v_dtlb_prot)
340 SUN4V_DO_PATCH(tl0_iax, sun4v_iacc)
341 SUN4V_DO_PATCH(tl1_iax, sun4v_iacc_tl1)
342 SUN4V_DO_PATCH(tl0_dax, sun4v_dacc)
343 SUN4V_DO_PATCH(tl1_dax, sun4v_dacc_tl1)
344 SUN4V_DO_PATCH(tl0_mna, sun4v_mna)
345 SUN4V_DO_PATCH(tl1_mna, sun4v_mna)
346 SUN4V_DO_PATCH(tl0_lddfmna, sun4v_lddfmna)
347 SUN4V_DO_PATCH(tl0_stdfmna, sun4v_stdfmna)
348 SUN4V_DO_PATCH(tl0_privact, sun4v_privact)
351 .size sun4v_patch_tlb_handlers,.-sun4v_patch_tlb_handlers