nop
/* Get &trap_block[smp_processor_id()] into %g3. */
- __GET_CPUID(%g1)
- sethi %hi(trap_block), %g3
- sllx %g1, TRAP_BLOCK_SZ_SHIFT, %g7
- or %g3, %lo(trap_block), %g3
- add %g3, %g7, %g3
+ ldxa [%g0] ASI_SCRATCHPAD, %g3
+ sub %g3, TRAP_PER_CPU_FAULT_INFO, %g3
/* Get CPU mondo queue base phys address into %g7. */
ldx [%g3 + TRAP_PER_CPU_CPU_MONDO_PA], %g7
nop
/* Get &trap_block[smp_processor_id()] into %g3. */
- __GET_CPUID(%g1)
- sethi %hi(trap_block), %g3
- sllx %g1, TRAP_BLOCK_SZ_SHIFT, %g7
- or %g3, %lo(trap_block), %g3
- add %g3, %g7, %g3
+ ldxa [%g0] ASI_SCRATCHPAD, %g3
+ sub %g3, TRAP_PER_CPU_FAULT_INFO, %g3
/* Get DEV mondo queue base phys address into %g5. */
ldx [%g3 + TRAP_PER_CPU_DEV_MONDO_PA], %g5
nop
/* Get &trap_block[smp_processor_id()] into %g3. */
- __GET_CPUID(%g1)
- sethi %hi(trap_block), %g3
- sllx %g1, TRAP_BLOCK_SZ_SHIFT, %g7
- or %g3, %lo(trap_block), %g3
- add %g3, %g7, %g3
+ ldxa [%g0] ASI_SCRATCHPAD, %g3
+ sub %g3, TRAP_PER_CPU_FAULT_INFO, %g3
/* Get RES mondo queue base phys address into %g5. */
ldx [%g3 + TRAP_PER_CPU_RESUM_MONDO_PA], %g5
nop
/* Get &trap_block[smp_processor_id()] into %g3. */
- __GET_CPUID(%g1)
- sethi %hi(trap_block), %g3
- sllx %g1, TRAP_BLOCK_SZ_SHIFT, %g7
- or %g3, %lo(trap_block), %g3
- add %g3, %g7, %g3
+ ldxa [%g0] ASI_SCRATCHPAD, %g3
+ sub %g3, TRAP_PER_CPU_FAULT_INFO, %g3
/* Get RES mondo queue base phys address into %g5. */
ldx [%g3 + TRAP_PER_CPU_NONRESUM_MONDO_PA], %g5
.align 32
sun4v_itlb_miss:
- /* Load CPU ID into %g3. */
- mov SCRATCHPAD_CPUID, %g1
- ldxa [%g1] ASI_SCRATCHPAD, %g3
+ /* Load MMU Miss base into %g2. */
+ ldxa [%g0] ASI_SCRATCHPAD, %g3
/* Load UTSB reg into %g1. */
- ldxa [%g1 + %g1] ASI_SCRATCHPAD, %g1
-
- /* Load &trap_block[smp_processor_id()] into %g2. */
- sethi %hi(trap_block), %g2
- or %g2, %lo(trap_block), %g2
- sllx %g3, TRAP_BLOCK_SZ_SHIFT, %g3
- add %g2, %g3, %g2
+ mov SCRATCHPAD_UTSBREG1, %g1
+ ldxa [%g1] ASI_SCRATCHPAD, %g1
/* Create a TAG TARGET, "(vaddr>>22) | (ctx << 48)", in %g6.
* Branch if kernel TLB miss. The kernel TSB and user TSB miss
* code wants the missing virtual address in %g4, so that value
* cannot be modified through the entirety of this handler.
*/
- ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_I_ADDR_OFFSET], %g4
- ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_I_CTX_OFFSET], %g5
+ ldx [%g2 + HV_FAULT_I_ADDR_OFFSET], %g4
+ ldx [%g2 + HV_FAULT_I_CTX_OFFSET], %g5
srlx %g4, 22, %g3
sllx %g5, 48, %g6
or %g6, %g3, %g6
retry
sun4v_dtlb_miss:
- /* Load CPU ID into %g3. */
- mov SCRATCHPAD_CPUID, %g1
- ldxa [%g1] ASI_SCRATCHPAD, %g3
+ /* Load MMU Miss base into %g2. */
+ ldxa [%g0] ASI_SCRATCHPAD, %g2
/* Load UTSB reg into %g1. */
+ mov SCRATCHPAD_UTSBREG1, %g1
ldxa [%g1 + %g1] ASI_SCRATCHPAD, %g1
- /* Load &trap_block[smp_processor_id()] into %g2. */
- sethi %hi(trap_block), %g2
- or %g2, %lo(trap_block), %g2
- sllx %g3, TRAP_BLOCK_SZ_SHIFT, %g3
- add %g2, %g3, %g2
-
/* Create a TAG TARGET, "(vaddr>>22) | (ctx << 48)", in %g6.
* Branch if kernel TLB miss. The kernel TSB and user TSB miss
* code wants the missing virtual address in %g4, so that value
* cannot be modified through the entirety of this handler.
*/
- ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_ADDR_OFFSET], %g4
- ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_CTX_OFFSET], %g5
+ ldx [%g2 + HV_FAULT_D_ADDR_OFFSET], %g4
+ ldx [%g2 + HV_FAULT_D_CTX_OFFSET], %g5
srlx %g4, 22, %g3
sllx %g5, 48, %g6
or %g6, %g3, %g6
retry
sun4v_dtlb_prot:
- /* Load CPU ID into %g3. */
- mov SCRATCHPAD_CPUID, %g1
- ldxa [%g1] ASI_SCRATCHPAD, %g3
+ /* Load MMU Miss base into %g2. */
+ ldxa [%g0] ASI_SCRATCHPAD, %g2
- /* Load &trap_block[smp_processor_id()] into %g2. */
- sethi %hi(trap_block), %g2
- or %g2, %lo(trap_block), %g2
- sllx %g3, TRAP_BLOCK_SZ_SHIFT, %g3
- add %g2, %g3, %g2
-
- ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_ADDR_OFFSET], %g5
+ ldx [%g2 + HV_FAULT_D_ADDR_OFFSET], %g5
rdpr %tl, %g1
cmp %g1, 1
bgu,pn %xcc, winfix_trampoline
ba,pt %xcc, sparc64_realfault_common
mov FAULT_CODE_DTLB | FAULT_CODE_WRITE, %g4
- /* Called from trap table with &trap_block[smp_processor_id()] in
- * %g5 and SCRATCHPAD_UTSBREG1 contents in %g1.
+ /* Called from trap table with TAG TARGET placed into
+ * %g6 and SCRATCHPAD_UTSBREG1 contents in %g1.
*/
sun4v_itsb_miss:
- ldx [%g5 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_I_ADDR_OFFSET], %g4
- ldx [%g5 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_I_CTX_OFFSET], %g5
-
- srlx %g4, 22, %g7
- sllx %g5, 48, %g6
- or %g6, %g7, %g6
- brz,pn %g5, kvmap_itlb_4v
- nop
-
ba,pt %xcc, sun4v_tsb_miss_common
mov FAULT_CODE_ITLB, %g3
- /* Called from trap table with &trap_block[smp_processor_id()] in
- * %g5 and SCRATCHPAD_UTSBREG1 contents in %g1.
+ /* Called from trap table with TAG TARGET placed into
+ * %g6 and SCRATCHPAD_UTSBREG1 contents in %g1.
*/
sun4v_dtsb_miss:
- ldx [%g5 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_ADDR_OFFSET], %g4
- ldx [%g5 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_CTX_OFFSET], %g5
-
- srlx %g4, 22, %g7
- sllx %g5, 48, %g6
- or %g6, %g7, %g6
- brz,pn %g5, kvmap_dtlb_4v
- nop
-
mov FAULT_CODE_DTLB, %g3
/* Create TSB pointer into %g1. This is something like:
/* Instruction Access Exception, tl0. */
sun4v_iacc:
- mov SCRATCHPAD_CPUID, %g1
- ldxa [%g1] ASI_SCRATCHPAD, %g3
- sethi %hi(trap_block), %g2
- or %g2, %lo(trap_block), %g2
- sllx %g3, TRAP_BLOCK_SZ_SHIFT, %g3
- add %g2, %g3, %g2
- ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_I_TYPE_OFFSET], %g3
- ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_I_ADDR_OFFSET], %g4
- ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_I_CTX_OFFSET], %g5
+ ldxa [%g0] ASI_SCRATCHPAD, %g2
+ ldx [%g2 + HV_FAULT_I_TYPE_OFFSET], %g3
+ ldx [%g2 + HV_FAULT_I_ADDR_OFFSET], %g4
+ ldx [%g2 + HV_FAULT_I_CTX_OFFSET], %g5
sllx %g3, 16, %g3
or %g5, %g3, %g5
ba,pt %xcc, etrap
/* Instruction Access Exception, tl1. */
sun4v_iacc_tl1:
- mov SCRATCHPAD_CPUID, %g1
- ldxa [%g1] ASI_SCRATCHPAD, %g3
- sethi %hi(trap_block), %g2
- or %g2, %lo(trap_block), %g2
- sllx %g3, TRAP_BLOCK_SZ_SHIFT, %g3
- add %g2, %g3, %g2
- ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_I_TYPE_OFFSET], %g3
- ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_I_ADDR_OFFSET], %g4
- ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_I_CTX_OFFSET], %g5
+ ldxa [%g0] ASI_SCRATCHPAD, %g2
+ ldx [%g2 + HV_FAULT_I_TYPE_OFFSET], %g3
+ ldx [%g2 + HV_FAULT_I_ADDR_OFFSET], %g4
+ ldx [%g2 + HV_FAULT_I_CTX_OFFSET], %g5
sllx %g3, 16, %g3
or %g5, %g3, %g5
ba,pt %xcc, etraptl1
/* Data Access Exception, tl0. */
sun4v_dacc:
- mov SCRATCHPAD_CPUID, %g1
- ldxa [%g1] ASI_SCRATCHPAD, %g3
- sethi %hi(trap_block), %g2
- or %g2, %lo(trap_block), %g2
- sllx %g3, TRAP_BLOCK_SZ_SHIFT, %g3
- add %g2, %g3, %g2
- ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_TYPE_OFFSET], %g3
- ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_ADDR_OFFSET], %g4
- ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_CTX_OFFSET], %g5
+ ldxa [%g0] ASI_SCRATCHPAD, %g2
+ ldx [%g2 + HV_FAULT_D_TYPE_OFFSET], %g3
+ ldx [%g2 + HV_FAULT_D_ADDR_OFFSET], %g4
+ ldx [%g2 + HV_FAULT_D_CTX_OFFSET], %g5
sllx %g3, 16, %g3
or %g5, %g3, %g5
ba,pt %xcc, etrap
/* Data Access Exception, tl1. */
sun4v_dacc_tl1:
- mov SCRATCHPAD_CPUID, %g1
- ldxa [%g1] ASI_SCRATCHPAD, %g3
- sethi %hi(trap_block), %g2
- or %g2, %lo(trap_block), %g2
- sllx %g3, TRAP_BLOCK_SZ_SHIFT, %g3
- add %g2, %g3, %g2
- ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_TYPE_OFFSET], %g3
- ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_ADDR_OFFSET], %g4
- ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_CTX_OFFSET], %g5
+ ldxa [%g0] ASI_SCRATCHPAD, %g2
+ ldx [%g2 + HV_FAULT_D_TYPE_OFFSET], %g3
+ ldx [%g2 + HV_FAULT_D_ADDR_OFFSET], %g4
+ ldx [%g2 + HV_FAULT_D_CTX_OFFSET], %g5
sllx %g3, 16, %g3
or %g5, %g3, %g5
ba,pt %xcc, etraptl1
/* Memory Address Unaligned. */
sun4v_mna:
- mov SCRATCHPAD_CPUID, %g1
- ldxa [%g1] ASI_SCRATCHPAD, %g3
- sethi %hi(trap_block), %g2
- or %g2, %lo(trap_block), %g2
- sllx %g3, TRAP_BLOCK_SZ_SHIFT, %g3
- add %g2, %g3, %g2
+ ldxa [%g0] ASI_SCRATCHPAD, %g2
mov HV_FAULT_TYPE_UNALIGNED, %g3
- ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_ADDR_OFFSET], %g4
- ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_CTX_OFFSET], %g5
+ ldx [%g2 + HV_FAULT_D_ADDR_OFFSET], %g4
+ ldx [%g2 + HV_FAULT_D_CTX_OFFSET], %g5
sllx %g3, 16, %g3
or %g5, %g3, %g5
/* Unaligned ldd float, tl0. */
sun4v_lddfmna:
- mov SCRATCHPAD_CPUID, %g1
- ldxa [%g1] ASI_SCRATCHPAD, %g3
- sethi %hi(trap_block), %g2
- or %g2, %lo(trap_block), %g2
- sllx %g3, TRAP_BLOCK_SZ_SHIFT, %g3
- add %g2, %g3, %g2
- ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_TYPE_OFFSET], %g3
- ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_ADDR_OFFSET], %g4
- ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_CTX_OFFSET], %g5
+ ldxa [%g0] ASI_SCRATCHPAD, %g2
+ ldx [%g2 + HV_FAULT_D_TYPE_OFFSET], %g3
+ ldx [%g2 + HV_FAULT_D_ADDR_OFFSET], %g4
+ ldx [%g2 + HV_FAULT_D_CTX_OFFSET], %g5
sllx %g3, 16, %g3
or %g5, %g3, %g5
ba,pt %xcc, etrap
/* Unaligned std float, tl0. */
sun4v_stdfmna:
- mov SCRATCHPAD_CPUID, %g1
- ldxa [%g1] ASI_SCRATCHPAD, %g3
- sethi %hi(trap_block), %g2
- or %g2, %lo(trap_block), %g2
- sllx %g3, TRAP_BLOCK_SZ_SHIFT, %g3
- add %g2, %g3, %g2
- ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_TYPE_OFFSET], %g3
- ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_ADDR_OFFSET], %g4
- ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_CTX_OFFSET], %g5
+ ldxa [%g0] ASI_SCRATCHPAD, %g2
+ ldx [%g2 + HV_FAULT_D_TYPE_OFFSET], %g3
+ ldx [%g2 + HV_FAULT_D_ADDR_OFFSET], %g4
+ ldx [%g2 + HV_FAULT_D_CTX_OFFSET], %g5
sllx %g3, 16, %g3
or %g5, %g3, %g5
ba,pt %xcc, etrap
nop; \
.previous;
-/* Clobbers TMP, current address space PGD phys address into DEST. */
-#define TRAP_LOAD_PGD_PHYS(DEST, TMP) \
+#define TRAP_LOAD_TRAP_BLOCK(DEST, TMP) \
__GET_CPUID(TMP) \
sethi %hi(trap_block), DEST; \
sllx TMP, TRAP_BLOCK_SZ_SHIFT, TMP; \
or DEST, %lo(trap_block), DEST; \
add DEST, TMP, DEST; \
+
+/* Clobbers TMP, current address space PGD phys address into DEST. */
+#define TRAP_LOAD_PGD_PHYS(DEST, TMP) \
+ TRAP_LOAD_TRAP_BLOCK(DEST, TMP) \
ldx [DEST + TRAP_PER_CPU_PGD_PADDR], DEST;
/* Clobbers TMP, loads local processor's IRQ work area into DEST. */
/* Clobbers TMP, loads DEST with current thread info pointer. */
#define TRAP_LOAD_THREAD_REG(DEST, TMP) \
- __GET_CPUID(TMP) \
- sethi %hi(trap_block), DEST; \
- sllx TMP, TRAP_BLOCK_SZ_SHIFT, TMP; \
- or DEST, %lo(trap_block), DEST; \
- ldx [DEST + TMP], DEST;
+ TRAP_LOAD_TRAP_BLOCK(DEST, TMP) \
+ ldx [DEST + TRAP_PER_CPU_THREAD], DEST;
/* Given the current thread info pointer in THR, load the per-cpu
* area base of the current processor into DEST. REG1, REG2, and REG3 are
#else
-#define __GET_CPUID(REG) \
- mov 0, REG;
+#define TRAP_LOAD_TRAP_BLOCK(DEST, TMP) \
+ sethi %hi(trap_block), DEST; \
+ or DEST, %lo(trap_block), DEST; \
/* Uniprocessor versions, we know the cpuid is zero. */
#define TRAP_LOAD_PGD_PHYS(DEST, TMP) \
- sethi %hi(trap_block), DEST; \
- or DEST, %lo(trap_block), DEST; \
+ TRAP_LOAD_TRAP_BLOCK(DEST, TMP) \
ldx [DEST + TRAP_PER_CPU_PGD_PADDR], DEST;
#define TRAP_LOAD_IRQ_WORK(DEST, TMP) \
or DEST, %lo(__irq_work), DEST;
#define TRAP_LOAD_THREAD_REG(DEST, TMP) \
- sethi %hi(trap_block), DEST; \
- ldx [DEST + %lo(trap_block)], DEST;
+ TRAP_LOAD_TRAP_BLOCK(DEST, TMP) \
+ ldx [DEST + TRAP_PER_CPU_THREAD], DEST;
/* No per-cpu areas on uniprocessor, so no need to load DEST. */
#define LOAD_PER_CPU_BASE(DEST, THR, REG1, REG2, REG3)