GCC_ASM_EXPORT (ArmInvalidateInstructionCache)\r
GCC_ASM_EXPORT (ArmInvalidateDataCacheEntryByMVA)\r
GCC_ASM_EXPORT (ArmCleanDataCacheEntryByMVA)\r
+GCC_ASM_EXPORT (ArmCleanDataCacheEntryToPoUByMVA)\r
+GCC_ASM_EXPORT (ArmInvalidateInstructionCacheEntryToPoUByMVA)\r
GCC_ASM_EXPORT (ArmCleanInvalidateDataCacheEntryByMVA)\r
GCC_ASM_EXPORT (ArmInvalidateDataCacheEntryBySetWay)\r
GCC_ASM_EXPORT (ArmCleanDataCacheEntryBySetWay)\r
GCC_ASM_EXPORT (ArmCleanInvalidateDataCacheEntryBySetWay)\r
-GCC_ASM_EXPORT (ArmDrainWriteBuffer)\r
GCC_ASM_EXPORT (ArmEnableMmu)\r
GCC_ASM_EXPORT (ArmDisableMmu)\r
GCC_ASM_EXPORT (ArmDisableCachesAndMmu)\r
GCC_ASM_EXPORT (ArmEnableBranchPrediction)\r
GCC_ASM_EXPORT (ArmDisableBranchPrediction)\r
GCC_ASM_EXPORT (AArch64AllDataCachesOperation)\r
-GCC_ASM_EXPORT (AArch64PerformPoUDataCacheOperation)\r
GCC_ASM_EXPORT (ArmDataMemoryBarrier)\r
-GCC_ASM_EXPORT (ArmDataSyncronizationBarrier)\r
+GCC_ASM_EXPORT (ArmDataSynchronizationBarrier)\r
GCC_ASM_EXPORT (ArmInstructionSynchronizationBarrier)\r
GCC_ASM_EXPORT (ArmWriteVBar)\r
GCC_ASM_EXPORT (ArmReadVBar)\r
GCC_ASM_EXPORT (ArmReadIdPfr0)\r
GCC_ASM_EXPORT (ArmReadIdPfr1)\r
GCC_ASM_EXPORT (ArmWriteHcr)\r
+GCC_ASM_EXPORT (ArmReadHcr)\r
GCC_ASM_EXPORT (ArmReadCurrentEL)\r
+GCC_ASM_EXPORT (ArmReplaceLiveTranslationEntry)\r
+GCC_ASM_EXPORT (ArmReplaceLiveTranslationEntrySize)\r
\r
.set CTRL_M_BIT, (1 << 0)\r
.set CTRL_A_BIT, (1 << 1)\r
\r
ASM_PFX(ArmInvalidateDataCacheEntryByMVA):\r
dc ivac, x0 // Invalidate single data cache line\r
- dsb sy\r
- isb\r
ret\r
\r
\r
ASM_PFX(ArmCleanDataCacheEntryByMVA):\r
dc cvac, x0 // Clean single data cache line\r
- dsb sy\r
- isb\r
+ ret\r
+\r
+\r
+ASM_PFX(ArmCleanDataCacheEntryToPoUByMVA):\r
+ dc cvau, x0 // Clean single data cache line to PoU\r
+ ret\r
+\r
+ASM_PFX(ArmInvalidateInstructionCacheEntryToPoUByMVA):\r
+ ic ivau, x0 // Invalidate single instruction cache line to PoU\r
ret\r
\r
\r
ASM_PFX(ArmCleanInvalidateDataCacheEntryByMVA):\r
dc civac, x0 // Clean and invalidate single data cache line\r
- dsb sy\r
- isb\r
ret\r
\r
\r
ASM_PFX(ArmInvalidateDataCacheEntryBySetWay):\r
dc isw, x0 // Invalidate this line\r
- dsb sy\r
- isb\r
ret\r
\r
\r
ASM_PFX(ArmCleanInvalidateDataCacheEntryBySetWay):\r
dc cisw, x0 // Clean and Invalidate this line\r
- dsb sy\r
- isb\r
ret\r
\r
\r
ASM_PFX(ArmCleanDataCacheEntryBySetWay):\r
dc csw, x0 // Clean this line\r
- dsb sy\r
- isb\r
ret\r
\r
\r
// right to ease the access to CSSELR and the Set/Way operation.\r
cbz x3, L_Finished // No need to clean if LoC is 0\r
mov x10, #0 // Start clean at cache level 0\r
- b Loop1\r
-\r
-ASM_PFX(AArch64PerformPoUDataCacheOperation):\r
-// We can use regs 0-7 and 9-15 without having to save/restore.\r
-// Save our link register on the stack. - The stack must always be quad-word aligned\r
- str x30, [sp, #-16]!\r
- mov x1, x0 // Save Function call in x1\r
- mrs x6, clidr_el1 // Read EL1 CLIDR\r
- and x3, x6, #0x38000000 // Mask out all but Point of Unification (PoU)\r
- lsr x3, x3, #26 // Left align cache level value - the level is shifted by 1 to the\r
- // right to ease the access to CSSELR and the Set/Way operation.\r
- cbz x3, L_Finished // No need to clean if LoC is 0\r
- mov x10, #0 // Start clean at cache level 0\r
\r
Loop1:\r
add x2, x10, x10, lsr #1 // Work out 3x cachelevel for cache info\r
ret\r
\r
\r
-ASM_PFX(ArmDataSyncronizationBarrier):\r
-ASM_PFX(ArmDrainWriteBuffer):\r
+ASM_PFX(ArmDataSynchronizationBarrier):\r
dsb sy\r
ret\r
\r
msr hcr_el2, x0 // Write the passed HCR value\r
ret\r
\r
+// UINTN ArmReadHcr(VOID)\r
+ASM_PFX(ArmReadHcr):\r
+ mrs x0, hcr_el2\r
+ ret\r
+\r
// UINTN ArmReadCurrentEL(VOID)\r
ASM_PFX(ArmReadCurrentEL):\r
mrs x0, CurrentEL\r
ret\r
\r
+\r
+ .macro __replace_entry, el\r
+\r
+ // disable the MMU\r
+ mrs x8, sctlr_el\el\r
+ bic x9, x8, #CTRL_M_BIT\r
+ msr sctlr_el\el, x9\r
+ isb\r
+\r
+ // write updated entry\r
+ str x1, [x0]\r
+\r
+ // invalidate again to get rid of stale clean cachelines that may\r
+ // have been filled speculatively since the last invalidate\r
+ dmb sy\r
+ dc ivac, x0\r
+\r
+ // flush the TLBs\r
+ .if \el == 1\r
+ tlbi vmalle1\r
+ .else\r
+ tlbi alle\el\r
+ .endif\r
+ dsb sy\r
+\r
+ // re-enable the MMU\r
+ msr sctlr_el\el, x8\r
+ isb\r
+ .endm\r
+\r
+//VOID\r
+//ArmReplaceLiveTranslationEntry (\r
+// IN UINT64 *Entry,\r
+// IN UINT64 Value\r
+// )\r
+ASM_PFX(ArmReplaceLiveTranslationEntry):\r
+\r
+ // disable interrupts\r
+ mrs x2, daif\r
+ msr daifset, #0xf\r
+ isb\r
+\r
+ // clean and invalidate first so that we don't clobber\r
+ // adjacent entries that are dirty in the caches\r
+ dc civac, x0\r
+ dsb ish\r
+\r
+ EL1_OR_EL2_OR_EL3(x3)\r
+1:__replace_entry 1\r
+ b 4f\r
+2:__replace_entry 2\r
+ b 4f\r
+3:__replace_entry 3\r
+\r
+4:msr daif, x2\r
+ ret\r
+\r
+ASM_PFX(ArmReplaceLiveTranslationEntrySize):\r
+ .long . - ArmReplaceLiveTranslationEntry\r
+\r
ASM_FUNCTION_REMOVE_IF_UNREFERENCED\r