\r
GCC_ASM_EXPORT (ArmInvalidateInstructionCache)\r
GCC_ASM_EXPORT (ArmInvalidateDataCacheEntryByMVA)\r
+GCC_ASM_EXPORT (ArmInvalidateInstructionCacheEntryToPoUByMVA)\r
GCC_ASM_EXPORT (ArmCleanDataCacheEntryByMVA)\r
+GCC_ASM_EXPORT (ArmCleanDataCacheEntryToPoUByMVA)\r
GCC_ASM_EXPORT (ArmCleanInvalidateDataCacheEntryByMVA)\r
GCC_ASM_EXPORT (ArmInvalidateDataCacheEntryBySetWay)\r
GCC_ASM_EXPORT (ArmCleanDataCacheEntryBySetWay)\r
GCC_ASM_EXPORT (ArmCleanInvalidateDataCacheEntryBySetWay)\r
-GCC_ASM_EXPORT (ArmDrainWriteBuffer)\r
GCC_ASM_EXPORT (ArmEnableMmu)\r
GCC_ASM_EXPORT (ArmDisableMmu)\r
GCC_ASM_EXPORT (ArmDisableCachesAndMmu)\r
GCC_ASM_EXPORT (ArmWriteTpidrurw)\r
GCC_ASM_EXPORT (ArmIsArchTimerImplemented)\r
GCC_ASM_EXPORT (ArmReadIdPfr1)\r
-GCC_ASM_EXPORT (ArmReadIdMmfr0)\r
\r
.set DC_ON, (0x1<<2)\r
.set IC_ON, (0x1<<12)\r
bx lr\r
\r
\r
+ASM_PFX(ArmCleanDataCacheEntryToPoUByMVA):\r
+ mcr p15, 0, r0, c7, c11, 1 @clean single data cache line to PoU\r
+ bx lr\r
+\r
+ASM_PFX(ArmInvalidateInstructionCacheEntryToPoUByMVA):\r
+ mcr p15, 0, r0, c7, c5, 1 @Invalidate single instruction cache line to PoU\r
+ mcr p15, 0, r0, c7, c5, 7 @Invalidate branch predictor\r
+ bx lr\r
+\r
ASM_PFX(ArmCleanInvalidateDataCacheEntryByMVA):\r
mcr p15, 0, r0, c7, c14, 1 @clean and invalidate single data cache line\r
bx lr\r
bx LR\r
\r
ASM_PFX(ArmDataSynchronizationBarrier):\r
-ASM_PFX(ArmDrainWriteBuffer):\r
dsb\r
bx LR\r
\r
isb\r
# Set EN bit in FPEXC. The Advanced SIMD and VFP extensions are enabled and operate normally.\r
mov r0, #0x40000000\r
+#ifndef __clang__\r
mcr p10,#0x7,r0,c8,c0,#0\r
+#else\r
+ vmsr fpexc, r0\r
+#endif\r
bx lr\r
\r
ASM_PFX(ArmCallWFI):\r
mrc p15, 0, r0, c0, c1, 1 @ Read ID_PFR1 Register\r
bx lr\r
\r
-ASM_PFX(ArmReadIdMmfr0):\r
- mrc p15, 0, r0, c0, c1, 4 @ Read ID_MMFR0 Register\r
- bx lr\r
-\r
ASM_FUNCTION_REMOVE_IF_UNREFERENCED\r