Portions copyright (c) 2008 - 2009, Apple Inc. All rights reserved.<BR>\r
Copyright (c) Microsoft Corporation.<BR>\r
Portions Copyright (c) 2020, Hewlett Packard Enterprise Development LP. All rights reserved.<BR>\r
+Portions Copyright (c) 2022, Loongson Technology Corporation Limited. All rights reserved.<BR>\r
\r
SPDX-License-Identifier: BSD-2-Clause-Patent\r
\r
\r
#endif // defined (MDE_CPU_RISCV64)\r
\r
+#if defined (MDE_CPU_LOONGARCH64)\r
+///\r
+/// The LoongArch architecture context buffer used by SetJump() and LongJump()\r
+///\r
+typedef struct {\r
+ UINT64 S0;\r
+ UINT64 S1;\r
+ UINT64 S2;\r
+ UINT64 S3;\r
+ UINT64 S4;\r
+ UINT64 S5;\r
+ UINT64 S6;\r
+ UINT64 S7;\r
+ UINT64 S8;\r
+ UINT64 SP;\r
+ UINT64 FP;\r
+ UINT64 RA;\r
+} BASE_LIBRARY_JUMP_BUFFER;\r
+\r
+#define BASE_LIBRARY_JUMP_BUFFER_ALIGNMENT 8\r
+\r
+#endif // defined (MDE_CPU_LOONGARCH64)\r
+\r
//\r
// String Services\r
//\r
LIBRARY_CLASS = BaseLib\r
\r
#\r
-# VALID_ARCHITECTURES = IA32 X64 EBC ARM AARCH64 RISCV64\r
+# VALID_ARCHITECTURES = IA32 X64 EBC ARM AARCH64 RISCV64 LOONGARCH64\r
#\r
\r
[Sources]\r
RiscV64/RiscVInterrupt.S | GCC\r
RiscV64/FlushCache.S | GCC\r
\r
+[Sources.LOONGARCH64]\r
+ Math64.c\r
+ Unaligned.c\r
+ LoongArch64/InternalSwitchStack.c\r
+ LoongArch64/GetInterruptState.S | GCC\r
+ LoongArch64/EnableInterrupts.S | GCC\r
+ LoongArch64/DisableInterrupts.S | GCC\r
+ LoongArch64/Barrier.S | GCC\r
+ LoongArch64/MemoryFence.S | GCC\r
+ LoongArch64/CpuBreakpoint.S | GCC\r
+ LoongArch64/CpuPause.S | GCC\r
+ LoongArch64/SetJumpLongJump.S | GCC\r
+ LoongArch64/SwitchStack.S | GCC\r
+\r
[Packages]\r
MdePkg/MdePkg.dec\r
\r
--- /dev/null
+#------------------------------------------------------------------------------\r
+#\r
+# LoongArch Barrier Operations\r
+#\r
+# Copyright (c) 2022, Loongson Technology Corporation Limited. All rights reserved.<BR>\r
+#\r
+# SPDX-License-Identifier: BSD-2-Clause-Patent\r
+#\r
+#------------------------------------------------------------------------------\r
+\r
+ASM_GLOBAL ASM_PFX(AsmDataBarrierLoongArch)\r
+ASM_GLOBAL ASM_PFX(AsmInstructionBarrierLoongArch)\r
+\r
+#\r
+# Data barrier operation for LoongArch.\r
+#\r
+ASM_PFX(AsmDataBarrierLoongArch):\r
+ dbar 0\r
+ jirl $zero, $ra, 0\r
+\r
+#\r
+# Instruction barrier operation for LoongArch.\r
+#\r
+ASM_PFX(AsmInstructionBarrierLoongArch):\r
+ ibar 0\r
+ jirl $zero, $ra, 0\r
+\r
+ .end\r
--- /dev/null
+#------------------------------------------------------------------------------\r
+#\r
+# CpuBreakpoint for LoongArch\r
+#\r
+# Copyright (c) 2022, Loongson Technology Corporation Limited. All rights reserved.<BR>\r
+#\r
+# SPDX-License-Identifier: BSD-2-Clause-Patent\r
+#\r
+#------------------------------------------------------------------------------\r
+\r
+ASM_GLOBAL ASM_PFX(CpuBreakpoint)\r
+\r
+#/**\r
+# Generates a breakpoint on the CPU.\r
+#\r
+# Generates a breakpoint on the CPU. The breakpoint must be implemented such\r
+# that code can resume normal execution after the breakpoint.\r
+#\r
+#**/\r
+\r
+ASM_PFX(CpuBreakpoint):\r
+ break 3\r
+ jirl $zero, $ra, 0\r
+ .end\r
--- /dev/null
+#------------------------------------------------------------------------------\r
+#\r
+# CpuPause for LoongArch\r
+#\r
+# Copyright (c) 2022, Loongson Technology Corporation Limited. All rights reserved.<BR>\r
+#\r
+# SPDX-License-Identifier: BSD-2-Clause-Patent\r
+#\r
+#------------------------------------------------------------------------------\r
+\r
+ASM_GLOBAL ASM_PFX(CpuPause)\r
+\r
+#/**\r
+# Requests CPU to pause for a short period of time.\r
+#\r
+# Requests CPU to pause for a short period of time. Typically used in MP\r
+# systems to prevent memory starvation while waiting for a spin lock.\r
+#\r
+#**/\r
+\r
+ASM_PFX(CpuPause):\r
+ nop\r
+ nop\r
+ nop\r
+ nop\r
+ nop\r
+ nop\r
+ nop\r
+ nop\r
+ jirl $zero, $ra, 0\r
+ .end\r
--- /dev/null
+#------------------------------------------------------------------------------\r
+#\r
+# LoongArch interrupt disable\r
+#\r
+# Copyright (c) 2022, Loongson Technology Corporation Limited. All rights reserved.<BR>\r
+#\r
+# SPDX-License-Identifier: BSD-2-Clause-Patent\r
+#\r
+#------------------------------------------------------------------------------\r
+\r
+ASM_GLOBAL ASM_PFX(DisableInterrupts)\r
+\r
+#/**\r
+# Disables CPU interrupts.\r
+#**/\r
+\r
+ASM_PFX(DisableInterrupts):\r
+ li.w $t0, 0x4\r
+ csrxchg $zero, $t0, 0x0\r
+ jirl $zero, $ra, 0\r
+ .end\r
--- /dev/null
+#------------------------------------------------------------------------------\r
+#\r
+# LoongArch interrupt enable\r
+#\r
+# Copyright (c) 2022, Loongson Technology Corporation Limited. All rights reserved.<BR>\r
+#\r
+# SPDX-License-Identifier: BSD-2-Clause-Patent\r
+#\r
+#------------------------------------------------------------------------------\r
+\r
+ASM_GLOBAL ASM_PFX(EnableInterrupts)\r
+\r
+#/**\r
+# Enables CPU interrupts.\r
+#**/\r
+\r
+ASM_PFX(EnableInterrupts):\r
+ li.w $t0, 0x4\r
+ csrxchg $t0, $t0, 0x0\r
+ jirl $zero, $ra, 0\r
+ .end\r
--- /dev/null
+#------------------------------------------------------------------------------\r
+#\r
+# Get LoongArch interrupt status\r
+#\r
+# Copyright (c) 2022, Loongson Technology Corporation Limited. All rights reserved.<BR>\r
+#\r
+# SPDX-License-Identifier: BSD-2-Clause-Patent\r
+#\r
+#------------------------------------------------------------------------------\r
+\r
+ASM_GLOBAL ASM_PFX(GetInterruptState)\r
+\r
+#/**\r
+# Retrieves the current CPU interrupt state.\r
+#\r
+# Returns TRUE means interrupts are currently enabled. Otherwise,\r
+# returns FALSE.\r
+#\r
+# @retval TRUE CPU interrupts are enabled.\r
+# @retval FALSE CPU interrupts are disabled.\r
+#\r
+#**/\r
+\r
+ASM_PFX(GetInterruptState):\r
+ li.w $t1, 0x4\r
+ csrrd $t0, 0x0\r
+ and $t0, $t0, $t1\r
+ beqz $t0, 1f\r
+ li.w $a0, 0x1\r
+ b 2f\r
+1:\r
+ li.w $a0, 0x0\r
+2:\r
+ jirl $zero, $ra, 0\r
+ .end\r
--- /dev/null
+/** @file\r
+ SwitchStack() function for LoongArch.\r
+\r
+ Copyright (c) 2022, Loongson Technology Corporation Limited. All rights reserved.<BR>\r
+\r
+ SPDX-License-Identifier: BSD-2-Clause-Patent\r
+**/\r
+\r
+#include "BaseLibInternals.h"\r
+\r
+UINTN\r
+EFIAPI\r
+InternalSwitchStackAsm (\r
+ IN BASE_LIBRARY_JUMP_BUFFER *JumpBuffer\r
+ );\r
+\r
+/**\r
+ Transfers control to a function starting with a new stack.\r
+\r
+ Transfers control to the function specified by EntryPoint using the\r
+ new stack specified by NewStack and passing in the parameters specified\r
+ by Context1 and Context2. Context1 and Context2 are optional and may\r
+ be NULL. The function EntryPoint must never return.\r
+\r
+ If EntryPoint is NULL, then ASSERT().\r
+ If NewStack is NULL, then ASSERT().\r
+\r
+ @param[in] EntryPoint A pointer to function to call with the new stack.\r
+ @param[in] Context1 A pointer to the context to pass into the EntryPoint\r
+ function.\r
+ @param[in] Context2 A pointer to the context to pass into the EntryPoint\r
+ function.\r
+ @param[in] NewStack A pointer to the new stack to use for the EntryPoint\r
+ function.\r
+ @param[in] Marker VA_LIST marker for the variable argument list.\r
+\r
+**/\r
+VOID\r
+EFIAPI\r
+InternalSwitchStack (\r
+ IN SWITCH_STACK_ENTRY_POINT EntryPoint,\r
+ IN VOID *Context1 OPTIONAL,\r
+ IN VOID *Context2 OPTIONAL,\r
+ IN VOID *NewStack,\r
+ IN VA_LIST Marker\r
+ )\r
+\r
+{\r
+ BASE_LIBRARY_JUMP_BUFFER JumpBuffer;\r
+\r
+ JumpBuffer.RA = (UINTN)EntryPoint;\r
+ JumpBuffer.SP = (UINTN)NewStack - sizeof (VOID *);\r
+ JumpBuffer.SP -= sizeof (Context1) + sizeof (Context2);\r
+ ((VOID **)(UINTN)JumpBuffer.SP)[0] = Context1;\r
+ ((VOID **)(UINTN)JumpBuffer.SP)[1] = Context2;\r
+\r
+ InternalSwitchStackAsm (&JumpBuffer);\r
+}\r
--- /dev/null
+#------------------------------------------------------------------------------\r
+#\r
+# MemoryFence() for LoongArch\r
+#\r
+# Copyright (c) 2022, Loongson Technology Corporation Limited. All rights reserved.<BR>\r
+#\r
+# SPDX-License-Identifier: BSD-2-Clause-Patent\r
+#\r
+#------------------------------------------------------------------------------\r
+\r
+ASM_GLOBAL ASM_PFX(MemoryFence)\r
+\r
+#\r
+# Memory fence for LoongArch\r
+#\r
+ASM_PFX(MemoryFence):\r
+ b AsmDataBarrierLoongArch\r
+ .end\r
--- /dev/null
+#------------------------------------------------------------------------------\r
+#\r
+# Set/Long jump for LoongArch\r
+#\r
+# Copyright (c) 2022, Loongson Technology Corporation Limited. All rights reserved.<BR>\r
+#\r
+# SPDX-License-Identifier: BSD-2-Clause-Patent\r
+#\r
+#------------------------------------------------------------------------------\r
+\r
+#define STORE st.d /* 64 bit mode regsave instruction */\r
+#define LOAD ld.d /* 64 bit mode regload instruction */\r
+#define RSIZE 8 /* 64 bit mode register size */\r
+\r
+ASM_GLOBAL ASM_PFX(SetJump)\r
+ASM_GLOBAL ASM_PFX(InternalLongJump)\r
+\r
+ASM_PFX(SetJump):\r
+ STORE $s0, $a0, RSIZE * 0\r
+ STORE $s1, $a0, RSIZE * 1\r
+ STORE $s2, $a0, RSIZE * 2\r
+ STORE $s3, $a0, RSIZE * 3\r
+ STORE $s4, $a0, RSIZE * 4\r
+ STORE $s5, $a0, RSIZE * 5\r
+ STORE $s6, $a0, RSIZE * 6\r
+ STORE $s7, $a0, RSIZE * 7\r
+ STORE $s8, $a0, RSIZE * 8\r
+ STORE $sp, $a0, RSIZE * 9\r
+ STORE $fp, $a0, RSIZE * 10\r
+ STORE $ra, $a0, RSIZE * 11\r
+ li.w $a0, 0 # Setjmp return\r
+ jirl $zero, $ra, 0\r
+\r
+ASM_PFX(InternalLongJump):\r
+ LOAD $ra, $a0, RSIZE * 11\r
+ LOAD $s0, $a0, RSIZE * 0\r
+ LOAD $s1, $a0, RSIZE * 1\r
+ LOAD $s2, $a0, RSIZE * 2\r
+ LOAD $s3, $a0, RSIZE * 3\r
+ LOAD $s4, $a0, RSIZE * 4\r
+ LOAD $s5, $a0, RSIZE * 5\r
+ LOAD $s6, $a0, RSIZE * 6\r
+ LOAD $s7, $a0, RSIZE * 7\r
+ LOAD $s8, $a0, RSIZE * 8\r
+ LOAD $sp, $a0, RSIZE * 9\r
+ LOAD $fp, $a0, RSIZE * 10\r
+ move $a0, $a1\r
+ jirl $zero, $ra, 0\r
+ .end\r
--- /dev/null
+#------------------------------------------------------------------------------\r
+#\r
+# InternalSwitchStackAsm for LoongArch\r
+#\r
+# Copyright (c) 2022, Loongson Technology Corporation Limited. All rights reserved.<BR>\r
+#\r
+# SPDX-License-Identifier: BSD-2-Clause-Patent\r
+#\r
+#------------------------------------------------------------------------------\r
+\r
+#define STORE st.d /* 64 bit mode regsave instruction */\r
+#define LOAD ld.d /* 64 bit mode regload instruction */\r
+#define RSIZE 8 /* 64 bit mode register size */\r
+\r
+ASM_GLOBAL ASM_PFX(InternalSwitchStackAsm)\r
+\r
+/**\r
+ This allows the caller to switch the stack and goes to the new entry point\r
+\r
+ @param JumpBuffer A pointer to CPU context buffer.\r
+**/\r
+\r
+ASM_PFX(InternalSwitchStackAsm):\r
+ LOAD $ra, $a0, RSIZE * 11\r
+ LOAD $s0, $a0, RSIZE * 0\r
+ LOAD $s1, $a0, RSIZE * 1\r
+ LOAD $s2, $a0, RSIZE * 2\r
+ LOAD $s3, $a0, RSIZE * 3\r
+ LOAD $s4, $a0, RSIZE * 4\r
+ LOAD $s5, $a0, RSIZE * 5\r
+ LOAD $s6, $a0, RSIZE * 6\r
+ LOAD $s7, $a0, RSIZE * 7\r
+ LOAD $s8, $a0, RSIZE * 8\r
+ LOAD $sp, $a0, RSIZE * 9\r
+ LOAD $fp, $a0, RSIZE * 10\r
+ LOAD $a0, $sp, 0\r
+ LOAD $a1, $sp, 8\r
+ jirl $zero, $ra, 0\r
+ .end\r