--- /dev/null
+#------------------------------------------------------------------------------\r
+#\r
+# LoongArch synchronization ASM functions.\r
+#\r
+# Copyright (c) 2022, Loongson Technology Corporation Limited. All rights reserved.<BR>\r
+#\r
+# SPDX-License-Identifier: BSD-2-Clause-Patent\r
+#\r
+#------------------------------------------------------------------------------\r
+\r
+ASM_GLOBAL ASM_PFX(AsmInternalSyncCompareExchange16)\r
+ASM_GLOBAL ASM_PFX(AsmInternalSyncCompareExchange32)\r
+ASM_GLOBAL ASM_PFX(AsmInternalSyncCompareExchange64)\r
+ASM_GLOBAL ASM_PFX(AsmInternalSyncIncrement)\r
+ASM_GLOBAL ASM_PFX(AsmInternalSyncDecrement)\r
+\r
+/**\r
+UINT32\r
+EFIAPI\r
+AsmInternalSyncCompareExchange16 (\r
+ IN volatile UINT32 *Ptr32,\r
+ IN UINT64 Mask,\r
+ IN UINT64 LocalCompareValue,\r
+ IN UINT64 LocalExchangeValue\r
+ )\r
+**/\r
+ASM_PFX(AsmInternalSyncCompareExchange16):\r
+1:\r
+ ll.w $t0, $a0, 0x0\r
+ and $t1, $t0, $a1\r
+ bne $t1, $a2, 2f\r
+ andn $t1, $t0, $a1\r
+ or $t1, $t1, $a3\r
+ sc.w $t1, $a0, 0x0\r
+ beqz $t1, 1b\r
+ b 3f\r
+2:\r
+ dbar 0\r
+3:\r
+ move $a0, $t0\r
+ jirl $zero, $ra, 0\r
+\r
+/**\r
+UINT32\r
+EFIAPI\r
+AsmInternalSyncCompareExchange32 (\r
+ IN volatile UINT32 *Value,\r
+ IN UINT64 CompareValue,\r
+ IN UINT64 ExchangeValue\r
+ )\r
+**/\r
+ASM_PFX(AsmInternalSyncCompareExchange32):\r
+1:\r
+ ll.w $t0, $a0, 0x0\r
+ bne $t0, $a1, 2f\r
+ move $t0, $a2\r
+ sc.w $t0, $a0, 0x0\r
+ beqz $t0, 1b\r
+ b 3f\r
+2:\r
+ dbar 0\r
+3:\r
+ move $a0, $t0\r
+ jirl $zero, $ra, 0\r
+\r
+/**\r
+UINT64\r
+EFIAPI\r
+AsmInternalSyncCompareExchange64 (\r
+ IN volatile UINT64 *Value,\r
+ IN UINT64 CompareValue,\r
+ IN UINT64 ExchangeValue\r
+ )\r
+**/\r
+ASM_PFX(AsmInternalSyncCompareExchange64):\r
+1:\r
+ ll.d $t0, $a0, 0x0\r
+ bne $t0, $a1, 2f\r
+ move $t0, $a2\r
+ sc.d $t0, $a0, 0x0\r
+ beqz $t0, 1b\r
+ b 3f\r
+2:\r
+ dbar 0\r
+3:\r
+ move $a0, $t0\r
+ jirl $zero, $ra, 0\r
+\r
+/**\r
+UINT32\r
+EFIAPI\r
+AsmInternalSyncIncrement (\r
+ IN volatile UINT32 *Value\r
+ )\r
+**/\r
+ASM_PFX(AsmInternalSyncIncrement):\r
+ move $t0, $a0\r
+ dbar 0\r
+ ld.w $t1, $t0, 0x0\r
+ li.w $t2, 1\r
+ amadd.w $t1, $t2, $t0\r
+\r
+ ld.w $a0, $t0, 0x0\r
+ jirl $zero, $ra, 0\r
+\r
+/**\r
+UINT32\r
+EFIAPI\r
+AsmInternalSyncDecrement (\r
+ IN volatile UINT32 *Value\r
+ )\r
+**/\r
+ASM_PFX(AsmInternalSyncDecrement):\r
+ move $t0, $a0\r
+ dbar 0\r
+ ld.w $t1, $t0, 0x0\r
+ li.w $t2, -1\r
+ amadd.w $t1, $t2, $t0\r
+\r
+ ld.w $a0, $t0, 0x0\r
+ jirl $zero, $ra, 0\r
+.end\r
--- /dev/null
+/** @file\r
+ LoongArch synchronization functions.\r
+\r
+ Copyright (c) 2022, Loongson Technology Corporation Limited. All rights reserved.<BR>\r
+\r
+ SPDX-License-Identifier: BSD-2-Clause-Patent\r
+\r
+**/\r
+\r
+#include <Library/DebugLib.h>\r
+\r
+UINT32\r
+EFIAPI\r
+AsmInternalSyncCompareExchange16 (\r
+ IN volatile UINT32 *,\r
+ IN UINT64,\r
+ IN UINT64,\r
+ IN UINT64\r
+ );\r
+\r
+UINT32\r
+EFIAPI\r
+AsmInternalSyncCompareExchange32 (\r
+ IN volatile UINT32 *,\r
+ IN UINT64,\r
+ IN UINT64\r
+ );\r
+\r
+UINT64\r
+EFIAPI\r
+AsmInternalSyncCompareExchange64 (\r
+ IN volatile UINT64 *,\r
+ IN UINT64,\r
+ IN UINT64\r
+ );\r
+\r
+UINT32\r
+EFIAPI\r
+AsmInternalSyncIncrement (\r
+ IN volatile UINT32 *\r
+ );\r
+\r
+UINT32\r
+EFIAPI\r
+AsmInternalSyncDecrement (\r
+ IN volatile UINT32 *\r
+ );\r
+\r
+/**\r
+ Performs an atomic compare exchange operation on a 16-bit\r
+ unsigned integer.\r
+\r
+ Performs an atomic compare exchange operation on the 16-bit\r
+ unsigned integer specified by Value. If Value is equal to\r
+ CompareValue, then Value is set to ExchangeValue and\r
+ CompareValue is returned. If Value is not equal to\r
+ CompareValue, then Value is returned. The compare exchange\r
+ operation must be performed using MP safe mechanisms.\r
+\r
+ @param[in] Value A pointer to the 16-bit value for the\r
+ compare exchange operation.\r
+ @param[in] CompareValue 16-bit value used in compare operation.\r
+ @param[in] ExchangeValue 16-bit value used in exchange operation.\r
+\r
+ @return The original *Value before exchange.\r
+\r
+**/\r
+UINT16\r
+EFIAPI\r
+InternalSyncCompareExchange16 (\r
+ IN volatile UINT16 *Value,\r
+ IN UINT16 CompareValue,\r
+ IN UINT16 ExchangeValue\r
+ )\r
+{\r
+ UINT32 RetValue;\r
+ UINT32 Shift;\r
+ UINT64 Mask;\r
+ UINT64 LocalCompareValue;\r
+ UINT64 LocalExchangeValue;\r
+ volatile UINT32 *Ptr32;\r
+\r
+ /* Check that ptr is naturally aligned */\r
+ ASSERT (!((UINT64)Value & (sizeof (Value) - 1)));\r
+\r
+ /* Mask inputs to the correct size. */\r
+ Mask = (((~0UL) - (1UL << (0)) + 1) & (~0UL >> (64 - 1 - ((sizeof (UINT16) * 8) - 1))));\r
+ LocalCompareValue = ((UINT64)CompareValue) & Mask;\r
+ LocalExchangeValue = ((UINT64)ExchangeValue) & Mask;\r
+\r
+ /*\r
+ * Calculate a shift & mask that correspond to the value we wish to\r
+ * compare & exchange within the naturally aligned 4 byte integer\r
+ * that includes it.\r
+ */\r
+ Shift = (UINT64)Value & 0x3;\r
+ Shift *= 8; /* BITS_PER_BYTE */\r
+ LocalCompareValue <<= Shift;\r
+ LocalExchangeValue <<= Shift;\r
+ Mask <<= Shift;\r
+\r
+ /*\r
+ * Calculate a pointer to the naturally aligned 4 byte integer that\r
+ * includes our byte of interest, and load its value.\r
+ */\r
+ Ptr32 = (UINT32 *)((UINT64)Value & ~0x3);\r
+\r
+ RetValue = AsmInternalSyncCompareExchange16 (\r
+ Ptr32,\r
+ Mask,\r
+ LocalCompareValue,\r
+ LocalExchangeValue\r
+ );\r
+\r
+ return (RetValue & Mask) >> Shift;\r
+}\r
+\r
+/**\r
+ Performs an atomic compare exchange operation on a 32-bit\r
+ unsigned integer.\r
+\r
+ Performs an atomic compare exchange operation on the 32-bit\r
+ unsigned integer specified by Value. If Value is equal to\r
+ CompareValue, then Value is set to ExchangeValue and\r
+ CompareValue is returned. If Value is not equal to\r
+ CompareValue, then Value is returned. The compare exchange\r
+ operation must be performed using MP safe mechanisms.\r
+\r
+ @param[in] Value A pointer to the 32-bit value for the\r
+ compare exchange operation.\r
+ @param[in] CompareValue 32-bit value used in compare operation.\r
+ @param[in] ExchangeValue 32-bit value used in exchange operation.\r
+\r
+ @return The original *Value before exchange.\r
+\r
+**/\r
+UINT32\r
+EFIAPI\r
+InternalSyncCompareExchange32 (\r
+ IN volatile UINT32 *Value,\r
+ IN UINT32 CompareValue,\r
+ IN UINT32 ExchangeValue\r
+ )\r
+{\r
+ UINT32 RetValue;\r
+\r
+ RetValue = AsmInternalSyncCompareExchange32 (\r
+ Value,\r
+ CompareValue,\r
+ ExchangeValue\r
+ );\r
+\r
+ return RetValue;\r
+}\r
+\r
+/**\r
+ Performs an atomic compare exchange operation on a 64-bit unsigned integer.\r
+\r
+ Performs an atomic compare exchange operation on the 64-bit unsigned integer specified\r
+ by Value. If Value is equal to CompareValue, then Value is set to ExchangeValue and\r
+ CompareValue is returned. If Value is not equal to CompareValue, then Value is returned.\r
+ The compare exchange operation must be performed using MP safe mechanisms.\r
+\r
+ @param[in] Value A pointer to the 64-bit value for the compare exchange\r
+ operation.\r
+ @param[in] CompareValue 64-bit value used in compare operation.\r
+ @param[in] ExchangeValue 64-bit value used in exchange operation.\r
+\r
+ @return The original *Value before exchange.\r
+\r
+**/\r
+UINT64\r
+EFIAPI\r
+InternalSyncCompareExchange64 (\r
+ IN volatile UINT64 *Value,\r
+ IN UINT64 CompareValue,\r
+ IN UINT64 ExchangeValue\r
+ )\r
+{\r
+ UINT64 RetValue;\r
+\r
+ RetValue = AsmInternalSyncCompareExchange64 (\r
+ Value,\r
+ CompareValue,\r
+ ExchangeValue\r
+ );\r
+\r
+ return RetValue;\r
+}\r
+\r
+/**\r
+ Performs an atomic increment of an 32-bit unsigned integer.\r
+\r
+ Performs an atomic increment of the 32-bit unsigned integer specified by\r
+ Value and returns the incremented value. The increment operation must be\r
+ performed using MP safe mechanisms. The state of the return value is not\r
+ guaranteed to be MP safe.\r
+\r
+ @param[in] Value A pointer to the 32-bit value to increment.\r
+\r
+ @return The incremented value.\r
+\r
+**/\r
+UINT32\r
+EFIAPI\r
+InternalSyncIncrement (\r
+ IN volatile UINT32 *Value\r
+ )\r
+{\r
+ return AsmInternalSyncIncrement (Value);\r
+}\r
+\r
+/**\r
+ Performs an atomic decrement of an 32-bit unsigned integer.\r
+\r
+ Performs an atomic decrement of the 32-bit unsigned integer specified by\r
+ Value and returns the decrement value. The decrement operation must be\r
+ performed using MP safe mechanisms. The state of the return value is not\r
+ guaranteed to be MP safe.\r
+\r
+ @param[in] Value A pointer to the 32-bit value to decrement.\r
+\r
+ @return The decrement value.\r
+\r
+**/\r
+UINT32\r
+EFIAPI\r
+InternalSyncDecrement (\r
+ IN volatile UINT32 *Value\r
+ )\r
+{\r
+ return AsmInternalSyncDecrement (Value);\r
+}\r