--- /dev/null
+;------------------------------------------------------------------------------\r
+;\r
+; Copyright (c) 2014, Intel Corporation. All rights reserved.<BR>\r
+; This program and the accompanying materials\r
+; are licensed and made available under the terms and conditions of the BSD License\r
+; which accompanies this distribution. The full text of the license may be found at\r
+; http://opensource.org/licenses/bsd-license.php.\r
+;\r
+; THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
+; WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
+;\r
+; Abstract:\r
+;\r
+; Provide macro for register save/restore using SSE registers\r
+;\r
+;------------------------------------------------------------------------------\r
+\r
+;\r
+; Define SSE instruction set\r
+;\r
+IFDEF USE_SSE41_FLAG\r
+;\r
+; Define SSE macros using SSE 4.1 instructions\r
+;\r
+SXMMN MACRO XMM, IDX, REG\r
+ pinsrd XMM, REG, (IDX AND 3)\r
+ ENDM\r
+\r
+LXMMN MACRO XMM, REG, IDX\r
+ pextrd REG, XMM, (IDX AND 3)\r
+ ENDM\r
+ELSE\r
+;\r
+; Define SSE macros using SSE 2 instructions\r
+;\r
+SXMMN MACRO XMM, IDX, REG\r
+ pinsrw XMM, REG, (IDX AND 3) * 2\r
+ ror REG, 16\r
+ pinsrw XMM, REG, (IDX AND 3) * 2 + 1\r
+ rol REG, 16\r
+ ENDM\r
+\r
+LXMMN MACRO XMM, REG, IDX\r
+ pshufd XMM, XMM, (0E4E4E4h SHR (IDX * 2)) AND 0FFh\r
+ movd REG, XMM\r
+ pshufd XMM, XMM, (0E4E4E4h SHR (IDX * 2 + (IDX AND 1) * 4)) AND 0FFh\r
+ ENDM\r
+ENDIF\r
+\r
+\r
+SAVE_REGS MACRO\r
+ SXMMN xmm7, 0, ebp\r
+ SXMMN xmm7, 1, ebx\r
+ SXMMN xmm7, 2, esi\r
+ SXMMN xmm7, 3, edi\r
+ SAVE_ESP\r
+ ENDM\r
+\r
+LOAD_REGS MACRO\r
+ LXMMN xmm7, ebp, 0\r
+ LXMMN xmm7, ebx, 1\r
+ LXMMN xmm7, esi, 2\r
+ LXMMN xmm7, edi, 3\r
+ LOAD_ESP\r
+ ENDM\r
+\r
+LOAD_EAX MACRO\r
+ LXMMN xmm6, eax, 1\r
+ ENDM\r
+\r
+SAVE_EAX MACRO\r
+ SXMMN xmm6, 1, eax\r
+ ENDM\r
+\r
+LOAD_EDX MACRO\r
+ LXMMN xmm6, edx, 2\r
+ ENDM\r
+\r
+SAVE_EDX MACRO\r
+ SXMMN xmm6, 2, edx\r
+ ENDM\r
+\r
+SAVE_ECX MACRO\r
+ SXMMN xmm6, 3, ecx\r
+ ENDM\r
+\r
+LOAD_ECX MACRO\r
+ LXMMN xmm6, ecx, 3\r
+ ENDM\r
+\r
+SAVE_ESP MACRO\r
+ SXMMN xmm6, 0, esp\r
+ ENDM\r
+\r
+LOAD_ESP MACRO\r
+ movd esp, xmm6\r
+ ENDM\r
+\r
+ENABLE_SSE MACRO\r
+ mov eax, cr4\r
+ or eax, 00000600h\r
+ mov cr4, eax\r
+ ENDM\r