]>
Commit | Line | Data |
---|---|---|
c8ec22a2 JY |
1 | ;------------------------------------------------------------------------------\r |
2 | ;\r | |
3 | ; Copyright (c) 2014, Intel Corporation. All rights reserved.<BR>\r | |
4 | ; This program and the accompanying materials\r | |
5 | ; are licensed and made available under the terms and conditions of the BSD License\r | |
6 | ; which accompanies this distribution. The full text of the license may be found at\r | |
7 | ; http://opensource.org/licenses/bsd-license.php.\r | |
8 | ;\r | |
9 | ; THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r | |
10 | ; WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r | |
11 | ;\r | |
12 | ; Abstract:\r | |
13 | ;\r | |
14 | ; Provide macro for register save/restore using SSE registers\r | |
15 | ;\r | |
16 | ;------------------------------------------------------------------------------\r | |
17 | \r | |
18 | ;\r | |
19 | ; Define SSE instruction set\r | |
20 | ;\r | |
21 | IFDEF USE_SSE41_FLAG\r | |
22 | ;\r | |
23 | ; Define SSE macros using SSE 4.1 instructions\r | |
24 | ;\r | |
25 | SXMMN MACRO XMM, IDX, REG\r | |
26 | pinsrd XMM, REG, (IDX AND 3)\r | |
27 | ENDM\r | |
28 | \r | |
29 | LXMMN MACRO XMM, REG, IDX\r | |
30 | pextrd REG, XMM, (IDX AND 3)\r | |
31 | ENDM\r | |
32 | ELSE\r | |
33 | ;\r | |
34 | ; Define SSE macros using SSE 2 instructions\r | |
35 | ;\r | |
36 | SXMMN MACRO XMM, IDX, REG\r | |
37 | pinsrw XMM, REG, (IDX AND 3) * 2\r | |
38 | ror REG, 16\r | |
39 | pinsrw XMM, REG, (IDX AND 3) * 2 + 1\r | |
40 | rol REG, 16\r | |
41 | ENDM\r | |
42 | \r | |
43 | LXMMN MACRO XMM, REG, IDX\r | |
44 | pshufd XMM, XMM, (0E4E4E4h SHR (IDX * 2)) AND 0FFh\r | |
45 | movd REG, XMM\r | |
46 | pshufd XMM, XMM, (0E4E4E4h SHR (IDX * 2 + (IDX AND 1) * 4)) AND 0FFh\r | |
47 | ENDM\r | |
48 | ENDIF\r | |
49 | \r | |
50 | \r | |
51 | SAVE_REGS MACRO\r | |
52 | SXMMN xmm7, 0, ebp\r | |
53 | SXMMN xmm7, 1, ebx\r | |
54 | SXMMN xmm7, 2, esi\r | |
55 | SXMMN xmm7, 3, edi\r | |
56 | SAVE_ESP\r | |
57 | ENDM\r | |
58 | \r | |
59 | LOAD_REGS MACRO\r | |
60 | LXMMN xmm7, ebp, 0\r | |
61 | LXMMN xmm7, ebx, 1\r | |
62 | LXMMN xmm7, esi, 2\r | |
63 | LXMMN xmm7, edi, 3\r | |
64 | LOAD_ESP\r | |
65 | ENDM\r | |
66 | \r | |
67 | LOAD_EAX MACRO\r | |
68 | LXMMN xmm6, eax, 1\r | |
69 | ENDM\r | |
70 | \r | |
71 | SAVE_EAX MACRO\r | |
72 | SXMMN xmm6, 1, eax\r | |
73 | ENDM\r | |
74 | \r | |
75 | LOAD_EDX MACRO\r | |
76 | LXMMN xmm6, edx, 2\r | |
77 | ENDM\r | |
78 | \r | |
79 | SAVE_EDX MACRO\r | |
80 | SXMMN xmm6, 2, edx\r | |
81 | ENDM\r | |
82 | \r | |
83 | SAVE_ECX MACRO\r | |
84 | SXMMN xmm6, 3, ecx\r | |
85 | ENDM\r | |
86 | \r | |
87 | LOAD_ECX MACRO\r | |
88 | LXMMN xmm6, ecx, 3\r | |
89 | ENDM\r | |
90 | \r | |
91 | SAVE_ESP MACRO\r | |
92 | SXMMN xmm6, 0, esp\r | |
93 | ENDM\r | |
94 | \r | |
95 | LOAD_ESP MACRO\r | |
96 | movd esp, xmm6\r | |
97 | ENDM\r | |
98 | \r | |
99 | ENABLE_SSE MACRO\r | |
100 | mov eax, cr4\r | |
101 | or eax, 00000600h\r | |
102 | mov cr4, eax\r | |
103 | ENDM\r |