]>
Commit | Line | Data |
---|---|---|
ba15b971 | 1 | ;------------------------------------------------------------------------------ ;\r |
e21e355e | 2 | ; Copyright (c) 2016 - 2018, Intel Corporation. All rights reserved.<BR>\r |
ba15b971 LG |
3 | ; This program and the accompanying materials\r |
4 | ; are licensed and made available under the terms and conditions of the BSD License\r | |
5 | ; which accompanies this distribution. The full text of the license may be found at\r | |
6 | ; http://opensource.org/licenses/bsd-license.php.\r | |
7 | ;\r | |
8 | ; THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r | |
9 | ; WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r | |
10 | ;\r | |
11 | ; Module Name:\r | |
12 | ;\r | |
13 | ; SmmInit.nasm\r | |
14 | ;\r | |
15 | ; Abstract:\r | |
16 | ;\r | |
17 | ; Functions for relocating SMBASE's for all processors\r | |
18 | ;\r | |
19 | ;-------------------------------------------------------------------------------\r | |
20 | \r | |
21 | extern ASM_PFX(SmmInitHandler)\r | |
22 | extern ASM_PFX(mRebasedFlag)\r | |
23 | extern ASM_PFX(mSmmRelocationOriginalAddress)\r | |
24 | \r | |
6b0841c1 | 25 | global ASM_PFX(gPatchSmmCr3)\r |
351b49c1 | 26 | global ASM_PFX(gPatchSmmCr4)\r |
f0053e83 | 27 | global ASM_PFX(gPatchSmmCr0)\r |
5830d2c3 | 28 | global ASM_PFX(gPatchSmmInitStack)\r |
ba15b971 LG |
29 | global ASM_PFX(gcSmiInitGdtr)\r |
30 | global ASM_PFX(gcSmmInitSize)\r | |
31 | global ASM_PFX(gcSmmInitTemplate)\r | |
9686a467 LE |
32 | global ASM_PFX(gPatchRebasedFlagAddr32)\r |
33 | global ASM_PFX(gPatchSmmRelocationOriginalAddressPtr32)\r | |
ba15b971 | 34 | \r |
456c4cca LE |
35 | %define LONG_MODE_CS 0x38\r |
36 | \r | |
ba15b971 LG |
37 | DEFAULT REL\r |
38 | SECTION .text\r | |
39 | \r | |
40 | ASM_PFX(gcSmiInitGdtr):\r | |
41 | DW 0\r | |
42 | DQ 0\r | |
43 | \r | |
44 | global ASM_PFX(SmmStartup)\r | |
00c5eede LE |
45 | \r |
46 | BITS 16\r | |
ba15b971 | 47 | ASM_PFX(SmmStartup):\r |
d4d87596 JW |
48 | mov eax, 0x80000001 ; read capability\r |
49 | cpuid\r | |
d4d87596 | 50 | mov ebx, edx ; rdmsr will change edx. keep it in ebx.\r |
6b0841c1 LE |
51 | mov eax, strict dword 0 ; source operand will be patched\r |
52 | ASM_PFX(gPatchSmmCr3):\r | |
00c5eede LE |
53 | mov cr3, eax\r |
54 | o32 lgdt [cs:ebp + (ASM_PFX(gcSmiInitGdtr) - ASM_PFX(SmmStartup))]\r | |
351b49c1 LE |
55 | mov eax, strict dword 0 ; source operand will be patched\r |
56 | ASM_PFX(gPatchSmmCr4):\r | |
ba15b971 | 57 | or ah, 2 ; enable XMM registers access\r |
00c5eede | 58 | mov cr4, eax\r |
ba15b971 LG |
59 | mov ecx, 0xc0000080 ; IA32_EFER MSR\r |
60 | rdmsr\r | |
d4d87596 | 61 | or ah, BIT0 ; set LME bit\r |
d4d87596 JW |
62 | test ebx, BIT20 ; check NXE capability\r |
63 | jz .1\r | |
64 | or ah, BIT3 ; set NXE bit\r | |
65 | .1:\r | |
ba15b971 | 66 | wrmsr\r |
f0053e83 LE |
67 | mov eax, strict dword 0 ; source operand will be patched\r |
68 | ASM_PFX(gPatchSmmCr0):\r | |
00c5eede | 69 | mov cr0, eax ; enable protected mode & paging\r |
456c4cca LE |
70 | jmp LONG_MODE_CS : dword 0 ; offset will be patched to @LongMode\r |
71 | @PatchLongModeOffset:\r | |
00c5eede LE |
72 | \r |
73 | BITS 64\r | |
ba15b971 | 74 | @LongMode: ; long-mode starts here\r |
5830d2c3 LE |
75 | mov rsp, strict qword 0 ; source operand will be patched\r |
76 | ASM_PFX(gPatchSmmInitStack):\r | |
ba15b971 LG |
77 | and sp, 0xfff0 ; make sure RSP is 16-byte aligned\r |
78 | ;\r | |
79 | ; Accoring to X64 calling convention, XMM0~5 are volatile, we need to save\r | |
80 | ; them before calling C-function.\r | |
81 | ;\r | |
82 | sub rsp, 0x60\r | |
83 | movdqa [rsp], xmm0\r | |
84 | movdqa [rsp + 0x10], xmm1\r | |
85 | movdqa [rsp + 0x20], xmm2\r | |
86 | movdqa [rsp + 0x30], xmm3\r | |
87 | movdqa [rsp + 0x40], xmm4\r | |
88 | movdqa [rsp + 0x50], xmm5\r | |
89 | \r | |
90 | add rsp, -0x20\r | |
91 | call ASM_PFX(SmmInitHandler)\r | |
92 | add rsp, 0x20\r | |
93 | \r | |
94 | ;\r | |
95 | ; Restore XMM0~5 after calling C-function.\r | |
96 | ;\r | |
97 | movdqa xmm0, [rsp]\r | |
98 | movdqa xmm1, [rsp + 0x10]\r | |
99 | movdqa xmm2, [rsp + 0x20]\r | |
100 | movdqa xmm3, [rsp + 0x30]\r | |
101 | movdqa xmm4, [rsp + 0x40]\r | |
102 | movdqa xmm5, [rsp + 0x50]\r | |
103 | \r | |
104 | rsm\r | |
105 | \r | |
106 | BITS 16\r | |
107 | ASM_PFX(gcSmmInitTemplate):\r | |
108 | mov ebp, [cs:@L1 - ASM_PFX(gcSmmInitTemplate) + 0x8000]\r | |
109 | sub ebp, 0x30000\r | |
110 | jmp ebp\r | |
111 | @L1:\r | |
e21e355e | 112 | DQ 0; ASM_PFX(SmmStartup)\r |
ba15b971 LG |
113 | \r |
114 | ASM_PFX(gcSmmInitSize): DW $ - ASM_PFX(gcSmmInitTemplate)\r | |
115 | \r | |
116 | BITS 64\r | |
117 | global ASM_PFX(SmmRelocationSemaphoreComplete)\r | |
118 | ASM_PFX(SmmRelocationSemaphoreComplete):\r | |
119 | push rax\r | |
120 | mov rax, [ASM_PFX(mRebasedFlag)]\r | |
121 | mov byte [rax], 1\r | |
122 | pop rax\r | |
123 | jmp [ASM_PFX(mSmmRelocationOriginalAddress)]\r | |
124 | \r | |
125 | ;\r | |
126 | ; Semaphore code running in 32-bit mode\r | |
127 | ;\r | |
9686a467 | 128 | BITS 32\r |
ba15b971 LG |
129 | global ASM_PFX(SmmRelocationSemaphoreComplete32)\r |
130 | ASM_PFX(SmmRelocationSemaphoreComplete32):\r | |
9686a467 LE |
131 | push eax\r |
132 | mov eax, strict dword 0 ; source operand will be patched\r | |
133 | ASM_PFX(gPatchRebasedFlagAddr32):\r | |
134 | mov byte [eax], 1\r | |
135 | pop eax\r | |
136 | jmp dword [dword 0] ; destination will be patched\r | |
137 | ASM_PFX(gPatchSmmRelocationOriginalAddressPtr32):\r | |
e21e355e | 138 | \r |
9686a467 | 139 | BITS 64\r |
e21e355e LG |
140 | global ASM_PFX(PiSmmCpuSmmInitFixupAddress)\r |
141 | ASM_PFX(PiSmmCpuSmmInitFixupAddress):\r | |
142 | lea rax, [@LongMode]\r | |
456c4cca LE |
143 | lea rcx, [@PatchLongModeOffset - 6]\r |
144 | mov dword [rcx], eax\r | |
e21e355e LG |
145 | \r |
146 | lea rax, [ASM_PFX(SmmStartup)]\r | |
147 | lea rcx, [@L1]\r | |
148 | mov qword [rcx], rax\r | |
149 | ret\r |