]> git.proxmox.com Git - mirror_edk2.git/blame - UefiCpuPkg/PiSmmCpuDxeSmm/X64/SmmInit.nasm
UefiCpuPkg/PiSmmCpuDxeSmm: [CVE-2017-5715] Stuff RSB before RSM
[mirror_edk2.git] / UefiCpuPkg / PiSmmCpuDxeSmm / X64 / SmmInit.nasm
CommitLineData
ba15b971 1;------------------------------------------------------------------------------ ;\r
e21e355e 2; Copyright (c) 2016 - 2018, Intel Corporation. All rights reserved.<BR>\r
ba15b971
LG
3; This program and the accompanying materials\r
4; are licensed and made available under the terms and conditions of the BSD License\r
5; which accompanies this distribution. The full text of the license may be found at\r
6; http://opensource.org/licenses/bsd-license.php.\r
7;\r
8; THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
9; WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
10;\r
11; Module Name:\r
12;\r
13; SmmInit.nasm\r
14;\r
15; Abstract:\r
16;\r
17; Functions for relocating SMBASE's for all processors\r
18;\r
19;-------------------------------------------------------------------------------\r
20\r
02f7fd15
HW
21%include "StuffRsb.inc"\r
22\r
ba15b971
LG
23extern ASM_PFX(SmmInitHandler)\r
24extern ASM_PFX(mRebasedFlag)\r
25extern ASM_PFX(mSmmRelocationOriginalAddress)\r
26\r
6b0841c1 27global ASM_PFX(gPatchSmmCr3)\r
351b49c1 28global ASM_PFX(gPatchSmmCr4)\r
f0053e83 29global ASM_PFX(gPatchSmmCr0)\r
5830d2c3 30global ASM_PFX(gPatchSmmInitStack)\r
ba15b971
LG
31global ASM_PFX(gcSmiInitGdtr)\r
32global ASM_PFX(gcSmmInitSize)\r
33global ASM_PFX(gcSmmInitTemplate)\r
9686a467
LE
34global ASM_PFX(gPatchRebasedFlagAddr32)\r
35global ASM_PFX(gPatchSmmRelocationOriginalAddressPtr32)\r
ba15b971 36\r
456c4cca
LE
37%define LONG_MODE_CS 0x38\r
38\r
ba15b971
LG
39 DEFAULT REL\r
40 SECTION .text\r
41\r
42ASM_PFX(gcSmiInitGdtr):\r
43 DW 0\r
44 DQ 0\r
45\r
46global ASM_PFX(SmmStartup)\r
00c5eede
LE
47\r
48BITS 16\r
ba15b971 49ASM_PFX(SmmStartup):\r
d4d87596
JW
50 mov eax, 0x80000001 ; read capability\r
51 cpuid\r
d4d87596 52 mov ebx, edx ; rdmsr will change edx. keep it in ebx.\r
6b0841c1
LE
53 mov eax, strict dword 0 ; source operand will be patched\r
54ASM_PFX(gPatchSmmCr3):\r
00c5eede
LE
55 mov cr3, eax\r
56o32 lgdt [cs:ebp + (ASM_PFX(gcSmiInitGdtr) - ASM_PFX(SmmStartup))]\r
351b49c1
LE
57 mov eax, strict dword 0 ; source operand will be patched\r
58ASM_PFX(gPatchSmmCr4):\r
ba15b971 59 or ah, 2 ; enable XMM registers access\r
00c5eede 60 mov cr4, eax\r
ba15b971
LG
61 mov ecx, 0xc0000080 ; IA32_EFER MSR\r
62 rdmsr\r
d4d87596 63 or ah, BIT0 ; set LME bit\r
d4d87596
JW
64 test ebx, BIT20 ; check NXE capability\r
65 jz .1\r
66 or ah, BIT3 ; set NXE bit\r
67.1:\r
ba15b971 68 wrmsr\r
f0053e83
LE
69 mov eax, strict dword 0 ; source operand will be patched\r
70ASM_PFX(gPatchSmmCr0):\r
00c5eede 71 mov cr0, eax ; enable protected mode & paging\r
456c4cca
LE
72 jmp LONG_MODE_CS : dword 0 ; offset will be patched to @LongMode\r
73@PatchLongModeOffset:\r
00c5eede
LE
74\r
75BITS 64\r
ba15b971 76@LongMode: ; long-mode starts here\r
5830d2c3
LE
77 mov rsp, strict qword 0 ; source operand will be patched\r
78ASM_PFX(gPatchSmmInitStack):\r
ba15b971
LG
79 and sp, 0xfff0 ; make sure RSP is 16-byte aligned\r
80 ;\r
81 ; Accoring to X64 calling convention, XMM0~5 are volatile, we need to save\r
82 ; them before calling C-function.\r
83 ;\r
84 sub rsp, 0x60\r
85 movdqa [rsp], xmm0\r
86 movdqa [rsp + 0x10], xmm1\r
87 movdqa [rsp + 0x20], xmm2\r
88 movdqa [rsp + 0x30], xmm3\r
89 movdqa [rsp + 0x40], xmm4\r
90 movdqa [rsp + 0x50], xmm5\r
91\r
92 add rsp, -0x20\r
93 call ASM_PFX(SmmInitHandler)\r
94 add rsp, 0x20\r
95\r
96 ;\r
97 ; Restore XMM0~5 after calling C-function.\r
98 ;\r
99 movdqa xmm0, [rsp]\r
100 movdqa xmm1, [rsp + 0x10]\r
101 movdqa xmm2, [rsp + 0x20]\r
102 movdqa xmm3, [rsp + 0x30]\r
103 movdqa xmm4, [rsp + 0x40]\r
104 movdqa xmm5, [rsp + 0x50]\r
105\r
02f7fd15 106 StuffRsb64\r
ba15b971
LG
107 rsm\r
108\r
109BITS 16\r
110ASM_PFX(gcSmmInitTemplate):\r
111 mov ebp, [cs:@L1 - ASM_PFX(gcSmmInitTemplate) + 0x8000]\r
112 sub ebp, 0x30000\r
113 jmp ebp\r
114@L1:\r
e21e355e 115 DQ 0; ASM_PFX(SmmStartup)\r
ba15b971
LG
116\r
117ASM_PFX(gcSmmInitSize): DW $ - ASM_PFX(gcSmmInitTemplate)\r
118\r
119BITS 64\r
120global ASM_PFX(SmmRelocationSemaphoreComplete)\r
121ASM_PFX(SmmRelocationSemaphoreComplete):\r
122 push rax\r
123 mov rax, [ASM_PFX(mRebasedFlag)]\r
124 mov byte [rax], 1\r
125 pop rax\r
126 jmp [ASM_PFX(mSmmRelocationOriginalAddress)]\r
127\r
128;\r
129; Semaphore code running in 32-bit mode\r
130;\r
9686a467 131BITS 32\r
ba15b971
LG
132global ASM_PFX(SmmRelocationSemaphoreComplete32)\r
133ASM_PFX(SmmRelocationSemaphoreComplete32):\r
9686a467
LE
134 push eax\r
135 mov eax, strict dword 0 ; source operand will be patched\r
136ASM_PFX(gPatchRebasedFlagAddr32):\r
137 mov byte [eax], 1\r
138 pop eax\r
139 jmp dword [dword 0] ; destination will be patched\r
140ASM_PFX(gPatchSmmRelocationOriginalAddressPtr32):\r
e21e355e 141\r
9686a467 142BITS 64\r
e21e355e
LG
143global ASM_PFX(PiSmmCpuSmmInitFixupAddress)\r
144ASM_PFX(PiSmmCpuSmmInitFixupAddress):\r
145 lea rax, [@LongMode]\r
456c4cca
LE
146 lea rcx, [@PatchLongModeOffset - 6]\r
147 mov dword [rcx], eax\r
e21e355e
LG
148\r
149 lea rax, [ASM_PFX(SmmStartup)]\r
150 lea rcx, [@L1]\r
151 mov qword [rcx], rax\r
152 ret\r