]> git.proxmox.com Git - mirror_edk2.git/blame - UefiCpuPkg/PiSmmCpuDxeSmm/X64/SmmInit.nasm
UefiCpuPkg: Update SmmCpuFeatureLib pass XCODE5 tool chain
[mirror_edk2.git] / UefiCpuPkg / PiSmmCpuDxeSmm / X64 / SmmInit.nasm
CommitLineData
ba15b971
LG
1;------------------------------------------------------------------------------ ;\r
2; Copyright (c) 2016, Intel Corporation. All rights reserved.<BR>\r
3; This program and the accompanying materials\r
4; are licensed and made available under the terms and conditions of the BSD License\r
5; which accompanies this distribution. The full text of the license may be found at\r
6; http://opensource.org/licenses/bsd-license.php.\r
7;\r
8; THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
9; WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
10;\r
11; Module Name:\r
12;\r
13; SmmInit.nasm\r
14;\r
15; Abstract:\r
16;\r
17; Functions for relocating SMBASE's for all processors\r
18;\r
19;-------------------------------------------------------------------------------\r
20\r
21extern ASM_PFX(SmmInitHandler)\r
22extern ASM_PFX(mRebasedFlag)\r
23extern ASM_PFX(mSmmRelocationOriginalAddress)\r
24\r
25global ASM_PFX(gSmmCr3)\r
26global ASM_PFX(gSmmCr4)\r
27global ASM_PFX(gSmmCr0)\r
28global ASM_PFX(gSmmJmpAddr)\r
29global ASM_PFX(gSmmInitStack)\r
30global ASM_PFX(gcSmiInitGdtr)\r
31global ASM_PFX(gcSmmInitSize)\r
32global ASM_PFX(gcSmmInitTemplate)\r
33global ASM_PFX(mRebasedFlagAddr32)\r
34global ASM_PFX(mSmmRelocationOriginalAddressPtr32)\r
35\r
36 DEFAULT REL\r
37 SECTION .text\r
38\r
39ASM_PFX(gcSmiInitGdtr):\r
40 DW 0\r
41 DQ 0\r
42\r
43global ASM_PFX(SmmStartup)\r
44ASM_PFX(SmmStartup):\r
45 DB 0x66, 0xb8 ; mov eax, imm32\r
46ASM_PFX(gSmmCr3): DD 0\r
47 mov cr3, rax\r
48 DB 0x66, 0x2e\r
49 lgdt [ebp + (ASM_PFX(gcSmiInitGdtr) - ASM_PFX(SmmStartup))]\r
50 DB 0x66, 0xb8 ; mov eax, imm32\r
51ASM_PFX(gSmmCr4): DD 0\r
52 or ah, 2 ; enable XMM registers access\r
53 mov cr4, rax\r
54 DB 0x66\r
55 mov ecx, 0xc0000080 ; IA32_EFER MSR\r
56 rdmsr\r
57 or ah, 1 ; set LME bit\r
58 wrmsr\r
59 DB 0x66, 0xb8 ; mov eax, imm32\r
60ASM_PFX(gSmmCr0): DD 0\r
61 mov cr0, rax ; enable protected mode & paging\r
62 DB 0x66, 0xea ; far jmp to long mode\r
63ASM_PFX(gSmmJmpAddr): DQ @LongMode\r
64@LongMode: ; long-mode starts here\r
65 DB 0x48, 0xbc ; mov rsp, imm64\r
66ASM_PFX(gSmmInitStack): DQ 0\r
67 and sp, 0xfff0 ; make sure RSP is 16-byte aligned\r
68 ;\r
69 ; Accoring to X64 calling convention, XMM0~5 are volatile, we need to save\r
70 ; them before calling C-function.\r
71 ;\r
72 sub rsp, 0x60\r
73 movdqa [rsp], xmm0\r
74 movdqa [rsp + 0x10], xmm1\r
75 movdqa [rsp + 0x20], xmm2\r
76 movdqa [rsp + 0x30], xmm3\r
77 movdqa [rsp + 0x40], xmm4\r
78 movdqa [rsp + 0x50], xmm5\r
79\r
80 add rsp, -0x20\r
81 call ASM_PFX(SmmInitHandler)\r
82 add rsp, 0x20\r
83\r
84 ;\r
85 ; Restore XMM0~5 after calling C-function.\r
86 ;\r
87 movdqa xmm0, [rsp]\r
88 movdqa xmm1, [rsp + 0x10]\r
89 movdqa xmm2, [rsp + 0x20]\r
90 movdqa xmm3, [rsp + 0x30]\r
91 movdqa xmm4, [rsp + 0x40]\r
92 movdqa xmm5, [rsp + 0x50]\r
93\r
94 rsm\r
95\r
96BITS 16\r
97ASM_PFX(gcSmmInitTemplate):\r
98 mov ebp, [cs:@L1 - ASM_PFX(gcSmmInitTemplate) + 0x8000]\r
99 sub ebp, 0x30000\r
100 jmp ebp\r
101@L1:\r
102 DQ ASM_PFX(SmmStartup)\r
103\r
104ASM_PFX(gcSmmInitSize): DW $ - ASM_PFX(gcSmmInitTemplate)\r
105\r
106BITS 64\r
107global ASM_PFX(SmmRelocationSemaphoreComplete)\r
108ASM_PFX(SmmRelocationSemaphoreComplete):\r
109 push rax\r
110 mov rax, [ASM_PFX(mRebasedFlag)]\r
111 mov byte [rax], 1\r
112 pop rax\r
113 jmp [ASM_PFX(mSmmRelocationOriginalAddress)]\r
114\r
115;\r
116; Semaphore code running in 32-bit mode\r
117;\r
118global ASM_PFX(SmmRelocationSemaphoreComplete32)\r
119ASM_PFX(SmmRelocationSemaphoreComplete32):\r
120 ;\r
121 ; mov byte ptr [], 1\r
122 ;\r
123 db 0xc6, 0x5\r
124ASM_PFX(mRebasedFlagAddr32): dd 0\r
125 db 1\r
126 ;\r
127 ; jmp dword ptr []\r
128 ;\r
129 db 0xff, 0x25\r
130ASM_PFX(mSmmRelocationOriginalAddressPtr32): dd 0\r