1 ;------------------------------------------------------------------------------ ;
2 ; Copyright (c) 2016 - 2018, Intel Corporation. All rights reserved.<BR>
3 ; This program and the accompanying materials
4 ; are licensed and made available under the terms and conditions of the BSD License
5 ; which accompanies this distribution. The full text of the license may be found at
6 ; http://opensource.org/licenses/bsd-license.php.
8 ; THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
9 ; WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
17 ; Functions for relocating SMBASE's for all processors
19 ;-------------------------------------------------------------------------------
21 extern ASM_PFX(SmmInitHandler)
22 extern ASM_PFX(mRebasedFlag)
23 extern ASM_PFX(mSmmRelocationOriginalAddress)
25 global ASM_PFX(gSmmCr3)
26 global ASM_PFX(gSmmCr4)
27 global ASM_PFX(gSmmCr0)
28 global ASM_PFX(gSmmJmpAddr)
29 global ASM_PFX(gSmmInitStack)
30 global ASM_PFX(gcSmiInitGdtr)
31 global ASM_PFX(gcSmmInitSize)
32 global ASM_PFX(gcSmmInitTemplate)
33 global ASM_PFX(mRebasedFlagAddr32)
34 global ASM_PFX(mSmmRelocationOriginalAddressPtr32)
39 ASM_PFX(gcSmiInitGdtr):
43 global ASM_PFX(SmmStartup)
46 mov eax, 0x80000001 ; read capability
49 mov ebx, edx ; rdmsr will change edx. keep it in ebx.
50 DB 0x66, 0xb8 ; mov eax, imm32
51 ASM_PFX(gSmmCr3): DD 0
54 lgdt [ebp + (ASM_PFX(gcSmiInitGdtr) - ASM_PFX(SmmStartup))]
55 DB 0x66, 0xb8 ; mov eax, imm32
56 ASM_PFX(gSmmCr4): DD 0
57 or ah, 2 ; enable XMM registers access
60 mov ecx, 0xc0000080 ; IA32_EFER MSR
62 or ah, BIT0 ; set LME bit
64 test ebx, BIT20 ; check NXE capability
66 or ah, BIT3 ; set NXE bit
69 DB 0x66, 0xb8 ; mov eax, imm32
70 ASM_PFX(gSmmCr0): DD 0
71 mov cr0, rax ; enable protected mode & paging
72 DB 0x66, 0xea ; far jmp to long mode
73 ASM_PFX(gSmmJmpAddr): DQ 0;@LongMode
74 @LongMode: ; long-mode starts here
75 DB 0x48, 0xbc ; mov rsp, imm64
76 ASM_PFX(gSmmInitStack): DQ 0
77 and sp, 0xfff0 ; make sure RSP is 16-byte aligned
79 ; Accoring to X64 calling convention, XMM0~5 are volatile, we need to save
80 ; them before calling C-function.
84 movdqa [rsp + 0x10], xmm1
85 movdqa [rsp + 0x20], xmm2
86 movdqa [rsp + 0x30], xmm3
87 movdqa [rsp + 0x40], xmm4
88 movdqa [rsp + 0x50], xmm5
91 call ASM_PFX(SmmInitHandler)
95 ; Restore XMM0~5 after calling C-function.
98 movdqa xmm1, [rsp + 0x10]
99 movdqa xmm2, [rsp + 0x20]
100 movdqa xmm3, [rsp + 0x30]
101 movdqa xmm4, [rsp + 0x40]
102 movdqa xmm5, [rsp + 0x50]
107 ASM_PFX(gcSmmInitTemplate):
108 mov ebp, [cs:@L1 - ASM_PFX(gcSmmInitTemplate) + 0x8000]
112 DQ 0; ASM_PFX(SmmStartup)
114 ASM_PFX(gcSmmInitSize): DW $ - ASM_PFX(gcSmmInitTemplate)
117 global ASM_PFX(SmmRelocationSemaphoreComplete)
118 ASM_PFX(SmmRelocationSemaphoreComplete):
120 mov rax, [ASM_PFX(mRebasedFlag)]
123 jmp [ASM_PFX(mSmmRelocationOriginalAddress)]
126 ; Semaphore code running in 32-bit mode
128 global ASM_PFX(SmmRelocationSemaphoreComplete32)
129 ASM_PFX(SmmRelocationSemaphoreComplete32):
134 ASM_PFX(mRebasedFlagAddr32): dd 0
140 ASM_PFX(mSmmRelocationOriginalAddressPtr32): dd 0
142 global ASM_PFX(PiSmmCpuSmmInitFixupAddress)
143 ASM_PFX(PiSmmCpuSmmInitFixupAddress):
145 lea rcx, [ASM_PFX(gSmmJmpAddr)]
148 lea rax, [ASM_PFX(SmmStartup)]