]>
Commit | Line | Data |
---|---|---|
427e3573 MK |
1 | #------------------------------------------------------------------------------\r |
2 | #\r | |
3 | # Copyright (c) 2009 - 2015, Intel Corporation. All rights reserved.<BR>\r | |
4 | # This program and the accompanying materials\r | |
5 | # are licensed and made available under the terms and conditions of the BSD License\r | |
6 | # which accompanies this distribution. The full text of the license may be found at\r | |
7 | # http://opensource.org/licenses/bsd-license.php.\r | |
8 | #\r | |
9 | # THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r | |
10 | # WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r | |
11 | #\r | |
12 | # Module Name:\r | |
13 | #\r | |
14 | # SmmInit.S\r | |
15 | #\r | |
16 | # Abstract:\r | |
17 | #\r | |
18 | # Functions for relocating SMBASE's for all processors\r | |
19 | #\r | |
20 | #------------------------------------------------------------------------------\r | |
21 | \r | |
22 | ASM_GLOBAL ASM_PFX(gSmmCr0)\r | |
23 | ASM_GLOBAL ASM_PFX(gSmmCr3)\r | |
24 | ASM_GLOBAL ASM_PFX(gSmmCr4)\r | |
25 | ASM_GLOBAL ASM_PFX(gSmmJmpAddr)\r | |
26 | ASM_GLOBAL ASM_PFX(gcSmmInitTemplate)\r | |
27 | ASM_GLOBAL ASM_PFX(gcSmmInitSize)\r | |
28 | ASM_GLOBAL ASM_PFX(mRebasedFlagAddr32)\r | |
29 | ASM_GLOBAL ASM_PFX(SmmRelocationSemaphoreComplete)\r | |
30 | ASM_GLOBAL ASM_PFX(SmmRelocationSemaphoreComplete32)\r | |
31 | ASM_GLOBAL ASM_PFX(mSmmRelocationOriginalAddressPtr32)\r | |
32 | ASM_GLOBAL ASM_PFX(gSmmInitStack)\r | |
33 | ASM_GLOBAL ASM_PFX(gcSmiInitGdtr)\r | |
34 | \r | |
35 | \r | |
36 | .text\r | |
37 | \r | |
38 | ASM_PFX(gcSmiInitGdtr):\r | |
39 | .word 0\r | |
40 | .quad 0\r | |
41 | \r | |
42 | SmmStartup:\r | |
43 | .byte 0x66,0xb8 # mov eax, imm32\r | |
44 | ASM_PFX(gSmmCr3): .space 4\r | |
45 | movq %rax, %cr3\r | |
46 | .byte 0x66,0x2e\r | |
47 | lgdt (ASM_PFX(gcSmiInitGdtr) - SmmStartup)(%ebp)\r | |
48 | .byte 0x66,0xb8 # mov eax, imm32\r | |
49 | ASM_PFX(gSmmCr4): .space 4\r | |
50 | orb $2, %ah # enable XMM registers access\r | |
51 | movq %rax, %cr4\r | |
52 | .byte 0x66\r | |
53 | movl $0xc0000080,%ecx # IA32_EFER MSR\r | |
54 | rdmsr\r | |
55 | orb $1,%ah # set LME bit\r | |
56 | wrmsr\r | |
57 | .byte 0x66,0xb8 # mov eax, imm32\r | |
58 | ASM_PFX(gSmmCr0): .space 4\r | |
59 | movq %rax, %cr0\r | |
60 | .byte 0x66,0xea # far jmp to long mode\r | |
61 | ASM_PFX(gSmmJmpAddr): .quad LongMode\r | |
62 | LongMode: # long-mode starts here\r | |
63 | .byte 0x48,0xbc # mov rsp, imm64\r | |
64 | ASM_PFX(gSmmInitStack): .space 8\r | |
65 | andw $0xfff0, %sp # make sure RSP is 16-byte aligned\r | |
66 | #\r | |
67 | # Accoring to X64 calling convention, XMM0~5 are volatile, we need to save\r | |
68 | # them before calling C-function.\r | |
69 | #\r | |
70 | subq $0x60, %rsp\r | |
71 | movdqa %xmm0, 0x0(%rsp)\r | |
72 | movdqa %xmm1, 0x10(%rsp)\r | |
73 | movdqa %xmm2, 0x20(%rsp)\r | |
74 | movdqa %xmm3, 0x30(%rsp)\r | |
75 | movdqa %xmm4, 0x40(%rsp)\r | |
76 | movdqa %xmm5, 0x50(%rsp)\r | |
77 | \r | |
78 | \r | |
79 | addq $-0x20, %rsp\r | |
80 | call ASM_PFX(SmmInitHandler)\r | |
81 | addq $0x20, %rsp\r | |
82 | #\r | |
83 | # Restore XMM0~5 after calling C-function.\r | |
84 | #\r | |
85 | movdqa 0x0(%rsp), %xmm0\r | |
86 | movdqa 0x10(%rsp), %xmm1\r | |
87 | movdqa 0x20(%rsp), %xmm2\r | |
88 | movdqa 0x30(%rsp), %xmm3\r | |
89 | movdqa 0x40(%rsp), %xmm4\r | |
90 | movdqa 0x50(%rsp), %xmm5\r | |
91 | \r | |
92 | rsm\r | |
93 | \r | |
94 | ASM_PFX(gcSmmInitTemplate):\r | |
95 | \r | |
96 | _SmmInitTemplate:\r | |
97 | .byte 0x66,0x2e,0x8b,0x2e # mov ebp, cs:[@F]\r | |
98 | .word L1 - _SmmInitTemplate + 0x8000\r | |
99 | .byte 0x66, 0x81, 0xed, 0, 0, 3, 0 # sub ebp, 0x30000\r | |
100 | jmp *%bp # jmp ebp actually\r | |
101 | L1:\r | |
102 | .quad SmmStartup\r | |
103 | \r | |
104 | ASM_PFX(gcSmmInitSize): .word . - ASM_PFX(gcSmmInitTemplate)\r | |
105 | \r | |
106 | ASM_PFX(SmmRelocationSemaphoreComplete):\r | |
107 | # Create a simple stack frame to store RAX and the original RSM location\r | |
108 | pushq %rax # Used to store return address\r | |
109 | pushq %rax\r | |
110 | \r | |
111 | # Load the original RSM location onto stack\r | |
112 | movabsq $ASM_PFX(mSmmRelocationOriginalAddress), %rax\r | |
113 | movq (%rax), %rax\r | |
114 | movq %rax, 0x08(%rsp)\r | |
115 | \r | |
116 | # Update rebase flag\r | |
117 | movabsq $ASM_PFX(mRebasedFlag), %rax\r | |
118 | movq (%rax), %rax\r | |
119 | movb $1, (%rax)\r | |
120 | \r | |
121 | #restore RAX and return to original RSM location\r | |
122 | popq %rax\r | |
123 | retq\r | |
124 | \r | |
125 | #\r | |
126 | # Semaphore code running in 32-bit mode\r | |
127 | #\r | |
128 | ASM_PFX(SmmRelocationSemaphoreComplete32):\r | |
129 | #\r | |
130 | # movb $1, ()\r | |
131 | #\r | |
132 | .byte 0xc6, 0x05\r | |
133 | ASM_PFX(mRebasedFlagAddr32):\r | |
134 | .long 0\r | |
135 | .byte 1\r | |
136 | #\r | |
137 | # jmpd ()\r | |
138 | #\r | |
139 | .byte 0xff, 0x25\r | |
140 | ASM_PFX(mSmmRelocationOriginalAddressPtr32):\r | |
141 | .long 0\r |