]>
Commit | Line | Data |
---|---|---|
7947da3c MK |
1 | #------------------------------------------------------------------------------\r |
2 | #\r | |
0d0a19cb | 3 | # Copyright (c) 2009 - 2017, Intel Corporation. All rights reserved.<BR>\r |
7947da3c MK |
4 | # This program and the accompanying materials\r |
5 | # are licensed and made available under the terms and conditions of the BSD License\r | |
6 | # which accompanies this distribution. The full text of the license may be found at\r | |
7 | # http://opensource.org/licenses/bsd-license.php.\r | |
8 | #\r | |
9 | # THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r | |
10 | # WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r | |
11 | #\r | |
12 | # Module Name:\r | |
13 | #\r | |
14 | # SmiEntry.S\r | |
15 | #\r | |
16 | # Abstract:\r | |
17 | #\r | |
18 | # Code template of the SMI handler for a particular processor\r | |
19 | #\r | |
20 | #------------------------------------------------------------------------------\r | |
21 | \r | |
22 | ASM_GLOBAL ASM_PFX(gcSmiHandlerTemplate)\r | |
23 | ASM_GLOBAL ASM_PFX(gcSmiHandlerSize)\r | |
24 | ASM_GLOBAL ASM_PFX(gSmiCr3)\r | |
25 | ASM_GLOBAL ASM_PFX(gSmiStack)\r | |
26 | ASM_GLOBAL ASM_PFX(gSmbase)\r | |
717fb604 | 27 | ASM_GLOBAL ASM_PFX(mXdSupported)\r |
7947da3c MK |
28 | ASM_GLOBAL ASM_PFX(FeaturePcdGet (PcdCpuSmmStackGuard))\r |
29 | ASM_GLOBAL ASM_PFX(gSmiHandlerIdtr)\r | |
30 | \r | |
b6fea56c | 31 | .equ MSR_IA32_MISC_ENABLE, 0x1A0\r |
717fb604 JY |
32 | .equ MSR_EFER, 0xc0000080\r |
33 | .equ MSR_EFER_XD, 0x800\r | |
34 | \r | |
f12367a0 MK |
35 | #\r |
36 | # Constants relating to PROCESSOR_SMM_DESCRIPTOR\r | |
37 | #\r | |
7947da3c MK |
38 | .equ DSC_OFFSET, 0xfb00\r |
39 | .equ DSC_GDTPTR, 0x30\r | |
40 | .equ DSC_GDTSIZ, 0x38\r | |
41 | .equ DSC_CS, 14\r | |
42 | .equ DSC_DS, 16\r | |
43 | .equ DSC_SS, 18\r | |
44 | .equ DSC_OTHERSEG, 20\r | |
45 | \r | |
46 | .equ PROTECT_MODE_CS, 0x08\r | |
47 | .equ PROTECT_MODE_DS, 0x20\r | |
48 | .equ TSS_SEGMENT, 0x40\r | |
49 | \r | |
50 | .text\r | |
51 | \r | |
52 | ASM_PFX(gcSmiHandlerTemplate):\r | |
53 | \r | |
54 | _SmiEntryPoint:\r | |
55 | .byte 0xbb # mov bx, imm16\r | |
56 | .word _GdtDesc - _SmiEntryPoint + 0x8000\r | |
57 | .byte 0x2e,0xa1 # mov ax, cs:[offset16]\r | |
58 | .word DSC_OFFSET + DSC_GDTSIZ\r | |
59 | decl %eax\r | |
60 | movl %eax, %cs:(%edi) # mov cs:[bx], ax\r | |
61 | .byte 0x66,0x2e,0xa1 # mov eax, cs:[offset16]\r | |
62 | .word DSC_OFFSET + DSC_GDTPTR\r | |
63 | movw %ax, %cs:2(%edi)\r | |
64 | movw %ax, %bp # ebp = GDT base\r | |
65 | .byte 0x66\r | |
66 | lgdt %cs:(%edi)\r | |
67 | # Patch ProtectedMode Segment\r | |
68 | .byte 0xb8 # mov ax, imm16\r | |
69 | .word PROTECT_MODE_CS # set AX for segment directly\r | |
70 | movl %eax, %cs:-2(%edi) # mov cs:[bx - 2], ax\r | |
71 | # Patch ProtectedMode entry\r | |
72 | .byte 0x66, 0xbf # mov edi, SMBASE\r | |
73 | ASM_PFX(gSmbase): .space 4\r | |
74 | .byte 0x67\r | |
75 | lea ((Start32bit - _SmiEntryPoint) + 0x8000)(%edi), %ax\r | |
76 | movw %ax, %cs:-6(%edi)\r | |
77 | movl %cr0, %ebx\r | |
78 | .byte 0x66\r | |
79 | andl $0x9ffafff3, %ebx\r | |
80 | .byte 0x66\r | |
81 | orl $0x23, %ebx\r | |
82 | movl %ebx, %cr0\r | |
83 | .byte 0x66,0xea\r | |
84 | .space 4\r | |
85 | .space 2\r | |
86 | _GdtDesc: .space 4\r | |
87 | .space 2\r | |
88 | \r | |
89 | Start32bit:\r | |
90 | movw $PROTECT_MODE_DS, %ax\r | |
91 | movl %eax,%ds\r | |
92 | movl %eax,%es\r | |
93 | movl %eax,%fs\r | |
94 | movl %eax,%gs\r | |
95 | movl %eax,%ss\r | |
96 | .byte 0xbc # mov esp, imm32\r | |
97 | ASM_PFX(gSmiStack): .space 4\r | |
98 | movl $ASM_PFX(gSmiHandlerIdtr), %eax\r | |
99 | lidt (%eax)\r | |
100 | jmp ProtFlatMode\r | |
101 | \r | |
102 | ProtFlatMode:\r | |
103 | .byte 0xb8 # mov eax, imm32\r | |
104 | ASM_PFX(gSmiCr3): .space 4\r | |
105 | movl %eax, %cr3\r | |
106 | #\r | |
107 | # Need to test for CR4 specific bit support\r | |
108 | #\r | |
109 | movl $1, %eax\r | |
110 | cpuid # use CPUID to determine if specific CR4 bits are supported\r | |
111 | xorl %eax, %eax # Clear EAX\r | |
112 | testl $BIT2, %edx # Check for DE capabilities\r | |
113 | jz L8\r | |
114 | orl $BIT3, %eax\r | |
115 | L8:\r | |
116 | testl $BIT6, %edx # Check for PAE capabilities\r | |
117 | jz L9\r | |
118 | orl $BIT5, %eax\r | |
119 | L9:\r | |
120 | testl $BIT7, %edx # Check for MCE capabilities\r | |
121 | jz L10\r | |
122 | orl $BIT6, %eax\r | |
123 | L10:\r | |
124 | testl $BIT24, %edx # Check for FXSR capabilities\r | |
125 | jz L11\r | |
126 | orl $BIT9, %eax\r | |
127 | L11:\r | |
128 | testl $BIT25, %edx # Check for SSE capabilities\r | |
129 | jz L12\r | |
130 | orl $BIT10, %eax\r | |
131 | L12: # as cr4.PGE is not set here, refresh cr3\r | |
132 | movl %eax, %cr4 # in PreModifyMtrrs() to flush TLB.\r | |
717fb604 JY |
133 | \r |
134 | cmpb $0, ASM_PFX(FeaturePcdGet (PcdCpuSmmStackGuard))\r | |
135 | jz L5\r | |
136 | # Load TSS\r | |
137 | movb $0x89, (TSS_SEGMENT + 5)(%ebp) # clear busy flag\r | |
138 | movl $TSS_SEGMENT, %eax\r | |
139 | ltrw %ax\r | |
140 | L5:\r | |
141 | \r | |
142 | # enable NXE if supported\r | |
143 | .byte 0xb0 # mov al, imm8\r | |
144 | ASM_PFX(mXdSupported): .byte 1\r | |
145 | cmpb $0, %al\r | |
146 | jz SkipNxe\r | |
147 | #\r | |
148 | # Check XD disable bit\r | |
149 | #\r | |
150 | movl $MSR_IA32_MISC_ENABLE, %ecx\r | |
151 | rdmsr\r | |
152 | pushl %edx # save MSR_IA32_MISC_ENABLE[63-32]\r | |
153 | testl $BIT2, %edx # MSR_IA32_MISC_ENABLE[34]\r | |
154 | jz L13\r | |
155 | andw $0x0FFFB, %dx # clear XD Disable bit if it is set\r | |
156 | wrmsr\r | |
157 | L13:\r | |
158 | movl $MSR_EFER, %ecx\r | |
159 | rdmsr\r | |
160 | orw $MSR_EFER_XD,%ax # enable NXE\r | |
161 | wrmsr\r | |
0d0a19cb | 162 | jmp NxeDone\r |
717fb604 JY |
163 | SkipNxe:\r |
164 | subl $4, %esp\r | |
165 | NxeDone:\r | |
166 | \r | |
7947da3c | 167 | movl %cr0, %ebx\r |
717fb604 | 168 | orl $0x080010023, %ebx # enable paging + WP + NE + MP + PE\r |
7947da3c MK |
169 | movl %ebx, %cr0\r |
170 | leal DSC_OFFSET(%edi),%ebx\r | |
171 | movw DSC_DS(%ebx),%ax\r | |
172 | movl %eax, %ds\r | |
173 | movw DSC_OTHERSEG(%ebx),%ax\r | |
174 | movl %eax, %es\r | |
175 | movl %eax, %fs\r | |
176 | movl %eax, %gs\r | |
177 | movw DSC_SS(%ebx),%ax\r | |
178 | movl %eax, %ss\r | |
179 | \r | |
7947da3c MK |
180 | # jmp _SmiHandler # instruction is not needed\r |
181 | \r | |
182 | _SmiHandler:\r | |
717fb604 | 183 | movl 4(%esp), %ebx\r |
7947da3c | 184 | \r |
f45f2d4a JY |
185 | pushl %ebx\r |
186 | movl $ASM_PFX(CpuSmmDebugEntry), %eax\r | |
187 | call *%eax\r | |
717fb604 JY |
188 | addl $4, %esp\r |
189 | \r | |
f45f2d4a | 190 | pushl %ebx\r |
7947da3c MK |
191 | movl $ASM_PFX(SmiRendezvous), %eax\r |
192 | call *%eax\r | |
717fb604 | 193 | addl $4, %esp\r |
7947da3c | 194 | \r |
f45f2d4a JY |
195 | pushl %ebx\r |
196 | movl $ASM_PFX(CpuSmmDebugExit), %eax\r | |
197 | call *%eax\r | |
717fb604 JY |
198 | addl $4, %esp\r |
199 | \r | |
200 | movl $ASM_PFX(mXdSupported), %eax\r | |
201 | movb (%eax), %al\r | |
202 | cmpb $0, %al\r | |
203 | jz L16\r | |
204 | popl %edx # get saved MSR_IA32_MISC_ENABLE[63-32]\r | |
205 | testl $BIT2, %edx\r | |
206 | jz L16\r | |
207 | movl $MSR_IA32_MISC_ENABLE, %ecx\r | |
208 | rdmsr\r | |
209 | orw $BIT2, %dx # set XD Disable bit if it was set before entering into SMM\r | |
210 | wrmsr\r | |
211 | \r | |
212 | L16:\r | |
7947da3c MK |
213 | rsm\r |
214 | \r | |
215 | ASM_PFX(gcSmiHandlerSize): .word . - _SmiEntryPoint\r |