]> git.proxmox.com Git - mirror_edk2.git/blame - UefiCpuPkg/PiSmmCpuDxeSmm/Ia32/SmiEntry.S
UefiCpuPkg: Add PiSmmCpuDxeSmm module IA32 files
[mirror_edk2.git] / UefiCpuPkg / PiSmmCpuDxeSmm / Ia32 / SmiEntry.S
CommitLineData
7947da3c
MK
1#------------------------------------------------------------------------------\r
2#\r
3# Copyright (c) 2009 - 2015, Intel Corporation. All rights reserved.<BR>\r
4# This program and the accompanying materials\r
5# are licensed and made available under the terms and conditions of the BSD License\r
6# which accompanies this distribution. The full text of the license may be found at\r
7# http://opensource.org/licenses/bsd-license.php.\r
8#\r
9# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
10# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
11#\r
12# Module Name:\r
13#\r
14# SmiEntry.S\r
15#\r
16# Abstract:\r
17#\r
18# Code template of the SMI handler for a particular processor\r
19#\r
20#------------------------------------------------------------------------------\r
21\r
22ASM_GLOBAL ASM_PFX(gcSmiHandlerTemplate)\r
23ASM_GLOBAL ASM_PFX(gcSmiHandlerSize)\r
24ASM_GLOBAL ASM_PFX(gSmiCr3)\r
25ASM_GLOBAL ASM_PFX(gSmiStack)\r
26ASM_GLOBAL ASM_PFX(gSmbase)\r
27ASM_GLOBAL ASM_PFX(FeaturePcdGet (PcdCpuSmmDebug))\r
28ASM_GLOBAL ASM_PFX(FeaturePcdGet (PcdCpuSmmStackGuard))\r
29ASM_GLOBAL ASM_PFX(gSmiHandlerIdtr)\r
30\r
31.equ DSC_OFFSET, 0xfb00\r
32.equ DSC_GDTPTR, 0x30\r
33.equ DSC_GDTSIZ, 0x38\r
34.equ DSC_CS, 14\r
35.equ DSC_DS, 16\r
36.equ DSC_SS, 18\r
37.equ DSC_OTHERSEG, 20\r
38\r
39.equ PROTECT_MODE_CS, 0x08\r
40.equ PROTECT_MODE_DS, 0x20\r
41.equ TSS_SEGMENT, 0x40\r
42\r
43 .text\r
44\r
45ASM_PFX(gcSmiHandlerTemplate):\r
46\r
47_SmiEntryPoint:\r
48 .byte 0xbb # mov bx, imm16\r
49 .word _GdtDesc - _SmiEntryPoint + 0x8000\r
50 .byte 0x2e,0xa1 # mov ax, cs:[offset16]\r
51 .word DSC_OFFSET + DSC_GDTSIZ\r
52 decl %eax\r
53 movl %eax, %cs:(%edi) # mov cs:[bx], ax\r
54 .byte 0x66,0x2e,0xa1 # mov eax, cs:[offset16]\r
55 .word DSC_OFFSET + DSC_GDTPTR\r
56 movw %ax, %cs:2(%edi)\r
57 movw %ax, %bp # ebp = GDT base\r
58 .byte 0x66\r
59 lgdt %cs:(%edi)\r
60# Patch ProtectedMode Segment\r
61 .byte 0xb8 # mov ax, imm16\r
62 .word PROTECT_MODE_CS # set AX for segment directly\r
63 movl %eax, %cs:-2(%edi) # mov cs:[bx - 2], ax\r
64# Patch ProtectedMode entry\r
65 .byte 0x66, 0xbf # mov edi, SMBASE\r
66ASM_PFX(gSmbase): .space 4\r
67 .byte 0x67\r
68 lea ((Start32bit - _SmiEntryPoint) + 0x8000)(%edi), %ax\r
69 movw %ax, %cs:-6(%edi)\r
70 movl %cr0, %ebx\r
71 .byte 0x66\r
72 andl $0x9ffafff3, %ebx\r
73 .byte 0x66\r
74 orl $0x23, %ebx\r
75 movl %ebx, %cr0\r
76 .byte 0x66,0xea\r
77 .space 4\r
78 .space 2\r
79_GdtDesc: .space 4\r
80 .space 2\r
81\r
82Start32bit:\r
83 movw $PROTECT_MODE_DS, %ax\r
84 movl %eax,%ds\r
85 movl %eax,%es\r
86 movl %eax,%fs\r
87 movl %eax,%gs\r
88 movl %eax,%ss\r
89 .byte 0xbc # mov esp, imm32\r
90ASM_PFX(gSmiStack): .space 4\r
91 movl $ASM_PFX(gSmiHandlerIdtr), %eax\r
92 lidt (%eax)\r
93 jmp ProtFlatMode\r
94\r
95ProtFlatMode:\r
96 .byte 0xb8 # mov eax, imm32\r
97ASM_PFX(gSmiCr3): .space 4\r
98 movl %eax, %cr3\r
99#\r
100# Need to test for CR4 specific bit support\r
101#\r
102 movl $1, %eax\r
103 cpuid # use CPUID to determine if specific CR4 bits are supported\r
104 xorl %eax, %eax # Clear EAX\r
105 testl $BIT2, %edx # Check for DE capabilities\r
106 jz L8\r
107 orl $BIT3, %eax\r
108L8:\r
109 testl $BIT6, %edx # Check for PAE capabilities\r
110 jz L9\r
111 orl $BIT5, %eax\r
112L9:\r
113 testl $BIT7, %edx # Check for MCE capabilities\r
114 jz L10\r
115 orl $BIT6, %eax\r
116L10:\r
117 testl $BIT24, %edx # Check for FXSR capabilities\r
118 jz L11\r
119 orl $BIT9, %eax\r
120L11:\r
121 testl $BIT25, %edx # Check for SSE capabilities\r
122 jz L12\r
123 orl $BIT10, %eax\r
124L12: # as cr4.PGE is not set here, refresh cr3\r
125 movl %eax, %cr4 # in PreModifyMtrrs() to flush TLB.\r
126 movl %cr0, %ebx\r
127 orl $0x080000000, %ebx # enable paging\r
128 movl %ebx, %cr0\r
129 leal DSC_OFFSET(%edi),%ebx\r
130 movw DSC_DS(%ebx),%ax\r
131 movl %eax, %ds\r
132 movw DSC_OTHERSEG(%ebx),%ax\r
133 movl %eax, %es\r
134 movl %eax, %fs\r
135 movl %eax, %gs\r
136 movw DSC_SS(%ebx),%ax\r
137 movl %eax, %ss\r
138\r
139 cmpb $0, ASM_PFX(FeaturePcdGet (PcdCpuSmmStackGuard))\r
140 jz L5\r
141\r
142# Load TSS\r
143 movb $0x89, (TSS_SEGMENT + 5)(%ebp) # clear busy flag\r
144 movl $TSS_SEGMENT, %eax\r
145 ltrw %ax\r
146L5:\r
147\r
148# jmp _SmiHandler # instruction is not needed\r
149\r
150_SmiHandler:\r
151 cmpb $0, ASM_PFX(FeaturePcdGet (PcdCpuSmmDebug))\r
152 jz L3\r
153\r
154L6:\r
155 call L1\r
156L1:\r
157 popl %ebp\r
158 movl $0x80000001, %eax\r
159 cpuid\r
160 btl $29, %edx # check cpuid to identify X64 or IA32\r
161 leal (0x7fc8 - (L1 - _SmiEntryPoint))(%ebp), %edi\r
162 leal 4(%edi), %esi\r
163 jnc L2\r
164 addl $4, %esi\r
165L2:\r
166 movl (%esi), %ecx\r
167 movl (%edi), %edx\r
168L7:\r
169 movl %ecx, %dr6\r
170 movl %edx, %dr7 # restore DR6 & DR7 before running C code\r
171L3:\r
172\r
173 pushl (%esp)\r
174\r
175 movl $ASM_PFX(SmiRendezvous), %eax\r
176 call *%eax\r
177 popl %ecx\r
178\r
179\r
180 cmpb $0, ASM_PFX(FeaturePcdGet (PcdCpuSmmDebug))\r
181 jz L4\r
182\r
183 movl %dr6, %ecx\r
184 movl %dr7, %edx\r
185 movl %ecx, (%esi)\r
186 movl %edx, (%edi)\r
187L4:\r
188\r
189 rsm\r
190\r
191ASM_PFX(gcSmiHandlerSize): .word . - _SmiEntryPoint\r