2 Code for Processor S3 restoration
4 Copyright (c) 2006 - 2016, Intel Corporation. All rights reserved.<BR>
5 This program and the accompanying materials
6 are licensed and made available under the terms and conditions of the BSD License
7 which accompanies this distribution. The full text of the license may be found at
8 http://opensource.org/licenses/bsd-license.php
10 THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
11 WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
15 #include "PiSmmCpuDxeSmm.h"
22 IA32_DESCRIPTOR GdtrProfile
;
23 IA32_DESCRIPTOR IdtrProfile
;
26 } MP_CPU_EXCHANGE_INFO
;
29 UINT8
*RendezvousFunnelAddress
;
30 UINTN PModeEntryOffset
;
33 UINTN LModeEntryOffset
;
35 } MP_ASSEMBLY_ADDRESS_MAP
;
38 // Spin lock used to serialize MemoryMapped operation
40 SPIN_LOCK
*mMemoryMappedLock
= NULL
;
43 Get starting address and size of the rendezvous entry for APs.
44 Information for fixing a jump instruction in the code is also returned.
46 @param AddressMap Output buffer for address map information.
51 MP_ASSEMBLY_ADDRESS_MAP
*AddressMap
54 #define LEGACY_REGION_SIZE (2 * 0x1000)
55 #define LEGACY_REGION_BASE (0xA0000 - LEGACY_REGION_SIZE)
57 ACPI_CPU_DATA mAcpiCpuData
;
58 UINT32 mNumberToFinish
;
59 MP_CPU_EXCHANGE_INFO
*mExchangeInfo
;
60 BOOLEAN mRestoreSmmConfigurationInS3
= FALSE
;
61 VOID
*mGdtForAp
= NULL
;
62 VOID
*mIdtForAp
= NULL
;
63 VOID
*mMachineCheckHandlerForAp
= NULL
;
64 MP_MSR_LOCK
*mMsrSpinLocks
= NULL
;
65 UINTN mMsrSpinLockCount
;
71 BOOLEAN mSmmS3Flag
= FALSE
;
74 // Pointer to structure used during S3 Resume
76 SMM_S3_RESUME_STATE
*mSmmS3ResumeState
= NULL
;
79 Get MSR spin lock by MSR index.
81 @param MsrIndex MSR index value.
83 @return Pointer to MSR spin lock.
87 GetMsrSpinLockByIndex (
92 for (Index
= 0; Index
< mMsrCount
; Index
++) {
93 if (MsrIndex
== mMsrSpinLocks
[Index
].MsrIndex
) {
94 return mMsrSpinLocks
[Index
].SpinLock
;
101 Initialize MSR spin lock by MSR index.
103 @param MsrIndex MSR index value.
107 InitMsrSpinLockByIndex (
111 UINTN MsrSpinLockCount
;
112 UINTN NewMsrSpinLockCount
;
116 if (mMsrSpinLocks
== NULL
) {
117 MsrSpinLockCount
= mSmmCpuSemaphores
.SemaphoreMsr
.AvailableCounter
;
118 mMsrSpinLocks
= (MP_MSR_LOCK
*) AllocatePool (sizeof (MP_MSR_LOCK
) * MsrSpinLockCount
);
119 ASSERT (mMsrSpinLocks
!= NULL
);
120 for (Index
= 0; Index
< MsrSpinLockCount
; Index
++) {
121 mMsrSpinLocks
[Index
].SpinLock
=
122 (SPIN_LOCK
*)((UINTN
)mSmmCpuSemaphores
.SemaphoreMsr
.Msr
+ Index
* mSemaphoreSize
);
123 mMsrSpinLocks
[Index
].MsrIndex
= (UINT32
)-1;
125 mMsrSpinLockCount
= MsrSpinLockCount
;
126 mSmmCpuSemaphores
.SemaphoreMsr
.AvailableCounter
= 0;
128 if (GetMsrSpinLockByIndex (MsrIndex
) == NULL
) {
130 // Initialize spin lock for MSR programming
132 mMsrSpinLocks
[mMsrCount
].MsrIndex
= MsrIndex
;
133 InitializeSpinLock (mMsrSpinLocks
[mMsrCount
].SpinLock
);
135 if (mMsrCount
== mMsrSpinLockCount
) {
137 // If MSR spin lock buffer is full, enlarge it
139 AddedSize
= SIZE_4KB
;
140 mSmmCpuSemaphores
.SemaphoreMsr
.Msr
=
141 AllocatePages (EFI_SIZE_TO_PAGES(AddedSize
));
142 ASSERT (mSmmCpuSemaphores
.SemaphoreMsr
.Msr
!= NULL
);
143 NewMsrSpinLockCount
= mMsrSpinLockCount
+ AddedSize
/ mSemaphoreSize
;
144 mMsrSpinLocks
= ReallocatePool (
145 sizeof (MP_MSR_LOCK
) * mMsrSpinLockCount
,
146 sizeof (MP_MSR_LOCK
) * NewMsrSpinLockCount
,
149 ASSERT (mMsrSpinLocks
!= NULL
);
150 mMsrSpinLockCount
= NewMsrSpinLockCount
;
151 for (Index
= mMsrCount
; Index
< mMsrSpinLockCount
; Index
++) {
152 mMsrSpinLocks
[Index
].SpinLock
=
153 (SPIN_LOCK
*)((UINTN
)mSmmCpuSemaphores
.SemaphoreMsr
.Msr
+
154 (Index
- mMsrCount
) * mSemaphoreSize
);
155 mMsrSpinLocks
[Index
].MsrIndex
= (UINT32
)-1;
162 Sync up the MTRR values for all processors.
164 @param MtrrTable Table holding fixed/variable MTRR values to be loaded.
169 EFI_PHYSICAL_ADDRESS MtrrTable
175 Sync up the MTRR values for all processors.
184 MTRR_SETTINGS
*MtrrSettings
;
186 MtrrSettings
= (MTRR_SETTINGS
*) (UINTN
) MtrrTable
;
187 MtrrSetAllMtrrs (MtrrSettings
);
191 Programs registers for the calling processor.
193 This function programs registers for the calling processor.
195 @param RegisterTable Pointer to register table of the running processor.
199 SetProcessorRegister (
200 IN CPU_REGISTER_TABLE
*RegisterTable
203 CPU_REGISTER_TABLE_ENTRY
*RegisterTableEntry
;
206 SPIN_LOCK
*MsrSpinLock
;
209 // Traverse Register Table of this logical processor
211 RegisterTableEntry
= (CPU_REGISTER_TABLE_ENTRY
*) (UINTN
) RegisterTable
->RegisterTableEntry
;
212 for (Index
= 0; Index
< RegisterTable
->TableLength
; Index
++, RegisterTableEntry
++) {
214 // Check the type of specified register
216 switch (RegisterTableEntry
->RegisterType
) {
218 // The specified register is Control Register
220 case ControlRegister
:
221 switch (RegisterTableEntry
->Index
) {
223 Value
= AsmReadCr0 ();
224 Value
= (UINTN
) BitFieldWrite64 (
226 RegisterTableEntry
->ValidBitStart
,
227 RegisterTableEntry
->ValidBitStart
+ RegisterTableEntry
->ValidBitLength
- 1,
228 (UINTN
) RegisterTableEntry
->Value
233 Value
= AsmReadCr2 ();
234 Value
= (UINTN
) BitFieldWrite64 (
236 RegisterTableEntry
->ValidBitStart
,
237 RegisterTableEntry
->ValidBitStart
+ RegisterTableEntry
->ValidBitLength
- 1,
238 (UINTN
) RegisterTableEntry
->Value
243 Value
= AsmReadCr3 ();
244 Value
= (UINTN
) BitFieldWrite64 (
246 RegisterTableEntry
->ValidBitStart
,
247 RegisterTableEntry
->ValidBitStart
+ RegisterTableEntry
->ValidBitLength
- 1,
248 (UINTN
) RegisterTableEntry
->Value
253 Value
= AsmReadCr4 ();
254 Value
= (UINTN
) BitFieldWrite64 (
256 RegisterTableEntry
->ValidBitStart
,
257 RegisterTableEntry
->ValidBitStart
+ RegisterTableEntry
->ValidBitLength
- 1,
258 (UINTN
) RegisterTableEntry
->Value
267 // The specified register is Model Specific Register
271 // If this function is called to restore register setting after INIT signal,
272 // there is no need to restore MSRs in register table.
274 if (RegisterTableEntry
->ValidBitLength
>= 64) {
276 // If length is not less than 64 bits, then directly write without reading
279 RegisterTableEntry
->Index
,
280 RegisterTableEntry
->Value
284 // Get lock to avoid Package/Core scope MSRs programming issue in parallel execution mode
285 // to make sure MSR read/write operation is atomic.
287 MsrSpinLock
= GetMsrSpinLockByIndex (RegisterTableEntry
->Index
);
288 AcquireSpinLock (MsrSpinLock
);
290 // Set the bit section according to bit start and length
292 AsmMsrBitFieldWrite64 (
293 RegisterTableEntry
->Index
,
294 RegisterTableEntry
->ValidBitStart
,
295 RegisterTableEntry
->ValidBitStart
+ RegisterTableEntry
->ValidBitLength
- 1,
296 RegisterTableEntry
->Value
298 ReleaseSpinLock (MsrSpinLock
);
302 // MemoryMapped operations
305 AcquireSpinLock (mMemoryMappedLock
);
306 MmioBitFieldWrite32 (
307 RegisterTableEntry
->Index
,
308 RegisterTableEntry
->ValidBitStart
,
309 RegisterTableEntry
->ValidBitStart
+ RegisterTableEntry
->ValidBitLength
- 1,
310 (UINT32
)RegisterTableEntry
->Value
312 ReleaseSpinLock (mMemoryMappedLock
);
315 // Enable or disable cache
319 // If value of the entry is 0, then disable cache. Otherwise, enable cache.
321 if (RegisterTableEntry
->Value
== 0) {
335 AP initialization before SMBASE relocation in the S3 boot path.
338 EarlyMPRendezvousProcedure (
342 CPU_REGISTER_TABLE
*RegisterTableList
;
346 LoadMtrrData (mAcpiCpuData
.MtrrTable
);
349 // Find processor number for this CPU.
351 RegisterTableList
= (CPU_REGISTER_TABLE
*) (UINTN
) mAcpiCpuData
.PreSmmInitRegisterTable
;
352 InitApicId
= GetInitialApicId ();
353 for (Index
= 0; Index
< mAcpiCpuData
.NumberOfCpus
; Index
++) {
354 if (RegisterTableList
[Index
].InitialApicId
== InitApicId
) {
355 SetProcessorRegister (&RegisterTableList
[Index
]);
361 // Count down the number with lock mechanism.
363 InterlockedDecrement (&mNumberToFinish
);
367 AP initialization after SMBASE relocation in the S3 boot path.
370 MPRendezvousProcedure (
374 CPU_REGISTER_TABLE
*RegisterTableList
;
378 ProgramVirtualWireMode ();
379 DisableLvtInterrupts ();
381 RegisterTableList
= (CPU_REGISTER_TABLE
*) (UINTN
) mAcpiCpuData
.RegisterTable
;
382 InitApicId
= GetInitialApicId ();
383 for (Index
= 0; Index
< mAcpiCpuData
.NumberOfCpus
; Index
++) {
384 if (RegisterTableList
[Index
].InitialApicId
== InitApicId
) {
385 SetProcessorRegister (&RegisterTableList
[Index
]);
391 // Count down the number with lock mechanism.
393 InterlockedDecrement (&mNumberToFinish
);
397 Prepares startup vector for APs.
399 This function prepares startup vector for APs.
401 @param WorkingBuffer The address of the work buffer.
404 PrepareApStartupVector (
405 EFI_PHYSICAL_ADDRESS WorkingBuffer
408 EFI_PHYSICAL_ADDRESS StartupVector
;
409 MP_ASSEMBLY_ADDRESS_MAP AddressMap
;
412 // Get the address map of startup code for AP,
413 // including code size, and offset of long jump instructions to redirect.
415 ZeroMem (&AddressMap
, sizeof (AddressMap
));
416 AsmGetAddressMap (&AddressMap
);
418 StartupVector
= WorkingBuffer
;
421 // Copy AP startup code to startup vector, and then redirect the long jump
422 // instructions for mode switching.
424 CopyMem ((VOID
*) (UINTN
) StartupVector
, AddressMap
.RendezvousFunnelAddress
, AddressMap
.Size
);
425 *(UINT32
*) (UINTN
) (StartupVector
+ AddressMap
.FlatJumpOffset
+ 3) = (UINT32
) (StartupVector
+ AddressMap
.PModeEntryOffset
);
426 if (AddressMap
.LongJumpOffset
!= 0) {
427 *(UINT32
*) (UINTN
) (StartupVector
+ AddressMap
.LongJumpOffset
+ 2) = (UINT32
) (StartupVector
+ AddressMap
.LModeEntryOffset
);
431 // Get the start address of exchange data between BSP and AP.
433 mExchangeInfo
= (MP_CPU_EXCHANGE_INFO
*) (UINTN
) (StartupVector
+ AddressMap
.Size
);
434 ZeroMem ((VOID
*) mExchangeInfo
, sizeof (MP_CPU_EXCHANGE_INFO
));
436 CopyMem ((VOID
*) (UINTN
) &mExchangeInfo
->GdtrProfile
, (VOID
*) (UINTN
) mAcpiCpuData
.GdtrProfile
, sizeof (IA32_DESCRIPTOR
));
437 CopyMem ((VOID
*) (UINTN
) &mExchangeInfo
->IdtrProfile
, (VOID
*) (UINTN
) mAcpiCpuData
.IdtrProfile
, sizeof (IA32_DESCRIPTOR
));
440 // Copy AP's GDT, IDT and Machine Check handler from SMRAM to ACPI NVS memory
442 CopyMem ((VOID
*) mExchangeInfo
->GdtrProfile
.Base
, mGdtForAp
, mExchangeInfo
->GdtrProfile
.Limit
+ 1);
443 CopyMem ((VOID
*) mExchangeInfo
->IdtrProfile
.Base
, mIdtForAp
, mExchangeInfo
->IdtrProfile
.Limit
+ 1);
444 CopyMem ((VOID
*)(UINTN
) mAcpiCpuData
.ApMachineCheckHandlerBase
, mMachineCheckHandlerForAp
, mAcpiCpuData
.ApMachineCheckHandlerSize
);
446 mExchangeInfo
->StackStart
= (VOID
*) (UINTN
) mAcpiCpuData
.StackAddress
;
447 mExchangeInfo
->StackSize
= mAcpiCpuData
.StackSize
;
448 mExchangeInfo
->BufferStart
= (UINT32
) StartupVector
;
449 mExchangeInfo
->Cr3
= (UINT32
) (AsmReadCr3 ());
453 The function is invoked before SMBASE relocation in S3 path to restores CPU status.
455 The function is invoked before SMBASE relocation in S3 path. It does first time microcode load
456 and restores MTRRs for both BSP and APs.
464 CPU_REGISTER_TABLE
*RegisterTableList
;
468 LoadMtrrData (mAcpiCpuData
.MtrrTable
);
471 // Find processor number for this CPU.
473 RegisterTableList
= (CPU_REGISTER_TABLE
*) (UINTN
) mAcpiCpuData
.PreSmmInitRegisterTable
;
474 InitApicId
= GetInitialApicId ();
475 for (Index
= 0; Index
< mAcpiCpuData
.NumberOfCpus
; Index
++) {
476 if (RegisterTableList
[Index
].InitialApicId
== InitApicId
) {
477 SetProcessorRegister (&RegisterTableList
[Index
]);
482 ProgramVirtualWireMode ();
484 PrepareApStartupVector (mAcpiCpuData
.StartupVector
);
486 mNumberToFinish
= mAcpiCpuData
.NumberOfCpus
- 1;
487 mExchangeInfo
->ApFunction
= (VOID
*) (UINTN
) EarlyMPRendezvousProcedure
;
490 // Send INIT IPI - SIPI to all APs
492 SendInitSipiSipiAllExcludingSelf ((UINT32
)mAcpiCpuData
.StartupVector
);
494 while (mNumberToFinish
> 0) {
500 The function is invoked after SMBASE relocation in S3 path to restores CPU status.
502 The function is invoked after SMBASE relocation in S3 path. It restores configuration according to
503 data saved by normal boot path for both BSP and APs.
511 CPU_REGISTER_TABLE
*RegisterTableList
;
515 RegisterTableList
= (CPU_REGISTER_TABLE
*) (UINTN
) mAcpiCpuData
.RegisterTable
;
516 InitApicId
= GetInitialApicId ();
517 for (Index
= 0; Index
< mAcpiCpuData
.NumberOfCpus
; Index
++) {
518 if (RegisterTableList
[Index
].InitialApicId
== InitApicId
) {
519 SetProcessorRegister (&RegisterTableList
[Index
]);
524 mNumberToFinish
= mAcpiCpuData
.NumberOfCpus
- 1;
526 // StackStart was updated when APs were waken up in EarlyInitializeCpu.
527 // Re-initialize StackAddress to original beginning address.
529 mExchangeInfo
->StackStart
= (VOID
*) (UINTN
) mAcpiCpuData
.StackAddress
;
530 mExchangeInfo
->ApFunction
= (VOID
*) (UINTN
) MPRendezvousProcedure
;
533 // Send INIT IPI - SIPI to all APs
535 SendInitSipiSipiAllExcludingSelf ((UINT32
)mAcpiCpuData
.StartupVector
);
537 while (mNumberToFinish
> 0) {
543 Restore SMM Configuration in S3 boot path.
547 RestoreSmmConfigurationInS3 (
552 // Restore SMM Configuration in S3 boot path.
554 if (mRestoreSmmConfigurationInS3
) {
556 // Need make sure gSmst is correct because below function may use them.
558 gSmst
->SmmStartupThisAp
= gSmmCpuPrivate
->SmmCoreEntryContext
.SmmStartupThisAp
;
559 gSmst
->CurrentlyExecutingCpu
= gSmmCpuPrivate
->SmmCoreEntryContext
.CurrentlyExecutingCpu
;
560 gSmst
->NumberOfCpus
= gSmmCpuPrivate
->SmmCoreEntryContext
.NumberOfCpus
;
561 gSmst
->CpuSaveStateSize
= gSmmCpuPrivate
->SmmCoreEntryContext
.CpuSaveStateSize
;
562 gSmst
->CpuSaveState
= gSmmCpuPrivate
->SmmCoreEntryContext
.CpuSaveState
;
565 // Configure SMM Code Access Check feature if available.
567 ConfigSmmCodeAccessCheck ();
569 SmmCpuFeaturesCompleteSmmReadyToLock ();
571 mRestoreSmmConfigurationInS3
= FALSE
;
576 Perform SMM initialization for all processors in the S3 boot path.
578 For a native platform, MP initialization in the S3 boot path is also performed in this function.
586 SMM_S3_RESUME_STATE
*SmmS3ResumeState
;
587 IA32_DESCRIPTOR Ia32Idtr
;
588 IA32_DESCRIPTOR X64Idtr
;
589 IA32_IDT_GATE_DESCRIPTOR IdtEntryTable
[EXCEPTION_VECTOR_NUMBER
];
592 DEBUG ((EFI_D_INFO
, "SmmRestoreCpu()\n"));
596 InitializeSpinLock (mMemoryMappedLock
);
599 // See if there is enough context to resume PEI Phase
601 if (mSmmS3ResumeState
== NULL
) {
602 DEBUG ((EFI_D_ERROR
, "No context to return to PEI Phase\n"));
606 SmmS3ResumeState
= mSmmS3ResumeState
;
607 ASSERT (SmmS3ResumeState
!= NULL
);
609 if (SmmS3ResumeState
->Signature
== SMM_S3_RESUME_SMM_64
) {
611 // Save the IA32 IDT Descriptor
613 AsmReadIdtr ((IA32_DESCRIPTOR
*) &Ia32Idtr
);
616 // Setup X64 IDT table
618 ZeroMem (IdtEntryTable
, sizeof (IA32_IDT_GATE_DESCRIPTOR
) * 32);
619 X64Idtr
.Base
= (UINTN
) IdtEntryTable
;
620 X64Idtr
.Limit
= (UINT16
) (sizeof (IA32_IDT_GATE_DESCRIPTOR
) * 32 - 1);
621 AsmWriteIdtr ((IA32_DESCRIPTOR
*) &X64Idtr
);
624 // Setup the default exception handler
626 Status
= InitializeCpuExceptionHandlers (NULL
);
627 ASSERT_EFI_ERROR (Status
);
630 // Initialize Debug Agent to support source level debug
632 InitializeDebugAgent (DEBUG_AGENT_INIT_THUNK_PEI_IA32TOX64
, (VOID
*)&Ia32Idtr
, NULL
);
636 // Skip initialization if mAcpiCpuData is not valid
638 if (mAcpiCpuData
.NumberOfCpus
> 0) {
640 // First time microcode load and restore MTRRs
642 EarlyInitializeCpu ();
646 // Restore SMBASE for BSP and all APs
651 // Skip initialization if mAcpiCpuData is not valid
653 if (mAcpiCpuData
.NumberOfCpus
> 0) {
655 // Restore MSRs for BSP and all APs
661 // Set a flag to restore SMM configuration in S3 path.
663 mRestoreSmmConfigurationInS3
= TRUE
;
665 DEBUG (( EFI_D_INFO
, "SMM S3 Return CS = %x\n", SmmS3ResumeState
->ReturnCs
));
666 DEBUG (( EFI_D_INFO
, "SMM S3 Return Entry Point = %x\n", SmmS3ResumeState
->ReturnEntryPoint
));
667 DEBUG (( EFI_D_INFO
, "SMM S3 Return Context1 = %x\n", SmmS3ResumeState
->ReturnContext1
));
668 DEBUG (( EFI_D_INFO
, "SMM S3 Return Context2 = %x\n", SmmS3ResumeState
->ReturnContext2
));
669 DEBUG (( EFI_D_INFO
, "SMM S3 Return Stack Pointer = %x\n", SmmS3ResumeState
->ReturnStackPointer
));
672 // If SMM is in 32-bit mode, then use SwitchStack() to resume PEI Phase
674 if (SmmS3ResumeState
->Signature
== SMM_S3_RESUME_SMM_32
) {
675 DEBUG ((EFI_D_INFO
, "Call SwitchStack() to return to S3 Resume in PEI Phase\n"));
678 (SWITCH_STACK_ENTRY_POINT
)(UINTN
)SmmS3ResumeState
->ReturnEntryPoint
,
679 (VOID
*)(UINTN
)SmmS3ResumeState
->ReturnContext1
,
680 (VOID
*)(UINTN
)SmmS3ResumeState
->ReturnContext2
,
681 (VOID
*)(UINTN
)SmmS3ResumeState
->ReturnStackPointer
686 // If SMM is in 64-bit mode, then use AsmDisablePaging64() to resume PEI Phase
688 if (SmmS3ResumeState
->Signature
== SMM_S3_RESUME_SMM_64
) {
689 DEBUG ((EFI_D_INFO
, "Call AsmDisablePaging64() to return to S3 Resume in PEI Phase\n"));
691 // Disable interrupt of Debug timer, since new IDT table is for IA32 and will not work in long mode.
693 SaveAndSetDebugTimerInterrupt (FALSE
);
695 // Restore IA32 IDT table
697 AsmWriteIdtr ((IA32_DESCRIPTOR
*) &Ia32Idtr
);
699 SmmS3ResumeState
->ReturnCs
,
700 (UINT32
)SmmS3ResumeState
->ReturnEntryPoint
,
701 (UINT32
)SmmS3ResumeState
->ReturnContext1
,
702 (UINT32
)SmmS3ResumeState
->ReturnContext2
,
703 (UINT32
)SmmS3ResumeState
->ReturnStackPointer
708 // Can not resume PEI Phase
710 DEBUG ((EFI_D_ERROR
, "No context to return to PEI Phase\n"));
715 Initialize SMM S3 resume state structure used during S3 Resume.
717 @param[in] Cr3 The base address of the page tables to use in SMM.
721 InitSmmS3ResumeState (
726 EFI_SMRAM_DESCRIPTOR
*SmramDescriptor
;
727 SMM_S3_RESUME_STATE
*SmmS3ResumeState
;
729 GuidHob
= GetFirstGuidHob (&gEfiAcpiVariableGuid
);
730 if (GuidHob
!= NULL
) {
731 SmramDescriptor
= (EFI_SMRAM_DESCRIPTOR
*) GET_GUID_HOB_DATA (GuidHob
);
733 DEBUG ((EFI_D_INFO
, "SMM S3 SMRAM Structure = %x\n", SmramDescriptor
));
734 DEBUG ((EFI_D_INFO
, "SMM S3 Structure = %x\n", SmramDescriptor
->CpuStart
));
736 SmmS3ResumeState
= (SMM_S3_RESUME_STATE
*)(UINTN
)SmramDescriptor
->CpuStart
;
737 ZeroMem (SmmS3ResumeState
, sizeof (SMM_S3_RESUME_STATE
));
739 mSmmS3ResumeState
= SmmS3ResumeState
;
740 SmmS3ResumeState
->Smst
= (EFI_PHYSICAL_ADDRESS
)(UINTN
)gSmst
;
742 SmmS3ResumeState
->SmmS3ResumeEntryPoint
= (EFI_PHYSICAL_ADDRESS
)(UINTN
)SmmRestoreCpu
;
744 SmmS3ResumeState
->SmmS3StackSize
= SIZE_32KB
;
745 SmmS3ResumeState
->SmmS3StackBase
= (EFI_PHYSICAL_ADDRESS
)(UINTN
)AllocatePages (EFI_SIZE_TO_PAGES ((UINTN
)SmmS3ResumeState
->SmmS3StackSize
));
746 if (SmmS3ResumeState
->SmmS3StackBase
== 0) {
747 SmmS3ResumeState
->SmmS3StackSize
= 0;
750 SmmS3ResumeState
->SmmS3Cr0
= gSmmCr0
;
751 SmmS3ResumeState
->SmmS3Cr3
= Cr3
;
752 SmmS3ResumeState
->SmmS3Cr4
= gSmmCr4
;
754 if (sizeof (UINTN
) == sizeof (UINT64
)) {
755 SmmS3ResumeState
->Signature
= SMM_S3_RESUME_SMM_64
;
757 if (sizeof (UINTN
) == sizeof (UINT32
)) {
758 SmmS3ResumeState
->Signature
= SMM_S3_RESUME_SMM_32
;
763 // Patch SmmS3ResumeState->SmmS3Cr3
769 Copy register table from ACPI NVS memory into SMRAM.
771 @param[in] DestinationRegisterTableList Points to destination register table.
772 @param[in] SourceRegisterTableList Points to source register table.
773 @param[in] NumberOfCpus Number of CPUs.
778 IN CPU_REGISTER_TABLE
*DestinationRegisterTableList
,
779 IN CPU_REGISTER_TABLE
*SourceRegisterTableList
,
780 IN UINT32 NumberOfCpus
785 CPU_REGISTER_TABLE_ENTRY
*RegisterTableEntry
;
787 CopyMem (DestinationRegisterTableList
, SourceRegisterTableList
, NumberOfCpus
* sizeof (CPU_REGISTER_TABLE
));
788 for (Index
= 0; Index
< NumberOfCpus
; Index
++) {
789 DestinationRegisterTableList
[Index
].RegisterTableEntry
= AllocatePool (DestinationRegisterTableList
[Index
].AllocatedSize
);
790 ASSERT (DestinationRegisterTableList
[Index
].RegisterTableEntry
!= NULL
);
791 CopyMem (DestinationRegisterTableList
[Index
].RegisterTableEntry
, SourceRegisterTableList
[Index
].RegisterTableEntry
, DestinationRegisterTableList
[Index
].AllocatedSize
);
793 // Go though all MSRs in register table to initialize MSR spin lock
795 RegisterTableEntry
= DestinationRegisterTableList
[Index
].RegisterTableEntry
;
796 for (Index1
= 0; Index1
< DestinationRegisterTableList
[Index
].TableLength
; Index1
++, RegisterTableEntry
++) {
797 if ((RegisterTableEntry
->RegisterType
== Msr
) && (RegisterTableEntry
->ValidBitLength
< 64)) {
799 // Initialize MSR spin lock only for those MSRs need bit field writing
801 InitMsrSpinLockByIndex (RegisterTableEntry
->Index
);
816 ACPI_CPU_DATA
*AcpiCpuData
;
817 IA32_DESCRIPTOR
*Gdtr
;
818 IA32_DESCRIPTOR
*Idtr
;
821 // Prevent use of mAcpiCpuData by initialize NumberOfCpus to 0
823 mAcpiCpuData
.NumberOfCpus
= 0;
826 // If PcdCpuS3DataAddress was never set, then do not copy CPU S3 Data into SMRAM
828 AcpiCpuData
= (ACPI_CPU_DATA
*)(UINTN
)PcdGet64 (PcdCpuS3DataAddress
);
829 if (AcpiCpuData
== 0) {
834 // For a native platform, copy the CPU S3 data into SMRAM for use on CPU S3 Resume.
836 CopyMem (&mAcpiCpuData
, AcpiCpuData
, sizeof (mAcpiCpuData
));
838 mAcpiCpuData
.MtrrTable
= (EFI_PHYSICAL_ADDRESS
)(UINTN
)AllocatePool (sizeof (MTRR_SETTINGS
));
839 ASSERT (mAcpiCpuData
.MtrrTable
!= 0);
841 CopyMem ((VOID
*)(UINTN
)mAcpiCpuData
.MtrrTable
, (VOID
*)(UINTN
)AcpiCpuData
->MtrrTable
, sizeof (MTRR_SETTINGS
));
843 mAcpiCpuData
.GdtrProfile
= (EFI_PHYSICAL_ADDRESS
)(UINTN
)AllocatePool (sizeof (IA32_DESCRIPTOR
));
844 ASSERT (mAcpiCpuData
.GdtrProfile
!= 0);
846 CopyMem ((VOID
*)(UINTN
)mAcpiCpuData
.GdtrProfile
, (VOID
*)(UINTN
)AcpiCpuData
->GdtrProfile
, sizeof (IA32_DESCRIPTOR
));
848 mAcpiCpuData
.IdtrProfile
= (EFI_PHYSICAL_ADDRESS
)(UINTN
)AllocatePool (sizeof (IA32_DESCRIPTOR
));
849 ASSERT (mAcpiCpuData
.IdtrProfile
!= 0);
851 CopyMem ((VOID
*)(UINTN
)mAcpiCpuData
.IdtrProfile
, (VOID
*)(UINTN
)AcpiCpuData
->IdtrProfile
, sizeof (IA32_DESCRIPTOR
));
853 mAcpiCpuData
.PreSmmInitRegisterTable
= (EFI_PHYSICAL_ADDRESS
)(UINTN
)AllocatePool (mAcpiCpuData
.NumberOfCpus
* sizeof (CPU_REGISTER_TABLE
));
854 ASSERT (mAcpiCpuData
.PreSmmInitRegisterTable
!= 0);
857 (CPU_REGISTER_TABLE
*)(UINTN
)mAcpiCpuData
.PreSmmInitRegisterTable
,
858 (CPU_REGISTER_TABLE
*)(UINTN
)AcpiCpuData
->PreSmmInitRegisterTable
,
859 mAcpiCpuData
.NumberOfCpus
862 mAcpiCpuData
.RegisterTable
= (EFI_PHYSICAL_ADDRESS
)(UINTN
)AllocatePool (mAcpiCpuData
.NumberOfCpus
* sizeof (CPU_REGISTER_TABLE
));
863 ASSERT (mAcpiCpuData
.RegisterTable
!= 0);
866 (CPU_REGISTER_TABLE
*)(UINTN
)mAcpiCpuData
.RegisterTable
,
867 (CPU_REGISTER_TABLE
*)(UINTN
)AcpiCpuData
->RegisterTable
,
868 mAcpiCpuData
.NumberOfCpus
872 // Copy AP's GDT, IDT and Machine Check handler into SMRAM.
874 Gdtr
= (IA32_DESCRIPTOR
*)(UINTN
)mAcpiCpuData
.GdtrProfile
;
875 Idtr
= (IA32_DESCRIPTOR
*)(UINTN
)mAcpiCpuData
.IdtrProfile
;
877 mGdtForAp
= AllocatePool ((Gdtr
->Limit
+ 1) + (Idtr
->Limit
+ 1) + mAcpiCpuData
.ApMachineCheckHandlerSize
);
878 ASSERT (mGdtForAp
!= NULL
);
879 mIdtForAp
= (VOID
*) ((UINTN
)mGdtForAp
+ (Gdtr
->Limit
+ 1));
880 mMachineCheckHandlerForAp
= (VOID
*) ((UINTN
)mIdtForAp
+ (Idtr
->Limit
+ 1));
882 CopyMem (mGdtForAp
, (VOID
*)Gdtr
->Base
, Gdtr
->Limit
+ 1);
883 CopyMem (mIdtForAp
, (VOID
*)Idtr
->Base
, Idtr
->Limit
+ 1);
884 CopyMem (mMachineCheckHandlerForAp
, (VOID
*)(UINTN
)mAcpiCpuData
.ApMachineCheckHandlerBase
, mAcpiCpuData
.ApMachineCheckHandlerSize
);