2 Code for Processor S3 restoration
4 Copyright (c) 2006 - 2016, Intel Corporation. All rights reserved.<BR>
5 This program and the accompanying materials
6 are licensed and made available under the terms and conditions of the BSD License
7 which accompanies this distribution. The full text of the license may be found at
8 http://opensource.org/licenses/bsd-license.php
10 THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
11 WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
15 #include "PiSmmCpuDxeSmm.h"
22 IA32_DESCRIPTOR GdtrProfile
;
23 IA32_DESCRIPTOR IdtrProfile
;
26 } MP_CPU_EXCHANGE_INFO
;
29 UINT8
*RendezvousFunnelAddress
;
30 UINTN PModeEntryOffset
;
33 UINTN LModeEntryOffset
;
35 } MP_ASSEMBLY_ADDRESS_MAP
;
38 // Spin lock used to serialize MemoryMapped operation
40 SPIN_LOCK
*mMemoryMappedLock
= NULL
;
43 Get starting address and size of the rendezvous entry for APs.
44 Information for fixing a jump instruction in the code is also returned.
46 @param AddressMap Output buffer for address map information.
51 MP_ASSEMBLY_ADDRESS_MAP
*AddressMap
54 #define LEGACY_REGION_SIZE (2 * 0x1000)
55 #define LEGACY_REGION_BASE (0xA0000 - LEGACY_REGION_SIZE)
57 ACPI_CPU_DATA mAcpiCpuData
;
58 UINT32 mNumberToFinish
;
59 MP_CPU_EXCHANGE_INFO
*mExchangeInfo
;
60 BOOLEAN mRestoreSmmConfigurationInS3
= FALSE
;
61 VOID
*mGdtForAp
= NULL
;
62 VOID
*mIdtForAp
= NULL
;
63 VOID
*mMachineCheckHandlerForAp
= NULL
;
64 MP_MSR_LOCK
*mMsrSpinLocks
= NULL
;
65 UINTN mMsrSpinLockCount
;
71 BOOLEAN mSmmS3Flag
= FALSE
;
74 // Pointer to structure used during S3 Resume
76 SMM_S3_RESUME_STATE
*mSmmS3ResumeState
= NULL
;
78 BOOLEAN mAcpiS3Enable
= TRUE
;
81 Get MSR spin lock by MSR index.
83 @param MsrIndex MSR index value.
85 @return Pointer to MSR spin lock.
89 GetMsrSpinLockByIndex (
94 for (Index
= 0; Index
< mMsrCount
; Index
++) {
95 if (MsrIndex
== mMsrSpinLocks
[Index
].MsrIndex
) {
96 return mMsrSpinLocks
[Index
].SpinLock
;
103 Initialize MSR spin lock by MSR index.
105 @param MsrIndex MSR index value.
109 InitMsrSpinLockByIndex (
113 UINTN MsrSpinLockCount
;
114 UINTN NewMsrSpinLockCount
;
118 if (mMsrSpinLocks
== NULL
) {
119 MsrSpinLockCount
= mSmmCpuSemaphores
.SemaphoreMsr
.AvailableCounter
;
120 mMsrSpinLocks
= (MP_MSR_LOCK
*) AllocatePool (sizeof (MP_MSR_LOCK
) * MsrSpinLockCount
);
121 ASSERT (mMsrSpinLocks
!= NULL
);
122 for (Index
= 0; Index
< MsrSpinLockCount
; Index
++) {
123 mMsrSpinLocks
[Index
].SpinLock
=
124 (SPIN_LOCK
*)((UINTN
)mSmmCpuSemaphores
.SemaphoreMsr
.Msr
+ Index
* mSemaphoreSize
);
125 mMsrSpinLocks
[Index
].MsrIndex
= (UINT32
)-1;
127 mMsrSpinLockCount
= MsrSpinLockCount
;
128 mSmmCpuSemaphores
.SemaphoreMsr
.AvailableCounter
= 0;
130 if (GetMsrSpinLockByIndex (MsrIndex
) == NULL
) {
132 // Initialize spin lock for MSR programming
134 mMsrSpinLocks
[mMsrCount
].MsrIndex
= MsrIndex
;
135 InitializeSpinLock (mMsrSpinLocks
[mMsrCount
].SpinLock
);
137 if (mMsrCount
== mMsrSpinLockCount
) {
139 // If MSR spin lock buffer is full, enlarge it
141 AddedSize
= SIZE_4KB
;
142 mSmmCpuSemaphores
.SemaphoreMsr
.Msr
=
143 AllocatePages (EFI_SIZE_TO_PAGES(AddedSize
));
144 ASSERT (mSmmCpuSemaphores
.SemaphoreMsr
.Msr
!= NULL
);
145 NewMsrSpinLockCount
= mMsrSpinLockCount
+ AddedSize
/ mSemaphoreSize
;
146 mMsrSpinLocks
= ReallocatePool (
147 sizeof (MP_MSR_LOCK
) * mMsrSpinLockCount
,
148 sizeof (MP_MSR_LOCK
) * NewMsrSpinLockCount
,
151 ASSERT (mMsrSpinLocks
!= NULL
);
152 mMsrSpinLockCount
= NewMsrSpinLockCount
;
153 for (Index
= mMsrCount
; Index
< mMsrSpinLockCount
; Index
++) {
154 mMsrSpinLocks
[Index
].SpinLock
=
155 (SPIN_LOCK
*)((UINTN
)mSmmCpuSemaphores
.SemaphoreMsr
.Msr
+
156 (Index
- mMsrCount
) * mSemaphoreSize
);
157 mMsrSpinLocks
[Index
].MsrIndex
= (UINT32
)-1;
164 Sync up the MTRR values for all processors.
166 @param MtrrTable Table holding fixed/variable MTRR values to be loaded.
171 EFI_PHYSICAL_ADDRESS MtrrTable
177 Sync up the MTRR values for all processors.
186 MTRR_SETTINGS
*MtrrSettings
;
188 MtrrSettings
= (MTRR_SETTINGS
*) (UINTN
) MtrrTable
;
189 MtrrSetAllMtrrs (MtrrSettings
);
193 Programs registers for the calling processor.
195 This function programs registers for the calling processor.
197 @param RegisterTable Pointer to register table of the running processor.
201 SetProcessorRegister (
202 IN CPU_REGISTER_TABLE
*RegisterTable
205 CPU_REGISTER_TABLE_ENTRY
*RegisterTableEntry
;
208 SPIN_LOCK
*MsrSpinLock
;
211 // Traverse Register Table of this logical processor
213 RegisterTableEntry
= (CPU_REGISTER_TABLE_ENTRY
*) (UINTN
) RegisterTable
->RegisterTableEntry
;
214 for (Index
= 0; Index
< RegisterTable
->TableLength
; Index
++, RegisterTableEntry
++) {
216 // Check the type of specified register
218 switch (RegisterTableEntry
->RegisterType
) {
220 // The specified register is Control Register
222 case ControlRegister
:
223 switch (RegisterTableEntry
->Index
) {
225 Value
= AsmReadCr0 ();
226 Value
= (UINTN
) BitFieldWrite64 (
228 RegisterTableEntry
->ValidBitStart
,
229 RegisterTableEntry
->ValidBitStart
+ RegisterTableEntry
->ValidBitLength
- 1,
230 (UINTN
) RegisterTableEntry
->Value
235 Value
= AsmReadCr2 ();
236 Value
= (UINTN
) BitFieldWrite64 (
238 RegisterTableEntry
->ValidBitStart
,
239 RegisterTableEntry
->ValidBitStart
+ RegisterTableEntry
->ValidBitLength
- 1,
240 (UINTN
) RegisterTableEntry
->Value
245 Value
= AsmReadCr3 ();
246 Value
= (UINTN
) BitFieldWrite64 (
248 RegisterTableEntry
->ValidBitStart
,
249 RegisterTableEntry
->ValidBitStart
+ RegisterTableEntry
->ValidBitLength
- 1,
250 (UINTN
) RegisterTableEntry
->Value
255 Value
= AsmReadCr4 ();
256 Value
= (UINTN
) BitFieldWrite64 (
258 RegisterTableEntry
->ValidBitStart
,
259 RegisterTableEntry
->ValidBitStart
+ RegisterTableEntry
->ValidBitLength
- 1,
260 (UINTN
) RegisterTableEntry
->Value
269 // The specified register is Model Specific Register
273 // If this function is called to restore register setting after INIT signal,
274 // there is no need to restore MSRs in register table.
276 if (RegisterTableEntry
->ValidBitLength
>= 64) {
278 // If length is not less than 64 bits, then directly write without reading
281 RegisterTableEntry
->Index
,
282 RegisterTableEntry
->Value
286 // Get lock to avoid Package/Core scope MSRs programming issue in parallel execution mode
287 // to make sure MSR read/write operation is atomic.
289 MsrSpinLock
= GetMsrSpinLockByIndex (RegisterTableEntry
->Index
);
290 AcquireSpinLock (MsrSpinLock
);
292 // Set the bit section according to bit start and length
294 AsmMsrBitFieldWrite64 (
295 RegisterTableEntry
->Index
,
296 RegisterTableEntry
->ValidBitStart
,
297 RegisterTableEntry
->ValidBitStart
+ RegisterTableEntry
->ValidBitLength
- 1,
298 RegisterTableEntry
->Value
300 ReleaseSpinLock (MsrSpinLock
);
304 // MemoryMapped operations
307 AcquireSpinLock (mMemoryMappedLock
);
308 MmioBitFieldWrite32 (
309 RegisterTableEntry
->Index
,
310 RegisterTableEntry
->ValidBitStart
,
311 RegisterTableEntry
->ValidBitStart
+ RegisterTableEntry
->ValidBitLength
- 1,
312 (UINT32
)RegisterTableEntry
->Value
314 ReleaseSpinLock (mMemoryMappedLock
);
317 // Enable or disable cache
321 // If value of the entry is 0, then disable cache. Otherwise, enable cache.
323 if (RegisterTableEntry
->Value
== 0) {
337 AP initialization before SMBASE relocation in the S3 boot path.
340 EarlyMPRendezvousProcedure (
344 CPU_REGISTER_TABLE
*RegisterTableList
;
348 LoadMtrrData (mAcpiCpuData
.MtrrTable
);
351 // Find processor number for this CPU.
353 RegisterTableList
= (CPU_REGISTER_TABLE
*) (UINTN
) mAcpiCpuData
.PreSmmInitRegisterTable
;
354 InitApicId
= GetInitialApicId ();
355 for (Index
= 0; Index
< mAcpiCpuData
.NumberOfCpus
; Index
++) {
356 if (RegisterTableList
[Index
].InitialApicId
== InitApicId
) {
357 SetProcessorRegister (&RegisterTableList
[Index
]);
363 // Count down the number with lock mechanism.
365 InterlockedDecrement (&mNumberToFinish
);
369 AP initialization after SMBASE relocation in the S3 boot path.
372 MPRendezvousProcedure (
376 CPU_REGISTER_TABLE
*RegisterTableList
;
380 ProgramVirtualWireMode ();
381 DisableLvtInterrupts ();
383 RegisterTableList
= (CPU_REGISTER_TABLE
*) (UINTN
) mAcpiCpuData
.RegisterTable
;
384 InitApicId
= GetInitialApicId ();
385 for (Index
= 0; Index
< mAcpiCpuData
.NumberOfCpus
; Index
++) {
386 if (RegisterTableList
[Index
].InitialApicId
== InitApicId
) {
387 SetProcessorRegister (&RegisterTableList
[Index
]);
393 // Count down the number with lock mechanism.
395 InterlockedDecrement (&mNumberToFinish
);
399 Prepares startup vector for APs.
401 This function prepares startup vector for APs.
403 @param WorkingBuffer The address of the work buffer.
406 PrepareApStartupVector (
407 EFI_PHYSICAL_ADDRESS WorkingBuffer
410 EFI_PHYSICAL_ADDRESS StartupVector
;
411 MP_ASSEMBLY_ADDRESS_MAP AddressMap
;
414 // Get the address map of startup code for AP,
415 // including code size, and offset of long jump instructions to redirect.
417 ZeroMem (&AddressMap
, sizeof (AddressMap
));
418 AsmGetAddressMap (&AddressMap
);
420 StartupVector
= WorkingBuffer
;
423 // Copy AP startup code to startup vector, and then redirect the long jump
424 // instructions for mode switching.
426 CopyMem ((VOID
*) (UINTN
) StartupVector
, AddressMap
.RendezvousFunnelAddress
, AddressMap
.Size
);
427 *(UINT32
*) (UINTN
) (StartupVector
+ AddressMap
.FlatJumpOffset
+ 3) = (UINT32
) (StartupVector
+ AddressMap
.PModeEntryOffset
);
428 if (AddressMap
.LongJumpOffset
!= 0) {
429 *(UINT32
*) (UINTN
) (StartupVector
+ AddressMap
.LongJumpOffset
+ 2) = (UINT32
) (StartupVector
+ AddressMap
.LModeEntryOffset
);
433 // Get the start address of exchange data between BSP and AP.
435 mExchangeInfo
= (MP_CPU_EXCHANGE_INFO
*) (UINTN
) (StartupVector
+ AddressMap
.Size
);
436 ZeroMem ((VOID
*) mExchangeInfo
, sizeof (MP_CPU_EXCHANGE_INFO
));
438 CopyMem ((VOID
*) (UINTN
) &mExchangeInfo
->GdtrProfile
, (VOID
*) (UINTN
) mAcpiCpuData
.GdtrProfile
, sizeof (IA32_DESCRIPTOR
));
439 CopyMem ((VOID
*) (UINTN
) &mExchangeInfo
->IdtrProfile
, (VOID
*) (UINTN
) mAcpiCpuData
.IdtrProfile
, sizeof (IA32_DESCRIPTOR
));
442 // Copy AP's GDT, IDT and Machine Check handler from SMRAM to ACPI NVS memory
444 CopyMem ((VOID
*) mExchangeInfo
->GdtrProfile
.Base
, mGdtForAp
, mExchangeInfo
->GdtrProfile
.Limit
+ 1);
445 CopyMem ((VOID
*) mExchangeInfo
->IdtrProfile
.Base
, mIdtForAp
, mExchangeInfo
->IdtrProfile
.Limit
+ 1);
446 CopyMem ((VOID
*)(UINTN
) mAcpiCpuData
.ApMachineCheckHandlerBase
, mMachineCheckHandlerForAp
, mAcpiCpuData
.ApMachineCheckHandlerSize
);
448 mExchangeInfo
->StackStart
= (VOID
*) (UINTN
) mAcpiCpuData
.StackAddress
;
449 mExchangeInfo
->StackSize
= mAcpiCpuData
.StackSize
;
450 mExchangeInfo
->BufferStart
= (UINT32
) StartupVector
;
451 mExchangeInfo
->Cr3
= (UINT32
) (AsmReadCr3 ());
455 The function is invoked before SMBASE relocation in S3 path to restores CPU status.
457 The function is invoked before SMBASE relocation in S3 path. It does first time microcode load
458 and restores MTRRs for both BSP and APs.
466 CPU_REGISTER_TABLE
*RegisterTableList
;
470 LoadMtrrData (mAcpiCpuData
.MtrrTable
);
473 // Find processor number for this CPU.
475 RegisterTableList
= (CPU_REGISTER_TABLE
*) (UINTN
) mAcpiCpuData
.PreSmmInitRegisterTable
;
476 InitApicId
= GetInitialApicId ();
477 for (Index
= 0; Index
< mAcpiCpuData
.NumberOfCpus
; Index
++) {
478 if (RegisterTableList
[Index
].InitialApicId
== InitApicId
) {
479 SetProcessorRegister (&RegisterTableList
[Index
]);
484 ProgramVirtualWireMode ();
486 PrepareApStartupVector (mAcpiCpuData
.StartupVector
);
488 mNumberToFinish
= mAcpiCpuData
.NumberOfCpus
- 1;
489 mExchangeInfo
->ApFunction
= (VOID
*) (UINTN
) EarlyMPRendezvousProcedure
;
492 // Send INIT IPI - SIPI to all APs
494 SendInitSipiSipiAllExcludingSelf ((UINT32
)mAcpiCpuData
.StartupVector
);
496 while (mNumberToFinish
> 0) {
502 The function is invoked after SMBASE relocation in S3 path to restores CPU status.
504 The function is invoked after SMBASE relocation in S3 path. It restores configuration according to
505 data saved by normal boot path for both BSP and APs.
513 CPU_REGISTER_TABLE
*RegisterTableList
;
517 RegisterTableList
= (CPU_REGISTER_TABLE
*) (UINTN
) mAcpiCpuData
.RegisterTable
;
518 InitApicId
= GetInitialApicId ();
519 for (Index
= 0; Index
< mAcpiCpuData
.NumberOfCpus
; Index
++) {
520 if (RegisterTableList
[Index
].InitialApicId
== InitApicId
) {
521 SetProcessorRegister (&RegisterTableList
[Index
]);
526 mNumberToFinish
= mAcpiCpuData
.NumberOfCpus
- 1;
528 // StackStart was updated when APs were waken up in EarlyInitializeCpu.
529 // Re-initialize StackAddress to original beginning address.
531 mExchangeInfo
->StackStart
= (VOID
*) (UINTN
) mAcpiCpuData
.StackAddress
;
532 mExchangeInfo
->ApFunction
= (VOID
*) (UINTN
) MPRendezvousProcedure
;
535 // Send INIT IPI - SIPI to all APs
537 SendInitSipiSipiAllExcludingSelf ((UINT32
)mAcpiCpuData
.StartupVector
);
539 while (mNumberToFinish
> 0) {
545 Restore SMM Configuration in S3 boot path.
549 RestoreSmmConfigurationInS3 (
553 if (!mAcpiS3Enable
) {
558 // Restore SMM Configuration in S3 boot path.
560 if (mRestoreSmmConfigurationInS3
) {
562 // Need make sure gSmst is correct because below function may use them.
564 gSmst
->SmmStartupThisAp
= gSmmCpuPrivate
->SmmCoreEntryContext
.SmmStartupThisAp
;
565 gSmst
->CurrentlyExecutingCpu
= gSmmCpuPrivate
->SmmCoreEntryContext
.CurrentlyExecutingCpu
;
566 gSmst
->NumberOfCpus
= gSmmCpuPrivate
->SmmCoreEntryContext
.NumberOfCpus
;
567 gSmst
->CpuSaveStateSize
= gSmmCpuPrivate
->SmmCoreEntryContext
.CpuSaveStateSize
;
568 gSmst
->CpuSaveState
= gSmmCpuPrivate
->SmmCoreEntryContext
.CpuSaveState
;
571 // Configure SMM Code Access Check feature if available.
573 ConfigSmmCodeAccessCheck ();
575 SmmCpuFeaturesCompleteSmmReadyToLock ();
577 mRestoreSmmConfigurationInS3
= FALSE
;
582 Perform SMM initialization for all processors in the S3 boot path.
584 For a native platform, MP initialization in the S3 boot path is also performed in this function.
592 SMM_S3_RESUME_STATE
*SmmS3ResumeState
;
593 IA32_DESCRIPTOR Ia32Idtr
;
594 IA32_DESCRIPTOR X64Idtr
;
595 IA32_IDT_GATE_DESCRIPTOR IdtEntryTable
[EXCEPTION_VECTOR_NUMBER
];
598 DEBUG ((EFI_D_INFO
, "SmmRestoreCpu()\n"));
602 InitializeSpinLock (mMemoryMappedLock
);
605 // See if there is enough context to resume PEI Phase
607 if (mSmmS3ResumeState
== NULL
) {
608 DEBUG ((EFI_D_ERROR
, "No context to return to PEI Phase\n"));
612 SmmS3ResumeState
= mSmmS3ResumeState
;
613 ASSERT (SmmS3ResumeState
!= NULL
);
615 if (SmmS3ResumeState
->Signature
== SMM_S3_RESUME_SMM_64
) {
617 // Save the IA32 IDT Descriptor
619 AsmReadIdtr ((IA32_DESCRIPTOR
*) &Ia32Idtr
);
622 // Setup X64 IDT table
624 ZeroMem (IdtEntryTable
, sizeof (IA32_IDT_GATE_DESCRIPTOR
) * 32);
625 X64Idtr
.Base
= (UINTN
) IdtEntryTable
;
626 X64Idtr
.Limit
= (UINT16
) (sizeof (IA32_IDT_GATE_DESCRIPTOR
) * 32 - 1);
627 AsmWriteIdtr ((IA32_DESCRIPTOR
*) &X64Idtr
);
630 // Setup the default exception handler
632 Status
= InitializeCpuExceptionHandlers (NULL
);
633 ASSERT_EFI_ERROR (Status
);
636 // Initialize Debug Agent to support source level debug
638 InitializeDebugAgent (DEBUG_AGENT_INIT_THUNK_PEI_IA32TOX64
, (VOID
*)&Ia32Idtr
, NULL
);
642 // Skip initialization if mAcpiCpuData is not valid
644 if (mAcpiCpuData
.NumberOfCpus
> 0) {
646 // First time microcode load and restore MTRRs
648 EarlyInitializeCpu ();
652 // Restore SMBASE for BSP and all APs
657 // Skip initialization if mAcpiCpuData is not valid
659 if (mAcpiCpuData
.NumberOfCpus
> 0) {
661 // Restore MSRs for BSP and all APs
667 // Set a flag to restore SMM configuration in S3 path.
669 mRestoreSmmConfigurationInS3
= TRUE
;
671 DEBUG (( EFI_D_INFO
, "SMM S3 Return CS = %x\n", SmmS3ResumeState
->ReturnCs
));
672 DEBUG (( EFI_D_INFO
, "SMM S3 Return Entry Point = %x\n", SmmS3ResumeState
->ReturnEntryPoint
));
673 DEBUG (( EFI_D_INFO
, "SMM S3 Return Context1 = %x\n", SmmS3ResumeState
->ReturnContext1
));
674 DEBUG (( EFI_D_INFO
, "SMM S3 Return Context2 = %x\n", SmmS3ResumeState
->ReturnContext2
));
675 DEBUG (( EFI_D_INFO
, "SMM S3 Return Stack Pointer = %x\n", SmmS3ResumeState
->ReturnStackPointer
));
678 // If SMM is in 32-bit mode, then use SwitchStack() to resume PEI Phase
680 if (SmmS3ResumeState
->Signature
== SMM_S3_RESUME_SMM_32
) {
681 DEBUG ((EFI_D_INFO
, "Call SwitchStack() to return to S3 Resume in PEI Phase\n"));
684 (SWITCH_STACK_ENTRY_POINT
)(UINTN
)SmmS3ResumeState
->ReturnEntryPoint
,
685 (VOID
*)(UINTN
)SmmS3ResumeState
->ReturnContext1
,
686 (VOID
*)(UINTN
)SmmS3ResumeState
->ReturnContext2
,
687 (VOID
*)(UINTN
)SmmS3ResumeState
->ReturnStackPointer
692 // If SMM is in 64-bit mode, then use AsmDisablePaging64() to resume PEI Phase
694 if (SmmS3ResumeState
->Signature
== SMM_S3_RESUME_SMM_64
) {
695 DEBUG ((EFI_D_INFO
, "Call AsmDisablePaging64() to return to S3 Resume in PEI Phase\n"));
697 // Disable interrupt of Debug timer, since new IDT table is for IA32 and will not work in long mode.
699 SaveAndSetDebugTimerInterrupt (FALSE
);
701 // Restore IA32 IDT table
703 AsmWriteIdtr ((IA32_DESCRIPTOR
*) &Ia32Idtr
);
705 SmmS3ResumeState
->ReturnCs
,
706 (UINT32
)SmmS3ResumeState
->ReturnEntryPoint
,
707 (UINT32
)SmmS3ResumeState
->ReturnContext1
,
708 (UINT32
)SmmS3ResumeState
->ReturnContext2
,
709 (UINT32
)SmmS3ResumeState
->ReturnStackPointer
714 // Can not resume PEI Phase
716 DEBUG ((EFI_D_ERROR
, "No context to return to PEI Phase\n"));
721 Initialize SMM S3 resume state structure used during S3 Resume.
723 @param[in] Cr3 The base address of the page tables to use in SMM.
727 InitSmmS3ResumeState (
732 EFI_SMRAM_DESCRIPTOR
*SmramDescriptor
;
733 SMM_S3_RESUME_STATE
*SmmS3ResumeState
;
735 if (!mAcpiS3Enable
) {
739 GuidHob
= GetFirstGuidHob (&gEfiAcpiVariableGuid
);
740 if (GuidHob
!= NULL
) {
741 SmramDescriptor
= (EFI_SMRAM_DESCRIPTOR
*) GET_GUID_HOB_DATA (GuidHob
);
743 DEBUG ((EFI_D_INFO
, "SMM S3 SMRAM Structure = %x\n", SmramDescriptor
));
744 DEBUG ((EFI_D_INFO
, "SMM S3 Structure = %x\n", SmramDescriptor
->CpuStart
));
746 SmmS3ResumeState
= (SMM_S3_RESUME_STATE
*)(UINTN
)SmramDescriptor
->CpuStart
;
747 ZeroMem (SmmS3ResumeState
, sizeof (SMM_S3_RESUME_STATE
));
749 mSmmS3ResumeState
= SmmS3ResumeState
;
750 SmmS3ResumeState
->Smst
= (EFI_PHYSICAL_ADDRESS
)(UINTN
)gSmst
;
752 SmmS3ResumeState
->SmmS3ResumeEntryPoint
= (EFI_PHYSICAL_ADDRESS
)(UINTN
)SmmRestoreCpu
;
754 SmmS3ResumeState
->SmmS3StackSize
= SIZE_32KB
;
755 SmmS3ResumeState
->SmmS3StackBase
= (EFI_PHYSICAL_ADDRESS
)(UINTN
)AllocatePages (EFI_SIZE_TO_PAGES ((UINTN
)SmmS3ResumeState
->SmmS3StackSize
));
756 if (SmmS3ResumeState
->SmmS3StackBase
== 0) {
757 SmmS3ResumeState
->SmmS3StackSize
= 0;
760 SmmS3ResumeState
->SmmS3Cr0
= gSmmCr0
;
761 SmmS3ResumeState
->SmmS3Cr3
= Cr3
;
762 SmmS3ResumeState
->SmmS3Cr4
= gSmmCr4
;
764 if (sizeof (UINTN
) == sizeof (UINT64
)) {
765 SmmS3ResumeState
->Signature
= SMM_S3_RESUME_SMM_64
;
767 if (sizeof (UINTN
) == sizeof (UINT32
)) {
768 SmmS3ResumeState
->Signature
= SMM_S3_RESUME_SMM_32
;
773 // Patch SmmS3ResumeState->SmmS3Cr3
779 Copy register table from ACPI NVS memory into SMRAM.
781 @param[in] DestinationRegisterTableList Points to destination register table.
782 @param[in] SourceRegisterTableList Points to source register table.
783 @param[in] NumberOfCpus Number of CPUs.
788 IN CPU_REGISTER_TABLE
*DestinationRegisterTableList
,
789 IN CPU_REGISTER_TABLE
*SourceRegisterTableList
,
790 IN UINT32 NumberOfCpus
795 CPU_REGISTER_TABLE_ENTRY
*RegisterTableEntry
;
797 CopyMem (DestinationRegisterTableList
, SourceRegisterTableList
, NumberOfCpus
* sizeof (CPU_REGISTER_TABLE
));
798 for (Index
= 0; Index
< NumberOfCpus
; Index
++) {
799 DestinationRegisterTableList
[Index
].RegisterTableEntry
= AllocatePool (DestinationRegisterTableList
[Index
].AllocatedSize
);
800 ASSERT (DestinationRegisterTableList
[Index
].RegisterTableEntry
!= NULL
);
801 CopyMem (DestinationRegisterTableList
[Index
].RegisterTableEntry
, SourceRegisterTableList
[Index
].RegisterTableEntry
, DestinationRegisterTableList
[Index
].AllocatedSize
);
803 // Go though all MSRs in register table to initialize MSR spin lock
805 RegisterTableEntry
= DestinationRegisterTableList
[Index
].RegisterTableEntry
;
806 for (Index1
= 0; Index1
< DestinationRegisterTableList
[Index
].TableLength
; Index1
++, RegisterTableEntry
++) {
807 if ((RegisterTableEntry
->RegisterType
== Msr
) && (RegisterTableEntry
->ValidBitLength
< 64)) {
809 // Initialize MSR spin lock only for those MSRs need bit field writing
811 InitMsrSpinLockByIndex (RegisterTableEntry
->Index
);
826 ACPI_CPU_DATA
*AcpiCpuData
;
827 IA32_DESCRIPTOR
*Gdtr
;
828 IA32_DESCRIPTOR
*Idtr
;
830 if (!mAcpiS3Enable
) {
835 // Prevent use of mAcpiCpuData by initialize NumberOfCpus to 0
837 mAcpiCpuData
.NumberOfCpus
= 0;
840 // If PcdCpuS3DataAddress was never set, then do not copy CPU S3 Data into SMRAM
842 AcpiCpuData
= (ACPI_CPU_DATA
*)(UINTN
)PcdGet64 (PcdCpuS3DataAddress
);
843 if (AcpiCpuData
== 0) {
848 // For a native platform, copy the CPU S3 data into SMRAM for use on CPU S3 Resume.
850 CopyMem (&mAcpiCpuData
, AcpiCpuData
, sizeof (mAcpiCpuData
));
852 mAcpiCpuData
.MtrrTable
= (EFI_PHYSICAL_ADDRESS
)(UINTN
)AllocatePool (sizeof (MTRR_SETTINGS
));
853 ASSERT (mAcpiCpuData
.MtrrTable
!= 0);
855 CopyMem ((VOID
*)(UINTN
)mAcpiCpuData
.MtrrTable
, (VOID
*)(UINTN
)AcpiCpuData
->MtrrTable
, sizeof (MTRR_SETTINGS
));
857 mAcpiCpuData
.GdtrProfile
= (EFI_PHYSICAL_ADDRESS
)(UINTN
)AllocatePool (sizeof (IA32_DESCRIPTOR
));
858 ASSERT (mAcpiCpuData
.GdtrProfile
!= 0);
860 CopyMem ((VOID
*)(UINTN
)mAcpiCpuData
.GdtrProfile
, (VOID
*)(UINTN
)AcpiCpuData
->GdtrProfile
, sizeof (IA32_DESCRIPTOR
));
862 mAcpiCpuData
.IdtrProfile
= (EFI_PHYSICAL_ADDRESS
)(UINTN
)AllocatePool (sizeof (IA32_DESCRIPTOR
));
863 ASSERT (mAcpiCpuData
.IdtrProfile
!= 0);
865 CopyMem ((VOID
*)(UINTN
)mAcpiCpuData
.IdtrProfile
, (VOID
*)(UINTN
)AcpiCpuData
->IdtrProfile
, sizeof (IA32_DESCRIPTOR
));
867 mAcpiCpuData
.PreSmmInitRegisterTable
= (EFI_PHYSICAL_ADDRESS
)(UINTN
)AllocatePool (mAcpiCpuData
.NumberOfCpus
* sizeof (CPU_REGISTER_TABLE
));
868 ASSERT (mAcpiCpuData
.PreSmmInitRegisterTable
!= 0);
871 (CPU_REGISTER_TABLE
*)(UINTN
)mAcpiCpuData
.PreSmmInitRegisterTable
,
872 (CPU_REGISTER_TABLE
*)(UINTN
)AcpiCpuData
->PreSmmInitRegisterTable
,
873 mAcpiCpuData
.NumberOfCpus
876 mAcpiCpuData
.RegisterTable
= (EFI_PHYSICAL_ADDRESS
)(UINTN
)AllocatePool (mAcpiCpuData
.NumberOfCpus
* sizeof (CPU_REGISTER_TABLE
));
877 ASSERT (mAcpiCpuData
.RegisterTable
!= 0);
880 (CPU_REGISTER_TABLE
*)(UINTN
)mAcpiCpuData
.RegisterTable
,
881 (CPU_REGISTER_TABLE
*)(UINTN
)AcpiCpuData
->RegisterTable
,
882 mAcpiCpuData
.NumberOfCpus
886 // Copy AP's GDT, IDT and Machine Check handler into SMRAM.
888 Gdtr
= (IA32_DESCRIPTOR
*)(UINTN
)mAcpiCpuData
.GdtrProfile
;
889 Idtr
= (IA32_DESCRIPTOR
*)(UINTN
)mAcpiCpuData
.IdtrProfile
;
891 mGdtForAp
= AllocatePool ((Gdtr
->Limit
+ 1) + (Idtr
->Limit
+ 1) + mAcpiCpuData
.ApMachineCheckHandlerSize
);
892 ASSERT (mGdtForAp
!= NULL
);
893 mIdtForAp
= (VOID
*) ((UINTN
)mGdtForAp
+ (Gdtr
->Limit
+ 1));
894 mMachineCheckHandlerForAp
= (VOID
*) ((UINTN
)mIdtForAp
+ (Idtr
->Limit
+ 1));
896 CopyMem (mGdtForAp
, (VOID
*)Gdtr
->Base
, Gdtr
->Limit
+ 1);
897 CopyMem (mIdtForAp
, (VOID
*)Idtr
->Base
, Idtr
->Limit
+ 1);
898 CopyMem (mMachineCheckHandlerForAp
, (VOID
*)(UINTN
)mAcpiCpuData
.ApMachineCheckHandlerBase
, mAcpiCpuData
.ApMachineCheckHandlerSize
);
902 Get ACPI S3 enable flag.
906 GetAcpiS3EnableFlag (
910 mAcpiS3Enable
= PcdGetBool (PcdAcpiS3Enable
);