Update MSRs semaphores to the ones in allocated aligned semaphores
buffer. If MSRs semaphores is not enough, allocate one page more.
Cc: Michael Kinney <michael.d.kinney@intel.com>
Cc: Feng Tian <feng.tian@intel.com>
Contributed-under: TianoCore Contribution Agreement 1.0
Signed-off-by: Jeff Fan <jeff.fan@intel.com>
Reviewed-by: Feng Tian <feng.tian@intel.com>
Reviewed-by: Michael Kinney <michael.d.kinney@intel.com>
Regression-tested-by: Laszlo Ersek <lersek@redhat.com>
VOID *mIdtForAp = NULL;\r
VOID *mMachineCheckHandlerForAp = NULL;\r
MP_MSR_LOCK *mMsrSpinLocks = NULL;\r
VOID *mIdtForAp = NULL;\r
VOID *mMachineCheckHandlerForAp = NULL;\r
MP_MSR_LOCK *mMsrSpinLocks = NULL;\r
-UINTN mMsrSpinLockCount = MSR_SPIN_LOCK_INIT_NUM;\r
+UINTN mMsrSpinLockCount;\r
UINTN mMsrCount = 0;\r
\r
/**\r
UINTN mMsrCount = 0;\r
\r
/**\r
UINTN Index;\r
for (Index = 0; Index < mMsrCount; Index++) {\r
if (MsrIndex == mMsrSpinLocks[Index].MsrIndex) {\r
UINTN Index;\r
for (Index = 0; Index < mMsrCount; Index++) {\r
if (MsrIndex == mMsrSpinLocks[Index].MsrIndex) {\r
- return &mMsrSpinLocks[Index].SpinLock;\r
+ return mMsrSpinLocks[Index].SpinLock;\r
IN UINT32 MsrIndex\r
)\r
{\r
IN UINT32 MsrIndex\r
)\r
{\r
+ UINTN MsrSpinLockCount;\r
UINTN NewMsrSpinLockCount;\r
UINTN NewMsrSpinLockCount;\r
+ UINTN Index;\r
+ UINTN AddedSize;\r
\r
if (mMsrSpinLocks == NULL) {\r
\r
if (mMsrSpinLocks == NULL) {\r
- mMsrSpinLocks = (MP_MSR_LOCK *) AllocatePool (sizeof (MP_MSR_LOCK) * mMsrSpinLockCount);\r
+ MsrSpinLockCount = mSmmCpuSemaphores.SemaphoreMsr.AvailableCounter;\r
+ mMsrSpinLocks = (MP_MSR_LOCK *) AllocatePool (sizeof (MP_MSR_LOCK) * MsrSpinLockCount);\r
ASSERT (mMsrSpinLocks != NULL);\r
ASSERT (mMsrSpinLocks != NULL);\r
+ for (Index = 0; Index < MsrSpinLockCount; Index++) {\r
+ mMsrSpinLocks[Index].SpinLock =\r
+ (SPIN_LOCK *)((UINTN)mSmmCpuSemaphores.SemaphoreMsr.Msr + Index * mSemaphoreSize);\r
+ mMsrSpinLocks[Index].MsrIndex = (UINT32)-1;\r
+ }\r
+ mMsrSpinLockCount = MsrSpinLockCount;\r
+ mSmmCpuSemaphores.SemaphoreMsr.AvailableCounter = 0;\r
}\r
if (GetMsrSpinLockByIndex (MsrIndex) == NULL) {\r
//\r
// Initialize spin lock for MSR programming\r
//\r
mMsrSpinLocks[mMsrCount].MsrIndex = MsrIndex;\r
}\r
if (GetMsrSpinLockByIndex (MsrIndex) == NULL) {\r
//\r
// Initialize spin lock for MSR programming\r
//\r
mMsrSpinLocks[mMsrCount].MsrIndex = MsrIndex;\r
- InitializeSpinLock (&mMsrSpinLocks[mMsrCount].SpinLock);\r
+ InitializeSpinLock (mMsrSpinLocks[mMsrCount].SpinLock);\r
mMsrCount ++;\r
if (mMsrCount == mMsrSpinLockCount) {\r
//\r
// If MSR spin lock buffer is full, enlarge it\r
//\r
mMsrCount ++;\r
if (mMsrCount == mMsrSpinLockCount) {\r
//\r
// If MSR spin lock buffer is full, enlarge it\r
//\r
- NewMsrSpinLockCount = mMsrSpinLockCount + MSR_SPIN_LOCK_INIT_NUM;\r
+ AddedSize = SIZE_4KB;\r
+ mSmmCpuSemaphores.SemaphoreMsr.Msr =\r
+ AllocatePages (EFI_SIZE_TO_PAGES(AddedSize));\r
+ ASSERT (mSmmCpuSemaphores.SemaphoreMsr.Msr != NULL);\r
+ NewMsrSpinLockCount = mMsrSpinLockCount + AddedSize / mSemaphoreSize;\r
mMsrSpinLocks = ReallocatePool (\r
sizeof (MP_MSR_LOCK) * mMsrSpinLockCount,\r
sizeof (MP_MSR_LOCK) * NewMsrSpinLockCount,\r
mMsrSpinLocks\r
);\r
mMsrSpinLocks = ReallocatePool (\r
sizeof (MP_MSR_LOCK) * mMsrSpinLockCount,\r
sizeof (MP_MSR_LOCK) * NewMsrSpinLockCount,\r
mMsrSpinLocks\r
);\r
+ ASSERT (mMsrSpinLocks != NULL);\r
mMsrSpinLockCount = NewMsrSpinLockCount;\r
mMsrSpinLockCount = NewMsrSpinLockCount;\r
+ for (Index = mMsrCount; Index < mMsrSpinLockCount; Index++) {\r
+ mMsrSpinLocks[Index].SpinLock =\r
+ (SPIN_LOCK *)((UINTN)mSmmCpuSemaphores.SemaphoreMsr.Msr +\r
+ (Index - mMsrCount) * mSemaphoreSize);\r
+ mMsrSpinLocks[Index].MsrIndex = (UINT32)-1;\r
+ }\r
#define MSR_SPIN_LOCK_INIT_NUM 15\r
\r
typedef struct {\r
#define MSR_SPIN_LOCK_INIT_NUM 15\r
\r
typedef struct {\r
UINT32 MsrIndex;\r
} MP_MSR_LOCK;\r
\r
UINT32 MsrIndex;\r
} MP_MSR_LOCK;\r
\r
extern UINTN mSmmStackSize;\r
extern EFI_SMM_CPU_SERVICE_PROTOCOL mSmmCpuService;\r
extern IA32_DESCRIPTOR gcSmiInitGdtr;\r
extern UINTN mSmmStackSize;\r
extern EFI_SMM_CPU_SERVICE_PROTOCOL mSmmCpuService;\r
extern IA32_DESCRIPTOR gcSmiInitGdtr;\r
+extern SMM_CPU_SEMAPHORES mSmmCpuSemaphores;\r
+extern UINTN mSemaphoreSize;\r
extern SPIN_LOCK *mPFLock;\r
extern SPIN_LOCK *mConfigSmmCodeAccessCheckLock;\r
\r
extern SPIN_LOCK *mPFLock;\r
extern SPIN_LOCK *mConfigSmmCodeAccessCheckLock;\r
\r