/** This function initialize host context per CPU. @param Index CPU Index **/ VOID InitHostContextPerCpu ( IN UINT32 Index ) { // // VmxOn for this CPU // AsmWbinvd (); AsmWriteCr3 (mHostContextCommon.PageTable); AsmWriteCr4 (AsmReadCr4 () | CR4_PAE | ((UINTN)AsmReadMsr64 (IA32_VMX_CR4_FIXED0_MSR_INDEX) & (UINTN)AsmReadMsr64 (IA32_VMX_CR4_FIXED1_MSR_INDEX))); AsmWriteCr0 (AsmReadCr0 () | ((UINTN)AsmReadMsr64 (IA32_VMX_CR0_FIXED0_MSR_INDEX) & (UINTN)AsmReadMsr64 (IA32_VMX_CR0_FIXED1_MSR_INDEX))); AsmVmxOn (&mHostContextCommon.HostContextPerCpu[Index].Vmcs); }
/** Get the attribute of variable MTRRs. This function shadows the content of variable MTRRs into an internal array: VariableMtrr. @param MtrrValidBitsMask The mask for the valid bit of the MTRR @param MtrrValidAddressMask The valid address mask for MTRR @param VariableMtrr The array to shadow variable MTRRs content @return The return value of this paramter indicates the number of MTRRs which has been used. **/ UINT32 EFIAPI MtrrGetMemoryAttributeInVariableMtrr ( IN UINT64 MtrrValidBitsMask, IN UINT64 MtrrValidAddressMask, OUT VARIABLE_MTRR *VariableMtrr ) { UINTN Index; UINT32 MsrNum; UINT32 UsedMtrr; UINT32 FirmwareVariableMtrrCount; UINT32 VariableMtrrEnd; if (!IsMtrrSupported ()) { return 0; } FirmwareVariableMtrrCount = GetFirmwareVariableMtrrCount (); VariableMtrrEnd = MTRR_LIB_IA32_VARIABLE_MTRR_BASE + (2 * GetVariableMtrrCount ()) - 1; ZeroMem (VariableMtrr, sizeof (VARIABLE_MTRR) * MTRR_NUMBER_OF_VARIABLE_MTRR); UsedMtrr = 0; for (MsrNum = MTRR_LIB_IA32_VARIABLE_MTRR_BASE, Index = 0; ( (MsrNum < VariableMtrrEnd) && (Index < FirmwareVariableMtrrCount) ); MsrNum += 2 ) { if ((AsmReadMsr64 (MsrNum + 1) & MTRR_LIB_CACHE_MTRR_ENABLED) != 0) { VariableMtrr[Index].Msr = MsrNum; VariableMtrr[Index].BaseAddress = (AsmReadMsr64 (MsrNum) & MtrrValidAddressMask); VariableMtrr[Index].Length = ((~(AsmReadMsr64 (MsrNum + 1) & MtrrValidAddressMask) ) & MtrrValidBitsMask ) + 1; VariableMtrr[Index].Type = (AsmReadMsr64 (MsrNum) & 0x0ff); VariableMtrr[Index].Valid = TRUE; VariableMtrr[Index].Used = TRUE; UsedMtrr = UsedMtrr + 1; Index++; } } return UsedMtrr; }
/** Returns the default MTRR cache type for the system. @return MTRR default type **/ UINT64 GetMtrrDefaultMemoryType ( VOID ) { return (AsmReadMsr64 (MTRR_LIB_IA32_MTRR_DEF_TYPE) & 0xff); }
/** Check if the specified processor is BSP or not. @param[in] ProcessorIndex Processor index value. @retval TRUE It is BSP. @retval FALSE It isn't BSP. **/ BOOLEAN IsBsp ( IN UINT32 ProcessorIndex ) { MSR_IA32_APIC_BASE_REGISTER MsrApicBase; // // If there are less than 2 CPUs detected, then the currently executing CPU // must be the BSP. This avoids an access to an MSR that may not be supported // on single core CPUs. // if (mDebugCpuData.CpuCount < 2) { return TRUE; } MsrApicBase.Uint64 = AsmReadMsr64 (MSR_IA32_APIC_BASE); if (MsrApicBase.Bits.BSP == 1) { if (mDebugMpContext.BspIndex != ProcessorIndex) { AcquireMpSpinLock (&mDebugMpContext.MpContextSpinLock); mDebugMpContext.BspIndex = ProcessorIndex; ReleaseMpSpinLock (&mDebugMpContext.MpContextSpinLock); } return TRUE; } else { return FALSE; } }
/* Enable SMRR register when SmmInit restores non-SMM MTRRs. */ VOID PentiumEnableSmrr ( VOID ) { AsmWriteMsr64(EFI_MSR_PENTIUM_SMRR_PHYS_MASK, AsmReadMsr64(EFI_MSR_PENTIUM_SMRR_PHYS_MASK) | EFI_MSR_SMRR_PHYS_MASK_VALID); }
/** This function initialize host VMCS. **/ VOID InitHostVmcs ( UINTN Index ) { UINT64 Data64; UINTN Size; // // VMCS size // Data64 = AsmReadMsr64 (IA32_VMX_BASIC_MSR_INDEX); Size = (UINTN)(RShiftU64 (Data64, 32) & 0xFFFF); // // Allocate // mHostContextCommon.HostContextPerCpu[Index].Vmcs = (UINT64)(UINTN)AllocatePages (FRM_SIZE_TO_PAGES(Size)); // // Set RevisionIdentifier // *(UINT32 *)(UINTN)mHostContextCommon.HostContextPerCpu[Index].Vmcs = (UINT32)Data64; return ; }
EFIAPI MtrrGetAllMtrrs ( OUT MTRR_SETTINGS *MtrrSetting ) { if (!IsMtrrSupported ()) { return MtrrSetting; } // // Get fixed MTRRs // MtrrGetFixedMtrr (&MtrrSetting->Fixed); // // Get variable MTRRs // MtrrGetVariableMtrr (&MtrrSetting->Variables); // // Get MTRR_DEF_TYPE value // MtrrSetting->MtrrDefType = AsmReadMsr64 (MTRR_LIB_IA32_MTRR_DEF_TYPE); return MtrrSetting; }
/*---------------------------------------------------------------------------------------*/ VOID EFIAPI GetSmmBase ( IN OUT VOID *Buffer ) { //Read MSR to get SMM base. FMO - Should this be moved to library function *(EFI_PHYSICAL_ADDRESS *)Buffer = AsmReadMsr64 (MSR_CPU_SMM_BASE); }
/** Set the current local APIC mode. If the specified local APIC mode is not valid, then ASSERT. If the specified local APIC mode can't be set as current, then ASSERT. @param ApicMode APIC mode to be set. **/ VOID EFIAPI SetApicMode ( IN UINTN ApicMode ) { UINTN CurrentMode; MSR_IA32_APIC_BASE ApicBaseMsr; CurrentMode = GetApicMode (); if (CurrentMode == LOCAL_APIC_MODE_XAPIC) { switch (ApicMode) { case LOCAL_APIC_MODE_XAPIC: break; case LOCAL_APIC_MODE_X2APIC: ApicBaseMsr.Uint64 = AsmReadMsr64 (MSR_IA32_APIC_BASE_ADDRESS); ApicBaseMsr.Bits.Extd = 1; AsmWriteMsr64 (MSR_IA32_APIC_BASE_ADDRESS, ApicBaseMsr.Uint64); break; default: ASSERT (FALSE); } } else { switch (ApicMode) { case LOCAL_APIC_MODE_XAPIC: // // Transition from x2APIC mode to xAPIC mode is a two-step process: // x2APIC -> Local APIC disabled -> xAPIC // ApicBaseMsr.Uint64 = AsmReadMsr64 (MSR_IA32_APIC_BASE_ADDRESS); ApicBaseMsr.Bits.Extd = 0; ApicBaseMsr.Bits.En = 0; AsmWriteMsr64 (MSR_IA32_APIC_BASE_ADDRESS, ApicBaseMsr.Uint64); ApicBaseMsr.Bits.En = 1; AsmWriteMsr64 (MSR_IA32_APIC_BASE_ADDRESS, ApicBaseMsr.Uint64); break; case LOCAL_APIC_MODE_X2APIC: break; default: ASSERT (FALSE); } } }
/** Return if Local machine check exception signaled. Indicates (when set) that a local machine check exception was generated. This indicates that the current machine-check event was delivered to only the logical processor. @retval TRUE LMCE was signaled. @retval FALSE LMCE was not signaled. **/ BOOLEAN IsLmceSignaled ( VOID ) { MSR_IA32_MCG_STATUS_REGISTER McgStatus; McgStatus.Uint64 = AsmReadMsr64 (MSR_IA32_MCG_STATUS); return (BOOLEAN) (McgStatus.Bits.LMCE_S == 1); }
/** Enable Execute Disable Bit. **/ VOID EnableExecuteDisableBit ( VOID ) { UINT64 MsrRegisters; MsrRegisters = AsmReadMsr64 (0xC0000080); MsrRegisters |= BIT11; AsmWriteMsr64 (0xC0000080, MsrRegisters); }
/** Returns the variable MTRR count for the CPU. @return Variable MTRR count **/ UINT32 GetVariableMtrrCount ( VOID ) { if (!IsMtrrSupported ()) { return 0; } return (UINT32)(AsmReadMsr64 (MTRR_LIB_IA32_MTRR_CAP) & MTRR_LIB_IA32_MTRR_CAP_VCNT_MASK); }
/** Returns the default MTRR cache type for the system. @return The default MTRR cache type. **/ MTRR_MEMORY_CACHE_TYPE EFIAPI MtrrGetDefaultMemoryType ( VOID ) { if (!IsMtrrSupported ()) { return CacheUncacheable; } return (MTRR_MEMORY_CACHE_TYPE) (AsmReadMsr64 (MTRR_LIB_IA32_MTRR_DEF_TYPE) & 0x7); }
/** Get microcode update signature of currently loaded microcode update. @return Microcode signature. **/ UINT32 GetCurrentMicrocodeSignature ( VOID ) { UINT64 Signature; AsmWriteMsr64 (EFI_MSR_IA32_BIOS_SIGN_ID, 0); AsmCpuid (CPUID_VERSION_INFO, NULL, NULL, NULL, NULL); Signature = AsmReadMsr64 (EFI_MSR_IA32_BIOS_SIGN_ID); return (UINT32) RShiftU64 (Signature, 32); }
/** Has OS enabled Lmce in the MSR_IA32_MCG_EXT_CTL @retval TRUE Os enable lmce. @retval FALSE Os not enable lmce. **/ BOOLEAN IsLmceOsEnabled ( VOID ) { MSR_IA32_MCG_CAP_REGISTER McgCap; MSR_IA32_FEATURE_CONTROL_REGISTER FeatureCtrl; MSR_IA32_MCG_EXT_CTL_REGISTER McgExtCtrl; McgCap.Uint64 = AsmReadMsr64 (MSR_IA32_MCG_CAP); if (McgCap.Bits.MCG_LMCE_P == 0) { return FALSE; } FeatureCtrl.Uint64 = AsmReadMsr64 (MSR_IA32_FEATURE_CONTROL); if (FeatureCtrl.Bits.LmceOn == 0) { return FALSE; } McgExtCtrl.Uint64 = AsmReadMsr64 (MSR_IA32_MCG_EXT_CTL); return (BOOLEAN) (McgExtCtrl.Bits.LMCE_EN == 1); }
EFI_STATUS AmdSmmLock ( IN EFI_SMM_ACCESS2_PROTOCOL *This ) { UINT64 Value; Value = AsmReadMsr64 (0xc0010115) | 1; AsmWriteMsr64 (0xc0010115, Value); mSmramMap.RegionState = EFI_SMRAM_LOCKED; mSmmAccess2.LockState = TRUE; return EFI_SUCCESS; }
/** Enable last branch. **/ VOID ActivateLBR ( VOID ) { UINT64 DebugCtl; DebugCtl = AsmReadMsr64 (MSR_DEBUG_CTL); if ((DebugCtl & MSR_DEBUG_CTL_LBR) != 0) { return ; } DebugCtl |= MSR_DEBUG_CTL_LBR; AsmWriteMsr64 (MSR_DEBUG_CTL, DebugCtl); }
/** Enable XD feature. **/ VOID ActivateXd ( VOID ) { UINT64 MsrRegisters; MsrRegisters = AsmReadMsr64 (MSR_EFER); if ((MsrRegisters & MSR_EFER_XD) != 0) { return ; } MsrRegisters |= MSR_EFER_XD; AsmWriteMsr64 (MSR_EFER, MsrRegisters); }
/** Return if SMRR is supported @retval TRUE SMRR is supported. @retval FALSE SMRR is not supported. **/ BOOLEAN IsSmrrSupported ( VOID ) { UINT64 MtrrCap; MtrrCap = AsmReadMsr64(EFI_MSR_IA32_MTRR_CAP); if ((MtrrCap & IA32_MTRR_SMRR_SUPPORT_BIT) == 0) { return FALSE; } else { return TRUE; } }
EFIAPI MtrrGetVariableMtrr ( OUT MTRR_VARIABLE_SETTINGS *VariableSettings ) { UINT32 Index; UINT32 VariableMtrrCount; if (!IsMtrrSupported ()) { return VariableSettings; } VariableMtrrCount = GetVariableMtrrCount (); ASSERT (VariableMtrrCount <= MTRR_NUMBER_OF_VARIABLE_MTRR); for (Index = 0; Index < VariableMtrrCount; Index++) { VariableSettings->Mtrr[Index].Base = AsmReadMsr64 (MTRR_LIB_IA32_VARIABLE_MTRR_BASE + (Index << 1)); VariableSettings->Mtrr[Index].Mask = AsmReadMsr64 (MTRR_LIB_IA32_VARIABLE_MTRR_BASE + (Index << 1) + 1); } return VariableSettings; }
/** Check if XD feature is supported by a processor. **/ VOID CheckFeatureSupported ( VOID ) { UINT32 RegEax; UINT32 RegEdx; MSR_IA32_MISC_ENABLE_REGISTER MiscEnableMsr; if (mXdSupported) { AsmCpuid (CPUID_EXTENDED_FUNCTION, &RegEax, NULL, NULL, NULL); if (RegEax <= CPUID_EXTENDED_FUNCTION) { // // Extended CPUID functions are not supported on this processor. // mXdSupported = FALSE; } AsmCpuid (CPUID_EXTENDED_CPU_SIG, NULL, NULL, NULL, &RegEdx); if ((RegEdx & CPUID1_EDX_XD_SUPPORT) == 0) { // // Execute Disable Bit feature is not supported on this processor. // mXdSupported = FALSE; } } if (mBtsSupported) { AsmCpuid (CPUID_VERSION_INFO, NULL, NULL, NULL, &RegEdx); if ((RegEdx & CPUID1_EDX_BTS_AVAILABLE) != 0) { // // Per IA32 manuals: // When CPUID.1:EDX[21] is set, the following BTS facilities are available: // 1. The BTS_UNAVAILABLE flag in the IA32_MISC_ENABLE MSR indicates the // availability of the BTS facilities, including the ability to set the BTS and // BTINT bits in the MSR_DEBUGCTLA MSR. // 2. The IA32_DS_AREA MSR can be programmed to point to the DS save area. // MiscEnableMsr.Uint64 = AsmReadMsr64 (MSR_IA32_MISC_ENABLE); if (MiscEnableMsr.Bits.BTS == 1) { // // BTS facilities is not supported if MSR_IA32_MISC_ENABLE.BTS bit is set. // mBtsSupported = FALSE; } } } }
/** This function check if MLE is launched. @retval TRUE MLE is launched @retval FALSE MLE is not launched **/ BOOLEAN IsMleLaunched ( VOID ) { UINT32 TxtStatus; if ((AsmReadMsr64(IA32_FEATURE_CONTROL_MSR_INDEX) & (IA32_FEATURE_CONTROL_SMX | IA32_FEATURE_CONTROL_LCK)) != (IA32_FEATURE_CONTROL_SMX | IA32_FEATURE_CONTROL_LCK)) { return FALSE; } TxtStatus = TxtPubRead32 (TXT_STS); return (BOOLEAN)((TxtStatus & TXT_STS_SENTER_DONE) != 0); }
/** Returns the variable MTRR count for the CPU. @return Variable MTRR count **/ UINT32 EFIAPI GetVariableMtrrCount ( VOID ) { UINT32 VariableMtrrCount; if (!IsMtrrSupported ()) { return 0; } VariableMtrrCount = (UINT32)(AsmReadMsr64 (MTRR_LIB_IA32_MTRR_CAP) & MTRR_LIB_IA32_MTRR_CAP_VCNT_MASK); ASSERT (VariableMtrrCount <= MTRR_NUMBER_OF_VARIABLE_MTRR); return VariableMtrrCount; }
/** Enable branch trace store. @param CpuIndex The index of the processor. **/ VOID ActivateBTS ( IN UINTN CpuIndex ) { UINT64 DebugCtl; DebugCtl = AsmReadMsr64 (MSR_DEBUG_CTL); if ((DebugCtl & MSR_DEBUG_CTL_BTS) != 0) { return ; } AsmWriteMsr64 (MSR_DS_AREA, (UINT64)(UINTN)mMsrDsArea[CpuIndex]); DebugCtl |= (UINT64)(MSR_DEBUG_CTL_BTS | MSR_DEBUG_CTL_TR); DebugCtl &= ~((UINT64)MSR_DEBUG_CTL_BTINT); AsmWriteMsr64 (MSR_DEBUG_CTL, DebugCtl); }
/** This function initialize guest context per CPU. @param Index CPU Index **/ VOID InitGuestContextPerCpu ( IN UINT32 Index ) { mGuestContextCommon.GuestContextPerCpu[Index].EFER = AsmReadMsr64 (IA32_EFER_MSR_INDEX); // // Load current VMCS, after that we can access VMCS region // AsmVmClear (&mGuestContextCommon.GuestContextPerCpu[Index].Vmcs); AsmVmPtrLoad (&mGuestContextCommon.GuestContextPerCpu[Index].Vmcs); SetVmcsHostField (Index); SetVmcsGuestField (Index); SetVmcsControlField (Index); }
VOID CpuSmmSxWorkAround( ) { UINT64 MsrValue; MsrValue = AsmReadMsr64 (0xE2); if (MsrValue & BIT15) { return; } if (MsrValue & BIT10) { MsrValue &= ~BIT10; AsmWriteMsr64 (0xE2, MsrValue); } }
EFI_STATUS AmdSmmOpen ( IN EFI_SMM_ACCESS2_PROTOCOL *This ) { UINT64 Value; if (mSmmAccess2.LockState) { return EFI_ACCESS_DENIED; } Value = AsmReadMsr64 (0xc0010113) & ~2; //TValid AsmWriteMsr64 (0xc0010113, Value); mSmramMap.RegionState = EFI_SMRAM_OPEN; mSmmAccess2.OpenState = TRUE; return EFI_SUCCESS; }
EFIAPI MtrrGetFixedMtrr ( OUT MTRR_FIXED_SETTINGS *FixedSettings ) { UINT32 Index; if (!IsMtrrSupported ()) { return FixedSettings; } for (Index = 0; Index < MTRR_NUMBER_OF_FIXED_MTRR; Index++) { FixedSettings->Mtrr[Index] = AsmReadMsr64 (mMtrrLibFixedMtrrTable[Index].Msr); }; return FixedSettings; }
EFI_STATUS AmdSmmClose ( IN EFI_SMM_ACCESS2_PROTOCOL *This ) { UINT64 Value; if (mSmmAccess2.LockState) { return EFI_ACCESS_DENIED; } Value = AsmReadMsr64 (0xc0010113) | 2; AsmWriteMsr64 (0xc0010113, Value); mSmramMap.RegionState = EFI_SMRAM_CLOSED; mSmmAccess2.OpenState = FALSE; return EFI_SUCCESS; }
/** Get the current local APIC mode. If local APIC is disabled, then ASSERT. @retval LOCAL_APIC_MODE_XAPIC current APIC mode is xAPIC. @retval LOCAL_APIC_MODE_X2APIC current APIC mode is x2APIC. **/ UINTN EFIAPI GetApicMode ( VOID ) { MSR_IA32_APIC_BASE ApicBaseMsr; ApicBaseMsr.Uint64 = AsmReadMsr64 (MSR_IA32_APIC_BASE_ADDRESS); // // Local APIC should have been enabled // ASSERT (ApicBaseMsr.Bits.En != 0); if (ApicBaseMsr.Bits.Extd != 0) { return LOCAL_APIC_MODE_X2APIC; } else { return LOCAL_APIC_MODE_XAPIC; } }