ulong vt_paging_apply_fixed_cr4 (ulong val) { ulong cr4_0, cr4_1; asm_rdmsr (MSR_IA32_VMX_CR4_FIXED0, &cr4_0); asm_rdmsr (MSR_IA32_VMX_CR4_FIXED1, &cr4_1); val &= cr4_1; val |= cr4_0; #ifdef CPU_MMU_SPT_DISABLE if (current->u.vt.vr.pg) return val; #endif #ifdef CPU_MMU_SPT_USE_PAE if (!ept_enabled ()) val |= CR4_PAE_BIT; #endif return val; }
ulong vt_paging_apply_fixed_cr0 (ulong val) { ulong cr0_0, cr0_1; asm_rdmsr (MSR_IA32_VMX_CR0_FIXED0, &cr0_0); asm_rdmsr (MSR_IA32_VMX_CR0_FIXED1, &cr0_1); if (current->u.vt.unrestricted_guest) { cr0_1 |= CR0_PG_BIT | CR0_PE_BIT; cr0_0 &= ~(CR0_PG_BIT | CR0_PE_BIT); } val &= cr0_1; val |= cr0_0; #ifdef CPU_MMU_SPT_DISABLE if (current->u.vt.vr.pg) return val; #endif if (!ept_enabled ()) val |= CR0_WP_BIT; return val; }
void svm_init (void) { u64 p; u64 tmp; void *v; ulong efer; asm_rdmsr (MSR_IA32_EFER, &efer); efer |= MSR_IA32_EFER_SVME_BIT; asm_wrmsr (MSR_IA32_EFER, efer); asm_rdmsr64 (MSR_AMD_VM_CR, &tmp); tmp |= MSR_AMD_VM_CR_DIS_A20M_BIT; asm_wrmsr64 (MSR_AMD_VM_CR, tmp); /* FIXME: size of a host state area is undocumented */ alloc_page (&v, &p); currentcpu->svm.hsave = v; currentcpu->svm.hsave_phys = p; asm_wrmsr64 (MSR_AMD_VM_HSAVE_PA, p); alloc_page (&v, &p); memset (v, 0, PAGESIZE); currentcpu->svm.vmcbhost = v; currentcpu->svm.vmcbhost_phys = p; }