void
switch_to_physical_rid(VCPU *vcpu)
{
    u64 psr;
    u64 rr;

    switch (vcpu->arch.arch_vmx.mmu_mode) {
    case VMX_MMU_PHY_DT:
        rr = vcpu->arch.metaphysical_rid_dt;
        break;
    case VMX_MMU_PHY_D:
        rr = vcpu->arch.metaphysical_rid_d;
        break;
    default:
        panic_domain(NULL, "bad mmu mode value");
    }
    
    psr = ia64_clear_ic();
    ia64_set_rr(VRN0<<VRN_SHIFT, rr);
    ia64_dv_serialize_data();
    ia64_set_rr(VRN4<<VRN_SHIFT, rr);
    ia64_srlz_d();
    
    ia64_set_psr(psr);
    ia64_srlz_i();
    return;
}
void
switch_to_virtual_rid(VCPU *vcpu)
{
    u64 psr;

    psr = ia64_clear_ic();
    ia64_set_rr(VRN0<<VRN_SHIFT, vcpu->arch.metaphysical_saved_rr0);
    ia64_dv_serialize_data();
    ia64_set_rr(VRN4<<VRN_SHIFT, vcpu->arch.metaphysical_saved_rr4);
    ia64_srlz_d();
    ia64_set_psr(psr);
    ia64_srlz_i();
    return;
}
void
vmx_load_all_rr(VCPU *vcpu)
{
	unsigned long rr0, rr4;

	switch (vcpu->arch.arch_vmx.mmu_mode) {
	case VMX_MMU_VIRTUAL:
		rr0 = vcpu->arch.metaphysical_saved_rr0;
		rr4 = vcpu->arch.metaphysical_saved_rr4;
		break;
	case VMX_MMU_PHY_DT:
		rr0 = vcpu->arch.metaphysical_rid_dt;
		rr4 = vcpu->arch.metaphysical_rid_dt;
		break;
	case VMX_MMU_PHY_D:
		rr0 = vcpu->arch.metaphysical_rid_d;
		rr4 = vcpu->arch.metaphysical_rid_d;
		break;
	default:
		panic_domain(NULL, "bad mmu mode value");
	}

	ia64_set_rr((VRN0 << VRN_SHIFT), rr0);
	ia64_dv_serialize_data();
	ia64_set_rr((VRN4 << VRN_SHIFT), rr4);
	ia64_dv_serialize_data();
	ia64_set_rr((VRN1 << VRN_SHIFT), vrrtomrr(vcpu, VMX(vcpu, vrr[VRN1])));
	ia64_dv_serialize_data();
	ia64_set_rr((VRN2 << VRN_SHIFT), vrrtomrr(vcpu, VMX(vcpu, vrr[VRN2])));
	ia64_dv_serialize_data();
	ia64_set_rr((VRN3 << VRN_SHIFT), vrrtomrr(vcpu, VMX(vcpu, vrr[VRN3])));
	ia64_dv_serialize_data();
	ia64_set_rr((VRN5 << VRN_SHIFT), vrrtomrr(vcpu, VMX(vcpu, vrr[VRN5])));
	ia64_dv_serialize_data();
	ia64_set_rr((VRN6 << VRN_SHIFT), vrrtomrr(vcpu, VMX(vcpu, vrr[VRN6])));
	ia64_dv_serialize_data();
	vmx_switch_rr7_vcpu(vcpu, vrrtomrr(vcpu, VMX(vcpu, vrr[VRN7])));
	ia64_set_pta(VMX(vcpu, mpta));
	vmx_ia64_set_dcr(vcpu);

	ia64_srlz_d();
}
Esempio n. 4
0
void
ia64_native_setreg_func(int regnum, unsigned long val)
{
	switch (regnum) {
	case _IA64_REG_PSR_L:
		ia64_native_setreg(_IA64_REG_PSR_L, val);
		ia64_dv_serialize_data();
		break;
	CASE_SET_REG(SP);
	CASE_SET_REG(GP);

	CASE_SET_AR(KR0);
	CASE_SET_AR(KR1);
	CASE_SET_AR(KR2);
	CASE_SET_AR(KR3);
	CASE_SET_AR(KR4);
	CASE_SET_AR(KR5);
	CASE_SET_AR(KR6);
	CASE_SET_AR(KR7);
	CASE_SET_AR(RSC);
	CASE_SET_AR(BSP);
	CASE_SET_AR(BSPSTORE);
	CASE_SET_AR(RNAT);
	CASE_SET_AR(FCR);
	CASE_SET_AR(EFLAG);
	CASE_SET_AR(CSD);
	CASE_SET_AR(SSD);
	CASE_SET_AR(CFLAG);
	CASE_SET_AR(FSR);
	CASE_SET_AR(FIR);
	CASE_SET_AR(FDR);
	CASE_SET_AR(CCV);
	CASE_SET_AR(UNAT);
	CASE_SET_AR(FPSR);
	CASE_SET_AR(ITC);
	CASE_SET_AR(PFS);
	CASE_SET_AR(LC);
	CASE_SET_AR(EC);

	CASE_SET_CR(DCR);
	CASE_SET_CR(ITM);
	CASE_SET_CR(IVA);
	CASE_SET_CR(PTA);
	CASE_SET_CR(IPSR);
	CASE_SET_CR(ISR);
	CASE_SET_CR(IIP);
	CASE_SET_CR(IFA);
	CASE_SET_CR(ITIR);
	CASE_SET_CR(IIPA);
	CASE_SET_CR(IFS);
	CASE_SET_CR(IIM);
	CASE_SET_CR(IHA);
	CASE_SET_CR(LID);
	CASE_SET_CR(IVR);
	CASE_SET_CR(TPR);
	CASE_SET_CR(EOI);
	CASE_SET_CR(IRR0);
	CASE_SET_CR(IRR1);
	CASE_SET_CR(IRR2);
	CASE_SET_CR(IRR3);
	CASE_SET_CR(ITV);
	CASE_SET_CR(PMV);
	CASE_SET_CR(CMCV);
	CASE_SET_CR(LRR0);
	CASE_SET_CR(LRR1);
	default:
		printk(KERN_CRIT "wrong setreg %d\n", regnum);
		break;
	}
}