Home
last modified time | relevance | path

Searched refs:guest_efer (Results 1 – 10 of 10) sorted by relevance

/xen/xen/arch/x86/hvm/
A Ddomain.c212 v->arch.hvm.guest_efer = regs->efer; in arch_set_info_hvm_guest()
263 v->arch.hvm.guest_efer = regs->efer; in arch_set_info_hvm_guest()
275 if ( v->arch.hvm.guest_efer & EFER_LME ) in arch_set_info_hvm_guest()
276 v->arch.hvm.guest_efer |= EFER_LMA; in arch_set_info_hvm_guest()
285 errstr = hvm_efer_valid(v, v->arch.hvm.guest_efer, -1); in arch_set_info_hvm_guest()
289 v->arch.hvm.guest_efer, errstr); in arch_set_info_hvm_guest()
A Dhvm.c857 .msr_efer = v->arch.hvm.guest_efer, in hvm_save_cpu_ctxt()
2052 v, v->arch.hvm.guest_efer, value, errstr); in hvm_set_efer()
2056 if ( ((value ^ v->arch.hvm.guest_efer) & EFER_LME) && in hvm_set_efer()
2090 ((value ^ v->arch.hvm.guest_efer) & EFER_SVME) ) in hvm_set_efer()
2097 value |= v->arch.hvm.guest_efer & EFER_LMA; in hvm_set_efer()
2098 v->arch.hvm.guest_efer = value; in hvm_set_efer()
2293 if ( v->arch.hvm.guest_efer & EFER_LME ) in hvm_set_cr0()
2302 v->arch.hvm.guest_efer |= EFER_LMA; in hvm_set_cr0()
2338 v->arch.hvm.guest_efer &= ~EFER_LMA; in hvm_set_cr0()
3486 *msr_content = v->arch.hvm.guest_efer; in hvm_msr_read_intercept()
[all …]
/xen/xen/include/asm-x86/hvm/
A Dvcpu.h155 unsigned long guest_efer; member
A Dhvm.h371 ((v)->arch.hvm.guest_efer & EFER_NX)
379 #define hvm_long_mode_active(v) (!!((v)->arch.hvm.guest_efer & EFER_LMA))
/xen/xen/include/asm-x86/hvm/svm/
A Dnestedsvm.h97 (!!((v)->arch.hvm.guest_efer & EFER_SVME))
/xen/xen/arch/x86/hvm/vmx/
A Dvmx.c743 v->arch.hvm.guest_efer = c->msr_efer; in vmx_vmcs_restore()
1322 if ( !hvm_pae_enabled(v) || (v->arch.hvm.guest_efer & EFER_LMA) ) in vmx_load_pdptrs()
1591 unsigned long entry_ctls, guest_efer = v->arch.hvm.guest_efer, in vmx_update_guest_efer() local
1600 guest_efer &= ~EFER_NX; in vmx_update_guest_efer()
1601 guest_efer |= xen_efer & EFER_NX; in vmx_update_guest_efer()
1618 if ( !(guest_efer & EFER_LMA) ) in vmx_update_guest_efer()
1619 guest_efer &= ~EFER_LME; in vmx_update_guest_efer()
1631 if ( guest_efer & EFER_LMA ) in vmx_update_guest_efer()
1638 __vmwrite(GUEST_EFER, guest_efer); in vmx_update_guest_efer()
1648 if ( guest_efer == xen_efer ) in vmx_update_guest_efer()
[all …]
A Dvvmx.c1226 v->arch.hvm.guest_efer |= EFER_LMA | EFER_LME; in virtual_vmentry()
1228 v->arch.hvm.guest_efer &= ~(EFER_LMA | EFER_LME); in virtual_vmentry()
1237 !(v->arch.hvm.guest_efer & EFER_LMA) ) in virtual_vmentry()
1440 !(v->arch.hvm.guest_efer & EFER_LMA) ) in virtual_vmexit()
1456 v->arch.hvm.guest_efer |= EFER_LMA | EFER_LME; in virtual_vmexit()
1458 v->arch.hvm.guest_efer &= ~(EFER_LMA | EFER_LME); in virtual_vmexit()
/xen/xen/arch/x86/hvm/svm/
A Dsvm.c376 v->arch.hvm.guest_efer = data->msr_efer; in svm_load_cpu_state()
558 unsigned long guest_efer = v->arch.hvm.guest_efer, in svm_update_guest_efer() local
564 guest_efer &= ~EFER_NX; in svm_update_guest_efer()
565 guest_efer |= xen_efer & EFER_NX; in svm_update_guest_efer()
576 if ( !(guest_efer & EFER_LMA) ) in svm_update_guest_efer()
577 guest_efer &= ~EFER_LME; in svm_update_guest_efer()
581 guest_efer |= EFER_SVME; in svm_update_guest_efer()
583 vmcb_set_efer(vmcb, guest_efer); in svm_update_guest_efer()
586 !(v->arch.hvm.guest_efer & EFER_SVME)); in svm_update_guest_efer()
A Dnestedsvm.c246 n1vmcb->_efer = v->arch.hvm.guest_efer; in nsvm_vcpu_hostsave()
279 v->arch.hvm.guest_efer = n1vmcb->_efer; in nsvm_vcpu_hostrestore()
549 v->arch.hvm.guest_efer = ns_vmcb->_efer; in nsvm_vmcb_prepare4vmrun()
1666 if ( v->arch.hvm.guest_efer & EFER_SVME ) in svm_nested_features_on_efer_update()
/xen/xen/arch/x86/
A Dvm_event.c240 req->data.regs.x86.msr_efer = curr->arch.hvm.guest_efer; in vm_event_fill_regs()

Completed in 41 milliseconds