Lines Matching refs:save
107 kvm_init_shadow_npt_mmu(vcpu, X86_CR0_PG, svm->vmcb01.ptr->save.cr4, in nested_svm_init_mmu_context()
108 svm->vmcb01.ptr->save.efer, in nested_svm_init_mmu_context()
279 struct vmcb_save_area *save) in nested_vmcb_check_cr3_cr4() argument
286 if ((save->efer & EFER_LME) && (save->cr0 & X86_CR0_PG)) { in nested_vmcb_check_cr3_cr4()
287 if (CC(!(save->cr4 & X86_CR4_PAE)) || in nested_vmcb_check_cr3_cr4()
288 CC(!(save->cr0 & X86_CR0_PE)) || in nested_vmcb_check_cr3_cr4()
289 CC(kvm_vcpu_is_illegal_gpa(vcpu, save->cr3))) in nested_vmcb_check_cr3_cr4()
293 if (CC(!kvm_is_valid_cr4(vcpu, save->cr4))) in nested_vmcb_check_cr3_cr4()
301 struct vmcb_save_area *save) in nested_vmcb_valid_sregs() argument
310 if (CC(!(save->efer & EFER_SVME))) in nested_vmcb_valid_sregs()
313 if (CC((save->cr0 & X86_CR0_CD) == 0 && (save->cr0 & X86_CR0_NW)) || in nested_vmcb_valid_sregs()
314 CC(save->cr0 & ~0xffffffffULL)) in nested_vmcb_valid_sregs()
317 if (CC(!kvm_dr6_valid(save->dr6)) || CC(!kvm_dr7_valid(save->dr7))) in nested_vmcb_valid_sregs()
320 if (!nested_vmcb_check_cr3_cr4(vcpu, save)) in nested_vmcb_valid_sregs()
323 if (CC(!kvm_valid_efer(vcpu, save->efer))) in nested_vmcb_valid_sregs()
461 svm->nested.vmcb02.ptr->save.g_pat = svm->vmcb01.ptr->save.g_pat; in nested_vmcb02_compute_g_pat()
477 svm->vmcb->save.es = vmcb12->save.es; in nested_vmcb02_prepare_save()
478 svm->vmcb->save.cs = vmcb12->save.cs; in nested_vmcb02_prepare_save()
479 svm->vmcb->save.ss = vmcb12->save.ss; in nested_vmcb02_prepare_save()
480 svm->vmcb->save.ds = vmcb12->save.ds; in nested_vmcb02_prepare_save()
481 svm->vmcb->save.cpl = vmcb12->save.cpl; in nested_vmcb02_prepare_save()
486 svm->vmcb->save.gdtr = vmcb12->save.gdtr; in nested_vmcb02_prepare_save()
487 svm->vmcb->save.idtr = vmcb12->save.idtr; in nested_vmcb02_prepare_save()
491 kvm_set_rflags(&svm->vcpu, vmcb12->save.rflags | X86_EFLAGS_FIXED); in nested_vmcb02_prepare_save()
498 svm_set_efer(&svm->vcpu, vmcb12->save.efer | EFER_SVME); in nested_vmcb02_prepare_save()
500 svm_set_cr0(&svm->vcpu, vmcb12->save.cr0); in nested_vmcb02_prepare_save()
501 svm_set_cr4(&svm->vcpu, vmcb12->save.cr4); in nested_vmcb02_prepare_save()
503 svm->vcpu.arch.cr2 = vmcb12->save.cr2; in nested_vmcb02_prepare_save()
505 kvm_rax_write(&svm->vcpu, vmcb12->save.rax); in nested_vmcb02_prepare_save()
506 kvm_rsp_write(&svm->vcpu, vmcb12->save.rsp); in nested_vmcb02_prepare_save()
507 kvm_rip_write(&svm->vcpu, vmcb12->save.rip); in nested_vmcb02_prepare_save()
510 svm->vmcb->save.rax = vmcb12->save.rax; in nested_vmcb02_prepare_save()
511 svm->vmcb->save.rsp = vmcb12->save.rsp; in nested_vmcb02_prepare_save()
512 svm->vmcb->save.rip = vmcb12->save.rip; in nested_vmcb02_prepare_save()
516 svm->vmcb->save.dr7 = vmcb12->save.dr7 | DR7_FIXED_1; in nested_vmcb02_prepare_save()
517 svm->vcpu.arch.dr6 = vmcb12->save.dr6 | DR6_ACTIVE_LOW; in nested_vmcb02_prepare_save()
598 to_vmcb->save.spec_ctrl = from_vmcb->save.spec_ctrl; in nested_svm_copy_common_state()
607 trace_kvm_nested_vmrun(svm->vmcb->save.rip, vmcb12_gpa, in enter_svm_guest_mode()
608 vmcb12->save.rip, in enter_svm_guest_mode()
631 ret = nested_svm_load_cr3(&svm->vcpu, vmcb12->save.cr3, in enter_svm_guest_mode()
665 vmcb12_gpa = svm->vmcb->save.rax; in nested_svm_vmrun()
683 if (!nested_vmcb_valid_sregs(vcpu, &vmcb12->save) || in nested_svm_vmrun()
696 svm->vmcb01.ptr->save.efer = vcpu->arch.efer; in nested_svm_vmrun()
697 svm->vmcb01.ptr->save.cr0 = kvm_read_cr0(vcpu); in nested_svm_vmrun()
698 svm->vmcb01.ptr->save.cr4 = vcpu->arch.cr4; in nested_svm_vmrun()
699 svm->vmcb01.ptr->save.rflags = kvm_get_rflags(vcpu); in nested_svm_vmrun()
700 svm->vmcb01.ptr->save.rip = kvm_rip_read(vcpu); in nested_svm_vmrun()
703 svm->vmcb01.ptr->save.cr3 = kvm_read_cr3(vcpu); in nested_svm_vmrun()
752 to_vmcb->save.fs = from_vmcb->save.fs; in svm_copy_vmloadsave_state()
753 to_vmcb->save.gs = from_vmcb->save.gs; in svm_copy_vmloadsave_state()
754 to_vmcb->save.tr = from_vmcb->save.tr; in svm_copy_vmloadsave_state()
755 to_vmcb->save.ldtr = from_vmcb->save.ldtr; in svm_copy_vmloadsave_state()
756 to_vmcb->save.kernel_gs_base = from_vmcb->save.kernel_gs_base; in svm_copy_vmloadsave_state()
757 to_vmcb->save.star = from_vmcb->save.star; in svm_copy_vmloadsave_state()
758 to_vmcb->save.lstar = from_vmcb->save.lstar; in svm_copy_vmloadsave_state()
759 to_vmcb->save.cstar = from_vmcb->save.cstar; in svm_copy_vmloadsave_state()
760 to_vmcb->save.sfmask = from_vmcb->save.sfmask; in svm_copy_vmloadsave_state()
761 to_vmcb->save.sysenter_cs = from_vmcb->save.sysenter_cs; in svm_copy_vmloadsave_state()
762 to_vmcb->save.sysenter_esp = from_vmcb->save.sysenter_esp; in svm_copy_vmloadsave_state()
763 to_vmcb->save.sysenter_eip = from_vmcb->save.sysenter_eip; in svm_copy_vmloadsave_state()
798 vmcb12->save.es = vmcb->save.es; in nested_svm_vmexit()
799 vmcb12->save.cs = vmcb->save.cs; in nested_svm_vmexit()
800 vmcb12->save.ss = vmcb->save.ss; in nested_svm_vmexit()
801 vmcb12->save.ds = vmcb->save.ds; in nested_svm_vmexit()
802 vmcb12->save.gdtr = vmcb->save.gdtr; in nested_svm_vmexit()
803 vmcb12->save.idtr = vmcb->save.idtr; in nested_svm_vmexit()
804 vmcb12->save.efer = svm->vcpu.arch.efer; in nested_svm_vmexit()
805 vmcb12->save.cr0 = kvm_read_cr0(vcpu); in nested_svm_vmexit()
806 vmcb12->save.cr3 = kvm_read_cr3(vcpu); in nested_svm_vmexit()
807 vmcb12->save.cr2 = vmcb->save.cr2; in nested_svm_vmexit()
808 vmcb12->save.cr4 = svm->vcpu.arch.cr4; in nested_svm_vmexit()
809 vmcb12->save.rflags = kvm_get_rflags(vcpu); in nested_svm_vmexit()
810 vmcb12->save.rip = kvm_rip_read(vcpu); in nested_svm_vmexit()
811 vmcb12->save.rsp = kvm_rsp_read(vcpu); in nested_svm_vmexit()
812 vmcb12->save.rax = kvm_rax_read(vcpu); in nested_svm_vmexit()
813 vmcb12->save.dr7 = vmcb->save.dr7; in nested_svm_vmexit()
814 vmcb12->save.dr6 = svm->vcpu.arch.dr6; in nested_svm_vmexit()
815 vmcb12->save.cpl = vmcb->save.cpl; in nested_svm_vmexit()
862 kvm_set_rflags(vcpu, svm->vmcb->save.rflags); in nested_svm_vmexit()
863 svm_set_efer(vcpu, svm->vmcb->save.efer); in nested_svm_vmexit()
864 svm_set_cr0(vcpu, svm->vmcb->save.cr0 | X86_CR0_PE); in nested_svm_vmexit()
865 svm_set_cr4(vcpu, svm->vmcb->save.cr4); in nested_svm_vmexit()
866 kvm_rax_write(vcpu, svm->vmcb->save.rax); in nested_svm_vmexit()
867 kvm_rsp_write(vcpu, svm->vmcb->save.rsp); in nested_svm_vmexit()
868 kvm_rip_write(vcpu, svm->vmcb->save.rip); in nested_svm_vmexit()
886 rc = nested_svm_load_cr3(vcpu, svm->vmcb->save.cr3, false, true); in nested_svm_vmexit()
904 if (unlikely(svm->vmcb->save.rflags & X86_EFLAGS_TF)) in nested_svm_vmexit()
1099 if (to_svm(vcpu)->vmcb->save.cpl) { in nested_svm_check_permissions()
1208 trace_kvm_nested_intr_vmexit(svm->vmcb->save.rip); in svm_check_nested_events()
1303 if (copy_to_user(&user_vmcb->save, &svm->vmcb01.ptr->save, in svm_get_nested_state()
1304 sizeof(user_vmcb->save))) in svm_get_nested_state()
1318 struct vmcb_save_area *save; in svm_set_nested_state() local
1360 save = kzalloc(sizeof(*save), GFP_KERNEL_ACCOUNT); in svm_set_nested_state()
1361 if (!ctl || !save) in svm_set_nested_state()
1367 if (copy_from_user(save, &user_vmcb->save, sizeof(*save))) in svm_set_nested_state()
1386 if (!(save->cr0 & X86_CR0_PG) || in svm_set_nested_state()
1387 !(save->cr0 & X86_CR0_PE) || in svm_set_nested_state()
1388 (save->rflags & X86_EFLAGS_VM) || in svm_set_nested_state()
1389 !nested_vmcb_valid_sregs(vcpu, save)) in svm_set_nested_state()
1415 svm->nested.vmcb02.ptr->save = svm->vmcb01.ptr->save; in svm_set_nested_state()
1424 svm_copy_vmrun_state(&svm->vmcb01.ptr->save, save); in svm_set_nested_state()
1432 kfree(save); in svm_set_nested_state()