Lines Matching refs:gfn

288 static void kvmppc_mark_gfn(unsigned long gfn, struct kvm *kvm,  in kvmppc_mark_gfn()  argument
294 if (gfn >= p->base_pfn && gfn < p->base_pfn + p->nr_pfns) { in kvmppc_mark_gfn()
295 unsigned long index = gfn - p->base_pfn; in kvmppc_mark_gfn()
307 static void kvmppc_gfn_secure_uvmem_pfn(unsigned long gfn, in kvmppc_gfn_secure_uvmem_pfn() argument
310 kvmppc_mark_gfn(gfn, kvm, KVMPPC_GFN_UVMEM_PFN, uvmem_pfn); in kvmppc_gfn_secure_uvmem_pfn()
314 static void kvmppc_gfn_secure_mem_pfn(unsigned long gfn, struct kvm *kvm) in kvmppc_gfn_secure_mem_pfn() argument
316 kvmppc_mark_gfn(gfn, kvm, KVMPPC_GFN_MEM_PFN, 0); in kvmppc_gfn_secure_mem_pfn()
320 static void kvmppc_gfn_shared(unsigned long gfn, struct kvm *kvm) in kvmppc_gfn_shared() argument
322 kvmppc_mark_gfn(gfn, kvm, KVMPPC_GFN_SHARED, 0); in kvmppc_gfn_shared()
326 static void kvmppc_gfn_remove(unsigned long gfn, struct kvm *kvm) in kvmppc_gfn_remove() argument
328 kvmppc_mark_gfn(gfn, kvm, 0, 0); in kvmppc_gfn_remove()
332 static bool kvmppc_gfn_is_uvmem_pfn(unsigned long gfn, struct kvm *kvm, in kvmppc_gfn_is_uvmem_pfn() argument
338 if (gfn >= p->base_pfn && gfn < p->base_pfn + p->nr_pfns) { in kvmppc_gfn_is_uvmem_pfn()
339 unsigned long index = gfn - p->base_pfn; in kvmppc_gfn_is_uvmem_pfn()
361 struct kvm *kvm, unsigned long *gfn) in kvmppc_next_nontransitioned_gfn() argument
368 if (*gfn >= p->base_pfn && *gfn < p->base_pfn + p->nr_pfns) in kvmppc_next_nontransitioned_gfn()
376 for (i = *gfn; i < p->base_pfn + p->nr_pfns; i++) { in kvmppc_next_nontransitioned_gfn()
380 *gfn = i; in kvmppc_next_nontransitioned_gfn()
391 unsigned long gfn = memslot->base_gfn; in kvmppc_memslot_page_merge() local
392 unsigned long end, start = gfn_to_hva(kvm, gfn); in kvmppc_memslot_page_merge()
606 unsigned long uvmem_pfn, gfn; in kvmppc_uvmem_drop_pages() local
613 gfn = slot->base_gfn; in kvmppc_uvmem_drop_pages()
614 for (i = slot->npages; i; --i, ++gfn, addr += PAGE_SIZE) { in kvmppc_uvmem_drop_pages()
620 pr_err("Can't find VMA for gfn:0x%lx\n", gfn); in kvmppc_uvmem_drop_pages()
627 if (kvmppc_gfn_is_uvmem_pfn(gfn, kvm, &uvmem_pfn)) { in kvmppc_uvmem_drop_pages()
639 kvmppc_gfn_remove(gfn, kvm); in kvmppc_uvmem_drop_pages()
787 unsigned long gfn = memslot->base_gfn; in kvmppc_uv_migrate_mem_slot() local
794 while (kvmppc_next_nontransitioned_gfn(memslot, kvm, &gfn)) { in kvmppc_uv_migrate_mem_slot()
796 start = gfn_to_hva(kvm, gfn); in kvmppc_uv_migrate_mem_slot()
806 (gfn << PAGE_SHIFT), kvm, PAGE_SHIFT, false); in kvmppc_uv_migrate_mem_slot()
875 unsigned long gfn = gpa >> page_shift; in kvmppc_share_page() local
881 if (kvmppc_gfn_is_uvmem_pfn(gfn, kvm, &uvmem_pfn)) { in kvmppc_share_page()
894 pfn = gfn_to_pfn(kvm, gfn); in kvmppc_share_page()
899 if (kvmppc_gfn_is_uvmem_pfn(gfn, kvm, &uvmem_pfn)) { in kvmppc_share_page()
910 kvmppc_gfn_shared(gfn, kvm); in kvmppc_share_page()
933 unsigned long gfn = gpa >> page_shift; in kvmppc_h_svm_page_in() local
952 start = gfn_to_hva(kvm, gfn); in kvmppc_h_svm_page_in()
958 if (kvmppc_gfn_is_uvmem_pfn(gfn, kvm, NULL)) in kvmppc_h_svm_page_in()
1039 unsigned long gfn = gpa >> page_shift; in kvmppc_h_svm_page_out() local
1057 start = gfn_to_hva(kvm, gfn); in kvmppc_h_svm_page_out()
1074 int kvmppc_send_page_to_uv(struct kvm *kvm, unsigned long gfn) in kvmppc_send_page_to_uv() argument
1079 pfn = gfn_to_pfn(kvm, gfn); in kvmppc_send_page_to_uv()
1084 if (kvmppc_gfn_is_uvmem_pfn(gfn, kvm, NULL)) in kvmppc_send_page_to_uv()
1087 ret = uv_page_in(kvm->arch.lpid, pfn << PAGE_SHIFT, gfn << PAGE_SHIFT, in kvmppc_send_page_to_uv()