Lines Matching refs:npages
178 unsigned long npages; in kvmppc_map_vrma() local
188 npages = memslot->npages >> (porder - PAGE_SHIFT); in kvmppc_map_vrma()
191 if (npages > 1ul << (40 - porder)) in kvmppc_map_vrma()
192 npages = 1ul << (40 - porder); in kvmppc_map_vrma()
194 if (npages > kvm->arch.hpt_mask + 1) in kvmppc_map_vrma()
195 npages = kvm->arch.hpt_mask + 1; in kvmppc_map_vrma()
202 for (i = 0; i < npages; ++i) { in kvmppc_map_vrma()
448 long index, ret, npages; in kvmppc_book3s_hv_page_fault() local
513 npages = get_user_pages_fast(hva, 1, writing, pages); in kvmppc_book3s_hv_page_fault()
514 if (npages < 1) { in kvmppc_book3s_hv_page_fault()
660 memslot->npages * sizeof(*memslot->arch.rmap)); in kvmppc_rmap_reset()
684 (memslot->npages << PAGE_SHIFT)); in kvm_handle_hva_range()
796 for (n = memslot->npages; n; --n) { in kvmppc_core_flush_memslot_hv()
1013 gfn >= memslot->base_gfn + memslot->npages) in harvest_vpa_dirty()
1030 for (i = 0; i < memslot->npages; ++i) { in kvmppc_hv_get_dirty_log()
1031 int npages = kvm_test_clear_dirty_npages(kvm, rmapp); in kvmppc_hv_get_dirty_log() local
1037 if (npages && map) in kvmppc_hv_get_dirty_log()
1038 for (j = i; npages; ++j, --npages) in kvmppc_hv_get_dirty_log()
1061 int npages; in kvmppc_pin_guest_page() local
1070 npages = get_user_pages_fast(hva, 1, 1, pages); in kvmppc_pin_guest_page()
1071 if (npages < 1) in kvmppc_pin_guest_page()