Lines Matching refs:pgd

157 static void clear_pgd_entry(struct kvm *kvm, pgd_t *pgd, phys_addr_t addr)  in clear_pgd_entry()  argument
159 pud_t *pud_table __maybe_unused = pud_offset(pgd, 0); in clear_pgd_entry()
160 pgd_clear(pgd); in clear_pgd_entry()
163 put_page(virt_to_page(pgd)); in clear_pgd_entry()
261 static void unmap_puds(struct kvm *kvm, pgd_t *pgd, in unmap_puds() argument
267 start_pud = pud = pud_offset(pgd, addr); in unmap_puds()
287 clear_pgd_entry(kvm, pgd, start_addr); in unmap_puds()
294 pgd_t *pgd; in unmap_range() local
298 pgd = pgdp + kvm_pgd_index(addr); in unmap_range()
301 if (!pgd_none(*pgd)) in unmap_range()
302 unmap_puds(kvm, pgd, addr, next); in unmap_range()
303 } while (pgd++, addr = next, addr != end); in unmap_range()
336 static void stage2_flush_puds(struct kvm *kvm, pgd_t *pgd, in stage2_flush_puds() argument
342 pud = pud_offset(pgd, addr); in stage2_flush_puds()
360 pgd_t *pgd; in stage2_flush_memslot() local
362 pgd = kvm->arch.pgd + kvm_pgd_index(addr); in stage2_flush_memslot()
365 stage2_flush_puds(kvm, pgd, addr, next); in stage2_flush_memslot()
366 } while (pgd++, addr = next, addr != end); in stage2_flush_memslot()
502 static int create_hyp_pud_mappings(pgd_t *pgd, unsigned long start, in create_hyp_pud_mappings() argument
513 pud = pud_offset(pgd, addr); in create_hyp_pud_mappings()
540 pgd_t *pgd; in __create_hyp_mappings() local
549 pgd = pgdp + pgd_index(addr); in __create_hyp_mappings()
551 if (pgd_none(*pgd)) { in __create_hyp_mappings()
558 pgd_populate(NULL, pgd, pud); in __create_hyp_mappings()
559 get_page(virt_to_page(pgd)); in __create_hyp_mappings()
560 kvm_flush_dcache_to_poc(pgd, sizeof(*pgd)); in __create_hyp_mappings()
564 err = create_hyp_pud_mappings(pgd, addr, next, pfn, prot); in __create_hyp_mappings()
668 pgd_t *pgd; in kvm_alloc_stage2_pgd() local
671 if (kvm->arch.pgd != NULL) { in kvm_alloc_stage2_pgd()
698 pgd = (pgd_t *)kmalloc(PTRS_PER_S2_PGD * sizeof(pgd_t), in kvm_alloc_stage2_pgd()
701 if (!pgd) { in kvm_alloc_stage2_pgd()
709 pgd_populate(NULL, pgd + i, in kvm_alloc_stage2_pgd()
712 pud_populate(NULL, pud_offset(pgd, 0) + i, in kvm_alloc_stage2_pgd()
720 pgd = (pgd_t *)hwpgd; in kvm_alloc_stage2_pgd()
723 kvm_clean_pgd(pgd); in kvm_alloc_stage2_pgd()
724 kvm->arch.pgd = pgd; in kvm_alloc_stage2_pgd()
741 unmap_range(kvm, kvm->arch.pgd, start, size); in unmap_stage2_range()
822 if (kvm->arch.pgd == NULL) in kvm_free_stage2_pgd()
828 kfree(kvm->arch.pgd); in kvm_free_stage2_pgd()
830 kvm->arch.pgd = NULL; in kvm_free_stage2_pgd()
836 pgd_t *pgd; in stage2_get_pud() local
839 pgd = kvm->arch.pgd + kvm_pgd_index(addr); in stage2_get_pud()
840 if (WARN_ON(pgd_none(*pgd))) { in stage2_get_pud()
844 pgd_populate(NULL, pgd, pud); in stage2_get_pud()
845 get_page(virt_to_page(pgd)); in stage2_get_pud()
848 return pud_offset(pgd, addr); in stage2_get_pud()
1102 static void stage2_wp_puds(pgd_t *pgd, phys_addr_t addr, phys_addr_t end) in stage2_wp_puds() argument
1107 pud = pud_offset(pgd, addr); in stage2_wp_puds()
1126 pgd_t *pgd; in stage2_wp_range() local
1129 pgd = kvm->arch.pgd + kvm_pgd_index(addr); in stage2_wp_range()
1142 if (pgd_present(*pgd)) in stage2_wp_range()
1143 stage2_wp_puds(pgd, addr, next); in stage2_wp_range()
1144 } while (pgd++, addr = next, addr != end); in stage2_wp_range()
1515 if (!kvm->arch.pgd) in kvm_unmap_hva()
1526 if (!kvm->arch.pgd) in kvm_unmap_hva_range()
1555 if (!kvm->arch.pgd) in kvm_set_spte_hva()