Lines Matching refs:iterator

2081 static void shadow_walk_init(struct kvm_shadow_walk_iterator *iterator,  in shadow_walk_init()  argument
2084 iterator->addr = addr; in shadow_walk_init()
2085 iterator->shadow_addr = vcpu->arch.mmu.root_hpa; in shadow_walk_init()
2086 iterator->level = vcpu->arch.mmu.shadow_root_level; in shadow_walk_init()
2088 if (iterator->level == PT64_ROOT_LEVEL && in shadow_walk_init()
2091 --iterator->level; in shadow_walk_init()
2093 if (iterator->level == PT32E_ROOT_LEVEL) { in shadow_walk_init()
2094 iterator->shadow_addr in shadow_walk_init()
2096 iterator->shadow_addr &= PT64_BASE_ADDR_MASK; in shadow_walk_init()
2097 --iterator->level; in shadow_walk_init()
2098 if (!iterator->shadow_addr) in shadow_walk_init()
2099 iterator->level = 0; in shadow_walk_init()
2103 static bool shadow_walk_okay(struct kvm_shadow_walk_iterator *iterator) in shadow_walk_okay() argument
2105 if (iterator->level < PT_PAGE_TABLE_LEVEL) in shadow_walk_okay()
2108 iterator->index = SHADOW_PT_INDEX(iterator->addr, iterator->level); in shadow_walk_okay()
2109 iterator->sptep = ((u64 *)__va(iterator->shadow_addr)) + iterator->index; in shadow_walk_okay()
2113 static void __shadow_walk_next(struct kvm_shadow_walk_iterator *iterator, in __shadow_walk_next() argument
2116 if (is_last_spte(spte, iterator->level)) { in __shadow_walk_next()
2117 iterator->level = 0; in __shadow_walk_next()
2121 iterator->shadow_addr = spte & PT64_BASE_ADDR_MASK; in __shadow_walk_next()
2122 --iterator->level; in __shadow_walk_next()
2125 static void shadow_walk_next(struct kvm_shadow_walk_iterator *iterator) in shadow_walk_next() argument
2127 return __shadow_walk_next(iterator, *iterator->sptep); in shadow_walk_next()
2739 struct kvm_shadow_walk_iterator iterator; in __direct_map() local
2747 for_each_shadow_entry(vcpu, (u64)gfn << PAGE_SHIFT, iterator) { in __direct_map()
2748 if (iterator.level == level) { in __direct_map()
2749 mmu_set_spte(vcpu, iterator.sptep, ACC_ALL, in __direct_map()
2752 direct_pte_prefetch(vcpu, iterator.sptep); in __direct_map()
2757 drop_large_spte(vcpu, iterator.sptep); in __direct_map()
2758 if (!is_shadow_present_pte(*iterator.sptep)) { in __direct_map()
2759 u64 base_addr = iterator.addr; in __direct_map()
2761 base_addr &= PT64_LVL_ADDR_MASK(iterator.level); in __direct_map()
2763 sp = kvm_mmu_get_page(vcpu, pseudo_gfn, iterator.addr, in __direct_map()
2764 iterator.level - 1, in __direct_map()
2765 1, ACC_ALL, iterator.sptep); in __direct_map()
2767 link_shadow_page(iterator.sptep, sp, true); in __direct_map()
2926 struct kvm_shadow_walk_iterator iterator; in fast_page_fault() local
2938 for_each_shadow_entry_lockless(vcpu, gva, iterator, spte) in fast_page_fault()
2939 if (!is_shadow_present_pte(spte) || iterator.level < level) in fast_page_fault()
2951 sp = page_header(__pa(iterator.sptep)); in fast_page_fault()
2991 ret = fast_pf_fix_direct_spte(vcpu, sp, iterator.sptep, spte); in fast_page_fault()
2993 trace_fast_page_fault(vcpu, gva, error_code, iterator.sptep, in fast_page_fault()
3325 struct kvm_shadow_walk_iterator iterator; in walk_shadow_page_get_mmio_spte() local
3332 for_each_shadow_entry_lockless(vcpu, addr, iterator, spte) in walk_shadow_page_get_mmio_spte()
4842 struct kvm_shadow_walk_iterator iterator; in kvm_mmu_get_spte_hierarchy() local
4850 for_each_shadow_entry_lockless(vcpu, addr, iterator, spte) { in kvm_mmu_get_spte_hierarchy()
4851 sptes[iterator.level-1] = spte; in kvm_mmu_get_spte_hierarchy()