Lines Matching refs:slot
824 struct kvm_memory_slot *slot; in __kvm_set_memory_region() local
835 as_id = mem->slot >> 16; in __kvm_set_memory_region()
836 id = (u16)mem->slot; in __kvm_set_memory_region()
855 slot = id_to_memslot(__kvm_memslots(kvm, as_id), id); in __kvm_set_memory_region()
862 new = old = *slot; in __kvm_set_memory_region()
899 kvm_for_each_memslot(slot, __kvm_memslots(kvm, as_id)) { in __kvm_set_memory_region()
900 if ((slot->id >= KVM_USER_MEM_SLOTS) || in __kvm_set_memory_region()
901 (slot->id == id)) in __kvm_set_memory_region()
903 if (!((base_gfn + npages <= slot->base_gfn) || in __kvm_set_memory_region()
904 (base_gfn >= slot->base_gfn + slot->npages))) in __kvm_set_memory_region()
933 slot = id_to_memslot(slots, id); in __kvm_set_memory_region()
934 slot->flags |= KVM_MEMSLOT_INVALID; in __kvm_set_memory_region()
947 kvm_arch_flush_shadow_memslot(kvm, slot); in __kvm_set_memory_region()
1015 if ((u16)mem->slot >= KVM_USER_MEM_SLOTS) in kvm_vm_ioctl_set_memory_region()
1031 as_id = log->slot >> 16; in kvm_get_dirty_log()
1032 id = (u16)log->slot; in kvm_get_dirty_log()
1094 as_id = log->slot >> 16; in kvm_get_dirty_log_protect()
1095 id = (u16)log->slot; in kvm_get_dirty_log_protect()
1204 static bool memslot_is_readonly(struct kvm_memory_slot *slot) in memslot_is_readonly() argument
1206 return slot->flags & KVM_MEM_READONLY; in memslot_is_readonly()
1209 static unsigned long __gfn_to_hva_many(struct kvm_memory_slot *slot, gfn_t gfn, in __gfn_to_hva_many() argument
1212 if (!slot || slot->flags & KVM_MEMSLOT_INVALID) in __gfn_to_hva_many()
1215 if (memslot_is_readonly(slot) && write) in __gfn_to_hva_many()
1219 *nr_pages = slot->npages - (gfn - slot->base_gfn); in __gfn_to_hva_many()
1221 return __gfn_to_hva_memslot(slot, gfn); in __gfn_to_hva_many()
1224 static unsigned long gfn_to_hva_many(struct kvm_memory_slot *slot, gfn_t gfn, in gfn_to_hva_many() argument
1227 return __gfn_to_hva_many(slot, gfn, nr_pages, true); in gfn_to_hva_many()
1230 unsigned long gfn_to_hva_memslot(struct kvm_memory_slot *slot, in gfn_to_hva_memslot() argument
1233 return gfn_to_hva_many(slot, gfn, NULL); in gfn_to_hva_memslot()
1253 unsigned long gfn_to_hva_memslot_prot(struct kvm_memory_slot *slot, in gfn_to_hva_memslot_prot() argument
1256 unsigned long hva = __gfn_to_hva_many(slot, gfn, NULL, false); in gfn_to_hva_memslot_prot()
1259 *writable = !memslot_is_readonly(slot); in gfn_to_hva_memslot_prot()
1266 struct kvm_memory_slot *slot = gfn_to_memslot(kvm, gfn); in gfn_to_hva_prot() local
1268 return gfn_to_hva_memslot_prot(slot, gfn, writable); in gfn_to_hva_prot()
1273 struct kvm_memory_slot *slot = kvm_vcpu_gfn_to_memslot(vcpu, gfn); in kvm_vcpu_gfn_to_hva_prot() local
1275 return gfn_to_hva_memslot_prot(slot, gfn, writable); in kvm_vcpu_gfn_to_hva_prot()
1445 pfn_t __gfn_to_pfn_memslot(struct kvm_memory_slot *slot, gfn_t gfn, bool atomic, in __gfn_to_pfn_memslot() argument
1448 unsigned long addr = __gfn_to_hva_many(slot, gfn, NULL, write_fault); in __gfn_to_pfn_memslot()
1457 if (writable && memslot_is_readonly(slot)) { in __gfn_to_pfn_memslot()
1475 pfn_t gfn_to_pfn_memslot(struct kvm_memory_slot *slot, gfn_t gfn) in gfn_to_pfn_memslot() argument
1477 return __gfn_to_pfn_memslot(slot, gfn, false, NULL, true, NULL); in gfn_to_pfn_memslot()
1481 pfn_t gfn_to_pfn_memslot_atomic(struct kvm_memory_slot *slot, gfn_t gfn) in gfn_to_pfn_memslot_atomic() argument
1483 return __gfn_to_pfn_memslot(slot, gfn, true, NULL, true, NULL); in gfn_to_pfn_memslot_atomic()
1511 int gfn_to_page_many_atomic(struct kvm_memory_slot *slot, gfn_t gfn, in gfn_to_page_many_atomic() argument
1517 addr = gfn_to_hva_many(slot, gfn, &entry); in gfn_to_page_many_atomic()
1623 static int __kvm_read_guest_page(struct kvm_memory_slot *slot, gfn_t gfn, in __kvm_read_guest_page() argument
1629 addr = gfn_to_hva_memslot_prot(slot, gfn, NULL); in __kvm_read_guest_page()
1641 struct kvm_memory_slot *slot = gfn_to_memslot(kvm, gfn); in kvm_read_guest_page() local
1643 return __kvm_read_guest_page(slot, gfn, data, offset, len); in kvm_read_guest_page()
1650 struct kvm_memory_slot *slot = kvm_vcpu_gfn_to_memslot(vcpu, gfn); in kvm_vcpu_read_guest_page() local
1652 return __kvm_read_guest_page(slot, gfn, data, offset, len); in kvm_vcpu_read_guest_page()
1696 static int __kvm_read_guest_atomic(struct kvm_memory_slot *slot, gfn_t gfn, in __kvm_read_guest_atomic() argument
1702 addr = gfn_to_hva_memslot_prot(slot, gfn, NULL); in __kvm_read_guest_atomic()
1717 struct kvm_memory_slot *slot = gfn_to_memslot(kvm, gfn); in kvm_read_guest_atomic() local
1720 return __kvm_read_guest_atomic(slot, gfn, data, offset, len); in kvm_read_guest_atomic()
1728 struct kvm_memory_slot *slot = kvm_vcpu_gfn_to_memslot(vcpu, gfn); in kvm_vcpu_read_guest_atomic() local
1731 return __kvm_read_guest_atomic(slot, gfn, data, offset, len); in kvm_vcpu_read_guest_atomic()
1754 struct kvm_memory_slot *slot = gfn_to_memslot(kvm, gfn); in kvm_write_guest_page() local
1756 return __kvm_write_guest_page(slot, gfn, data, offset, len); in kvm_write_guest_page()
1763 struct kvm_memory_slot *slot = kvm_vcpu_gfn_to_memslot(vcpu, gfn); in kvm_vcpu_write_guest_page() local
1765 return __kvm_write_guest_page(slot, gfn, data, offset, len); in kvm_vcpu_write_guest_page()
2914 __u32 slot; member
2939 log.slot = compat_log.slot; in kvm_vm_compat_ioctl()