Lines Matching refs:gpa

2077 	gpa_t gpa = data & ~0x3f;  in kvm_pv_enable_async_pf()  local
2091 if (kvm_gfn_to_hva_cache_init(vcpu->kvm, &vcpu->arch.apf.data, gpa, in kvm_pv_enable_async_pf()
4198 gpa_t translate_nested_gpa(struct kvm_vcpu *vcpu, gpa_t gpa, u32 access, in translate_nested_gpa() argument
4207 t_gpa = vcpu->arch.mmu.gva_to_gpa(vcpu, gpa, access, exception); in translate_nested_gpa()
4250 gpa_t gpa = vcpu->arch.walk_mmu->gva_to_gpa(vcpu, addr, access, in kvm_read_guest_virt_helper() local
4256 if (gpa == UNMAPPED_GVA) in kvm_read_guest_virt_helper()
4258 ret = kvm_read_guest_page(vcpu->kvm, gpa >> PAGE_SHIFT, data, in kvm_read_guest_virt_helper()
4284 gpa_t gpa = vcpu->arch.walk_mmu->gva_to_gpa(vcpu, addr, access|PFERR_FETCH_MASK, in kvm_fetch_guest_virt() local
4286 if (unlikely(gpa == UNMAPPED_GVA)) in kvm_fetch_guest_virt()
4292 ret = kvm_read_guest_page(vcpu->kvm, gpa >> PAGE_SHIFT, val, in kvm_fetch_guest_virt()
4330 gpa_t gpa = vcpu->arch.walk_mmu->gva_to_gpa(vcpu, addr, in kvm_write_guest_virt_system() local
4337 if (gpa == UNMAPPED_GVA) in kvm_write_guest_virt_system()
4339 ret = kvm_write_guest(vcpu->kvm, gpa, data, towrite); in kvm_write_guest_virt_system()
4355 gpa_t *gpa, struct x86_exception *exception, in vcpu_mmio_gva_to_gpa() argument
4364 *gpa = vcpu->arch.mmio_gfn << PAGE_SHIFT | in vcpu_mmio_gva_to_gpa()
4366 trace_vcpu_match_mmio(gva, *gpa, write, false); in vcpu_mmio_gva_to_gpa()
4370 *gpa = vcpu->arch.walk_mmu->gva_to_gpa(vcpu, gva, access, exception); in vcpu_mmio_gva_to_gpa()
4372 if (*gpa == UNMAPPED_GVA) in vcpu_mmio_gva_to_gpa()
4376 if ((*gpa & PAGE_MASK) == APIC_DEFAULT_PHYS_BASE) in vcpu_mmio_gva_to_gpa()
4379 if (vcpu_match_mmio_gpa(vcpu, *gpa)) { in vcpu_mmio_gva_to_gpa()
4380 trace_vcpu_match_mmio(gva, *gpa, write, true); in vcpu_mmio_gva_to_gpa()
4387 int emulator_write_phys(struct kvm_vcpu *vcpu, gpa_t gpa, in emulator_write_phys() argument
4392 ret = kvm_write_guest(vcpu->kvm, gpa, val, bytes); in emulator_write_phys()
4395 kvm_mmu_pte_write(vcpu, gpa, val, bytes); in emulator_write_phys()
4402 int (*read_write_emulate)(struct kvm_vcpu *vcpu, gpa_t gpa,
4404 int (*read_write_mmio)(struct kvm_vcpu *vcpu, gpa_t gpa,
4406 int (*read_write_exit_mmio)(struct kvm_vcpu *vcpu, gpa_t gpa,
4415 vcpu->mmio_fragments[0].gpa, *(u64 *)val); in read_prepare()
4423 static int read_emulate(struct kvm_vcpu *vcpu, gpa_t gpa, in read_emulate() argument
4426 return !kvm_read_guest(vcpu->kvm, gpa, val, bytes); in read_emulate()
4429 static int write_emulate(struct kvm_vcpu *vcpu, gpa_t gpa, in write_emulate() argument
4432 return emulator_write_phys(vcpu, gpa, val, bytes); in write_emulate()
4435 static int write_mmio(struct kvm_vcpu *vcpu, gpa_t gpa, int bytes, void *val) in write_mmio() argument
4437 trace_kvm_mmio(KVM_TRACE_MMIO_WRITE, bytes, gpa, *(u64 *)val); in write_mmio()
4438 return vcpu_mmio_write(vcpu, gpa, bytes, val); in write_mmio()
4441 static int read_exit_mmio(struct kvm_vcpu *vcpu, gpa_t gpa, in read_exit_mmio() argument
4444 trace_kvm_mmio(KVM_TRACE_MMIO_READ_UNSATISFIED, bytes, gpa, 0); in read_exit_mmio()
4448 static int write_exit_mmio(struct kvm_vcpu *vcpu, gpa_t gpa, in write_exit_mmio() argument
4477 gpa_t gpa; in emulator_read_write_onepage() local
4482 ret = vcpu_mmio_gva_to_gpa(vcpu, addr, &gpa, exception, write); in emulator_read_write_onepage()
4491 if (ops->read_write_emulate(vcpu, gpa, val, bytes)) in emulator_read_write_onepage()
4498 handled = ops->read_write_mmio(vcpu, gpa, bytes, val); in emulator_read_write_onepage()
4502 gpa += handled; in emulator_read_write_onepage()
4508 frag->gpa = gpa; in emulator_read_write_onepage()
4521 gpa_t gpa; in emulator_read_write() local
4555 gpa = vcpu->mmio_fragments[0].gpa; in emulator_read_write()
4563 vcpu->run->mmio.phys_addr = gpa; in emulator_read_write()
4565 return ops->read_write_exit_mmio(vcpu, gpa, val, bytes); in emulator_read_write()
4606 gpa_t gpa; in emulator_cmpxchg_emulated() local
4615 gpa = kvm_mmu_gva_to_gpa_write(vcpu, addr, NULL); in emulator_cmpxchg_emulated()
4617 if (gpa == UNMAPPED_GVA || in emulator_cmpxchg_emulated()
4618 (gpa & PAGE_MASK) == APIC_DEFAULT_PHYS_BASE) in emulator_cmpxchg_emulated()
4621 if (((gpa + bytes - 1) & PAGE_MASK) != (gpa & PAGE_MASK)) in emulator_cmpxchg_emulated()
4624 page = gfn_to_page(vcpu->kvm, gpa >> PAGE_SHIFT); in emulator_cmpxchg_emulated()
4629 kaddr += offset_in_page(gpa); in emulator_cmpxchg_emulated()
4652 mark_page_dirty(vcpu->kvm, gpa >> PAGE_SHIFT); in emulator_cmpxchg_emulated()
4653 kvm_mmu_pte_write(vcpu, gpa, new, bytes); in emulator_cmpxchg_emulated()
5166 gpa_t gpa = cr2; in reexecute_instruction() local
5177 gpa = kvm_mmu_gva_to_gpa_write(vcpu, cr2, NULL); in reexecute_instruction()
5183 if (gpa == UNMAPPED_GVA) in reexecute_instruction()
5193 pfn = gfn_to_pfn(vcpu->kvm, gpa_to_gfn(gpa)); in reexecute_instruction()
5213 kvm_mmu_unprotect_page(vcpu->kvm, gpa_to_gfn(gpa)); in reexecute_instruction()
5223 kvm_mmu_unprotect_page(vcpu->kvm, gpa_to_gfn(gpa)); in reexecute_instruction()
5237 unsigned long last_retry_eip, last_retry_addr, gpa = cr2; in retry_instruction() local
5270 gpa = kvm_mmu_gva_to_gpa_write(vcpu, cr2, NULL); in retry_instruction()
5272 kvm_mmu_unprotect_page(vcpu->kvm, gpa_to_gfn(gpa)); in retry_instruction()
6577 frag->gpa += len; in complete_emulated_mmio()
6592 run->mmio.phys_addr = frag->gpa; in complete_emulated_mmio()
6962 gpa_t gpa; in kvm_arch_vcpu_ioctl_translate() local
6966 gpa = kvm_mmu_gva_to_gpa_system(vcpu, vaddr, NULL); in kvm_arch_vcpu_ioctl_translate()
6968 tr->physical_address = gpa; in kvm_arch_vcpu_ioctl_translate()
6969 tr->valid = gpa != UNMAPPED_GVA; in kvm_arch_vcpu_ioctl_translate()