hv                296 arch/powerpc/kernel/security.c 	bool enable, hv;
hv                298 arch/powerpc/kernel/security.c 	hv = cpu_has_feature(CPU_FTR_HVMODE);
hv                312 arch/powerpc/kernel/security.c 		 (security_ftr_enabled(SEC_FTR_L1D_FLUSH_HV) && hv));
hv               1719 arch/powerpc/kernel/traps.c 	bool hv;
hv               1721 arch/powerpc/kernel/traps.c 	hv = (TRAP(regs) == 0xf80);
hv               1722 arch/powerpc/kernel/traps.c 	if (hv)
hv               1728 arch/powerpc/kernel/traps.c 	if ((hv || status >= 2) &&
hv               1811 arch/powerpc/kernel/traps.c 		hv ? "Hypervisor " : "", facility, status, regs->nip, regs->msr);
hv               1939 arch/powerpc/xmon/xmon.c 	bool hv = mfmsr() & MSR_HV;
hv               1947 arch/powerpc/xmon/xmon.c 		hv ? mfspr(SPRN_PSSCR) : mfspr(SPRN_PSSCR_PR));
hv               1949 arch/powerpc/xmon/xmon.c 	if (!hv)
hv               2585 arch/powerpc/xmon/xmon.c 	bool hv = cpu_has_feature(CPU_FTR_HVMODE);
hv               2587 arch/powerpc/xmon/xmon.c 	if (hv) {
hv                398 arch/x86/kvm/hyperv.c 	struct kvm_hv *hv = &kvm->arch.hyperv;
hv                406 arch/x86/kvm/hyperv.c 	if (!hv->tsc_ref.tsc_sequence)
hv                411 arch/x86/kvm/hyperv.c 	return mul_u64_u64_shr(tsc, hv->tsc_ref.tsc_scale, 64)
hv                412 arch/x86/kvm/hyperv.c 		+ hv->tsc_ref.tsc_offset;
hv                811 arch/x86/kvm/hyperv.c 	struct kvm_hv *hv = &vcpu->kvm->arch.hyperv;
hv                812 arch/x86/kvm/hyperv.c 	size_t size = ARRAY_SIZE(hv->hv_crash_param);
hv                817 arch/x86/kvm/hyperv.c 	*pdata = hv->hv_crash_param[array_index_nospec(index, size)];
hv                823 arch/x86/kvm/hyperv.c 	struct kvm_hv *hv = &vcpu->kvm->arch.hyperv;
hv                825 arch/x86/kvm/hyperv.c 	*pdata = hv->hv_crash_ctl;
hv                831 arch/x86/kvm/hyperv.c 	struct kvm_hv *hv = &vcpu->kvm->arch.hyperv;
hv                834 arch/x86/kvm/hyperv.c 		hv->hv_crash_ctl = data & HV_CRASH_CTL_CRASH_NOTIFY;
hv                839 arch/x86/kvm/hyperv.c 			  hv->hv_crash_param[0],
hv                840 arch/x86/kvm/hyperv.c 			  hv->hv_crash_param[1],
hv                841 arch/x86/kvm/hyperv.c 			  hv->hv_crash_param[2],
hv                842 arch/x86/kvm/hyperv.c 			  hv->hv_crash_param[3],
hv                843 arch/x86/kvm/hyperv.c 			  hv->hv_crash_param[4]);
hv                855 arch/x86/kvm/hyperv.c 	struct kvm_hv *hv = &vcpu->kvm->arch.hyperv;
hv                856 arch/x86/kvm/hyperv.c 	size_t size = ARRAY_SIZE(hv->hv_crash_param);
hv                861 arch/x86/kvm/hyperv.c 	hv->hv_crash_param[array_index_nospec(index, size)] = data;
hv                937 arch/x86/kvm/hyperv.c 	struct kvm_hv *hv = &kvm->arch.hyperv;
hv                941 arch/x86/kvm/hyperv.c 	BUILD_BUG_ON(sizeof(tsc_seq) != sizeof(hv->tsc_ref.tsc_sequence));
hv                944 arch/x86/kvm/hyperv.c 	if (!(hv->hv_tsc_page & HV_X64_MSR_TSC_REFERENCE_ENABLE))
hv                948 arch/x86/kvm/hyperv.c 	if (!(hv->hv_tsc_page & HV_X64_MSR_TSC_REFERENCE_ENABLE))
hv                951 arch/x86/kvm/hyperv.c 	gfn = hv->hv_tsc_page >> HV_X64_MSR_TSC_REFERENCE_ADDRESS_SHIFT;
hv                964 arch/x86/kvm/hyperv.c 	hv->tsc_ref.tsc_sequence = 0;
hv                966 arch/x86/kvm/hyperv.c 			    &hv->tsc_ref, sizeof(hv->tsc_ref.tsc_sequence)))
hv                969 arch/x86/kvm/hyperv.c 	if (!compute_tsc_page_parameters(hv_clock, &hv->tsc_ref))
hv                974 arch/x86/kvm/hyperv.c 	if (kvm_write_guest(kvm, gfn_to_gpa(gfn), &hv->tsc_ref, sizeof(hv->tsc_ref)))
hv                987 arch/x86/kvm/hyperv.c 	hv->tsc_ref.tsc_sequence = tsc_seq;
hv                989 arch/x86/kvm/hyperv.c 			&hv->tsc_ref, sizeof(hv->tsc_ref.tsc_sequence));
hv                998 arch/x86/kvm/hyperv.c 	struct kvm_hv *hv = &kvm->arch.hyperv;
hv               1002 arch/x86/kvm/hyperv.c 		hv->hv_guest_os_id = data;
hv               1004 arch/x86/kvm/hyperv.c 		if (!hv->hv_guest_os_id)
hv               1005 arch/x86/kvm/hyperv.c 			hv->hv_hypercall &= ~HV_X64_MSR_HYPERCALL_ENABLE;
hv               1013 arch/x86/kvm/hyperv.c 		if (!hv->hv_guest_os_id)
hv               1016 arch/x86/kvm/hyperv.c 			hv->hv_hypercall = data;
hv               1027 arch/x86/kvm/hyperv.c 		hv->hv_hypercall = data;
hv               1032 arch/x86/kvm/hyperv.c 		hv->hv_tsc_page = data;
hv               1033 arch/x86/kvm/hyperv.c 		if (hv->hv_tsc_page & HV_X64_MSR_TSC_REFERENCE_ENABLE)
hv               1049 arch/x86/kvm/hyperv.c 		hv->hv_reenlightenment_control = data;
hv               1052 arch/x86/kvm/hyperv.c 		hv->hv_tsc_emulation_control = data;
hv               1055 arch/x86/kvm/hyperv.c 		hv->hv_tsc_emulation_status = data;
hv               1086 arch/x86/kvm/hyperv.c 		struct kvm_hv *hv = &vcpu->kvm->arch.hyperv;
hv               1103 arch/x86/kvm/hyperv.c 			atomic_inc(&hv->num_mismatched_vp_indexes);
hv               1105 arch/x86/kvm/hyperv.c 			atomic_dec(&hv->num_mismatched_vp_indexes);
hv               1195 arch/x86/kvm/hyperv.c 	struct kvm_hv *hv = &kvm->arch.hyperv;
hv               1199 arch/x86/kvm/hyperv.c 		data = hv->hv_guest_os_id;
hv               1202 arch/x86/kvm/hyperv.c 		data = hv->hv_hypercall;
hv               1208 arch/x86/kvm/hyperv.c 		data = hv->hv_tsc_page;
hv               1220 arch/x86/kvm/hyperv.c 		data = hv->hv_reenlightenment_control;
hv               1223 arch/x86/kvm/hyperv.c 		data = hv->hv_tsc_emulation_control;
hv               1226 arch/x86/kvm/hyperv.c 		data = hv->hv_tsc_emulation_status;
hv               1328 arch/x86/kvm/hyperv.c 	struct kvm_hv *hv = &kvm->arch.hyperv;
hv               1338 arch/x86/kvm/hyperv.c 	if (likely(!atomic_read(&hv->num_mismatched_vp_indexes))) {
hv               1736 arch/x86/kvm/hyperv.c 	struct kvm_hv *hv = &kvm->arch.hyperv;
hv               1744 arch/x86/kvm/hyperv.c 	mutex_lock(&hv->hv_lock);
hv               1745 arch/x86/kvm/hyperv.c 	ret = idr_alloc(&hv->conn_to_evt, eventfd, conn_id, conn_id + 1,
hv               1747 arch/x86/kvm/hyperv.c 	mutex_unlock(&hv->hv_lock);
hv               1760 arch/x86/kvm/hyperv.c 	struct kvm_hv *hv = &kvm->arch.hyperv;
hv               1763 arch/x86/kvm/hyperv.c 	mutex_lock(&hv->hv_lock);
hv               1764 arch/x86/kvm/hyperv.c 	eventfd = idr_remove(&hv->conn_to_evt, conn_id);
hv               1765 arch/x86/kvm/hyperv.c 	mutex_unlock(&hv->hv_lock);
hv                827 drivers/gpu/drm/omapdrm/dss/dispc.c 		u32 h, hv;
hv                833 drivers/gpu/drm/omapdrm/dss/dispc.c 		hv = FLD_VAL(h_coef[i].hc4_vc22, 7, 0)
hv                840 drivers/gpu/drm/omapdrm/dss/dispc.c 			dispc_ovl_write_firhv_reg(dispc, plane, i, hv);
hv                843 drivers/gpu/drm/omapdrm/dss/dispc.c 			dispc_ovl_write_firhv2_reg(dispc, plane, i, hv);
hv               1257 drivers/infiniband/ulp/ipoib/ipoib_main.c 	u32 hv;
hv               1259 drivers/infiniband/ulp/ipoib/ipoib_main.c 	hv = jhash_3words(d32[3], d32[4], IPOIB_QPN_MASK & d32[0], 0);
hv               1260 drivers/infiniband/ulp/ipoib/ipoib_main.c 	return hv & htbl->mask;
hv               1897 drivers/staging/media/ipu3/include/intel-ipu3.h 	__u32 hv:8;
hv                654 drivers/video/fbdev/omap2/omapfb/dss/dispc.c 		u32 h, hv;
hv                660 drivers/video/fbdev/omap2/omapfb/dss/dispc.c 		hv = FLD_VAL(h_coef[i].hc4_vc22, 7, 0)
hv                667 drivers/video/fbdev/omap2/omapfb/dss/dispc.c 			dispc_ovl_write_firhv_reg(plane, i, hv);
hv                670 drivers/video/fbdev/omap2/omapfb/dss/dispc.c 			dispc_ovl_write_firhv2_reg(plane, i, hv);
hv                 84 lib/zstd/huf.h 	void *name##hv = &(name##hb);                  \
hv                 85 lib/zstd/huf.h 	HUF_CElt *name = (HUF_CElt *)(name##hv) /* no final ; */
hv                411 tools/perf/util/evsel.c 		MOD_PRINT(hv, 'h');