Lines Matching refs:idx

69 	int idx = hwc->idx;  in x86_perf_event_update()  local
72 if (idx == INTEL_PMC_IDX_FIXED_BTS) in x86_perf_event_update()
129 reg->idx = er->idx; in x86_pmu_extra_regs()
562 event->hw.idx = -1; in __x86_pmu_event_init()
567 event->hw.extra_reg.idx = EXTRA_REG_NONE; in __x86_pmu_event_init()
568 event->hw.branch_reg.idx = EXTRA_REG_NONE; in __x86_pmu_event_init()
576 int idx; in x86_pmu_disable_all() local
578 for (idx = 0; idx < x86_pmu.num_counters; idx++) { in x86_pmu_disable_all()
581 if (!test_bit(idx, cpuc->active_mask)) in x86_pmu_disable_all()
583 rdmsrl(x86_pmu_config_addr(idx), val); in x86_pmu_disable_all()
587 wrmsrl(x86_pmu_config_addr(idx), val); in x86_pmu_disable_all()
611 int idx; in x86_pmu_enable_all() local
613 for (idx = 0; idx < x86_pmu.num_counters; idx++) { in x86_pmu_enable_all()
614 struct hw_perf_event *hwc = &cpuc->events[idx]->hw; in x86_pmu_enable_all()
616 if (!test_bit(idx, cpuc->active_mask)) in x86_pmu_enable_all()
665 int idx; in perf_sched_init() local
673 for (idx = 0; idx < num; idx++) { in perf_sched_init()
674 if (constraints[idx]->weight == wmin) in perf_sched_init()
678 sched->state.event = idx; /* start with min weight */ in perf_sched_init()
713 int idx; in __perf_sched_find_counter() local
724 idx = INTEL_PMC_IDX_FIXED; in __perf_sched_find_counter()
725 for_each_set_bit_from(idx, c->idxmsk, X86_PMC_IDX_MAX) { in __perf_sched_find_counter()
726 if (!__test_and_set_bit(idx, sched->state.used)) in __perf_sched_find_counter()
732 idx = sched->state.counter; in __perf_sched_find_counter()
733 for_each_set_bit_from(idx, c->idxmsk, INTEL_PMC_IDX_FIXED) { in __perf_sched_find_counter()
734 if (!__test_and_set_bit(idx, sched->state.used)) { in __perf_sched_find_counter()
745 sched->state.counter = idx; in __perf_sched_find_counter()
843 if (hwc->idx == -1) in x86_schedule_events()
847 if (!test_bit(hwc->idx, c->idxmsk)) in x86_schedule_events()
851 if (test_bit(hwc->idx, used_mask)) in x86_schedule_events()
854 __set_bit(hwc->idx, used_mask); in x86_schedule_events()
856 assign[i] = hwc->idx; in x86_schedule_events()
967 hwc->idx = cpuc->assign[i]; in x86_assign_hw_event()
971 if (hwc->idx == INTEL_PMC_IDX_FIXED_BTS) { in x86_assign_hw_event()
974 } else if (hwc->idx >= INTEL_PMC_IDX_FIXED) { in x86_assign_hw_event()
976 hwc->event_base = MSR_ARCH_PERFMON_FIXED_CTR0 + (hwc->idx - INTEL_PMC_IDX_FIXED); in x86_assign_hw_event()
977 hwc->event_base_rdpmc = (hwc->idx - INTEL_PMC_IDX_FIXED) | 1<<30; in x86_assign_hw_event()
979 hwc->config_base = x86_pmu_config_addr(hwc->idx); in x86_assign_hw_event()
980 hwc->event_base = x86_pmu_event_addr(hwc->idx); in x86_assign_hw_event()
981 hwc->event_base_rdpmc = x86_pmu_rdpmc_index(hwc->idx); in x86_assign_hw_event()
989 return hwc->idx == cpuc->assign[i] && in match_prev_assignment()
1027 if (hwc->idx == -1 || in x86_pmu_enable()
1079 int ret = 0, idx = hwc->idx; in x86_perf_event_set_period() local
1081 if (idx == INTEL_PMC_IDX_FIXED_BTS) in x86_perf_event_set_period()
1112 per_cpu(pmc_prev_left[idx], smp_processor_id()) = left; in x86_perf_event_set_period()
1202 int idx = event->hw.idx; in x86_pmu_start() local
1207 if (WARN_ON_ONCE(idx == -1)) in x86_pmu_start()
1217 cpuc->events[idx] = event; in x86_pmu_start()
1218 __set_bit(idx, cpuc->active_mask); in x86_pmu_start()
1219 __set_bit(idx, cpuc->running); in x86_pmu_start()
1230 int cpu, idx; in perf_event_print_debug() local
1262 for (idx = 0; idx < x86_pmu.num_counters; idx++) { in perf_event_print_debug()
1263 rdmsrl(x86_pmu_config_addr(idx), pmc_ctrl); in perf_event_print_debug()
1264 rdmsrl(x86_pmu_event_addr(idx), pmc_count); in perf_event_print_debug()
1266 prev_left = per_cpu(pmc_prev_left[idx], cpu); in perf_event_print_debug()
1269 cpu, idx, pmc_ctrl); in perf_event_print_debug()
1271 cpu, idx, pmc_count); in perf_event_print_debug()
1273 cpu, idx, prev_left); in perf_event_print_debug()
1275 for (idx = 0; idx < x86_pmu.num_counters_fixed; idx++) { in perf_event_print_debug()
1276 rdmsrl(MSR_ARCH_PERFMON_FIXED_CTR0 + idx, pmc_count); in perf_event_print_debug()
1279 cpu, idx, pmc_count); in perf_event_print_debug()
1289 if (__test_and_clear_bit(hwc->idx, cpuc->active_mask)) { in x86_pmu_stop()
1291 cpuc->events[hwc->idx] = NULL; in x86_pmu_stop()
1362 int idx, handled = 0; in x86_pmu_handle_irq() local
1377 for (idx = 0; idx < x86_pmu.num_counters; idx++) { in x86_pmu_handle_irq()
1378 if (!test_bit(idx, cpuc->active_mask)) { in x86_pmu_handle_irq()
1384 if (__test_and_clear_bit(idx, cpuc->running)) in x86_pmu_handle_irq()
1389 event = cpuc->events[idx]; in x86_pmu_handle_irq()
1977 int idx = event->hw.idx; in x86_pmu_event_idx() local
1982 if (x86_pmu.num_counters_fixed && idx >= INTEL_PMC_IDX_FIXED) { in x86_pmu_event_idx()
1983 idx -= INTEL_PMC_IDX_FIXED; in x86_pmu_event_idx()
1984 idx |= 1 << 30; in x86_pmu_event_idx()
1987 return idx + 1; in x86_pmu_event_idx()
2170 int idx = segment >> 3; in get_segment_base() local
2175 if (idx > LDT_ENTRIES) in get_segment_base()
2180 if (!ldt || idx > ldt->size) in get_segment_base()
2183 desc = &ldt->entries[idx]; in get_segment_base()
2185 if (idx > GDT_ENTRIES) in get_segment_base()
2188 desc = raw_cpu_ptr(gdt_page.gdt) + idx; in get_segment_base()