Lines Matching refs:idx
69 int idx = hwc->idx; in x86_perf_event_update() local
72 if (idx == INTEL_PMC_IDX_FIXED_BTS) in x86_perf_event_update()
129 reg->idx = er->idx; in x86_pmu_extra_regs()
567 event->hw.idx = -1; in __x86_pmu_event_init()
572 event->hw.extra_reg.idx = EXTRA_REG_NONE; in __x86_pmu_event_init()
573 event->hw.branch_reg.idx = EXTRA_REG_NONE; in __x86_pmu_event_init()
581 int idx; in x86_pmu_disable_all() local
583 for (idx = 0; idx < x86_pmu.num_counters; idx++) { in x86_pmu_disable_all()
586 if (!test_bit(idx, cpuc->active_mask)) in x86_pmu_disable_all()
588 rdmsrl(x86_pmu_config_addr(idx), val); in x86_pmu_disable_all()
592 wrmsrl(x86_pmu_config_addr(idx), val); in x86_pmu_disable_all()
629 int idx; in x86_pmu_enable_all() local
631 for (idx = 0; idx < x86_pmu.num_counters; idx++) { in x86_pmu_enable_all()
632 struct hw_perf_event *hwc = &cpuc->events[idx]->hw; in x86_pmu_enable_all()
634 if (!test_bit(idx, cpuc->active_mask)) in x86_pmu_enable_all()
683 int idx; in perf_sched_init() local
691 for (idx = 0; idx < num; idx++) { in perf_sched_init()
692 if (constraints[idx]->weight == wmin) in perf_sched_init()
696 sched->state.event = idx; /* start with min weight */ in perf_sched_init()
731 int idx; in __perf_sched_find_counter() local
742 idx = INTEL_PMC_IDX_FIXED; in __perf_sched_find_counter()
743 for_each_set_bit_from(idx, c->idxmsk, X86_PMC_IDX_MAX) { in __perf_sched_find_counter()
744 if (!__test_and_set_bit(idx, sched->state.used)) in __perf_sched_find_counter()
750 idx = sched->state.counter; in __perf_sched_find_counter()
751 for_each_set_bit_from(idx, c->idxmsk, INTEL_PMC_IDX_FIXED) { in __perf_sched_find_counter()
752 if (!__test_and_set_bit(idx, sched->state.used)) { in __perf_sched_find_counter()
763 sched->state.counter = idx; in __perf_sched_find_counter()
861 if (hwc->idx == -1) in x86_schedule_events()
865 if (!test_bit(hwc->idx, c->idxmsk)) in x86_schedule_events()
869 if (test_bit(hwc->idx, used_mask)) in x86_schedule_events()
872 __set_bit(hwc->idx, used_mask); in x86_schedule_events()
874 assign[i] = hwc->idx; in x86_schedule_events()
982 hwc->idx = cpuc->assign[i]; in x86_assign_hw_event()
986 if (hwc->idx == INTEL_PMC_IDX_FIXED_BTS) { in x86_assign_hw_event()
989 } else if (hwc->idx >= INTEL_PMC_IDX_FIXED) { in x86_assign_hw_event()
991 hwc->event_base = MSR_ARCH_PERFMON_FIXED_CTR0 + (hwc->idx - INTEL_PMC_IDX_FIXED); in x86_assign_hw_event()
992 hwc->event_base_rdpmc = (hwc->idx - INTEL_PMC_IDX_FIXED) | 1<<30; in x86_assign_hw_event()
994 hwc->config_base = x86_pmu_config_addr(hwc->idx); in x86_assign_hw_event()
995 hwc->event_base = x86_pmu_event_addr(hwc->idx); in x86_assign_hw_event()
996 hwc->event_base_rdpmc = x86_pmu_rdpmc_index(hwc->idx); in x86_assign_hw_event()
1004 return hwc->idx == cpuc->assign[i] && in match_prev_assignment()
1042 if (hwc->idx == -1 || in x86_pmu_enable()
1094 int ret = 0, idx = hwc->idx; in x86_perf_event_set_period() local
1096 if (idx == INTEL_PMC_IDX_FIXED_BTS) in x86_perf_event_set_period()
1127 per_cpu(pmc_prev_left[idx], smp_processor_id()) = left; in x86_perf_event_set_period()
1220 int idx = event->hw.idx; in x86_pmu_start() local
1225 if (WARN_ON_ONCE(idx == -1)) in x86_pmu_start()
1235 cpuc->events[idx] = event; in x86_pmu_start()
1236 __set_bit(idx, cpuc->active_mask); in x86_pmu_start()
1237 __set_bit(idx, cpuc->running); in x86_pmu_start()
1248 int cpu, idx; in perf_event_print_debug() local
1280 for (idx = 0; idx < x86_pmu.num_counters; idx++) { in perf_event_print_debug()
1281 rdmsrl(x86_pmu_config_addr(idx), pmc_ctrl); in perf_event_print_debug()
1282 rdmsrl(x86_pmu_event_addr(idx), pmc_count); in perf_event_print_debug()
1284 prev_left = per_cpu(pmc_prev_left[idx], cpu); in perf_event_print_debug()
1287 cpu, idx, pmc_ctrl); in perf_event_print_debug()
1289 cpu, idx, pmc_count); in perf_event_print_debug()
1291 cpu, idx, prev_left); in perf_event_print_debug()
1293 for (idx = 0; idx < x86_pmu.num_counters_fixed; idx++) { in perf_event_print_debug()
1294 rdmsrl(MSR_ARCH_PERFMON_FIXED_CTR0 + idx, pmc_count); in perf_event_print_debug()
1297 cpu, idx, pmc_count); in perf_event_print_debug()
1307 if (__test_and_clear_bit(hwc->idx, cpuc->active_mask)) { in x86_pmu_stop()
1309 cpuc->events[hwc->idx] = NULL; in x86_pmu_stop()
1380 int idx, handled = 0; in x86_pmu_handle_irq() local
1395 for (idx = 0; idx < x86_pmu.num_counters; idx++) { in x86_pmu_handle_irq()
1396 if (!test_bit(idx, cpuc->active_mask)) { in x86_pmu_handle_irq()
1402 if (__test_and_clear_bit(idx, cpuc->running)) in x86_pmu_handle_irq()
1407 event = cpuc->events[idx]; in x86_pmu_handle_irq()
2026 int idx = event->hw.idx; in x86_pmu_event_idx() local
2031 if (x86_pmu.num_counters_fixed && idx >= INTEL_PMC_IDX_FIXED) { in x86_pmu_event_idx()
2032 idx -= INTEL_PMC_IDX_FIXED; in x86_pmu_event_idx()
2033 idx |= 1 << 30; in x86_pmu_event_idx()
2036 return idx + 1; in x86_pmu_event_idx()
2219 int idx = segment >> 3; in get_segment_base() local
2225 if (idx > LDT_ENTRIES) in get_segment_base()
2230 if (!ldt || idx > ldt->size) in get_segment_base()
2233 desc = &ldt->entries[idx]; in get_segment_base()
2238 if (idx > GDT_ENTRIES) in get_segment_base()
2241 desc = raw_cpu_ptr(gdt_page.gdt) + idx; in get_segment_base()