Lines Matching refs:pt
114 struct intel_pt *pt; member
142 static void intel_pt_dump(struct intel_pt *pt __maybe_unused, in intel_pt_dump()
181 static void intel_pt_dump_event(struct intel_pt *pt, unsigned char *buf, in intel_pt_dump_event() argument
185 intel_pt_dump(pt, buf, len); in intel_pt_dump_event()
188 static int intel_pt_do_fix_overlap(struct intel_pt *pt, struct auxtrace_buffer *a, in intel_pt_do_fix_overlap() argument
194 pt->have_tsc); in intel_pt_do_fix_overlap()
219 ptq->thread = machine__findnew_thread(ptq->pt->machine, in intel_pt_use_buffer_pid_tid()
223 ptq->thread = machine__find_thread(ptq->pt->machine, -1, in intel_pt_use_buffer_pid_tid()
240 queue = &ptq->pt->queues.queue_array[ptq->queue_nr]; in intel_pt_get_trace()
253 int fd = perf_data_file__fd(ptq->pt->session->file); in intel_pt_get_trace()
260 if (ptq->pt->snapshot_mode && !buffer->consecutive && old_buffer && in intel_pt_get_trace()
261 intel_pt_do_fix_overlap(ptq->pt, old_buffer, buffer)) in intel_pt_get_trace()
276 if (!old_buffer || ptq->pt->sampling_mode || (ptq->pt->snapshot_mode && in intel_pt_get_trace()
415 struct machine *machine = ptq->pt->machine; in intel_pt_walk_next_insn()
432 if (*ip >= ptq->pt->kernel_start) in intel_pt_walk_next_insn()
441 thread = ptq->pt->unknown_thread; in intel_pt_walk_next_insn()
541 static bool intel_pt_get_config(struct intel_pt *pt, in intel_pt_get_config() argument
544 if (attr->type == pt->pmu_type) { in intel_pt_get_config()
553 static bool intel_pt_exclude_kernel(struct intel_pt *pt) in intel_pt_exclude_kernel() argument
557 evlist__for_each(pt->session->evlist, evsel) { in intel_pt_exclude_kernel()
558 if (intel_pt_get_config(pt, &evsel->attr, NULL) && in intel_pt_exclude_kernel()
565 static bool intel_pt_return_compression(struct intel_pt *pt) in intel_pt_return_compression() argument
570 if (!pt->noretcomp_bit) in intel_pt_return_compression()
573 evlist__for_each(pt->session->evlist, evsel) { in intel_pt_return_compression()
574 if (intel_pt_get_config(pt, &evsel->attr, &config) && in intel_pt_return_compression()
575 (config & pt->noretcomp_bit)) in intel_pt_return_compression()
581 static unsigned int intel_pt_mtc_period(struct intel_pt *pt) in intel_pt_mtc_period() argument
587 if (!pt->mtc_freq_bits) in intel_pt_mtc_period()
590 for (shift = 0, config = pt->mtc_freq_bits; !(config & 1); shift++) in intel_pt_mtc_period()
593 evlist__for_each(pt->session->evlist, evsel) { in intel_pt_mtc_period()
594 if (intel_pt_get_config(pt, &evsel->attr, &config)) in intel_pt_mtc_period()
595 return (config & pt->mtc_freq_bits) >> shift; in intel_pt_mtc_period()
600 static bool intel_pt_timeless_decoding(struct intel_pt *pt) in intel_pt_timeless_decoding() argument
606 if (!pt->tsc_bit || !pt->cap_user_time_zero) in intel_pt_timeless_decoding()
609 evlist__for_each(pt->session->evlist, evsel) { in intel_pt_timeless_decoding()
612 if (intel_pt_get_config(pt, &evsel->attr, &config)) { in intel_pt_timeless_decoding()
613 if (config & pt->tsc_bit) in intel_pt_timeless_decoding()
622 static bool intel_pt_tracing_kernel(struct intel_pt *pt) in intel_pt_tracing_kernel() argument
626 evlist__for_each(pt->session->evlist, evsel) { in intel_pt_tracing_kernel()
627 if (intel_pt_get_config(pt, &evsel->attr, NULL) && in intel_pt_tracing_kernel()
634 static bool intel_pt_have_tsc(struct intel_pt *pt) in intel_pt_have_tsc() argument
640 if (!pt->tsc_bit) in intel_pt_have_tsc()
643 evlist__for_each(pt->session->evlist, evsel) { in intel_pt_have_tsc()
644 if (intel_pt_get_config(pt, &evsel->attr, &config)) { in intel_pt_have_tsc()
645 if (config & pt->tsc_bit) in intel_pt_have_tsc()
654 static u64 intel_pt_ns_to_ticks(const struct intel_pt *pt, u64 ns) in intel_pt_ns_to_ticks() argument
658 quot = ns / pt->tc.time_mult; in intel_pt_ns_to_ticks()
659 rem = ns % pt->tc.time_mult; in intel_pt_ns_to_ticks()
660 return (quot << pt->tc.time_shift) + (rem << pt->tc.time_shift) / in intel_pt_ns_to_ticks()
661 pt->tc.time_mult; in intel_pt_ns_to_ticks()
664 static struct intel_pt_queue *intel_pt_alloc_queue(struct intel_pt *pt, in intel_pt_alloc_queue() argument
674 if (pt->synth_opts.callchain) { in intel_pt_alloc_queue()
677 sz += pt->synth_opts.callchain_sz * sizeof(u64); in intel_pt_alloc_queue()
683 if (pt->synth_opts.last_branch) { in intel_pt_alloc_queue()
686 sz += pt->synth_opts.last_branch_sz * in intel_pt_alloc_queue()
700 ptq->pt = pt; in intel_pt_alloc_queue()
702 ptq->exclude_kernel = intel_pt_exclude_kernel(pt); in intel_pt_alloc_queue()
711 params.return_compression = intel_pt_return_compression(pt); in intel_pt_alloc_queue()
712 params.max_non_turbo_ratio = pt->max_non_turbo_ratio; in intel_pt_alloc_queue()
713 params.mtc_period = intel_pt_mtc_period(pt); in intel_pt_alloc_queue()
714 params.tsc_ctc_ratio_n = pt->tsc_ctc_ratio_n; in intel_pt_alloc_queue()
715 params.tsc_ctc_ratio_d = pt->tsc_ctc_ratio_d; in intel_pt_alloc_queue()
717 if (pt->synth_opts.instructions) { in intel_pt_alloc_queue()
718 if (pt->synth_opts.period) { in intel_pt_alloc_queue()
719 switch (pt->synth_opts.period_type) { in intel_pt_alloc_queue()
723 params.period = pt->synth_opts.period; in intel_pt_alloc_queue()
727 params.period = pt->synth_opts.period; in intel_pt_alloc_queue()
731 params.period = intel_pt_ns_to_ticks(pt, in intel_pt_alloc_queue()
732 pt->synth_opts.period); in intel_pt_alloc_queue()
775 static void intel_pt_set_pid_tid_cpu(struct intel_pt *pt, in intel_pt_set_pid_tid_cpu() argument
780 if (queue->tid == -1 || pt->have_sched_switch) { in intel_pt_set_pid_tid_cpu()
781 ptq->tid = machine__get_current_tid(pt->machine, ptq->cpu); in intel_pt_set_pid_tid_cpu()
786 ptq->thread = machine__find_thread(pt->machine, -1, ptq->tid); in intel_pt_set_pid_tid_cpu()
820 static int intel_pt_setup_queue(struct intel_pt *pt, in intel_pt_setup_queue() argument
830 ptq = intel_pt_alloc_queue(pt, queue_nr); in intel_pt_setup_queue()
839 if (pt->sampling_mode) { in intel_pt_setup_queue()
840 if (pt->timeless_decoding) in intel_pt_setup_queue()
842 if (pt->timeless_decoding || !pt->have_sched_switch) in intel_pt_setup_queue()
848 (!pt->sync_switch || in intel_pt_setup_queue()
853 if (pt->timeless_decoding) in intel_pt_setup_queue()
879 ret = auxtrace_heap__add(&pt->heap, queue_nr, ptq->timestamp); in intel_pt_setup_queue()
888 static int intel_pt_setup_queues(struct intel_pt *pt) in intel_pt_setup_queues() argument
893 for (i = 0; i < pt->queues.nr_queues; i++) { in intel_pt_setup_queues()
894 ret = intel_pt_setup_queue(pt, &pt->queues.queue_array[i], i); in intel_pt_setup_queues()
912 nr = ptq->pt->synth_opts.last_branch_sz - ptq->last_branch_pos; in intel_pt_copy_last_branch_rb()
917 if (bs_src->nr >= ptq->pt->synth_opts.last_branch_sz) { in intel_pt_copy_last_branch_rb()
937 ptq->last_branch_pos = ptq->pt->synth_opts.last_branch_sz; in intel_pt_update_last_branch_rb()
947 be->flags.mispred = ptq->pt->mispred_all; in intel_pt_update_last_branch_rb()
949 if (bs->nr < ptq->pt->synth_opts.last_branch_sz) in intel_pt_update_last_branch_rb()
964 struct intel_pt *pt = ptq->pt; in intel_pt_synth_branch_sample() local
972 if (pt->branches_filter && !(pt->branches_filter & ptq->flags)) in intel_pt_synth_branch_sample()
979 if (!pt->timeless_decoding) in intel_pt_synth_branch_sample()
980 sample.time = tsc_to_perf_time(ptq->timestamp, &pt->tc); in intel_pt_synth_branch_sample()
986 sample.id = ptq->pt->branches_id; in intel_pt_synth_branch_sample()
987 sample.stream_id = ptq->pt->branches_id; in intel_pt_synth_branch_sample()
997 if (pt->synth_opts.last_branch && sort__mode == SORT_MODE__BRANCH) { in intel_pt_synth_branch_sample()
1008 if (pt->synth_opts.inject) { in intel_pt_synth_branch_sample()
1010 pt->branches_sample_type, in intel_pt_synth_branch_sample()
1011 pt->synth_needs_swap); in intel_pt_synth_branch_sample()
1016 ret = perf_session__deliver_synth_event(pt->session, event, &sample); in intel_pt_synth_branch_sample()
1027 struct intel_pt *pt = ptq->pt; in intel_pt_synth_instruction_sample() local
1035 if (!pt->timeless_decoding) in intel_pt_synth_instruction_sample()
1036 sample.time = tsc_to_perf_time(ptq->timestamp, &pt->tc); in intel_pt_synth_instruction_sample()
1042 sample.id = ptq->pt->instructions_id; in intel_pt_synth_instruction_sample()
1043 sample.stream_id = ptq->pt->instructions_id; in intel_pt_synth_instruction_sample()
1051 if (pt->synth_opts.callchain) { in intel_pt_synth_instruction_sample()
1053 pt->synth_opts.callchain_sz, sample.ip); in intel_pt_synth_instruction_sample()
1057 if (pt->synth_opts.last_branch) { in intel_pt_synth_instruction_sample()
1062 if (pt->synth_opts.inject) { in intel_pt_synth_instruction_sample()
1064 pt->instructions_sample_type, in intel_pt_synth_instruction_sample()
1065 pt->synth_needs_swap); in intel_pt_synth_instruction_sample()
1070 ret = perf_session__deliver_synth_event(pt->session, event, &sample); in intel_pt_synth_instruction_sample()
1075 if (pt->synth_opts.last_branch) in intel_pt_synth_instruction_sample()
1084 struct intel_pt *pt = ptq->pt; in intel_pt_synth_transaction_sample() local
1092 if (!pt->timeless_decoding) in intel_pt_synth_transaction_sample()
1093 sample.time = tsc_to_perf_time(ptq->timestamp, &pt->tc); in intel_pt_synth_transaction_sample()
1099 sample.id = ptq->pt->transactions_id; in intel_pt_synth_transaction_sample()
1100 sample.stream_id = ptq->pt->transactions_id; in intel_pt_synth_transaction_sample()
1106 if (pt->synth_opts.callchain) { in intel_pt_synth_transaction_sample()
1108 pt->synth_opts.callchain_sz, sample.ip); in intel_pt_synth_transaction_sample()
1112 if (pt->synth_opts.last_branch) { in intel_pt_synth_transaction_sample()
1117 if (pt->synth_opts.inject) { in intel_pt_synth_transaction_sample()
1119 pt->transactions_sample_type, in intel_pt_synth_transaction_sample()
1120 pt->synth_needs_swap); in intel_pt_synth_transaction_sample()
1125 ret = perf_session__deliver_synth_event(pt->session, event, &sample); in intel_pt_synth_transaction_sample()
1130 if (pt->synth_opts.last_branch) in intel_pt_synth_transaction_sample()
1136 static int intel_pt_synth_error(struct intel_pt *pt, int code, int cpu, in intel_pt_synth_error() argument
1148 err = perf_session__deliver_synth_event(pt->session, &event, NULL); in intel_pt_synth_error()
1156 static int intel_pt_next_tid(struct intel_pt *pt, struct intel_pt_queue *ptq) in intel_pt_next_tid() argument
1167 err = machine__set_current_tid(pt->machine, ptq->cpu, -1, tid); in intel_pt_next_tid()
1169 queue = &pt->queues.queue_array[ptq->queue_nr]; in intel_pt_next_tid()
1170 intel_pt_set_pid_tid_cpu(pt, queue); in intel_pt_next_tid()
1179 struct intel_pt *pt = ptq->pt; in intel_pt_is_switch_ip() local
1181 return ip == pt->switch_ip && in intel_pt_is_switch_ip()
1190 struct intel_pt *pt = ptq->pt; in intel_pt_sample() local
1198 if (pt->sample_instructions && in intel_pt_sample()
1205 if (pt->sample_transactions && in intel_pt_sample()
1215 if (pt->synth_opts.callchain) in intel_pt_sample()
1222 if (pt->sample_branches) { in intel_pt_sample()
1228 if (pt->synth_opts.last_branch) in intel_pt_sample()
1231 if (!pt->sync_switch) in intel_pt_sample()
1238 err = intel_pt_next_tid(pt, ptq); in intel_pt_sample()
1252 state->to_ip == pt->ptss_ip && in intel_pt_sample()
1260 static u64 intel_pt_switch_ip(struct intel_pt *pt, u64 *ptss_ip) in intel_pt_switch_ip() argument
1262 struct machine *machine = pt->machine; in intel_pt_switch_ip()
1294 if (pt->have_sched_switch == 1) in intel_pt_switch_ip()
1315 struct intel_pt *pt = ptq->pt; in intel_pt_run_decoder() local
1318 if (!pt->kernel_start) { in intel_pt_run_decoder()
1319 pt->kernel_start = machine__kernel_start(pt->machine); in intel_pt_run_decoder()
1320 if (pt->per_cpu_mmaps && in intel_pt_run_decoder()
1321 (pt->have_sched_switch == 1 || pt->have_sched_switch == 3) && in intel_pt_run_decoder()
1322 !pt->timeless_decoding && intel_pt_tracing_kernel(pt) && in intel_pt_run_decoder()
1323 !pt->sampling_mode) { in intel_pt_run_decoder()
1324 pt->switch_ip = intel_pt_switch_ip(pt, &pt->ptss_ip); in intel_pt_run_decoder()
1325 if (pt->switch_ip) { in intel_pt_run_decoder()
1327 pt->switch_ip, pt->ptss_ip); in intel_pt_run_decoder()
1328 pt->sync_switch = true; in intel_pt_run_decoder()
1344 if (pt->sync_switch && in intel_pt_run_decoder()
1345 state->from_ip >= pt->kernel_start) { in intel_pt_run_decoder()
1346 pt->sync_switch = false; in intel_pt_run_decoder()
1347 intel_pt_next_tid(pt, ptq); in intel_pt_run_decoder()
1349 if (pt->synth_opts.errors) { in intel_pt_run_decoder()
1350 err = intel_pt_synth_error(pt, state->err, in intel_pt_run_decoder()
1365 if (pt->est_tsc && in intel_pt_run_decoder()
1366 (state->from_ip >= pt->kernel_start || !state->from_ip) && in intel_pt_run_decoder()
1367 state->to_ip && state->to_ip < pt->kernel_start) { in intel_pt_run_decoder()
1372 } else if (pt->sync_switch && in intel_pt_run_decoder()
1383 if (!pt->timeless_decoding && ptq->timestamp >= *timestamp) { in intel_pt_run_decoder()
1391 static inline int intel_pt_update_queues(struct intel_pt *pt) in intel_pt_update_queues() argument
1393 if (pt->queues.new_data) { in intel_pt_update_queues()
1394 pt->queues.new_data = false; in intel_pt_update_queues()
1395 return intel_pt_setup_queues(pt); in intel_pt_update_queues()
1400 static int intel_pt_process_queues(struct intel_pt *pt, u64 timestamp) in intel_pt_process_queues() argument
1410 if (!pt->heap.heap_cnt) in intel_pt_process_queues()
1413 if (pt->heap.heap_array[0].ordinal >= timestamp) in intel_pt_process_queues()
1416 queue_nr = pt->heap.heap_array[0].queue_nr; in intel_pt_process_queues()
1417 queue = &pt->queues.queue_array[queue_nr]; in intel_pt_process_queues()
1421 queue_nr, pt->heap.heap_array[0].ordinal, in intel_pt_process_queues()
1424 auxtrace_heap__pop(&pt->heap); in intel_pt_process_queues()
1426 if (pt->heap.heap_cnt) { in intel_pt_process_queues()
1427 ts = pt->heap.heap_array[0].ordinal + 1; in intel_pt_process_queues()
1434 intel_pt_set_pid_tid_cpu(pt, queue); in intel_pt_process_queues()
1439 auxtrace_heap__add(&pt->heap, queue_nr, ts); in intel_pt_process_queues()
1444 ret = auxtrace_heap__add(&pt->heap, queue_nr, ts); in intel_pt_process_queues()
1455 static int intel_pt_process_timeless_queues(struct intel_pt *pt, pid_t tid, in intel_pt_process_timeless_queues() argument
1458 struct auxtrace_queues *queues = &pt->queues; in intel_pt_process_timeless_queues()
1463 struct auxtrace_queue *queue = &pt->queues.queue_array[i]; in intel_pt_process_timeless_queues()
1468 intel_pt_set_pid_tid_cpu(pt, queue); in intel_pt_process_timeless_queues()
1475 static int intel_pt_lost(struct intel_pt *pt, struct perf_sample *sample) in intel_pt_lost() argument
1477 return intel_pt_synth_error(pt, INTEL_PT_ERR_LOST, sample->cpu, in intel_pt_lost()
1481 static struct intel_pt_queue *intel_pt_cpu_to_ptq(struct intel_pt *pt, int cpu) in intel_pt_cpu_to_ptq() argument
1485 if (cpu < 0 || !pt->queues.nr_queues) in intel_pt_cpu_to_ptq()
1488 if ((unsigned)cpu >= pt->queues.nr_queues) in intel_pt_cpu_to_ptq()
1489 i = pt->queues.nr_queues - 1; in intel_pt_cpu_to_ptq()
1493 if (pt->queues.queue_array[i].cpu == cpu) in intel_pt_cpu_to_ptq()
1494 return pt->queues.queue_array[i].priv; in intel_pt_cpu_to_ptq()
1497 if (pt->queues.queue_array[--i].cpu == cpu) in intel_pt_cpu_to_ptq()
1498 return pt->queues.queue_array[i].priv; in intel_pt_cpu_to_ptq()
1501 for (; j < pt->queues.nr_queues; j++) { in intel_pt_cpu_to_ptq()
1502 if (pt->queues.queue_array[j].cpu == cpu) in intel_pt_cpu_to_ptq()
1503 return pt->queues.queue_array[j].priv; in intel_pt_cpu_to_ptq()
1509 static int intel_pt_sync_switch(struct intel_pt *pt, int cpu, pid_t tid, in intel_pt_sync_switch() argument
1515 if (!pt->sync_switch) in intel_pt_sync_switch()
1518 ptq = intel_pt_cpu_to_ptq(pt, cpu); in intel_pt_sync_switch()
1534 &pt->tc); in intel_pt_sync_switch()
1535 err = auxtrace_heap__add(&pt->heap, ptq->queue_nr, in intel_pt_sync_switch()
1554 static int intel_pt_process_switch(struct intel_pt *pt, in intel_pt_process_switch() argument
1561 evsel = perf_evlist__id2evsel(pt->session->evlist, sample->id); in intel_pt_process_switch()
1562 if (evsel != pt->switch_evsel) in intel_pt_process_switch()
1570 &pt->tc)); in intel_pt_process_switch()
1572 ret = intel_pt_sync_switch(pt, cpu, tid, sample->time); in intel_pt_process_switch()
1576 return machine__set_current_tid(pt->machine, cpu, -1, tid); in intel_pt_process_switch()
1579 static int intel_pt_context_switch(struct intel_pt *pt, union perf_event *event, in intel_pt_context_switch() argument
1588 if (pt->have_sched_switch == 3) { in intel_pt_context_switch()
1611 &pt->tc)); in intel_pt_context_switch()
1613 ret = intel_pt_sync_switch(pt, cpu, tid, sample->time); in intel_pt_context_switch()
1617 return machine__set_current_tid(pt->machine, cpu, pid, tid); in intel_pt_context_switch()
1620 static int intel_pt_process_itrace_start(struct intel_pt *pt, in intel_pt_process_itrace_start() argument
1624 if (!pt->per_cpu_mmaps) in intel_pt_process_itrace_start()
1630 perf_time_to_tsc(sample->time, &pt->tc)); in intel_pt_process_itrace_start()
1632 return machine__set_current_tid(pt->machine, sample->cpu, in intel_pt_process_itrace_start()
1642 struct intel_pt *pt = container_of(session->auxtrace, struct intel_pt, in intel_pt_process_event() local
1656 timestamp = perf_time_to_tsc(sample->time, &pt->tc); in intel_pt_process_event()
1660 if (timestamp || pt->timeless_decoding) { in intel_pt_process_event()
1661 err = intel_pt_update_queues(pt); in intel_pt_process_event()
1666 if (pt->timeless_decoding) { in intel_pt_process_event()
1668 err = intel_pt_process_timeless_queues(pt, in intel_pt_process_event()
1673 err = intel_pt_process_queues(pt, timestamp); in intel_pt_process_event()
1680 pt->synth_opts.errors) { in intel_pt_process_event()
1681 err = intel_pt_lost(pt, sample); in intel_pt_process_event()
1686 if (pt->switch_evsel && event->header.type == PERF_RECORD_SAMPLE) in intel_pt_process_event()
1687 err = intel_pt_process_switch(pt, sample); in intel_pt_process_event()
1689 err = intel_pt_process_itrace_start(pt, event, sample); in intel_pt_process_event()
1692 err = intel_pt_context_switch(pt, event, sample); in intel_pt_process_event()
1703 struct intel_pt *pt = container_of(session->auxtrace, struct intel_pt, in intel_pt_flush() local
1713 ret = intel_pt_update_queues(pt); in intel_pt_flush()
1717 if (pt->timeless_decoding) in intel_pt_flush()
1718 return intel_pt_process_timeless_queues(pt, -1, in intel_pt_flush()
1721 return intel_pt_process_queues(pt, MAX_TIMESTAMP); in intel_pt_flush()
1726 struct intel_pt *pt = container_of(session->auxtrace, struct intel_pt, in intel_pt_free_events() local
1728 struct auxtrace_queues *queues = &pt->queues; in intel_pt_free_events()
1741 struct intel_pt *pt = container_of(session->auxtrace, struct intel_pt, in intel_pt_free() local
1744 auxtrace_heap__free(&pt->heap); in intel_pt_free()
1747 thread__delete(pt->unknown_thread); in intel_pt_free()
1748 free(pt); in intel_pt_free()
1755 struct intel_pt *pt = container_of(session->auxtrace, struct intel_pt, in intel_pt_process_auxtrace_event() local
1758 if (pt->sampling_mode) in intel_pt_process_auxtrace_event()
1761 if (!pt->data_queued) { in intel_pt_process_auxtrace_event()
1775 err = auxtrace_queues__add_event(&pt->queues, session, event, in intel_pt_process_auxtrace_event()
1783 intel_pt_dump_event(pt, buffer->data, in intel_pt_process_auxtrace_event()
1822 static int intel_pt_synth_events(struct intel_pt *pt, in intel_pt_synth_events() argument
1833 if (evsel->attr.type == pt->pmu_type && evsel->ids) { in intel_pt_synth_events()
1850 if (pt->timeless_decoding) in intel_pt_synth_events()
1854 if (!pt->per_cpu_mmaps) in intel_pt_synth_events()
1868 if (pt->synth_opts.instructions) { in intel_pt_synth_events()
1870 if (pt->synth_opts.period_type == PERF_ITRACE_PERIOD_NANOSECS) in intel_pt_synth_events()
1872 intel_pt_ns_to_ticks(pt, pt->synth_opts.period); in intel_pt_synth_events()
1874 attr.sample_period = pt->synth_opts.period; in intel_pt_synth_events()
1875 pt->instructions_sample_period = attr.sample_period; in intel_pt_synth_events()
1876 if (pt->synth_opts.callchain) in intel_pt_synth_events()
1878 if (pt->synth_opts.last_branch) in intel_pt_synth_events()
1888 pt->sample_instructions = true; in intel_pt_synth_events()
1889 pt->instructions_sample_type = attr.sample_type; in intel_pt_synth_events()
1890 pt->instructions_id = id; in intel_pt_synth_events()
1894 if (pt->synth_opts.transactions) { in intel_pt_synth_events()
1897 if (pt->synth_opts.callchain) in intel_pt_synth_events()
1899 if (pt->synth_opts.last_branch) in intel_pt_synth_events()
1909 pt->sample_transactions = true; in intel_pt_synth_events()
1910 pt->transactions_id = id; in intel_pt_synth_events()
1913 if (evsel->id && evsel->id[0] == pt->transactions_id) { in intel_pt_synth_events()
1922 if (pt->synth_opts.branches) { in intel_pt_synth_events()
1936 pt->sample_branches = true; in intel_pt_synth_events()
1937 pt->branches_sample_type = attr.sample_type; in intel_pt_synth_events()
1938 pt->branches_id = id; in intel_pt_synth_events()
1941 pt->synth_needs_swap = evsel->needs_swap; in intel_pt_synth_events()
1974 struct intel_pt *pt = data; in intel_pt_perf_config() local
1977 pt->mispred_all = perf_config_bool(var, value); in intel_pt_perf_config()
2015 struct intel_pt *pt; in intel_pt_process_auxtrace_info() local
2022 pt = zalloc(sizeof(struct intel_pt)); in intel_pt_process_auxtrace_info()
2023 if (!pt) in intel_pt_process_auxtrace_info()
2026 perf_config(intel_pt_perf_config, pt); in intel_pt_process_auxtrace_info()
2028 err = auxtrace_queues__init(&pt->queues); in intel_pt_process_auxtrace_info()
2034 pt->session = session; in intel_pt_process_auxtrace_info()
2035 pt->machine = &session->machines.host; /* No kvm support */ in intel_pt_process_auxtrace_info()
2036 pt->auxtrace_type = auxtrace_info->type; in intel_pt_process_auxtrace_info()
2037 pt->pmu_type = auxtrace_info->priv[INTEL_PT_PMU_TYPE]; in intel_pt_process_auxtrace_info()
2038 pt->tc.time_shift = auxtrace_info->priv[INTEL_PT_TIME_SHIFT]; in intel_pt_process_auxtrace_info()
2039 pt->tc.time_mult = auxtrace_info->priv[INTEL_PT_TIME_MULT]; in intel_pt_process_auxtrace_info()
2040 pt->tc.time_zero = auxtrace_info->priv[INTEL_PT_TIME_ZERO]; in intel_pt_process_auxtrace_info()
2041 pt->cap_user_time_zero = auxtrace_info->priv[INTEL_PT_CAP_USER_TIME_ZERO]; in intel_pt_process_auxtrace_info()
2042 pt->tsc_bit = auxtrace_info->priv[INTEL_PT_TSC_BIT]; in intel_pt_process_auxtrace_info()
2043 pt->noretcomp_bit = auxtrace_info->priv[INTEL_PT_NORETCOMP_BIT]; in intel_pt_process_auxtrace_info()
2044 pt->have_sched_switch = auxtrace_info->priv[INTEL_PT_HAVE_SCHED_SWITCH]; in intel_pt_process_auxtrace_info()
2045 pt->snapshot_mode = auxtrace_info->priv[INTEL_PT_SNAPSHOT_MODE]; in intel_pt_process_auxtrace_info()
2046 pt->per_cpu_mmaps = auxtrace_info->priv[INTEL_PT_PER_CPU_MMAPS]; in intel_pt_process_auxtrace_info()
2052 pt->mtc_bit = auxtrace_info->priv[INTEL_PT_MTC_BIT]; in intel_pt_process_auxtrace_info()
2053 pt->mtc_freq_bits = auxtrace_info->priv[INTEL_PT_MTC_FREQ_BITS]; in intel_pt_process_auxtrace_info()
2054 pt->tsc_ctc_ratio_n = auxtrace_info->priv[INTEL_PT_TSC_CTC_N]; in intel_pt_process_auxtrace_info()
2055 pt->tsc_ctc_ratio_d = auxtrace_info->priv[INTEL_PT_TSC_CTC_D]; in intel_pt_process_auxtrace_info()
2056 pt->cyc_bit = auxtrace_info->priv[INTEL_PT_CYC_BIT]; in intel_pt_process_auxtrace_info()
2061 pt->timeless_decoding = intel_pt_timeless_decoding(pt); in intel_pt_process_auxtrace_info()
2062 pt->have_tsc = intel_pt_have_tsc(pt); in intel_pt_process_auxtrace_info()
2063 pt->sampling_mode = false; in intel_pt_process_auxtrace_info()
2064 pt->est_tsc = !pt->timeless_decoding; in intel_pt_process_auxtrace_info()
2066 pt->unknown_thread = thread__new(999999999, 999999999); in intel_pt_process_auxtrace_info()
2067 if (!pt->unknown_thread) { in intel_pt_process_auxtrace_info()
2071 err = thread__set_comm(pt->unknown_thread, "unknown", 0); in intel_pt_process_auxtrace_info()
2074 if (thread__init_map_groups(pt->unknown_thread, pt->machine)) { in intel_pt_process_auxtrace_info()
2079 pt->auxtrace.process_event = intel_pt_process_event; in intel_pt_process_auxtrace_info()
2080 pt->auxtrace.process_auxtrace_event = intel_pt_process_auxtrace_event; in intel_pt_process_auxtrace_info()
2081 pt->auxtrace.flush_events = intel_pt_flush; in intel_pt_process_auxtrace_info()
2082 pt->auxtrace.free_events = intel_pt_free_events; in intel_pt_process_auxtrace_info()
2083 pt->auxtrace.free = intel_pt_free; in intel_pt_process_auxtrace_info()
2084 session->auxtrace = &pt->auxtrace; in intel_pt_process_auxtrace_info()
2089 if (pt->have_sched_switch == 1) { in intel_pt_process_auxtrace_info()
2090 pt->switch_evsel = intel_pt_find_sched_switch(session->evlist); in intel_pt_process_auxtrace_info()
2091 if (!pt->switch_evsel) { in intel_pt_process_auxtrace_info()
2095 } else if (pt->have_sched_switch == 2 && in intel_pt_process_auxtrace_info()
2102 pt->synth_opts = *session->itrace_synth_opts; in intel_pt_process_auxtrace_info()
2104 itrace_synth_opts__set_default(&pt->synth_opts); in intel_pt_process_auxtrace_info()
2106 pt->synth_opts.branches = false; in intel_pt_process_auxtrace_info()
2107 pt->synth_opts.callchain = true; in intel_pt_process_auxtrace_info()
2111 if (pt->synth_opts.log) in intel_pt_process_auxtrace_info()
2115 if (pt->tc.time_mult) { in intel_pt_process_auxtrace_info()
2116 u64 tsc_freq = intel_pt_ns_to_ticks(pt, 1000000000); in intel_pt_process_auxtrace_info()
2118 pt->max_non_turbo_ratio = (tsc_freq + 50000000) / 100000000; in intel_pt_process_auxtrace_info()
2121 pt->max_non_turbo_ratio); in intel_pt_process_auxtrace_info()
2124 if (pt->synth_opts.calls) in intel_pt_process_auxtrace_info()
2125 pt->branches_filter |= PERF_IP_FLAG_CALL | PERF_IP_FLAG_ASYNC | in intel_pt_process_auxtrace_info()
2127 if (pt->synth_opts.returns) in intel_pt_process_auxtrace_info()
2128 pt->branches_filter |= PERF_IP_FLAG_RETURN | in intel_pt_process_auxtrace_info()
2131 if (pt->synth_opts.callchain && !symbol_conf.use_callchain) { in intel_pt_process_auxtrace_info()
2135 pt->synth_opts.callchain = false; in intel_pt_process_auxtrace_info()
2139 err = intel_pt_synth_events(pt, session); in intel_pt_process_auxtrace_info()
2143 err = auxtrace_queues__process_index(&pt->queues, session); in intel_pt_process_auxtrace_info()
2147 if (pt->queues.populated) in intel_pt_process_auxtrace_info()
2148 pt->data_queued = true; in intel_pt_process_auxtrace_info()
2150 if (pt->timeless_decoding) in intel_pt_process_auxtrace_info()
2156 thread__delete(pt->unknown_thread); in intel_pt_process_auxtrace_info()
2159 auxtrace_queues__free(&pt->queues); in intel_pt_process_auxtrace_info()
2162 free(pt); in intel_pt_process_auxtrace_info()