Lines Matching refs:pc

102 	    __kernel_text_address(p->pc) &&  in valid_fault_handler()
114 p->pc, p->sp, p->ex1); in valid_fault_handler()
123 static int is_sigreturn(unsigned long pc) in is_sigreturn() argument
125 return current->mm && (pc == VDSO_SYM(&__vdso_rt_sigreturn)); in is_sigreturn()
134 if (is_sigreturn(b->pc) && b->sp < PAGE_OFFSET && in valid_sigframe()
156 return is_sigreturn(kbt->it.pc); in KBacktraceIterator_is_sigreturn()
170 p->pc, p->lr, p->sp, p->regs[52]); in KBacktraceIterator_restart()
208 cpu, ksp0_base, ksp0, sp, regs->sp, regs->pc, regs->lr); in validate_stack()
214 cpu, ksp0_base, ksp0, sp, regs->sp, regs->pc, regs->lr); in validate_stack()
221 unsigned long pc, lr, sp, r52; in KBacktraceIterator_init() local
246 pc = get_switch_to_pc(); in KBacktraceIterator_init()
247 lr = t->thread.pc; in KBacktraceIterator_init()
251 pc = regs->pc; in KBacktraceIterator_init()
257 backtrace_init(&kbt->it, read_memory_func, kbt, pc, lr, sp, r52); in KBacktraceIterator_init()
270 unsigned long old_pc = kbt->it.pc, old_sp = kbt->it.sp; in KBacktraceIterator_next()
277 if (old_pc == kbt->it.pc && old_sp == kbt->it.sp) { in KBacktraceIterator_next()
399 unsigned long address = kbt->it.pc; in tile_show_stack()
438 ulong pc, ulong lr, ulong sp, ulong r52) in regs_to_pt_regs() argument
441 regs->pc = pc; in regs_to_pt_regs()
449 void _dump_stack(int dummy, ulong pc, ulong lr, ulong sp, ulong r52) in _dump_stack() argument
452 dump_stack_regs(regs_to_pt_regs(&regs, pc, lr, sp, r52)); in _dump_stack()
456 void _KBacktraceIterator_init_current(struct KBacktraceIterator *kbt, ulong pc, in _KBacktraceIterator_init_current() argument
461 regs_to_pt_regs(&regs, pc, lr, sp, r52)); in _KBacktraceIterator_init_current()
496 if (i >= trace->max_entries || kbt.it.pc < PAGE_OFFSET) in save_stack_trace_tsk()
498 trace->entries[i++] = kbt.it.pc; in save_stack_trace_tsk()