runnable_load_avg  403 include/linux/sched.h 	unsigned long			runnable_load_avg;
runnable_load_avg  409 kernel/sched/debug.c 	P(se->avg.runnable_load_avg);
runnable_load_avg  531 kernel/sched/debug.c 			cfs_rq->avg.runnable_load_avg);
runnable_load_avg  949 kernel/sched/debug.c 	P(se.avg.runnable_load_avg);
runnable_load_avg  745 kernel/sched/fair.c 		sa->runnable_load_avg = sa->load_avg = scale_load_down(se->load.weight);
runnable_load_avg 2838 kernel/sched/fair.c 	cfs_rq->avg.runnable_load_avg += se->avg.runnable_load_avg;
runnable_load_avg 2847 kernel/sched/fair.c 	sub_positive(&cfs_rq->avg.runnable_load_avg, se->avg.runnable_load_avg);
runnable_load_avg 2896 kernel/sched/fair.c 		se->avg.runnable_load_avg =
runnable_load_avg 3062 kernel/sched/fair.c 	runnable = max(cfs_rq->avg.runnable_load_avg,
runnable_load_avg 3313 kernel/sched/fair.c 	unsigned long runnable_load_avg, load_avg;
runnable_load_avg 3364 kernel/sched/fair.c 	runnable_load_avg = div_s64(runnable_load_sum, LOAD_AVG_MAX);
runnable_load_avg 3366 kernel/sched/fair.c 	delta_avg = runnable_load_avg - se->avg.runnable_load_avg;
runnable_load_avg 3369 kernel/sched/fair.c 	se->avg.runnable_load_avg = runnable_load_avg;
runnable_load_avg 3372 kernel/sched/fair.c 		add_positive(&cfs_rq->avg.runnable_load_avg, delta_avg);
runnable_load_avg 3685 kernel/sched/fair.c 	return cfs_rq->avg.runnable_load_avg;
runnable_load_avg  235 kernel/sched/pelt.c 	sa->runnable_load_avg =	div_u64(runnable * sa->runnable_load_sum, divider);