avg_idle 2234 kernel/sched/core.c update_avg(&rq->avg_idle, delta); avg_idle 2236 kernel/sched/core.c if (rq->avg_idle > max) avg_idle 2237 kernel/sched/core.c rq->avg_idle = max; avg_idle 6672 kernel/sched/core.c rq->avg_idle = 2*sysctl_sched_migration_cost; avg_idle 654 kernel/sched/debug.c P64(avg_idle); avg_idle 5962 kernel/sched/fair.c u64 avg_cost, avg_idle; avg_idle 5976 kernel/sched/fair.c avg_idle = this_rq()->avg_idle / 512; avg_idle 5979 kernel/sched/fair.c if (sched_feat(SIS_AVG_CPU) && avg_idle < avg_cost) avg_idle 5983 kernel/sched/fair.c u64 span_avg = sd->span_weight * avg_idle; avg_idle 9764 kernel/sched/fair.c if (this_rq->avg_idle < sysctl_sched_migration_cost) avg_idle 9828 kernel/sched/fair.c if (this_rq->avg_idle < sysctl_sched_migration_cost || avg_idle 9853 kernel/sched/fair.c if (this_rq->avg_idle < curr_cost + sd->max_newidle_lb_cost) { avg_idle 954 kernel/sched/sched.h u64 avg_idle;