load_avg 402 include/linux/sched.h unsigned long load_avg; load_avg 407 kernel/sched/debug.c P(se->avg.load_avg); load_avg 529 kernel/sched/debug.c cfs_rq->avg.load_avg); load_avg 537 kernel/sched/debug.c cfs_rq->removed.load_avg); load_avg 546 kernel/sched/debug.c atomic_long_read(&cfs_rq->tg->load_avg)); load_avg 948 kernel/sched/debug.c P(se.avg.load_avg); load_avg 745 kernel/sched/fair.c sa->runnable_load_avg = sa->load_avg = scale_load_down(se->load.weight); load_avg 791 kernel/sched/fair.c sa->util_avg /= (cfs_rq->avg.load_avg + 1); load_avg 2855 kernel/sched/fair.c cfs_rq->avg.load_avg += se->avg.load_avg; load_avg 2862 kernel/sched/fair.c sub_positive(&cfs_rq->avg.load_avg, se->avg.load_avg); load_avg 2895 kernel/sched/fair.c se->avg.load_avg = div_u64(se_weight(se) * se->avg.load_sum, divider); load_avg 3001 kernel/sched/fair.c load = max(scale_load_down(cfs_rq->load.weight), cfs_rq->avg.load_avg); load_avg 3003 kernel/sched/fair.c tg_weight = atomic_long_read(&tg->load_avg); load_avg 3057 kernel/sched/fair.c long runnable, load_avg; load_avg 3059 kernel/sched/fair.c load_avg = max(cfs_rq->avg.load_avg, load_avg 3066 kernel/sched/fair.c if (load_avg) load_avg 3067 kernel/sched/fair.c runnable /= load_avg; load_avg 3151 kernel/sched/fair.c long delta = cfs_rq->avg.load_avg - cfs_rq->tg_load_avg_contrib; load_avg 3160 kernel/sched/fair.c atomic_long_add(delta, &cfs_rq->tg->load_avg); load_avg 3161 kernel/sched/fair.c cfs_rq->tg_load_avg_contrib = cfs_rq->avg.load_avg; load_avg 3313 kernel/sched/fair.c unsigned long runnable_load_avg, load_avg; load_avg 3353 kernel/sched/fair.c load_avg = div_s64(load_sum, LOAD_AVG_MAX); load_avg 3356 kernel/sched/fair.c delta_avg = load_avg - se->avg.load_avg; load_avg 3359 kernel/sched/fair.c se->avg.load_avg = load_avg; load_avg 3360 kernel/sched/fair.c add_positive(&cfs_rq->avg.load_avg, delta_avg); load_avg 3422 kernel/sched/fair.c if (se->avg.load_avg || se->avg.util_avg) load_avg 3482 kernel/sched/fair.c swap(cfs_rq->removed.load_avg, removed_load); load_avg 3488 kernel/sched/fair.c sub_positive(&sa->load_avg, r); load_avg 3544 kernel/sched/fair.c div_u64(se->avg.load_avg * se->avg.load_sum, se_weight(se)); load_avg 3678 kernel/sched/fair.c cfs_rq->removed.load_avg += se->avg.load_avg; load_avg 3690 kernel/sched/fair.c return cfs_rq->avg.load_avg; load_avg 5412 kernel/sched/fair.c unsigned long load_avg = cpu_runnable_load(rq); load_avg 5415 kernel/sched/fair.c return load_avg / nr_running; load_avg 7508 kernel/sched/fair.c if (cfs_rq->avg.load_avg) load_avg 7657 kernel/sched/fair.c load = div64_ul(load * se->avg.load_avg, load_avg 7670 kernel/sched/fair.c return div64_ul(p->se.avg.load_avg * cfs_rq->h_load, load_avg 7688 kernel/sched/fair.c return p->se.avg.load_avg; load_avg 234 kernel/sched/pelt.c sa->load_avg = div_u64(load * sa->load_sum, divider); load_avg 377 kernel/sched/sched.h atomic_long_t load_avg ____cacheline_aligned; load_avg 535 kernel/sched/sched.h unsigned long load_avg;