Lines Matching refs:sgc
4770 avg_load = (avg_load * SCHED_CAPACITY_SCALE) / group->sgc->capacity; in find_idlest_group()
6119 sdg->sgc->capacity = capacity; in update_cpu_capacity()
6131 sdg->sgc->next_update = jiffies + interval; in update_group_capacity()
6147 struct sched_group_capacity *sgc; in update_group_capacity() local
6166 sgc = rq->sd->groups->sgc; in update_group_capacity()
6167 capacity += sgc->capacity; in update_group_capacity()
6177 capacity += group->sgc->capacity; in update_group_capacity()
6182 sdg->sgc->capacity = capacity; in update_group_capacity()
6228 return group->sgc->imbalance; in sg_imbalanced()
6335 sgs->group_capacity = group->sgc->capacity; in update_sg_lb_stats()
6454 time_after_eq(jiffies, sg->sgc->next_update)) in update_sd_lb_stats()
7091 int *group_imbalance = &sd_parent->groups->sgc->imbalance; in load_balance()
7184 int *group_imbalance = &sd_parent->groups->sgc->imbalance; in load_balance()
7495 atomic_inc(&sd->groups->sgc->nr_busy_cpus); in set_cpu_sd_state_busy()
7512 atomic_dec(&sd->groups->sgc->nr_busy_cpus); in set_cpu_sd_state_idle()
7752 struct sched_group_capacity *sgc; in nohz_kick_needed() local
7782 sgc = sd->groups->sgc; in nohz_kick_needed()
7783 nr_busy = atomic_read(&sgc->nr_busy_cpus); in nohz_kick_needed()