Lines Matching refs:sgc
4680 avg_load = (avg_load * SCHED_CAPACITY_SCALE) / group->sgc->capacity; in find_idlest_group()
6081 sdg->sgc->capacity = capacity; in update_cpu_capacity()
6093 sdg->sgc->next_update = jiffies + interval; in update_group_capacity()
6109 struct sched_group_capacity *sgc; in update_group_capacity() local
6128 sgc = rq->sd->groups->sgc; in update_group_capacity()
6129 capacity += sgc->capacity; in update_group_capacity()
6139 capacity += group->sgc->capacity; in update_group_capacity()
6144 sdg->sgc->capacity = capacity; in update_group_capacity()
6190 return group->sgc->imbalance; in sg_imbalanced()
6297 sgs->group_capacity = group->sgc->capacity; in update_sg_lb_stats()
6416 time_after_eq(jiffies, sg->sgc->next_update)) in update_sd_lb_stats()
7053 int *group_imbalance = &sd_parent->groups->sgc->imbalance; in load_balance()
7146 int *group_imbalance = &sd_parent->groups->sgc->imbalance; in load_balance()
7460 atomic_inc(&sd->groups->sgc->nr_busy_cpus); in set_cpu_sd_state_busy()
7477 atomic_dec(&sd->groups->sgc->nr_busy_cpus); in set_cpu_sd_state_idle()
7691 struct sched_group_capacity *sgc; in nohz_kick_needed() local
7721 sgc = sd->groups->sgc; in nohz_kick_needed()
7722 nr_busy = atomic_read(&sgc->nr_busy_cpus); in nohz_kick_needed()