Lines Matching refs:clk
49 read_vco(struct gf100_clk *clk, u32 dsrc) in read_vco() argument
51 struct nvkm_device *device = clk->base.subdev.device; in read_vco()
54 return nvkm_clk_read(&clk->base, nv_clk_src_sppll0); in read_vco()
55 return nvkm_clk_read(&clk->base, nv_clk_src_sppll1); in read_vco()
59 read_pll(struct gf100_clk *clk, u32 pll) in read_pll() argument
61 struct nvkm_device *device = clk->base.subdev.device; in read_pll()
79 sclk = nvkm_clk_read(&clk->base, nv_clk_src_mpllsrc); in read_pll()
82 sclk = nvkm_clk_read(&clk->base, nv_clk_src_mpllsrcref); in read_pll()
88 sclk = read_div(clk, (pll & 0xff) / 0x20, 0x137120, 0x137140); in read_pll()
98 read_div(struct gf100_clk *clk, int doff, u32 dsrc, u32 dctl) in read_div() argument
100 struct nvkm_device *device = clk->base.subdev.device; in read_div()
113 u32 sclk = read_vco(clk, dsrc + (doff * 4)); in read_div()
118 return read_vco(clk, dsrc + (doff * 4)); in read_div()
125 read_clk(struct gf100_clk *clk, int idx) in read_clk() argument
127 struct nvkm_device *device = clk->base.subdev.device; in read_clk()
134 sclk = read_pll(clk, 0x137000 + (idx * 0x20)); in read_clk()
136 sclk = read_pll(clk, 0x1370e0); in read_clk()
139 sclk = read_div(clk, idx, 0x137160, 0x1371d0); in read_clk()
152 struct gf100_clk *clk = gf100_clk(base); in gf100_clk_read() local
153 struct nvkm_subdev *subdev = &clk->base.subdev; in gf100_clk_read()
162 return read_pll(clk, 0x00e800); in gf100_clk_read()
164 return read_pll(clk, 0x00e820); in gf100_clk_read()
167 return read_div(clk, 0, 0x137320, 0x137330); in gf100_clk_read()
169 return read_pll(clk, 0x132020); in gf100_clk_read()
171 return read_pll(clk, 0x132000); in gf100_clk_read()
173 return read_div(clk, 0, 0x137300, 0x137310); in gf100_clk_read()
176 return nvkm_clk_read(&clk->base, nv_clk_src_mpll); in gf100_clk_read()
177 return nvkm_clk_read(&clk->base, nv_clk_src_mdiv); in gf100_clk_read()
180 return read_clk(clk, 0x00); in gf100_clk_read()
182 return read_clk(clk, 0x01); in gf100_clk_read()
184 return read_clk(clk, 0x02); in gf100_clk_read()
186 return read_clk(clk, 0x07); in gf100_clk_read()
188 return read_clk(clk, 0x08); in gf100_clk_read()
190 return read_clk(clk, 0x09); in gf100_clk_read()
192 return read_clk(clk, 0x0c); in gf100_clk_read()
194 return read_clk(clk, 0x0e); in gf100_clk_read()
202 calc_div(struct gf100_clk *clk, int idx, u32 ref, u32 freq, u32 *ddiv) in calc_div() argument
213 calc_src(struct gf100_clk *clk, int idx, u32 freq, u32 *dsrc, u32 *ddiv) in calc_src() argument
235 sclk = read_vco(clk, 0x137160 + (idx * 4)); in calc_src()
237 sclk = calc_div(clk, idx, sclk, freq, ddiv); in calc_src()
242 calc_pll(struct gf100_clk *clk, int idx, u32 freq, u32 *coef) in calc_pll() argument
244 struct nvkm_subdev *subdev = &clk->base.subdev; in calc_pll()
253 limits.refclk = read_div(clk, idx, 0x137120, 0x137140); in calc_pll()
266 calc_clk(struct gf100_clk *clk, struct nvkm_cstate *cstate, int idx, int dom) in calc_clk() argument
268 struct gf100_clk_info *info = &clk->eng[idx]; in calc_clk()
278 clk0 = calc_src(clk, idx, freq, &src0, &div0); in calc_clk()
279 clk0 = calc_div(clk, idx, clk0, freq, &div1D); in calc_clk()
284 clk1 = calc_pll(clk, idx, freq, &info->coef); in calc_clk()
287 clk1 = calc_div(clk, idx, clk1, freq, &div1P); in calc_clk()
319 struct gf100_clk *clk = gf100_clk(base); in gf100_clk_calc() local
322 if ((ret = calc_clk(clk, cstate, 0x00, nv_clk_src_gpc)) || in gf100_clk_calc()
323 (ret = calc_clk(clk, cstate, 0x01, nv_clk_src_rop)) || in gf100_clk_calc()
324 (ret = calc_clk(clk, cstate, 0x02, nv_clk_src_hubk07)) || in gf100_clk_calc()
325 (ret = calc_clk(clk, cstate, 0x07, nv_clk_src_hubk06)) || in gf100_clk_calc()
326 (ret = calc_clk(clk, cstate, 0x08, nv_clk_src_hubk01)) || in gf100_clk_calc()
327 (ret = calc_clk(clk, cstate, 0x09, nv_clk_src_copy)) || in gf100_clk_calc()
328 (ret = calc_clk(clk, cstate, 0x0c, nv_clk_src_daemon)) || in gf100_clk_calc()
329 (ret = calc_clk(clk, cstate, 0x0e, nv_clk_src_vdec))) in gf100_clk_calc()
336 gf100_clk_prog_0(struct gf100_clk *clk, int idx) in gf100_clk_prog_0() argument
338 struct gf100_clk_info *info = &clk->eng[idx]; in gf100_clk_prog_0()
339 struct nvkm_device *device = clk->base.subdev.device; in gf100_clk_prog_0()
347 gf100_clk_prog_1(struct gf100_clk *clk, int idx) in gf100_clk_prog_1() argument
349 struct nvkm_device *device = clk->base.subdev.device; in gf100_clk_prog_1()
358 gf100_clk_prog_2(struct gf100_clk *clk, int idx) in gf100_clk_prog_2() argument
360 struct gf100_clk_info *info = &clk->eng[idx]; in gf100_clk_prog_2()
361 struct nvkm_device *device = clk->base.subdev.device; in gf100_clk_prog_2()
379 gf100_clk_prog_3(struct gf100_clk *clk, int idx) in gf100_clk_prog_3() argument
381 struct gf100_clk_info *info = &clk->eng[idx]; in gf100_clk_prog_3()
382 struct nvkm_device *device = clk->base.subdev.device; in gf100_clk_prog_3()
394 gf100_clk_prog_4(struct gf100_clk *clk, int idx) in gf100_clk_prog_4() argument
396 struct gf100_clk_info *info = &clk->eng[idx]; in gf100_clk_prog_4()
397 struct nvkm_device *device = clk->base.subdev.device; in gf100_clk_prog_4()
404 struct gf100_clk *clk = gf100_clk(base); in gf100_clk_prog() local
417 for (j = 0; j < ARRAY_SIZE(clk->eng); j++) { in gf100_clk_prog()
418 if (!clk->eng[j].freq) in gf100_clk_prog()
420 stage[i].exec(clk, j); in gf100_clk_prog()
430 struct gf100_clk *clk = gf100_clk(base); in gf100_clk_tidy() local
431 memset(clk->eng, 0x00, sizeof(clk->eng)); in gf100_clk_tidy()
459 struct gf100_clk *clk; in gf100_clk_new() local
461 if (!(clk = kzalloc(sizeof(*clk), GFP_KERNEL))) in gf100_clk_new()
463 *pclk = &clk->base; in gf100_clk_new()
465 return nvkm_clk_ctor(&gf100_clk, device, index, false, &clk->base); in gf100_clk_new()