Lines Matching refs:gr
150 gk20a_gr_wait_mem_scrubbing(struct gf100_gr *gr) in gk20a_gr_wait_mem_scrubbing() argument
152 struct nvkm_subdev *subdev = &gr->base.engine.subdev; in gk20a_gr_wait_mem_scrubbing()
175 gk20a_gr_set_hww_esr_report_mask(struct gf100_gr *gr) in gk20a_gr_set_hww_esr_report_mask() argument
177 struct nvkm_device *device = gr->base.engine.subdev.device; in gk20a_gr_set_hww_esr_report_mask()
183 gk20a_gr_init(struct gf100_gr *gr) in gk20a_gr_init() argument
185 struct nvkm_device *device = gr->base.engine.subdev.device; in gk20a_gr_init()
186 const u32 magicgpc918 = DIV_ROUND_UP(0x00800000, gr->tpc_total); in gk20a_gr_init()
195 gf100_gr_mmio(gr, gr->fuc_sw_nonctx); in gk20a_gr_init()
197 ret = gk20a_gr_wait_mem_scrubbing(gr); in gk20a_gr_init()
201 ret = gf100_gr_wait_idle(gr); in gk20a_gr_init()
206 nvkm_wr32(device, 0x100cc8, nvkm_memory_addr(gr->unk4188b4) >> 8); in gk20a_gr_init()
207 nvkm_wr32(device, 0x100ccc, nvkm_memory_addr(gr->unk4188b8) >> 8); in gk20a_gr_init()
209 if (gr->func->init_gpc_mmu) in gk20a_gr_init()
210 gr->func->init_gpc_mmu(gr); in gk20a_gr_init()
217 memcpy(tpcnr, gr->tpc_nr, sizeof(gr->tpc_nr)); in gk20a_gr_init()
218 for (i = 0, gpc = -1; i < gr->tpc_total; i++) { in gk20a_gr_init()
220 gpc = (gpc + 1) % gr->gpc_nr; in gk20a_gr_init()
222 tpc = gr->tpc_nr[gpc] - tpcnr[gpc]--; in gk20a_gr_init()
232 for (gpc = 0; gpc < gr->gpc_nr; gpc++) { in gk20a_gr_init()
234 gr->magic_not_rop_nr << 8 | gr->tpc_nr[gpc]); in gk20a_gr_init()
236 gr->tpc_total); in gk20a_gr_init()
256 if (gr->func->set_hww_esr_report_mask) in gk20a_gr_init()
257 gr->func->set_hww_esr_report_mask(gr); in gk20a_gr_init()
261 nvkm_wr32(device, 0x41ac94, (((1 << gr->tpc_total) - 1) & 0xff) << 16); in gk20a_gr_init()
271 gf100_gr_zbc_init(gr); in gk20a_gr_init()
273 return gf100_gr_init_ctxctl(gr); in gk20a_gr_init()
277 gk20a_gr_dtor(struct gf100_gr *gr) in gk20a_gr_dtor() argument
279 gk20a_gr_init_dtor(gr->fuc_method); in gk20a_gr_dtor()
280 gk20a_gr_init_dtor(gr->fuc_bundle); in gk20a_gr_dtor()
281 gk20a_gr_init_dtor(gr->fuc_sw_ctx); in gk20a_gr_dtor()
282 gk20a_gr_init_dtor(gr->fuc_sw_nonctx); in gk20a_gr_dtor()
290 struct gf100_gr *gr; in gk20a_gr_new_() local
293 if (!(gr = kzalloc(sizeof(*gr), GFP_KERNEL))) in gk20a_gr_new_()
295 *pgr = &gr->base; in gk20a_gr_new_()
297 ret = gf100_gr_ctor(func, device, index, gr); in gk20a_gr_new_()
301 ret = gf100_gr_ctor_fw(gr, "sw_nonctx", &fuc); in gk20a_gr_new_()
304 gr->fuc_sw_nonctx = gk20a_gr_av_to_init(&fuc); in gk20a_gr_new_()
306 if (IS_ERR(gr->fuc_sw_nonctx)) in gk20a_gr_new_()
307 return PTR_ERR(gr->fuc_sw_nonctx); in gk20a_gr_new_()
309 ret = gf100_gr_ctor_fw(gr, "sw_ctx", &fuc); in gk20a_gr_new_()
312 gr->fuc_sw_ctx = gk20a_gr_aiv_to_init(&fuc); in gk20a_gr_new_()
314 if (IS_ERR(gr->fuc_sw_ctx)) in gk20a_gr_new_()
315 return PTR_ERR(gr->fuc_sw_ctx); in gk20a_gr_new_()
317 ret = gf100_gr_ctor_fw(gr, "sw_bundle_init", &fuc); in gk20a_gr_new_()
320 gr->fuc_bundle = gk20a_gr_av_to_init(&fuc); in gk20a_gr_new_()
322 if (IS_ERR(gr->fuc_bundle)) in gk20a_gr_new_()
323 return PTR_ERR(gr->fuc_bundle); in gk20a_gr_new_()
325 ret = gf100_gr_ctor_fw(gr, "sw_method_init", &fuc); in gk20a_gr_new_()
328 gr->fuc_method = gk20a_gr_av_to_method(&fuc); in gk20a_gr_new_()
330 if (IS_ERR(gr->fuc_method)) in gk20a_gr_new_()
331 return PTR_ERR(gr->fuc_method); in gk20a_gr_new_()