Lines Matching refs:rdev

29 u32 r600_gpu_check_soft_reset(struct radeon_device *rdev);
51 uint32_t r600_dma_get_rptr(struct radeon_device *rdev, in r600_dma_get_rptr() argument
56 if (rdev->wb.enabled) in r600_dma_get_rptr()
57 rptr = rdev->wb.wb[ring->rptr_offs/4]; in r600_dma_get_rptr()
72 uint32_t r600_dma_get_wptr(struct radeon_device *rdev, in r600_dma_get_wptr() argument
86 void r600_dma_set_wptr(struct radeon_device *rdev, in r600_dma_set_wptr() argument
99 void r600_dma_stop(struct radeon_device *rdev) in r600_dma_stop() argument
103 if (rdev->asic->copy.copy_ring_index == R600_RING_TYPE_DMA_INDEX) in r600_dma_stop()
104 radeon_ttm_set_active_vram_size(rdev, rdev->mc.visible_vram_size); in r600_dma_stop()
109 rdev->ring[R600_RING_TYPE_DMA_INDEX].ready = false; in r600_dma_stop()
120 int r600_dma_resume(struct radeon_device *rdev) in r600_dma_resume() argument
122 struct radeon_ring *ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX]; in r600_dma_resume()
144 upper_32_bits(rdev->wb.gpu_addr + R600_WB_DMA_RPTR_OFFSET) & 0xFF); in r600_dma_resume()
146 ((rdev->wb.gpu_addr + R600_WB_DMA_RPTR_OFFSET) & 0xFFFFFFFC)); in r600_dma_resume()
148 if (rdev->wb.enabled) in r600_dma_resume()
164 if (rdev->family >= CHIP_RV770) in r600_dma_resume()
174 r = radeon_ring_test(rdev, R600_RING_TYPE_DMA_INDEX, ring); in r600_dma_resume()
180 if (rdev->asic->copy.copy_ring_index == R600_RING_TYPE_DMA_INDEX) in r600_dma_resume()
181 radeon_ttm_set_active_vram_size(rdev, rdev->mc.real_vram_size); in r600_dma_resume()
193 void r600_dma_fini(struct radeon_device *rdev) in r600_dma_fini() argument
195 r600_dma_stop(rdev); in r600_dma_fini()
196 radeon_ring_fini(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX]); in r600_dma_fini()
208 bool r600_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring) in r600_dma_is_lockup() argument
210 u32 reset_mask = r600_gpu_check_soft_reset(rdev); in r600_dma_is_lockup()
213 radeon_ring_lockup_update(rdev, ring); in r600_dma_is_lockup()
216 return radeon_ring_test_lockup(rdev, ring); in r600_dma_is_lockup()
230 int r600_dma_ring_test(struct radeon_device *rdev, in r600_dma_ring_test() argument
244 gpu_addr = rdev->wb.gpu_addr + index; in r600_dma_ring_test()
247 rdev->wb.wb[index/4] = cpu_to_le32(tmp); in r600_dma_ring_test()
249 r = radeon_ring_lock(rdev, ring, 4); in r600_dma_ring_test()
258 radeon_ring_unlock_commit(rdev, ring, false); in r600_dma_ring_test()
260 for (i = 0; i < rdev->usec_timeout; i++) { in r600_dma_ring_test()
261 tmp = le32_to_cpu(rdev->wb.wb[index/4]); in r600_dma_ring_test()
267 if (i < rdev->usec_timeout) { in r600_dma_ring_test()
287 void r600_dma_fence_ring_emit(struct radeon_device *rdev, in r600_dma_fence_ring_emit() argument
290 struct radeon_ring *ring = &rdev->ring[fence->ring]; in r600_dma_fence_ring_emit()
291 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in r600_dma_fence_ring_emit()
313 bool r600_dma_semaphore_ring_emit(struct radeon_device *rdev, in r600_dma_semaphore_ring_emit() argument
337 int r600_dma_ib_test(struct radeon_device *rdev, struct radeon_ring *ring) in r600_dma_ib_test() argument
351 gpu_addr = rdev->wb.gpu_addr + index; in r600_dma_ib_test()
353 r = radeon_ib_get(rdev, ring->idx, &ib, NULL, 256); in r600_dma_ib_test()
365 r = radeon_ib_schedule(rdev, &ib, NULL, false); in r600_dma_ib_test()
367 radeon_ib_free(rdev, &ib); in r600_dma_ib_test()
376 for (i = 0; i < rdev->usec_timeout; i++) { in r600_dma_ib_test()
377 tmp = le32_to_cpu(rdev->wb.wb[index/4]); in r600_dma_ib_test()
382 if (i < rdev->usec_timeout) { in r600_dma_ib_test()
388 radeon_ib_free(rdev, &ib); in r600_dma_ib_test()
400 void r600_dma_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib) in r600_dma_ring_ib_execute() argument
402 struct radeon_ring *ring = &rdev->ring[ib->ring]; in r600_dma_ring_ib_execute()
404 if (rdev->wb.enabled) { in r600_dma_ring_ib_execute()
439 struct radeon_fence *r600_copy_dma(struct radeon_device *rdev, in r600_copy_dma() argument
446 int ring_index = rdev->asic->copy.dma_ring_index; in r600_copy_dma()
447 struct radeon_ring *ring = &rdev->ring[ring_index]; in r600_copy_dma()
456 r = radeon_ring_lock(rdev, ring, num_loops * 4 + 8); in r600_copy_dma()
459 radeon_sync_free(rdev, &sync, NULL); in r600_copy_dma()
463 radeon_sync_resv(rdev, &sync, resv, false); in r600_copy_dma()
464 radeon_sync_rings(rdev, &sync, ring->idx); in r600_copy_dma()
480 r = radeon_fence_emit(rdev, &fence, ring->idx); in r600_copy_dma()
482 radeon_ring_unlock_undo(rdev, ring); in r600_copy_dma()
483 radeon_sync_free(rdev, &sync, NULL); in r600_copy_dma()
487 radeon_ring_unlock_commit(rdev, ring, false); in r600_copy_dma()
488 radeon_sync_free(rdev, &sync, fence); in r600_copy_dma()