Lines Matching refs:vce
82 INIT_DELAYED_WORK(&adev->vce.idle_work, amdgpu_vce_idle_work_handler); in amdgpu_vce_sw_init()
119 r = request_firmware(&adev->vce.fw, fw_name, adev->dev); in amdgpu_vce_sw_init()
126 r = amdgpu_ucode_validate(adev->vce.fw); in amdgpu_vce_sw_init()
130 release_firmware(adev->vce.fw); in amdgpu_vce_sw_init()
131 adev->vce.fw = NULL; in amdgpu_vce_sw_init()
135 hdr = (const struct common_firmware_header *)adev->vce.fw->data; in amdgpu_vce_sw_init()
143 adev->vce.fw_version = ((version_major << 24) | (version_minor << 16) | in amdgpu_vce_sw_init()
151 NULL, NULL, &adev->vce.vcpu_bo); in amdgpu_vce_sw_init()
157 r = amdgpu_bo_reserve(adev->vce.vcpu_bo, false); in amdgpu_vce_sw_init()
159 amdgpu_bo_unref(&adev->vce.vcpu_bo); in amdgpu_vce_sw_init()
164 r = amdgpu_bo_pin(adev->vce.vcpu_bo, AMDGPU_GEM_DOMAIN_VRAM, in amdgpu_vce_sw_init()
165 &adev->vce.gpu_addr); in amdgpu_vce_sw_init()
166 amdgpu_bo_unreserve(adev->vce.vcpu_bo); in amdgpu_vce_sw_init()
168 amdgpu_bo_unref(&adev->vce.vcpu_bo); in amdgpu_vce_sw_init()
174 atomic_set(&adev->vce.handles[i], 0); in amdgpu_vce_sw_init()
175 adev->vce.filp[i] = NULL; in amdgpu_vce_sw_init()
190 if (adev->vce.vcpu_bo == NULL) in amdgpu_vce_sw_fini()
193 amdgpu_bo_unref(&adev->vce.vcpu_bo); in amdgpu_vce_sw_fini()
195 amdgpu_ring_fini(&adev->vce.ring[0]); in amdgpu_vce_sw_fini()
196 amdgpu_ring_fini(&adev->vce.ring[1]); in amdgpu_vce_sw_fini()
198 release_firmware(adev->vce.fw); in amdgpu_vce_sw_fini()
213 if (adev->vce.vcpu_bo == NULL) in amdgpu_vce_suspend()
217 if (atomic_read(&adev->vce.handles[i])) in amdgpu_vce_suspend()
223 cancel_delayed_work_sync(&adev->vce.idle_work); in amdgpu_vce_suspend()
241 if (adev->vce.vcpu_bo == NULL) in amdgpu_vce_resume()
244 r = amdgpu_bo_reserve(adev->vce.vcpu_bo, false); in amdgpu_vce_resume()
250 r = amdgpu_bo_kmap(adev->vce.vcpu_bo, &cpu_addr); in amdgpu_vce_resume()
252 amdgpu_bo_unreserve(adev->vce.vcpu_bo); in amdgpu_vce_resume()
257 hdr = (const struct common_firmware_header *)adev->vce.fw->data; in amdgpu_vce_resume()
259 memcpy(cpu_addr, (adev->vce.fw->data) + offset, in amdgpu_vce_resume()
260 (adev->vce.fw->size) - offset); in amdgpu_vce_resume()
262 amdgpu_bo_kunmap(adev->vce.vcpu_bo); in amdgpu_vce_resume()
264 amdgpu_bo_unreserve(adev->vce.vcpu_bo); in amdgpu_vce_resume()
279 container_of(work, struct amdgpu_device, vce.idle_work.work); in amdgpu_vce_idle_work_handler()
281 if ((amdgpu_fence_count_emitted(&adev->vce.ring[0]) == 0) && in amdgpu_vce_idle_work_handler()
282 (amdgpu_fence_count_emitted(&adev->vce.ring[1]) == 0)) { in amdgpu_vce_idle_work_handler()
289 schedule_delayed_work(&adev->vce.idle_work, in amdgpu_vce_idle_work_handler()
304 bool set_clocks = !cancel_delayed_work_sync(&adev->vce.idle_work); in amdgpu_vce_note_usage()
305 set_clocks &= schedule_delayed_work(&adev->vce.idle_work, in amdgpu_vce_note_usage()
332 struct amdgpu_ring *ring = &adev->vce.ring[0]; in amdgpu_vce_free_handles()
335 uint32_t handle = atomic_read(&adev->vce.handles[i]); in amdgpu_vce_free_handles()
336 if (!handle || adev->vce.filp[i] != filp) in amdgpu_vce_free_handles()
345 adev->vce.filp[i] = NULL; in amdgpu_vce_free_handles()
346 atomic_set(&adev->vce.handles[i], 0); in amdgpu_vce_free_handles()
396 if ((ring->adev->vce.fw_version >> 24) >= 52) in amdgpu_vce_get_create_msg()
411 if ((ring->adev->vce.fw_version >> 24) >= 52) { in amdgpu_vce_get_create_msg()
579 if (atomic_read(&p->adev->vce.handles[i]) == handle) { in amdgpu_vce_validate_handle()
580 if (p->adev->vce.filp[i] != p->filp) { in amdgpu_vce_validate_handle()
590 if (!atomic_cmpxchg(&p->adev->vce.handles[i], 0, handle)) { in amdgpu_vce_validate_handle()
591 p->adev->vce.filp[i] = p->filp; in amdgpu_vce_validate_handle()
592 p->adev->vce.img_size[i] = 0; in amdgpu_vce_validate_handle()
645 size = &p->adev->vce.img_size[session_idx]; in amdgpu_vce_ring_parse_cs()
740 atomic_cmpxchg(&p->adev->vce.handles[i], handle, 0); in amdgpu_vce_ring_parse_cs()
857 if (ring == &ring->adev->vce.ring[1]) in amdgpu_vce_ring_test_ib()