/linux-4.1.27/include/linux/ |
D | fence.h | 33 struct fence; 72 struct fence { struct 90 typedef void (*fence_func_t)(struct fence *fence, struct fence_cb *cb); argument 166 const char * (*get_driver_name)(struct fence *fence); 167 const char * (*get_timeline_name)(struct fence *fence); 168 bool (*enable_signaling)(struct fence *fence); 169 bool (*signaled)(struct fence *fence); 170 signed long (*wait)(struct fence *fence, bool intr, signed long timeout); 171 void (*release)(struct fence *fence); 173 int (*fill_driver_data)(struct fence *fence, void *data, int size); [all …]
|
D | seqno-fence.h | 32 struct fence base; 50 to_seqno_fence(struct fence *fence) in to_seqno_fence() argument 52 if (fence->ops != &seqno_fence_ops) in to_seqno_fence() 54 return container_of(fence, struct seqno_fence, base); in to_seqno_fence() 95 seqno_fence_init(struct seqno_fence *fence, spinlock_t *lock, in seqno_fence_init() argument 101 BUG_ON(!fence || !sync_buf || !ops); in seqno_fence_init() 109 fence->ops = ops; in seqno_fence_init() 110 fence_init(&fence->base, &seqno_fence_ops, lock, context, seqno); in seqno_fence_init() 112 fence->sync_buf = sync_buf; in seqno_fence_init() 113 fence->seqno_ofs = seqno_ofs; in seqno_fence_init() [all …]
|
D | reservation.h | 55 struct fence __rcu *shared[]; 62 struct fence __rcu *fence_excl; 63 struct reservation_object_list __rcu *fence; member 77 RCU_INIT_POINTER(obj->fence, NULL); in reservation_object_init() 87 struct fence *excl; in reservation_object_fini() 97 fobj = rcu_dereference_protected(obj->fence, 1); in reservation_object_fini() 112 return rcu_dereference_protected(obj->fence, in reservation_object_get_list() 116 static inline struct fence * 125 struct fence *fence); 128 struct fence *fence); [all …]
|
/linux-4.1.27/drivers/dma-buf/ |
D | fence.c | 66 int fence_signal_locked(struct fence *fence) in fence_signal_locked() argument 71 if (WARN_ON(!fence)) in fence_signal_locked() 74 if (!ktime_to_ns(fence->timestamp)) { in fence_signal_locked() 75 fence->timestamp = ktime_get(); in fence_signal_locked() 79 if (test_and_set_bit(FENCE_FLAG_SIGNALED_BIT, &fence->flags)) { in fence_signal_locked() 87 trace_fence_signaled(fence); in fence_signal_locked() 89 list_for_each_entry_safe(cur, tmp, &fence->cb_list, node) { in fence_signal_locked() 91 cur->func(fence, cur); in fence_signal_locked() 107 int fence_signal(struct fence *fence) in fence_signal() argument 111 if (!fence) in fence_signal() [all …]
|
D | seqno-fence.c | 24 static const char *seqno_fence_get_driver_name(struct fence *fence) in seqno_fence_get_driver_name() argument 26 struct seqno_fence *seqno_fence = to_seqno_fence(fence); in seqno_fence_get_driver_name() 27 return seqno_fence->ops->get_driver_name(fence); in seqno_fence_get_driver_name() 30 static const char *seqno_fence_get_timeline_name(struct fence *fence) in seqno_fence_get_timeline_name() argument 32 struct seqno_fence *seqno_fence = to_seqno_fence(fence); in seqno_fence_get_timeline_name() 33 return seqno_fence->ops->get_timeline_name(fence); in seqno_fence_get_timeline_name() 36 static bool seqno_enable_signaling(struct fence *fence) in seqno_enable_signaling() argument 38 struct seqno_fence *seqno_fence = to_seqno_fence(fence); in seqno_enable_signaling() 39 return seqno_fence->ops->enable_signaling(fence); in seqno_enable_signaling() 42 static bool seqno_signaled(struct fence *fence) in seqno_signaled() argument [all …]
|
D | reservation.c | 86 struct fence *fence) in reservation_object_add_shared_inplace() argument 90 fence_get(fence); in reservation_object_add_shared_inplace() 96 struct fence *old_fence; in reservation_object_add_shared_inplace() 101 if (old_fence->context == fence->context) { in reservation_object_add_shared_inplace() 103 RCU_INIT_POINTER(fobj->shared[i], fence); in reservation_object_add_shared_inplace() 116 RCU_INIT_POINTER(fobj->shared[fobj->shared_count], fence); in reservation_object_add_shared_inplace() 127 struct fence *fence) in reservation_object_add_shared_replace() argument 130 struct fence *old_fence = NULL; in reservation_object_add_shared_replace() 132 fence_get(fence); in reservation_object_add_shared_replace() 135 RCU_INIT_POINTER(fobj->shared[0], fence); in reservation_object_add_shared_replace() [all …]
|
D | Makefile | 1 obj-y := dma-buf.o fence.o reservation.o seqno-fence.o
|
D | dma-buf.c | 122 static void dma_buf_poll_cb(struct fence *fence, struct fence_cb *cb) in dma_buf_poll_cb() argument 138 struct fence *fence_excl; in dma_buf_poll() 158 fobj = rcu_dereference(resv->fence); in dma_buf_poll() 220 struct fence *fence = rcu_dereference(fobj->shared[i]); in dma_buf_poll() local 222 if (!fence_get_rcu(fence)) { in dma_buf_poll() 233 if (!fence_add_callback(fence, &dcb->cb, in dma_buf_poll() 235 fence_put(fence); in dma_buf_poll() 239 fence_put(fence); in dma_buf_poll()
|
/linux-4.1.27/include/trace/events/ |
D | fence.h | 2 #define TRACE_SYSTEM fence 9 struct fence; 14 TP_PROTO(struct fence *fence, struct fence *f1), 16 TP_ARGS(fence, f1), 19 __string(driver, fence->ops->get_driver_name(fence)) 20 __string(timeline, fence->ops->get_driver_name(fence)) 31 __assign_str(driver, fence->ops->get_driver_name(fence)) 32 __assign_str(timeline, fence->ops->get_timeline_name(fence)) 33 __entry->context = fence->context; 34 __entry->seqno = fence->seqno; [all …]
|
/linux-4.1.27/drivers/staging/android/ |
D | sync.c | 155 struct sync_fence *fence; in sync_fence_alloc() local 157 fence = kzalloc(size, GFP_KERNEL); in sync_fence_alloc() 158 if (fence == NULL) in sync_fence_alloc() 161 fence->file = anon_inode_getfile("sync_fence", &sync_fence_fops, in sync_fence_alloc() 162 fence, 0); in sync_fence_alloc() 163 if (IS_ERR(fence->file)) in sync_fence_alloc() 166 kref_init(&fence->kref); in sync_fence_alloc() 167 strlcpy(fence->name, name, sizeof(fence->name)); in sync_fence_alloc() 169 init_waitqueue_head(&fence->wq); in sync_fence_alloc() 171 return fence; in sync_fence_alloc() [all …]
|
D | sync_debug.c | 56 void sync_fence_debug_add(struct sync_fence *fence) in sync_fence_debug_add() argument 61 list_add_tail(&fence->sync_fence_list, &sync_fence_list_head); in sync_fence_debug_add() 65 void sync_fence_debug_remove(struct sync_fence *fence) in sync_fence_debug_remove() argument 70 list_del(&fence->sync_fence_list); in sync_fence_debug_remove() 85 static void sync_print_pt(struct seq_file *s, struct sync_pt *pt, bool fence) in sync_print_pt() argument 94 fence ? parent->name : "", in sync_print_pt() 95 fence ? "_" : "", in sync_print_pt() 111 if (fence) { in sync_print_pt() 146 static void sync_print_fence(struct seq_file *s, struct sync_fence *fence) in sync_print_fence() argument 152 seq_printf(s, "[%p] %s: %s\n", fence, fence->name, in sync_print_fence() [all …]
|
D | sync.h | 124 struct fence base; 138 struct fence *sync_pt; 139 struct sync_fence *fence; member 170 typedef void (*sync_callback_t)(struct sync_fence *fence, 289 void sync_fence_put(struct sync_fence *fence); 299 void sync_fence_install(struct sync_fence *fence, int fd); 311 int sync_fence_wait_async(struct sync_fence *fence, 325 int sync_fence_cancel_async(struct sync_fence *fence, 336 int sync_fence_wait(struct sync_fence *fence, long timeout); 342 extern void sync_fence_debug_add(struct sync_fence *fence); [all …]
|
D | sw_sync.c | 170 struct sync_fence *fence; in sw_sync_ioctl_create_fence() local 188 fence = sync_fence_create(data.name, pt); in sw_sync_ioctl_create_fence() 189 if (fence == NULL) { in sw_sync_ioctl_create_fence() 195 data.fence = fd; in sw_sync_ioctl_create_fence() 197 sync_fence_put(fence); in sw_sync_ioctl_create_fence() 202 sync_fence_install(fence, fd); in sw_sync_ioctl_create_fence()
|
/linux-4.1.27/drivers/gpu/drm/nouveau/ |
D | nouveau_fence.c | 44 from_fence(struct fence *fence) in from_fence() argument 46 return container_of(fence, struct nouveau_fence, base); in from_fence() 50 nouveau_fctx(struct nouveau_fence *fence) in nouveau_fctx() argument 52 return container_of(fence->base.lock, struct nouveau_fence_chan, lock); in nouveau_fctx() 56 nouveau_fence_signal(struct nouveau_fence *fence) in nouveau_fence_signal() argument 60 fence_signal_locked(&fence->base); in nouveau_fence_signal() 61 list_del(&fence->head); in nouveau_fence_signal() 62 rcu_assign_pointer(fence->channel, NULL); in nouveau_fence_signal() 64 if (test_bit(FENCE_FLAG_USER_BITS, &fence->base.flags)) { in nouveau_fence_signal() 65 struct nouveau_fence_chan *fctx = nouveau_fctx(fence); in nouveau_fence_signal() [all …]
|
D | nv04_fence.c | 38 nv04_fence_emit(struct nouveau_fence *fence) in nv04_fence_emit() argument 40 struct nouveau_channel *chan = fence->channel; in nv04_fence_emit() 44 OUT_RING (chan, fence->base.seqno); in nv04_fence_emit() 51 nv04_fence_sync(struct nouveau_fence *fence, in nv04_fence_sync() argument 67 struct nv04_fence_chan *fctx = chan->fence; in nv04_fence_context_del() 69 chan->fence = NULL; in nv04_fence_context_del() 82 chan->fence = fctx; in nv04_fence_context_new() 91 struct nv04_fence_priv *priv = drm->fence; in nv04_fence_destroy() 92 drm->fence = NULL; in nv04_fence_destroy() 101 priv = drm->fence = kzalloc(sizeof(*priv), GFP_KERNEL); in nv04_fence_create()
|
D | nv84_fence.c | 34 struct nv84_fence_chan *fctx = chan->fence; in nv84_fence_crtc() 74 nv84_fence_emit(struct nouveau_fence *fence) in nv84_fence_emit() argument 76 struct nouveau_channel *chan = fence->channel; in nv84_fence_emit() 77 struct nv84_fence_chan *fctx = chan->fence; in nv84_fence_emit() 80 if (fence->sysmem) in nv84_fence_emit() 85 return fctx->base.emit32(chan, addr, fence->base.seqno); in nv84_fence_emit() 89 nv84_fence_sync(struct nouveau_fence *fence, in nv84_fence_sync() argument 92 struct nv84_fence_chan *fctx = chan->fence; in nv84_fence_sync() 95 if (fence->sysmem) in nv84_fence_sync() 100 return fctx->base.sync32(chan, addr, fence->base.seqno); in nv84_fence_sync() [all …]
|
D | nv10_fence.c | 30 nv10_fence_emit(struct nouveau_fence *fence) in nv10_fence_emit() argument 32 struct nouveau_channel *chan = fence->channel; in nv10_fence_emit() 36 OUT_RING (chan, fence->base.seqno); in nv10_fence_emit() 44 nv10_fence_sync(struct nouveau_fence *fence, in nv10_fence_sync() argument 59 struct nv10_fence_chan *fctx = chan->fence; in nv10_fence_context_del() 65 chan->fence = NULL; in nv10_fence_context_del() 74 fctx = chan->fence = kzalloc(sizeof(*fctx), GFP_KERNEL); in nv10_fence_context_new() 88 struct nv10_fence_priv *priv = drm->fence; in nv10_fence_destroy() 93 drm->fence = NULL; in nv10_fence_destroy() 102 priv = drm->fence = kzalloc(sizeof(*priv), GFP_KERNEL); in nv10_fence_create()
|
D | nv17_fence.c | 33 nv17_fence_sync(struct nouveau_fence *fence, in nv17_fence_sync() argument 37 struct nv10_fence_priv *priv = chan->drm->fence; in nv17_fence_sync() 38 struct nv10_fence_chan *fctx = chan->fence; in nv17_fence_sync() 76 struct nv10_fence_priv *priv = chan->drm->fence; in nv17_fence_context_new() 83 fctx = chan->fence = kzalloc(sizeof(*fctx), GFP_KERNEL); in nv17_fence_context_new() 108 struct nv10_fence_priv *priv = drm->fence; in nv17_fence_resume() 119 priv = drm->fence = kzalloc(sizeof(*priv), GFP_KERNEL); in nv17_fence_create()
|
D | nouveau_fence.h | 11 struct fence base; 27 void nouveau_fence_work(struct fence *, void (*)(void *), void *); 64 #define nouveau_fence(drm) ((struct nouveau_fence_priv *)(drm)->fence)
|
D | nouveau_gem.c | 120 struct fence *fence = NULL; in nouveau_gem_object_unmap() local 129 fence = rcu_dereference_protected(fobj->shared[0], in nouveau_gem_object_unmap() 132 fence = reservation_object_get_excl(nvbo->bo.resv); in nouveau_gem_object_unmap() 134 if (fence && mapped) { in nouveau_gem_object_unmap() 135 nouveau_fence_work(fence, nouveau_gem_object_delete, vma); in nouveau_gem_object_unmap() 330 validate_fini_no_ticket(struct validate_op *op, struct nouveau_fence *fence, in validate_fini_no_ticket() argument 340 if (likely(fence)) in validate_fini_no_ticket() 341 nouveau_bo_fence(nvbo, fence, !!b->write_domains); in validate_fini_no_ticket() 356 validate_fini(struct validate_op *op, struct nouveau_fence *fence, in validate_fini() argument 359 validate_fini_no_ticket(op, fence, pbbo); in validate_fini() [all …]
|
D | nv50_fence.c | 38 struct nv10_fence_priv *priv = chan->drm->fence; in nv50_fence_context_new() 45 fctx = chan->fence = kzalloc(sizeof(*fctx), GFP_KERNEL); in nv50_fence_context_new() 90 priv = drm->fence = kzalloc(sizeof(*priv), GFP_KERNEL); in nv50_fence_create()
|
D | nvc0_fence.c | 68 struct nv84_fence_chan *fctx = chan->fence; in nvc0_fence_context_new() 80 struct nv84_fence_priv *priv = drm->fence; in nvc0_fence_create()
|
D | nouveau_bo.c | 55 nouveau_fence_unref(®->fence); in nv10_bo_update_tile_region() 80 (!tile->fence || nouveau_fence_done(tile->fence))) in nv10_bo_get_tile_region() 91 struct fence *fence) in nv10_bo_put_tile_region() argument 97 tile->fence = (struct nouveau_fence *)fence_get(fence); in nv10_bo_put_tile_region() 1068 struct nouveau_fence *fence; in nouveau_bo_move_m2mf() local 1086 ret = nouveau_fence_new(chan, false, &fence); in nouveau_bo_move_m2mf() 1089 &fence->base, in nouveau_bo_move_m2mf() 1093 nouveau_fence_unref(&fence); in nouveau_bo_move_m2mf() 1282 struct fence *fence = reservation_object_get_excl(bo->resv); in nouveau_bo_vm_cleanup() local 1284 nv10_bo_put_tile_region(dev, *old_tile, fence); in nouveau_bo_vm_cleanup() [all …]
|
D | nouveau_chan.c | 46 struct nouveau_fence *fence = NULL; in nouveau_channel_idle() local 49 ret = nouveau_fence_new(chan, false, &fence); in nouveau_channel_idle() 51 ret = nouveau_fence_wait(fence, false, false); in nouveau_channel_idle() 52 nouveau_fence_unref(&fence); in nouveau_channel_idle() 66 if (chan->fence) { in nouveau_channel_del()
|
D | nouveau_drm.h | 58 struct nouveau_fence *fence; member 142 void *fence; member
|
D | nouveau_chan.h | 25 void *fence; member
|
D | nouveau_display.c | 656 struct nouveau_fence_chan *fctx = chan->fence; in nouveau_page_flip_emit() 708 struct nouveau_fence *fence; in nouveau_crtc_page_flip() local 782 ret = nouveau_page_flip_emit(chan, old_bo, new_bo, s, &fence); in nouveau_crtc_page_flip() 790 nouveau_bo_fence(old_bo, fence, false); in nouveau_crtc_page_flip() 794 nouveau_fence_unref(&fence); in nouveau_crtc_page_flip() 813 struct nouveau_fence_chan *fctx = chan->fence; in nouveau_finish_page_flip()
|
D | nouveau_drm.c | 141 if (drm->fence) in nouveau_accel_fini() 580 if (drm->fence && nouveau_fence(drm)->suspend) { in nouveau_do_suspend() 606 if (drm->fence && nouveau_fence(drm)->resume) in nouveau_do_suspend() 632 if (drm->fence && nouveau_fence(drm)->resume) in nouveau_do_resume()
|
/linux-4.1.27/drivers/gpu/drm/radeon/ |
D | radeon_fence.c | 130 struct radeon_fence **fence, in radeon_fence_emit() argument 136 *fence = kmalloc(sizeof(struct radeon_fence), GFP_KERNEL); in radeon_fence_emit() 137 if ((*fence) == NULL) { in radeon_fence_emit() 140 (*fence)->rdev = rdev; in radeon_fence_emit() 141 (*fence)->seq = seq; in radeon_fence_emit() 142 (*fence)->ring = ring; in radeon_fence_emit() 143 (*fence)->is_vm_update = false; in radeon_fence_emit() 144 fence_init(&(*fence)->base, &radeon_fence_ops, in radeon_fence_emit() 146 radeon_fence_ring_emit(rdev, ring, *fence); in radeon_fence_emit() 147 trace_radeon_fence_emit(rdev->ddev, ring, (*fence)->seq); in radeon_fence_emit() [all …]
|
D | radeon_sync.c | 64 struct radeon_fence *fence) in radeon_sync_fence() argument 68 if (!fence) in radeon_sync_fence() 71 other = sync->sync_to[fence->ring]; in radeon_sync_fence() 72 sync->sync_to[fence->ring] = radeon_fence_later(fence, other); in radeon_sync_fence() 74 if (fence->is_vm_update) { in radeon_sync_fence() 76 sync->last_vm_update = radeon_fence_later(fence, other); in radeon_sync_fence() 95 struct fence *f; in radeon_sync_resv() 96 struct radeon_fence *fence; in radeon_sync_resv() local 102 fence = f ? to_radeon_fence(f) : NULL; in radeon_sync_resv() 103 if (fence && fence->rdev == rdev) in radeon_sync_resv() [all …]
|
D | radeon_sa.c | 150 radeon_fence_unref(&sa_bo->fence); in radeon_sa_bo_remove_locked() 163 if (sa_bo->fence == NULL || !radeon_fence_signaled(sa_bo->fence)) { in radeon_sa_bo_try_free() 277 if (!radeon_fence_signaled(sa_bo->fence)) { in radeon_sa_bo_next_hole() 278 fences[i] = sa_bo->fence; in radeon_sa_bo_next_hole() 301 ++tries[best_bo->fence->ring]; in radeon_sa_bo_next_hole() 329 (*sa_bo)->fence = NULL; in radeon_sa_bo_new() 377 struct radeon_fence *fence) in radeon_sa_bo_free() argument 387 if (fence && !radeon_fence_signaled(fence)) { in radeon_sa_bo_free() 388 (*sa_bo)->fence = radeon_fence_ref(fence); in radeon_sa_bo_free() 390 &sa_manager->flist[fence->ring]); in radeon_sa_bo_free() [all …]
|
D | radeon_test.c | 87 struct radeon_fence *fence = NULL; in radeon_do_test_moves() local 120 fence = radeon_copy_dma(rdev, gtt_addr, vram_addr, in radeon_do_test_moves() 124 fence = radeon_copy_blit(rdev, gtt_addr, vram_addr, in radeon_do_test_moves() 127 if (IS_ERR(fence)) { in radeon_do_test_moves() 129 r = PTR_ERR(fence); in radeon_do_test_moves() 133 r = radeon_fence_wait(fence, false); in radeon_do_test_moves() 139 radeon_fence_unref(&fence); in radeon_do_test_moves() 171 fence = radeon_copy_dma(rdev, vram_addr, gtt_addr, in radeon_do_test_moves() 175 fence = radeon_copy_blit(rdev, vram_addr, gtt_addr, in radeon_do_test_moves() 178 if (IS_ERR(fence)) { in radeon_do_test_moves() [all …]
|
D | evergreen_dma.c | 42 struct radeon_fence *fence) in evergreen_dma_fence_ring_emit() argument 44 struct radeon_ring *ring = &rdev->ring[fence->ring]; in evergreen_dma_fence_ring_emit() 45 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in evergreen_dma_fence_ring_emit() 50 radeon_ring_write(ring, fence->seq); in evergreen_dma_fence_ring_emit() 113 struct radeon_fence *fence; in evergreen_copy_dma() local 149 r = radeon_fence_emit(rdev, &fence, ring->idx); in evergreen_copy_dma() 157 radeon_sync_free(rdev, &sync, fence); in evergreen_copy_dma() 159 return fence; in evergreen_copy_dma()
|
D | radeon_ib.c | 70 ib->fence = NULL; in radeon_ib_get() 96 radeon_sync_free(rdev, &ib->sync, ib->fence); in radeon_ib_free() 97 radeon_sa_bo_free(rdev, &ib->sa_bo, ib->fence); in radeon_ib_free() 98 radeon_fence_unref(&ib->fence); in radeon_ib_free() 165 r = radeon_fence_emit(rdev, &ib->fence, ib->ring); in radeon_ib_schedule() 172 const_ib->fence = radeon_fence_ref(ib->fence); in radeon_ib_schedule() 176 radeon_vm_fence(rdev, ib->vm, ib->fence); in radeon_ib_schedule()
|
D | radeon_benchmark.c | 42 struct radeon_fence *fence = NULL; in radeon_benchmark_do_move() local 49 fence = radeon_copy_dma(rdev, saddr, daddr, in radeon_benchmark_do_move() 54 fence = radeon_copy_blit(rdev, saddr, daddr, in radeon_benchmark_do_move() 62 if (IS_ERR(fence)) in radeon_benchmark_do_move() 63 return PTR_ERR(fence); in radeon_benchmark_do_move() 65 r = radeon_fence_wait(fence, false); in radeon_benchmark_do_move() 66 radeon_fence_unref(&fence); in radeon_benchmark_do_move()
|
D | rv770_dma.c | 47 struct radeon_fence *fence; in rv770_copy_dma() local 83 r = radeon_fence_emit(rdev, &fence, ring->idx); in rv770_copy_dma() 91 radeon_sync_free(rdev, &sync, fence); in rv770_copy_dma() 93 return fence; in rv770_copy_dma()
|
D | radeon_vce.c | 329 uint32_t handle, struct radeon_fence **fence) in radeon_vce_get_create_msg() argument 377 if (fence) in radeon_vce_get_create_msg() 378 *fence = radeon_fence_ref(ib.fence); in radeon_vce_get_create_msg() 396 uint32_t handle, struct radeon_fence **fence) in radeon_vce_get_destroy_msg() argument 434 if (fence) in radeon_vce_get_destroy_msg() 435 *fence = radeon_fence_ref(ib.fence); in radeon_vce_get_destroy_msg() 718 struct radeon_fence *fence) in radeon_vce_fence_emit() argument 720 struct radeon_ring *ring = &rdev->ring[fence->ring]; in radeon_vce_fence_emit() 721 uint64_t addr = rdev->fence_drv[fence->ring].gpu_addr; in radeon_vce_fence_emit() 726 radeon_ring_write(ring, fence->seq); in radeon_vce_fence_emit() [all …]
|
D | r600_dma.c | 288 struct radeon_fence *fence) in r600_dma_fence_ring_emit() argument 290 struct radeon_ring *ring = &rdev->ring[fence->ring]; in r600_dma_fence_ring_emit() 291 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in r600_dma_fence_ring_emit() 297 radeon_ring_write(ring, lower_32_bits(fence->seq)); in r600_dma_fence_ring_emit() 371 r = radeon_fence_wait(ib.fence, false); in r600_dma_ib_test() 383 DRM_INFO("ib test on ring %d succeeded in %u usecs\n", ib.fence->ring, i); in r600_dma_ib_test() 444 struct radeon_fence *fence; in r600_copy_dma() local 480 r = radeon_fence_emit(rdev, &fence, ring->idx); in r600_copy_dma() 488 radeon_sync_free(rdev, &sync, fence); in r600_copy_dma() 490 return fence; in r600_copy_dma()
|
D | radeon_vm.c | 196 struct radeon_fence *fence = rdev->vm_manager.active[i]; in radeon_vm_grab_id() local 198 if (fence == NULL) { in radeon_vm_grab_id() 205 if (radeon_fence_is_earlier(fence, best[fence->ring])) { in radeon_vm_grab_id() 206 best[fence->ring] = fence; in radeon_vm_grab_id() 207 choices[fence->ring == ring ? 0 : 1] = i; in radeon_vm_grab_id() 270 struct radeon_fence *fence) in radeon_vm_fence() argument 272 unsigned vm_id = vm->ids[fence->ring].id; in radeon_vm_fence() 275 rdev->vm_manager.active[vm_id] = radeon_fence_ref(fence); in radeon_vm_fence() 277 radeon_fence_unref(&vm->ids[fence->ring].last_id_use); in radeon_vm_fence() 278 vm->ids[fence->ring].last_id_use = radeon_fence_ref(fence); in radeon_vm_fence() [all …]
|
D | radeon_uvd.c | 215 struct radeon_fence *fence; in radeon_uvd_suspend() local 220 R600_RING_TYPE_UVD_INDEX, handle, &fence); in radeon_uvd_suspend() 226 radeon_fence_wait(fence, false); in radeon_uvd_suspend() 227 radeon_fence_unref(&fence); in radeon_uvd_suspend() 290 struct radeon_fence *fence; in radeon_uvd_free_handles() local 295 R600_RING_TYPE_UVD_INDEX, handle, &fence); in radeon_uvd_free_handles() 301 radeon_fence_wait(fence, false); in radeon_uvd_free_handles() 302 radeon_fence_unref(&fence); in radeon_uvd_free_handles() 426 struct fence *f; in radeon_uvd_cs_msg() 697 struct radeon_fence **fence) in radeon_uvd_send_msg() argument [all …]
|
D | uvd_v1_0.c | 82 struct radeon_fence *fence) in uvd_v1_0_fence_emit() argument 84 struct radeon_ring *ring = &rdev->ring[fence->ring]; in uvd_v1_0_fence_emit() 85 uint64_t addr = rdev->fence_drv[fence->ring].gpu_addr; in uvd_v1_0_fence_emit() 90 radeon_ring_write(ring, fence->seq); in uvd_v1_0_fence_emit() 501 struct radeon_fence *fence = NULL; in uvd_v1_0_ib_test() local 519 r = radeon_uvd_get_destroy_msg(rdev, ring->idx, 1, &fence); in uvd_v1_0_ib_test() 525 r = radeon_fence_wait(fence, false); in uvd_v1_0_ib_test() 532 radeon_fence_unref(&fence); in uvd_v1_0_ib_test()
|
D | uvd_v2_2.c | 40 struct radeon_fence *fence) in uvd_v2_2_fence_emit() argument 42 struct radeon_ring *ring = &rdev->ring[fence->ring]; in uvd_v2_2_fence_emit() 43 uint64_t addr = rdev->fence_drv[fence->ring].gpu_addr; in uvd_v2_2_fence_emit() 46 radeon_ring_write(ring, fence->seq); in uvd_v2_2_fence_emit()
|
D | cik_sdma.c | 201 struct radeon_fence *fence) in cik_sdma_fence_ring_emit() argument 203 struct radeon_ring *ring = &rdev->ring[fence->ring]; in cik_sdma_fence_ring_emit() 204 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in cik_sdma_fence_ring_emit() 210 radeon_ring_write(ring, fence->seq); in cik_sdma_fence_ring_emit() 214 cik_sdma_hdp_flush_ring_emit(rdev, fence->ring); in cik_sdma_fence_ring_emit() 584 struct radeon_fence *fence; in cik_copy_dma() local 622 r = radeon_fence_emit(rdev, &fence, ring->idx); in cik_copy_dma() 630 radeon_sync_free(rdev, &sync, fence); in cik_copy_dma() 632 return fence; in cik_copy_dma() 740 r = radeon_fence_wait(ib.fence, false); in cik_sdma_ib_test() [all …]
|
D | si_dma.c | 236 struct radeon_fence *fence; in si_copy_dma() local 272 r = radeon_fence_emit(rdev, &fence, ring->idx); in si_copy_dma() 280 radeon_sync_free(rdev, &sync, fence); in si_copy_dma() 282 return fence; in si_copy_dma()
|
D | radeon_semaphore.c | 94 struct radeon_fence *fence) in radeon_semaphore_free() argument 103 radeon_sa_bo_free(rdev, &(*semaphore)->sa_bo, fence); in radeon_semaphore_free()
|
D | radeon.h | 367 struct fence base; 382 int radeon_fence_emit(struct radeon_device *rdev, struct radeon_fence **fence, int ring); 384 bool radeon_fence_signaled(struct radeon_fence *fence); 385 int radeon_fence_wait(struct radeon_fence *fence, bool interruptible); 391 struct radeon_fence *radeon_fence_ref(struct radeon_fence *fence); 392 void radeon_fence_unref(struct radeon_fence **fence); 394 bool radeon_fence_need_sync(struct radeon_fence *fence, int ring); 395 void radeon_fence_note_sync(struct radeon_fence *fence, int ring); 562 struct radeon_fence *fence; member 604 struct radeon_fence *fence); [all …]
|
D | radeon_object.h | 156 extern void radeon_bo_fence(struct radeon_bo *bo, struct radeon_fence *fence, 189 struct radeon_fence *fence);
|
D | radeon_asic.h | 77 struct radeon_fence *fence); 173 struct radeon_fence *fence); 323 struct radeon_fence *fence); 329 struct radeon_fence *fence); 543 struct radeon_fence *fence); 603 struct radeon_fence *fence); 706 struct radeon_fence *fence); 787 struct radeon_fence *fence); 805 struct radeon_fence *fence); 807 struct radeon_fence *fence); [all …]
|
D | radeon_display.c | 405 if (work->fence) { in radeon_flip_work_func() 406 struct radeon_fence *fence; in radeon_flip_work_func() local 408 fence = to_radeon_fence(work->fence); in radeon_flip_work_func() 409 if (fence && fence->rdev == rdev) { in radeon_flip_work_func() 410 r = radeon_fence_wait(fence, false); in radeon_flip_work_func() 419 r = fence_wait(work->fence, false); in radeon_flip_work_func() 429 fence_put(work->fence); in radeon_flip_work_func() 430 work->fence = NULL; in radeon_flip_work_func() 506 work->fence = fence_get(reservation_object_get_excl(new_rbo->tbo.resv)); in radeon_crtc_page_flip() 588 fence_put(work->fence); in radeon_crtc_page_flip()
|
D | r200.c | 90 struct radeon_fence *fence; in r200_copy_dma() local 122 r = radeon_fence_emit(rdev, &fence, RADEON_RING_TYPE_GFX_INDEX); in r200_copy_dma() 128 return fence; in r200_copy_dma()
|
D | radeon_ttm.c | 260 struct radeon_fence *fence; in radeon_move_blit() local 299 fence = radeon_copy(rdev, old_start, new_start, num_pages, bo->resv); in radeon_move_blit() 300 if (IS_ERR(fence)) in radeon_move_blit() 301 return PTR_ERR(fence); in radeon_move_blit() 303 r = ttm_bo_move_accel_cleanup(bo, &fence->base, in radeon_move_blit() 305 radeon_fence_unref(&fence); in radeon_move_blit()
|
D | radeon_object.c | 845 void radeon_bo_fence(struct radeon_bo *bo, struct radeon_fence *fence, in radeon_bo_fence() argument 851 reservation_object_add_shared_fence(resv, &fence->base); in radeon_bo_fence() 853 reservation_object_add_excl_fence(resv, &fence->base); in radeon_bo_fence()
|
D | r300.c | 184 struct radeon_fence *fence) in r300_fence_ring_emit() argument 186 struct radeon_ring *ring = &rdev->ring[fence->ring]; in r300_fence_ring_emit() 211 radeon_ring_write(ring, PACKET0(rdev->fence_drv[fence->ring].scratch_reg, 0)); in r300_fence_ring_emit() 212 radeon_ring_write(ring, fence->seq); in r300_fence_ring_emit()
|
D | r600.c | 2818 struct radeon_fence *fence) in r600_fence_ring_emit() argument 2820 struct radeon_ring *ring = &rdev->ring[fence->ring]; in r600_fence_ring_emit() 2828 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in r600_fence_ring_emit() 2840 radeon_ring_write(ring, fence->seq); in r600_fence_ring_emit() 2857 …radeon_ring_write(ring, ((rdev->fence_drv[fence->ring].scratch_reg - PACKET3_SET_CONFIG_REG_OFFSET… in r600_fence_ring_emit() 2858 radeon_ring_write(ring, fence->seq); in r600_fence_ring_emit() 2919 struct radeon_fence *fence; in r600_copy_cpdma() local 2965 r = radeon_fence_emit(rdev, &fence, ring->idx); in r600_copy_cpdma() 2973 radeon_sync_free(rdev, &sync, fence); in r600_copy_cpdma() 2975 return fence; in r600_copy_cpdma() [all …]
|
D | ni.c | 1384 struct radeon_fence *fence) in cayman_fence_ring_emit() argument 1386 struct radeon_ring *ring = &rdev->ring[fence->ring]; in cayman_fence_ring_emit() 1387 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in cayman_fence_ring_emit() 1402 radeon_ring_write(ring, fence->seq); in cayman_fence_ring_emit()
|
D | r100.c | 851 struct radeon_fence *fence) in r100_fence_ring_emit() argument 853 struct radeon_ring *ring = &rdev->ring[fence->ring]; in r100_fence_ring_emit() 866 radeon_ring_write(ring, PACKET0(rdev->fence_drv[fence->ring].scratch_reg, 0)); in r100_fence_ring_emit() 867 radeon_ring_write(ring, fence->seq); in r100_fence_ring_emit() 889 struct radeon_fence *fence; in r100_copy_blit() local 950 r = radeon_fence_emit(rdev, &fence, RADEON_RING_TYPE_GFX_INDEX); in r100_copy_blit() 956 return fence; in r100_copy_blit() 3724 r = radeon_fence_wait(ib.fence, false); in r100_ib_test()
|
D | cik.c | 3938 struct radeon_fence *fence) in cik_fence_gfx_ring_emit() argument 3940 struct radeon_ring *ring = &rdev->ring[fence->ring]; in cik_fence_gfx_ring_emit() 3941 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in cik_fence_gfx_ring_emit() 3954 radeon_ring_write(ring, fence->seq - 1); in cik_fence_gfx_ring_emit() 3965 radeon_ring_write(ring, fence->seq); in cik_fence_gfx_ring_emit() 3979 struct radeon_fence *fence) in cik_fence_compute_ring_emit() argument 3981 struct radeon_ring *ring = &rdev->ring[fence->ring]; in cik_fence_compute_ring_emit() 3982 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in cik_fence_compute_ring_emit() 3993 radeon_ring_write(ring, fence->seq); in cik_fence_compute_ring_emit() 4047 struct radeon_fence *fence; in cik_copy_cpdma() local [all …]
|
D | radeon_cs.c | 415 &parser->ib.fence->base); in radeon_cs_parser_fini()
|
D | si.c | 3369 struct radeon_fence *fence) in si_fence_ring_emit() argument 3371 struct radeon_ring *ring = &rdev->ring[fence->ring]; in si_fence_ring_emit() 3372 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in si_fence_ring_emit() 3391 radeon_ring_write(ring, fence->seq); in si_fence_ring_emit()
|
/linux-4.1.27/drivers/gpu/drm/vmwgfx/ |
D | vmwgfx_fence.c | 54 struct vmw_fence_obj fence; member 77 struct vmw_fence_obj *fence; member 85 fman_from_fence(struct vmw_fence_obj *fence) in fman_from_fence() argument 87 return container_of(fence->base.lock, struct vmw_fence_manager, lock); in fman_from_fence() 112 static void vmw_fence_obj_destroy(struct fence *f) in vmw_fence_obj_destroy() 114 struct vmw_fence_obj *fence = in vmw_fence_obj_destroy() local 117 struct vmw_fence_manager *fman = fman_from_fence(fence); in vmw_fence_obj_destroy() 121 list_del_init(&fence->head); in vmw_fence_obj_destroy() 124 fence->destroy(fence); in vmw_fence_obj_destroy() 127 static const char *vmw_fence_get_driver_name(struct fence *f) in vmw_fence_get_driver_name() [all …]
|
D | vmwgfx_fence.h | 55 struct fence base; 59 void (*destroy)(struct vmw_fence_obj *fence); 70 struct vmw_fence_obj *fence = *fence_p; in vmw_fence_obj_unreference() local 73 if (fence) in vmw_fence_obj_unreference() 74 fence_put(&fence->base); in vmw_fence_obj_unreference() 78 vmw_fence_obj_reference(struct vmw_fence_obj *fence) in vmw_fence_obj_reference() argument 80 if (fence) in vmw_fence_obj_reference() 81 fence_get(&fence->base); in vmw_fence_obj_reference() 82 return fence; in vmw_fence_obj_reference() 87 extern bool vmw_fence_obj_signaled(struct vmw_fence_obj *fence); [all …]
|
D | vmwgfx_shader.c | 217 struct vmw_fence_obj *fence; in vmw_gb_shader_unbind() local 240 &fence, NULL); in vmw_gb_shader_unbind() 242 vmw_fence_single_bo(val_buf->bo, fence); in vmw_gb_shader_unbind() 244 if (likely(fence != NULL)) in vmw_gb_shader_unbind() 245 vmw_fence_obj_unreference(&fence); in vmw_gb_shader_unbind()
|
D | vmwgfx_execbuf.c | 2381 struct vmw_fence_obj *fence, in vmw_execbuf_copy_fence_user() argument 2393 BUG_ON(fence == NULL); in vmw_execbuf_copy_fence_user() 2396 fence_rep.seqno = fence->base.seqno; in vmw_execbuf_copy_fence_user() 2417 (void) vmw_fence_obj_wait(fence, false, false, in vmw_execbuf_copy_fence_user() 2434 struct vmw_fence_obj *fence = NULL; in vmw_execbuf_process() local 2545 &fence, in vmw_execbuf_process() 2559 (void *) fence); in vmw_execbuf_process() 2563 __vmw_execbuf_release_pinned_bo(dev_priv, fence); in vmw_execbuf_process() 2567 user_fence_rep, fence, handle); in vmw_execbuf_process() 2571 *out_fence = fence; in vmw_execbuf_process() [all …]
|
D | vmwgfx_reg.h | 47 __le32 fence; member
|
D | vmwgfx_surface.c | 440 struct vmw_fence_obj *fence; in vmw_legacy_srf_dma() local 465 &fence, NULL); in vmw_legacy_srf_dma() 467 vmw_fence_single_bo(val_buf->bo, fence); in vmw_legacy_srf_dma() 469 if (likely(fence != NULL)) in vmw_legacy_srf_dma() 470 vmw_fence_obj_unreference(&fence); in vmw_legacy_srf_dma() 1128 struct vmw_fence_obj *fence; in vmw_gb_surface_unbind() local 1182 &fence, NULL); in vmw_gb_surface_unbind() 1184 vmw_fence_single_bo(val_buf->bo, fence); in vmw_gb_surface_unbind() 1186 if (likely(fence != NULL)) in vmw_gb_surface_unbind() 1187 vmw_fence_obj_unreference(&fence); in vmw_gb_surface_unbind()
|
D | vmwgfx_context.c | 330 struct vmw_fence_obj *fence; in vmw_gb_context_unbind() local 382 &fence, NULL); in vmw_gb_context_unbind() 384 vmw_fence_single_bo(bo, fence); in vmw_gb_context_unbind() 386 if (likely(fence != NULL)) in vmw_gb_context_unbind() 387 vmw_fence_obj_unreference(&fence); in vmw_gb_context_unbind()
|
D | vmwgfx_resource.c | 1441 struct vmw_fence_obj *fence) in vmw_fence_single_bo() argument 1448 if (fence == NULL) { in vmw_fence_single_bo() 1449 vmw_execbuf_fence_commands(NULL, dev_priv, &fence, NULL); in vmw_fence_single_bo() 1450 reservation_object_add_excl_fence(bo->resv, &fence->base); in vmw_fence_single_bo() 1451 fence_put(&fence->base); in vmw_fence_single_bo() 1453 reservation_object_add_excl_fence(bo->resv, &fence->base); in vmw_fence_single_bo()
|
D | vmwgfx_drv.h | 670 struct vmw_fence_obj *fence); 817 struct vmw_fence_obj *fence); 829 struct vmw_fence_obj *fence,
|
D | vmwgfx_kms.c | 1727 struct vmw_fence_obj *fence = NULL; in vmw_du_page_flip() local 1751 0, 0, &clips, 1, 1, &fence); in vmw_du_page_flip() 1754 0, 0, &clips, 1, 1, &fence); in vmw_du_page_flip() 1759 if (!fence) { in vmw_du_page_flip() 1764 ret = vmw_event_fence_action_queue(file_priv, fence, in vmw_du_page_flip() 1774 vmw_fence_obj_unreference(&fence); in vmw_du_page_flip()
|
D | svga_reg.h | 1220 uint32 fence; member
|
D | vmwgfx_fifo.c | 521 iowrite32(*seqno, &cmd_fence->fence); in vmw_fifo_send_fence()
|
/linux-4.1.27/arch/metag/include/asm/ |
D | atomic_lock1.h | 34 fence(); in atomic_set() 46 fence(); \ 60 fence(); \ 81 fence(); in atomic_clear_mask() 91 fence(); in atomic_set_mask() 104 fence(); in atomic_cmpxchg() 122 fence(); in __atomic_add_unless() 138 fence(); in atomic_sub_if_positive()
|
D | bitops.h | 20 fence(); in set_bit() 33 fence(); in clear_bit() 46 fence(); in change_bit() 62 fence(); in test_and_set_bit() 82 fence(); in test_and_clear_bit() 100 fence(); in test_and_change_bit()
|
D | spinlock_lock1.h | 25 fence(); in arch_spin_lock() 44 fence(); in arch_spin_trylock() 74 fence(); in arch_write_lock() 92 fence(); in arch_write_trylock() 138 fence(); in arch_read_lock() 153 fence(); in arch_read_unlock() 167 fence(); in arch_read_trylock()
|
D | barrier.h | 51 #define fence() do { } while (0) macro 66 static inline void fence(void) in fence() function 73 #define smp_mb() fence() 74 #define smp_rmb() fence() 77 #define fence() do { } while (0) macro
|
D | cmpxchg_lock1.h | 13 fence(); in xchg_u32() 25 fence(); in xchg_u8() 41 fence(); in __cmpxchg_u32()
|
/linux-4.1.27/drivers/gpu/drm/msm/ |
D | msm_gem.h | 79 uint32_t fence = 0; in msm_gem_fence() local 82 fence = msm_obj->write_fence; in msm_gem_fence() 84 fence = max(fence, msm_obj->read_fence); in msm_gem_fence() 86 return fence; in msm_gem_fence() 101 uint32_t fence; member
|
D | msm_gpu.c | 297 uint32_t fence = gpu->funcs->last_fence(gpu); in hangcheck_handler() local 299 if (fence != gpu->hangcheck_fence) { in hangcheck_handler() 301 gpu->hangcheck_fence = fence; in hangcheck_handler() 302 } else if (fence < gpu->submitted_fence) { in hangcheck_handler() 304 gpu->hangcheck_fence = fence; in hangcheck_handler() 308 gpu->name, fence); in hangcheck_handler() 425 uint32_t fence = gpu->funcs->last_fence(gpu); in retire_worker() local 427 msm_update_fence(gpu->dev, fence); in retire_worker() 437 if ((obj->read_fence <= fence) && in retire_worker() 438 (obj->write_fence <= fence)) { in retire_worker() [all …]
|
D | msm_atomic.c | 25 uint32_t fence; member 139 c->fence = max(c->fence, msm_gem_fence(to_msm_bo(obj), MSM_PREP_READ)); in add_fb() 252 msm_queue_fence_cb(dev, &c->fence_cb, c->fence); in msm_atomic_commit() 258 ret = msm_wait_fence_interruptable(dev, c->fence, &timeout); in msm_atomic_commit()
|
D | msm_drv.h | 149 uint32_t fence; member 167 int msm_wait_fence_interruptable(struct drm_device *dev, uint32_t fence, 170 struct msm_fence_cb *cb, uint32_t fence); 171 void msm_update_fence(struct drm_device *dev, uint32_t fence); 205 struct msm_gpu *gpu, bool write, uint32_t fence); 291 static inline bool fence_completed(struct drm_device *dev, uint32_t fence) in fence_completed() argument 294 return priv->completed_fence >= fence; in fence_completed()
|
D | msm_drv.c | 640 int msm_wait_fence_interruptable(struct drm_device *dev, uint32_t fence, in msm_wait_fence_interruptable() argument 649 if (fence > priv->gpu->submitted_fence) { in msm_wait_fence_interruptable() 651 fence, priv->gpu->submitted_fence); in msm_wait_fence_interruptable() 657 ret = fence_completed(dev, fence) ? 0 : -EBUSY; in msm_wait_fence_interruptable() 669 fence_completed(dev, fence), in msm_wait_fence_interruptable() 674 fence, priv->completed_fence); in msm_wait_fence_interruptable() 685 struct msm_fence_cb *cb, uint32_t fence) in msm_queue_fence_cb() argument 693 } else if (fence > priv->completed_fence) { in msm_queue_fence_cb() 694 cb->fence = fence; in msm_queue_fence_cb() 705 void msm_update_fence(struct drm_device *dev, uint32_t fence) in msm_update_fence() argument [all …]
|
D | msm_gem.c | 418 uint32_t fence = msm_gem_fence(msm_obj, in msm_gem_queue_inactive_cb() local 420 return msm_queue_fence_cb(obj->dev, cb, fence); in msm_gem_queue_inactive_cb() 424 struct msm_gpu *gpu, bool write, uint32_t fence) in msm_gem_move_to_active() argument 429 msm_obj->write_fence = fence; in msm_gem_move_to_active() 431 msm_obj->read_fence = fence; in msm_gem_move_to_active() 459 uint32_t fence = msm_gem_fence(msm_obj, op); in msm_gem_cpu_prep() local 464 ret = msm_wait_fence_interruptable(dev, fence, timeout); in msm_gem_cpu_prep()
|
D | msm_gem_submit.c | 420 args->fence = submit->fence; in msm_ioctl_gem_submit()
|
D | msm_rd.c | 299 submit->fence); in msm_rd_dump_submit()
|
/linux-4.1.27/drivers/staging/android/trace/ |
D | sync.h | 36 TP_PROTO(struct sync_fence *fence, int begin), 38 TP_ARGS(fence, begin), 41 __string(name, fence->name) 47 __assign_str(name, fence->name); 48 __entry->status = atomic_read(&fence->status); 57 TP_PROTO(struct fence *pt),
|
/linux-4.1.27/drivers/gpu/drm/qxl/ |
D | qxl_release.c | 43 static const char *qxl_get_driver_name(struct fence *fence) in qxl_get_driver_name() argument 48 static const char *qxl_get_timeline_name(struct fence *fence) in qxl_get_timeline_name() argument 53 static bool qxl_nop_signaling(struct fence *fence) in qxl_nop_signaling() argument 59 static long qxl_fence_wait(struct fence *fence, bool intr, signed long timeout) in qxl_fence_wait() argument 67 qdev = container_of(fence->lock, struct qxl_device, release_lock); in qxl_fence_wait() 68 release = container_of(fence, struct qxl_release, base); in qxl_fence_wait() 74 if (fence_is_signaled(fence)) in qxl_fence_wait() 83 if (fence_is_signaled(fence)) in qxl_fence_wait() 87 if (fence_is_signaled(fence)) in qxl_fence_wait() 99 FENCE_WARN(fence, "failed to wait on release %d " in qxl_fence_wait() [all …]
|
D | qxl_debugfs.c | 65 fobj = rcu_dereference(bo->tbo.resv->fence); in qxl_debugfs_buffers_info()
|
D | qxl_drv.h | 192 struct fence base;
|
/linux-4.1.27/include/drm/ttm/ |
D | ttm_execbuf_util.h | 117 struct fence *fence);
|
D | ttm_bo_driver.h | 1017 struct fence *fence,
|
/linux-4.1.27/fs/ocfs2/cluster/ |
D | quorum.c | 108 int lowest_hb, lowest_reachable = 0, fence = 0; in o2quo_make_decision() local 135 fence = 1; in o2quo_make_decision() 149 fence = 1; in o2quo_make_decision() 158 fence = 1; in o2quo_make_decision() 163 if (fence) { in o2quo_make_decision()
|
/linux-4.1.27/drivers/gpu/drm/ttm/ |
D | ttm_execbuf_util.c | 183 struct list_head *list, struct fence *fence) in ttm_eu_fence_buffer_objects() argument 204 reservation_object_add_shared_fence(bo->resv, fence); in ttm_eu_fence_buffer_objects() 206 reservation_object_add_excl_fence(bo->resv, fence); in ttm_eu_fence_buffer_objects()
|
D | ttm_bo.c | 409 struct fence *fence; in ttm_bo_flush_all_fences() local 413 fence = reservation_object_get_excl(bo->resv); in ttm_bo_flush_all_fences() 414 if (fence && !fence->ops->signaled) in ttm_bo_flush_all_fences() 415 fence_enable_sw_signaling(fence); in ttm_bo_flush_all_fences() 418 fence = rcu_dereference_protected(fobj->shared[i], in ttm_bo_flush_all_fences() 421 if (!fence->ops->signaled) in ttm_bo_flush_all_fences() 422 fence_enable_sw_signaling(fence); in ttm_bo_flush_all_fences() 1539 struct fence *excl; in ttm_bo_wait() 1557 struct fence *fence; in ttm_bo_wait() local 1558 fence = rcu_dereference_protected(fobj->shared[i], in ttm_bo_wait() [all …]
|
D | ttm_bo_util.c | 634 struct fence *fence, in ttm_bo_move_accel_cleanup() argument 645 reservation_object_add_excl_fence(bo->resv, fence); in ttm_bo_move_accel_cleanup() 673 reservation_object_add_excl_fence(ghost_obj->resv, fence); in ttm_bo_move_accel_cleanup()
|
/linux-4.1.27/drivers/dma/ioat/ |
D | hw.h | 79 unsigned int fence:1; member 113 unsigned int fence:1; member 160 unsigned int fence:1; member 209 unsigned int fence:1; member
|
D | dma_v3.c | 735 xor->ctl_f.fence = !!(flags & DMA_PREP_FENCE); in __ioat3_prep_xor_lock() 934 pq->ctl_f.fence = !!(flags & DMA_PREP_FENCE); in __ioat3_prep_pq_lock() 1048 pq->ctl_f.fence = !!(flags & DMA_PREP_FENCE); in __ioat3_prep_pq16_lock() 1193 hw->ctl_f.fence = !!(flags & DMA_PREP_FENCE); in ioat3_prep_interrupt_lock()
|
D | dma_v2.c | 783 hw->ctl_f.fence = !!(flags & DMA_PREP_FENCE); in ioat2_dma_prep_memcpy_lock()
|
/linux-4.1.27/drivers/gpu/drm/msm/adreno/ |
D | adreno_gpu.c | 80 rbmemptr(adreno_gpu, fence)); in adreno_hw_init() 95 return adreno_gpu->memptrs->fence; in adreno_last_fence() 110 adreno_gpu->memptrs->fence = gpu->submitted_fence; in adreno_recover() 156 OUT_RING(ring, submit->fence); in adreno_submit() 172 OUT_RING(ring, rbmemptr(adreno_gpu, fence)); in adreno_submit() 173 OUT_RING(ring, submit->fence); in adreno_submit() 227 seq_printf(m, "fence: %d/%d\n", adreno_gpu->memptrs->fence, in adreno_show() 263 printk("fence: %d/%d\n", adreno_gpu->memptrs->fence, in adreno_dump()
|
D | adreno_gpu.h | 133 volatile uint32_t fence; member
|
/linux-4.1.27/drivers/gpu/host1x/ |
D | cdma.c | 75 pb->fence = pb->size_bytes - 8; in host1x_pushbuffer_init() 101 WARN_ON(pos == pb->fence); in host1x_pushbuffer_push() 114 pb->fence = (pb->fence + slots * 8) & (pb->size_bytes - 1); in host1x_pushbuffer_pop() 122 return ((pb->fence - pb->pos) & (pb->size_bytes - 1)) / 8; in host1x_pushbuffer_space()
|
D | cdma.h | 47 u32 fence; /* index we've written */ member
|
/linux-4.1.27/Documentation/DocBook/ |
D | .device-drivers.xml.cmd | 2 …dma-buf/dma-buf.c drivers/dma-buf/fence.c drivers/dma-buf/seqno-fence.c include/linux/fence.h incl…
|
/linux-4.1.27/include/uapi/drm/ |
D | msm_drm.h | 179 uint32_t fence; /* out */ member 194 uint32_t fence; /* in */ member
|
D | tegra_drm.h | 128 __u32 fence; /* Return value */ member
|
/linux-4.1.27/drivers/staging/android/uapi/ |
D | sw_sync.h | 23 __s32 fence; /* fd of new fence */ member
|
D | sync.h | 26 __s32 fence; /* fd on newly created fence */ member
|
/linux-4.1.27/Documentation/filesystems/nfs/ |
D | pnfs-block-server.txt | 21 If the nfsd server needs to fence a non-responding client it calls 36 echo "fencing client ${CLIENT} serial ${EVPD}" >> /var/log/pnfsd-fence.log
|
/linux-4.1.27/drivers/gpu/drm/omapdrm/ |
D | TODO | 13 . This can be handled by the dma-buf fence/reservation stuff when it
|
/linux-4.1.27/arch/metag/kernel/ |
D | head.S | 53 ! In case GCOn has just been turned on we need to fence any writes that
|
/linux-4.1.27/drivers/gpu/drm/mga/ |
D | mga_state.c | 1043 u32 *fence = data; in mga_set_fence() local 1057 *fence = dev_priv->next_fence_to_post; in mga_set_fence() 1073 u32 *fence = data; in mga_wait_fence() local 1082 mga_driver_fence_wait(dev, fence); in mga_wait_fence()
|
/linux-4.1.27/drivers/infiniband/hw/mlx5/ |
D | qp.c | 2518 static u8 get_fence(u8 fence, struct ib_send_wr *wr) in get_fence() argument 2524 if (unlikely(fence)) { in get_fence() 2528 return fence; in get_fence() 2567 int nreq, u8 fence, u8 next_fence, in finish_wqe() argument 2575 ctrl->fm_ce_se |= fence; in finish_wqe() 2610 u8 fence; in mlx5_ib_post_send() local 2622 fence = qp->fm_cache; in mlx5_ib_post_send() 2704 nreq, get_fence(fence, wr), in mlx5_ib_post_send() 2731 nreq, get_fence(fence, wr), in mlx5_ib_post_send() 2753 nreq, get_fence(fence, wr), in mlx5_ib_post_send() [all …]
|
/linux-4.1.27/drivers/gpu/drm/i915/ |
D | i915_gpu_error.c | 381 err_printf(m, " fence[%d] = %08llx\n", i, error->fence[i]); in i915_error_state_to_str() 781 error->fence[i] = I915_READ(FENCE_REG_830_0 + (i * 4)); in i915_gem_record_fences() 784 error->fence[i+8] = I915_READ(FENCE_REG_945_8 + in i915_gem_record_fences() 788 error->fence[i] = I915_READ64(FENCE_REG_965_0 + in i915_gem_record_fences() 792 error->fence[i] = I915_READ64(FENCE_REG_SANDYBRIDGE_0 + in i915_gem_record_fences()
|
D | i915_gem.c | 52 struct drm_i915_fence_reg *fence, 3226 struct drm_i915_fence_reg *fence) in fence_number() argument 3228 return fence - dev_priv->fence_regs; in fence_number() 3232 struct drm_i915_fence_reg *fence, in i915_gem_object_update_fence() argument 3236 int reg = fence_number(dev_priv, fence); in i915_gem_object_update_fence() 3242 fence->obj = obj; in i915_gem_object_update_fence() 3243 list_move_tail(&fence->lru_list, &dev_priv->mm.fence_list); in i915_gem_object_update_fence() 3246 fence->obj = NULL; in i915_gem_object_update_fence() 3247 list_del_init(&fence->lru_list); in i915_gem_object_update_fence() 3270 struct drm_i915_fence_reg *fence; in i915_gem_object_put_fence() local [all …]
|
D | i915_drv.h | 438 u64 fence[I915_MAX_NUM_FENCES]; member
|
/linux-4.1.27/include/drm/ |
D | drm_crtc.h | 46 struct fence; 759 struct fence *fence; member
|
/linux-4.1.27/drivers/gpu/drm/ |
D | drm_atomic_helper.c | 834 if (!plane->state->fence) in wait_for_fences() 839 fence_wait(plane->state->fence, false); in wait_for_fences() 840 fence_put(plane->state->fence); in wait_for_fences() 841 plane->state->fence = NULL; in wait_for_fences()
|
/linux-4.1.27/drivers/video/fbdev/intelfb/ |
D | intelfb.h | 228 u32 fence[8]; member
|
D | intelfbhw.c | 641 hw->fence[i] = INREG(FENCE + (i << 2)); in intelfbhw_read_hw_state() 860 hw->fence[i]); in intelfbhw_print_hw_state()
|
/linux-4.1.27/drivers/gpu/drm/tegra/ |
D | drm.c | 407 args->fence = job->syncpt_end; in tegra_drm_submit()
|
/linux-4.1.27/ |
D | MAINTAINERS | 3267 F: include/linux/*fence.h
|