Lines Matching refs:desc

344 	struct xilinx_vdma_tx_descriptor *desc;  in xilinx_vdma_alloc_tx_descriptor()  local
350 desc = kzalloc(sizeof(*desc), GFP_KERNEL); in xilinx_vdma_alloc_tx_descriptor()
351 if (!desc) in xilinx_vdma_alloc_tx_descriptor()
355 chan->allocated_desc = desc; in xilinx_vdma_alloc_tx_descriptor()
358 INIT_LIST_HEAD(&desc->segments); in xilinx_vdma_alloc_tx_descriptor()
360 return desc; in xilinx_vdma_alloc_tx_descriptor()
370 struct xilinx_vdma_tx_descriptor *desc) in xilinx_vdma_free_tx_descriptor() argument
374 if (!desc) in xilinx_vdma_free_tx_descriptor()
377 list_for_each_entry_safe(segment, next, &desc->segments, node) { in xilinx_vdma_free_tx_descriptor()
382 kfree(desc); in xilinx_vdma_free_tx_descriptor()
395 struct xilinx_vdma_tx_descriptor *desc, *next; in xilinx_vdma_free_desc_list() local
397 list_for_each_entry_safe(desc, next, list, node) { in xilinx_vdma_free_desc_list()
398 list_del(&desc->node); in xilinx_vdma_free_desc_list()
399 xilinx_vdma_free_tx_descriptor(chan, desc); in xilinx_vdma_free_desc_list()
443 struct xilinx_vdma_tx_descriptor *desc, *next; in xilinx_vdma_chan_desc_cleanup() local
448 list_for_each_entry_safe(desc, next, &chan->done_list, node) { in xilinx_vdma_chan_desc_cleanup()
453 list_del(&desc->node); in xilinx_vdma_chan_desc_cleanup()
456 callback = desc->async_tx.callback; in xilinx_vdma_chan_desc_cleanup()
457 callback_param = desc->async_tx.callback_param; in xilinx_vdma_chan_desc_cleanup()
465 dma_run_dependencies(&desc->async_tx); in xilinx_vdma_chan_desc_cleanup()
466 xilinx_vdma_free_tx_descriptor(chan, desc); in xilinx_vdma_chan_desc_cleanup()
617 struct xilinx_vdma_tx_descriptor *desc; in xilinx_vdma_start_transfer() local
634 desc = list_first_entry(&chan->pending_list, in xilinx_vdma_start_transfer()
649 head = list_first_entry(&desc->segments, in xilinx_vdma_start_transfer()
651 tail = list_entry(desc->segments.prev, in xilinx_vdma_start_transfer()
702 list_for_each_entry(segment, &desc->segments, node) { in xilinx_vdma_start_transfer()
719 list_del(&desc->node); in xilinx_vdma_start_transfer()
720 chan->active_desc = desc; in xilinx_vdma_start_transfer()
745 struct xilinx_vdma_tx_descriptor *desc; in xilinx_vdma_complete_descriptor() local
750 desc = chan->active_desc; in xilinx_vdma_complete_descriptor()
751 if (!desc) { in xilinx_vdma_complete_descriptor()
756 dma_cookie_complete(&desc->async_tx); in xilinx_vdma_complete_descriptor()
757 list_add_tail(&desc->node, &chan->done_list); in xilinx_vdma_complete_descriptor()
889 struct xilinx_vdma_tx_descriptor *desc = to_vdma_tx_descriptor(tx); in xilinx_vdma_tx_submit() local
910 list_add_tail(&desc->node, &chan->pending_list); in xilinx_vdma_tx_submit()
935 struct xilinx_vdma_tx_descriptor *desc; in xilinx_vdma_dma_prep_interleaved() local
949 desc = xilinx_vdma_alloc_tx_descriptor(chan); in xilinx_vdma_dma_prep_interleaved()
950 if (!desc) in xilinx_vdma_dma_prep_interleaved()
953 dma_async_tx_descriptor_init(&desc->async_tx, &chan->common); in xilinx_vdma_dma_prep_interleaved()
954 desc->async_tx.tx_submit = xilinx_vdma_tx_submit; in xilinx_vdma_dma_prep_interleaved()
955 async_tx_ack(&desc->async_tx); in xilinx_vdma_dma_prep_interleaved()
977 if (!list_empty(&desc->segments)) { in xilinx_vdma_dma_prep_interleaved()
978 prev = list_last_entry(&desc->segments, in xilinx_vdma_dma_prep_interleaved()
984 list_add_tail(&segment->node, &desc->segments); in xilinx_vdma_dma_prep_interleaved()
989 segment = list_first_entry(&desc->segments, in xilinx_vdma_dma_prep_interleaved()
993 return &desc->async_tx; in xilinx_vdma_dma_prep_interleaved()
996 xilinx_vdma_free_tx_descriptor(chan, desc); in xilinx_vdma_dma_prep_interleaved()