Lines Matching refs:desc

164 	} desc;  member
319 struct rcar_dmac_desc *desc = chan->desc.running; in rcar_dmac_chan_start_xfer() local
320 u32 chcr = desc->chcr; in rcar_dmac_chan_start_xfer()
327 if (desc->hwdescs.use) { in rcar_dmac_chan_start_xfer()
332 chan->index, desc, desc->nchunks, &desc->hwdescs.dma); in rcar_dmac_chan_start_xfer()
336 desc->hwdescs.dma >> 32); in rcar_dmac_chan_start_xfer()
339 (desc->hwdescs.dma & 0xfffffff0) | in rcar_dmac_chan_start_xfer()
342 RCAR_DMACHCRB_DCNT(desc->nchunks - 1) | in rcar_dmac_chan_start_xfer()
352 chunk = list_first_entry(&desc->chunks, in rcar_dmac_chan_start_xfer()
370 if (!desc->cyclic) in rcar_dmac_chan_start_xfer()
376 else if (desc->async_tx.callback) in rcar_dmac_chan_start_xfer()
385 struct rcar_dmac_xfer_chunk *chunk = desc->running; in rcar_dmac_chan_start_xfer()
403 chunk->size >> desc->xfer_shift); in rcar_dmac_chan_start_xfer()
436 struct rcar_dmac_desc *desc = to_rcar_dmac_desc(tx); in rcar_dmac_tx_submit() local
445 chan->index, tx->cookie, desc); in rcar_dmac_tx_submit()
447 list_add_tail(&desc->node, &chan->desc.pending); in rcar_dmac_tx_submit()
448 desc->running = list_first_entry(&desc->chunks, in rcar_dmac_tx_submit()
476 struct rcar_dmac_desc *desc = &page->descs[i]; in rcar_dmac_desc_alloc() local
478 dma_async_tx_descriptor_init(&desc->async_tx, &chan->chan); in rcar_dmac_desc_alloc()
479 desc->async_tx.tx_submit = rcar_dmac_tx_submit; in rcar_dmac_desc_alloc()
480 INIT_LIST_HEAD(&desc->chunks); in rcar_dmac_desc_alloc()
482 list_add_tail(&desc->node, &list); in rcar_dmac_desc_alloc()
486 list_splice_tail(&list, &chan->desc.free); in rcar_dmac_desc_alloc()
487 list_add_tail(&page->node, &chan->desc.pages); in rcar_dmac_desc_alloc()
506 struct rcar_dmac_desc *desc) in rcar_dmac_desc_put() argument
511 list_splice_tail_init(&desc->chunks, &chan->desc.chunks_free); in rcar_dmac_desc_put()
512 list_add_tail(&desc->node, &chan->desc.free); in rcar_dmac_desc_put()
518 struct rcar_dmac_desc *desc, *_desc; in rcar_dmac_desc_recycle_acked() local
528 list_splice_init(&chan->desc.wait, &list); in rcar_dmac_desc_recycle_acked()
531 list_for_each_entry_safe(desc, _desc, &list, node) { in rcar_dmac_desc_recycle_acked()
532 if (async_tx_test_ack(&desc->async_tx)) { in rcar_dmac_desc_recycle_acked()
533 list_del(&desc->node); in rcar_dmac_desc_recycle_acked()
534 rcar_dmac_desc_put(chan, desc); in rcar_dmac_desc_recycle_acked()
543 list_splice(&list, &chan->desc.wait); in rcar_dmac_desc_recycle_acked()
558 struct rcar_dmac_desc *desc; in rcar_dmac_desc_get() local
566 while (list_empty(&chan->desc.free)) { in rcar_dmac_desc_get()
580 desc = list_first_entry(&chan->desc.free, struct rcar_dmac_desc, node); in rcar_dmac_desc_get()
581 list_del(&desc->node); in rcar_dmac_desc_get()
585 return desc; in rcar_dmac_desc_get()
610 list_splice_tail(&list, &chan->desc.chunks_free); in rcar_dmac_xfer_chunk_alloc()
611 list_add_tail(&page->node, &chan->desc.pages); in rcar_dmac_xfer_chunk_alloc()
634 while (list_empty(&chan->desc.chunks_free)) { in rcar_dmac_xfer_chunk_get()
648 chunk = list_first_entry(&chan->desc.chunks_free, in rcar_dmac_xfer_chunk_get()
658 struct rcar_dmac_desc *desc, size_t size) in rcar_dmac_realloc_hwdesc() argument
668 if (desc->hwdescs.size == size) in rcar_dmac_realloc_hwdesc()
671 if (desc->hwdescs.mem) { in rcar_dmac_realloc_hwdesc()
672 dma_free_coherent(chan->chan.device->dev, desc->hwdescs.size, in rcar_dmac_realloc_hwdesc()
673 desc->hwdescs.mem, desc->hwdescs.dma); in rcar_dmac_realloc_hwdesc()
674 desc->hwdescs.mem = NULL; in rcar_dmac_realloc_hwdesc()
675 desc->hwdescs.size = 0; in rcar_dmac_realloc_hwdesc()
681 desc->hwdescs.mem = dma_alloc_coherent(chan->chan.device->dev, size, in rcar_dmac_realloc_hwdesc()
682 &desc->hwdescs.dma, GFP_NOWAIT); in rcar_dmac_realloc_hwdesc()
683 if (!desc->hwdescs.mem) in rcar_dmac_realloc_hwdesc()
686 desc->hwdescs.size = size; in rcar_dmac_realloc_hwdesc()
690 struct rcar_dmac_desc *desc) in rcar_dmac_fill_hwdesc() argument
695 rcar_dmac_realloc_hwdesc(chan, desc, desc->nchunks * sizeof(*hwdesc)); in rcar_dmac_fill_hwdesc()
697 hwdesc = desc->hwdescs.mem; in rcar_dmac_fill_hwdesc()
701 list_for_each_entry(chunk, &desc->chunks, node) { in rcar_dmac_fill_hwdesc()
704 hwdesc->tcr = chunk->size >> desc->xfer_shift; in rcar_dmac_fill_hwdesc()
726 struct rcar_dmac_desc *desc, *_desc; in rcar_dmac_chan_reinit() local
733 list_splice_init(&chan->desc.pending, &descs); in rcar_dmac_chan_reinit()
734 list_splice_init(&chan->desc.active, &descs); in rcar_dmac_chan_reinit()
735 list_splice_init(&chan->desc.done, &descs); in rcar_dmac_chan_reinit()
736 list_splice_init(&chan->desc.wait, &descs); in rcar_dmac_chan_reinit()
738 chan->desc.running = NULL; in rcar_dmac_chan_reinit()
742 list_for_each_entry_safe(desc, _desc, &descs, node) { in rcar_dmac_chan_reinit()
743 list_del(&desc->node); in rcar_dmac_chan_reinit()
744 rcar_dmac_desc_put(chan, desc); in rcar_dmac_chan_reinit()
775 struct rcar_dmac_desc *desc) in rcar_dmac_chan_configure_desc() argument
787 switch (desc->direction) { in rcar_dmac_chan_configure_desc()
808 desc->xfer_shift = ilog2(xfer_size); in rcar_dmac_chan_configure_desc()
809 desc->chcr = chcr | chcr_ts[desc->xfer_shift]; in rcar_dmac_chan_configure_desc()
829 struct rcar_dmac_desc *desc; in rcar_dmac_chan_prep_sg() local
837 desc = rcar_dmac_desc_get(chan); in rcar_dmac_chan_prep_sg()
838 if (!desc) in rcar_dmac_chan_prep_sg()
841 desc->async_tx.flags = dma_flags; in rcar_dmac_chan_prep_sg()
842 desc->async_tx.cookie = -EBUSY; in rcar_dmac_chan_prep_sg()
844 desc->cyclic = cyclic; in rcar_dmac_chan_prep_sg()
845 desc->direction = dir; in rcar_dmac_chan_prep_sg()
847 rcar_dmac_chan_configure_desc(chan, desc); in rcar_dmac_chan_prep_sg()
849 max_chunk_size = (RCAR_DMATCR_MASK + 1) << desc->xfer_shift; in rcar_dmac_chan_prep_sg()
885 rcar_dmac_desc_put(chan, desc); in rcar_dmac_chan_prep_sg()
901 chan->index, chunk, desc, i, sg, size, len, in rcar_dmac_chan_prep_sg()
910 list_add_tail(&chunk->node, &desc->chunks); in rcar_dmac_chan_prep_sg()
915 desc->nchunks = nchunks; in rcar_dmac_chan_prep_sg()
916 desc->size = full_size; in rcar_dmac_chan_prep_sg()
928 desc->hwdescs.use = !highmem && nchunks > 1; in rcar_dmac_chan_prep_sg()
929 if (desc->hwdescs.use) { in rcar_dmac_chan_prep_sg()
930 if (rcar_dmac_fill_hwdesc(chan, desc) < 0) in rcar_dmac_chan_prep_sg()
931 desc->hwdescs.use = false; in rcar_dmac_chan_prep_sg()
934 return &desc->async_tx; in rcar_dmac_chan_prep_sg()
946 INIT_LIST_HEAD(&rchan->desc.chunks_free); in rcar_dmac_alloc_chan_resources()
947 INIT_LIST_HEAD(&rchan->desc.pages); in rcar_dmac_alloc_chan_resources()
966 struct rcar_dmac_desc *desc; in rcar_dmac_free_chan_resources() local
982 list_splice_init(&rchan->desc.free, &list); in rcar_dmac_free_chan_resources()
983 list_splice_init(&rchan->desc.pending, &list); in rcar_dmac_free_chan_resources()
984 list_splice_init(&rchan->desc.active, &list); in rcar_dmac_free_chan_resources()
985 list_splice_init(&rchan->desc.done, &list); in rcar_dmac_free_chan_resources()
986 list_splice_init(&rchan->desc.wait, &list); in rcar_dmac_free_chan_resources()
988 list_for_each_entry(desc, &list, node) in rcar_dmac_free_chan_resources()
989 rcar_dmac_realloc_hwdesc(rchan, desc, 0); in rcar_dmac_free_chan_resources()
991 list_for_each_entry_safe(page, _page, &rchan->desc.pages, node) { in rcar_dmac_free_chan_resources()
1049 struct dma_async_tx_descriptor *desc; in rcar_dmac_prep_dma_cyclic() local
1092 desc = rcar_dmac_chan_prep_sg(rchan, sgl, sg_len, dev_addr, in rcar_dmac_prep_dma_cyclic()
1096 return desc; in rcar_dmac_prep_dma_cyclic()
1138 struct rcar_dmac_desc *desc = chan->desc.running; in rcar_dmac_chan_get_residue() local
1144 if (!desc) in rcar_dmac_chan_get_residue()
1152 if (cookie != desc->async_tx.cookie) in rcar_dmac_chan_get_residue()
1153 return desc->size; in rcar_dmac_chan_get_residue()
1161 if (desc->hwdescs.use) { in rcar_dmac_chan_get_residue()
1164 WARN_ON(dptr >= desc->nchunks); in rcar_dmac_chan_get_residue()
1166 running = desc->running; in rcar_dmac_chan_get_residue()
1170 list_for_each_entry_reverse(chunk, &desc->chunks, node) { in rcar_dmac_chan_get_residue()
1171 if (chunk == running || ++dptr == desc->nchunks) in rcar_dmac_chan_get_residue()
1178 residue += rcar_dmac_chan_read(chan, RCAR_DMATCR) << desc->xfer_shift; in rcar_dmac_chan_get_residue()
1212 if (list_empty(&rchan->desc.pending)) in rcar_dmac_issue_pending()
1216 list_splice_tail_init(&rchan->desc.pending, &rchan->desc.active); in rcar_dmac_issue_pending()
1222 if (!rchan->desc.running) { in rcar_dmac_issue_pending()
1223 struct rcar_dmac_desc *desc; in rcar_dmac_issue_pending() local
1225 desc = list_first_entry(&rchan->desc.active, in rcar_dmac_issue_pending()
1227 rchan->desc.running = desc; in rcar_dmac_issue_pending()
1242 struct rcar_dmac_desc *desc = chan->desc.running; in rcar_dmac_isr_desc_stage_end() local
1245 if (WARN_ON(!desc || !desc->cyclic)) { in rcar_dmac_isr_desc_stage_end()
1264 struct rcar_dmac_desc *desc = chan->desc.running; in rcar_dmac_isr_transfer_end() local
1267 if (WARN_ON_ONCE(!desc)) { in rcar_dmac_isr_transfer_end()
1281 if (!desc->hwdescs.use) { in rcar_dmac_isr_transfer_end()
1287 if (!list_is_last(&desc->running->node, &desc->chunks)) { in rcar_dmac_isr_transfer_end()
1288 desc->running = list_next_entry(desc->running, node); in rcar_dmac_isr_transfer_end()
1289 if (!desc->cyclic) in rcar_dmac_isr_transfer_end()
1298 if (desc->cyclic) { in rcar_dmac_isr_transfer_end()
1299 desc->running = in rcar_dmac_isr_transfer_end()
1300 list_first_entry(&desc->chunks, in rcar_dmac_isr_transfer_end()
1308 list_move_tail(&desc->node, &chan->desc.done); in rcar_dmac_isr_transfer_end()
1311 if (!list_empty(&chan->desc.active)) in rcar_dmac_isr_transfer_end()
1312 chan->desc.running = list_first_entry(&chan->desc.active, in rcar_dmac_isr_transfer_end()
1316 chan->desc.running = NULL; in rcar_dmac_isr_transfer_end()
1319 if (chan->desc.running) in rcar_dmac_isr_transfer_end()
1353 struct rcar_dmac_desc *desc; in rcar_dmac_isr_channel_thread() local
1358 if (chan->desc.running && chan->desc.running->cyclic) { in rcar_dmac_isr_channel_thread()
1362 desc = chan->desc.running; in rcar_dmac_isr_channel_thread()
1363 callback = desc->async_tx.callback; in rcar_dmac_isr_channel_thread()
1364 callback_param = desc->async_tx.callback_param; in rcar_dmac_isr_channel_thread()
1377 while (!list_empty(&chan->desc.done)) { in rcar_dmac_isr_channel_thread()
1378 desc = list_first_entry(&chan->desc.done, struct rcar_dmac_desc, in rcar_dmac_isr_channel_thread()
1380 dma_cookie_complete(&desc->async_tx); in rcar_dmac_isr_channel_thread()
1381 list_del(&desc->node); in rcar_dmac_isr_channel_thread()
1383 if (desc->async_tx.callback) { in rcar_dmac_isr_channel_thread()
1390 desc->async_tx.callback(desc->async_tx.callback_param); in rcar_dmac_isr_channel_thread()
1394 list_add_tail(&desc->node, &chan->desc.wait); in rcar_dmac_isr_channel_thread()
1531 INIT_LIST_HEAD(&rchan->desc.free); in rcar_dmac_chan_probe()
1532 INIT_LIST_HEAD(&rchan->desc.pending); in rcar_dmac_chan_probe()
1533 INIT_LIST_HEAD(&rchan->desc.active); in rcar_dmac_chan_probe()
1534 INIT_LIST_HEAD(&rchan->desc.done); in rcar_dmac_chan_probe()
1535 INIT_LIST_HEAD(&rchan->desc.wait); in rcar_dmac_chan_probe()