Lines Matching refs:desc
164 } desc; member
319 struct rcar_dmac_desc *desc = chan->desc.running; in rcar_dmac_chan_start_xfer() local
320 u32 chcr = desc->chcr; in rcar_dmac_chan_start_xfer()
327 if (desc->hwdescs.use) { in rcar_dmac_chan_start_xfer()
332 chan->index, desc, desc->nchunks, &desc->hwdescs.dma); in rcar_dmac_chan_start_xfer()
336 desc->hwdescs.dma >> 32); in rcar_dmac_chan_start_xfer()
339 (desc->hwdescs.dma & 0xfffffff0) | in rcar_dmac_chan_start_xfer()
342 RCAR_DMACHCRB_DCNT(desc->nchunks - 1) | in rcar_dmac_chan_start_xfer()
352 chunk = list_first_entry(&desc->chunks, in rcar_dmac_chan_start_xfer()
370 if (!desc->cyclic) in rcar_dmac_chan_start_xfer()
376 else if (desc->async_tx.callback) in rcar_dmac_chan_start_xfer()
385 struct rcar_dmac_xfer_chunk *chunk = desc->running; in rcar_dmac_chan_start_xfer()
403 chunk->size >> desc->xfer_shift); in rcar_dmac_chan_start_xfer()
436 struct rcar_dmac_desc *desc = to_rcar_dmac_desc(tx); in rcar_dmac_tx_submit() local
445 chan->index, tx->cookie, desc); in rcar_dmac_tx_submit()
447 list_add_tail(&desc->node, &chan->desc.pending); in rcar_dmac_tx_submit()
448 desc->running = list_first_entry(&desc->chunks, in rcar_dmac_tx_submit()
477 struct rcar_dmac_desc *desc = &page->descs[i]; in rcar_dmac_desc_alloc() local
479 dma_async_tx_descriptor_init(&desc->async_tx, &chan->chan); in rcar_dmac_desc_alloc()
480 desc->async_tx.tx_submit = rcar_dmac_tx_submit; in rcar_dmac_desc_alloc()
481 INIT_LIST_HEAD(&desc->chunks); in rcar_dmac_desc_alloc()
483 list_add_tail(&desc->node, &list); in rcar_dmac_desc_alloc()
487 list_splice_tail(&list, &chan->desc.free); in rcar_dmac_desc_alloc()
488 list_add_tail(&page->node, &chan->desc.pages); in rcar_dmac_desc_alloc()
507 struct rcar_dmac_desc *desc) in rcar_dmac_desc_put() argument
512 list_splice_tail_init(&desc->chunks, &chan->desc.chunks_free); in rcar_dmac_desc_put()
513 list_add_tail(&desc->node, &chan->desc.free); in rcar_dmac_desc_put()
519 struct rcar_dmac_desc *desc, *_desc; in rcar_dmac_desc_recycle_acked() local
530 list_splice_init(&chan->desc.wait, &list); in rcar_dmac_desc_recycle_acked()
533 list_for_each_entry_safe(desc, _desc, &list, node) { in rcar_dmac_desc_recycle_acked()
534 if (async_tx_test_ack(&desc->async_tx)) { in rcar_dmac_desc_recycle_acked()
535 list_del(&desc->node); in rcar_dmac_desc_recycle_acked()
536 rcar_dmac_desc_put(chan, desc); in rcar_dmac_desc_recycle_acked()
545 list_splice(&list, &chan->desc.wait); in rcar_dmac_desc_recycle_acked()
560 struct rcar_dmac_desc *desc; in rcar_dmac_desc_get() local
569 while (list_empty(&chan->desc.free)) { in rcar_dmac_desc_get()
583 desc = list_first_entry(&chan->desc.free, struct rcar_dmac_desc, node); in rcar_dmac_desc_get()
584 list_del(&desc->node); in rcar_dmac_desc_get()
588 return desc; in rcar_dmac_desc_get()
614 list_splice_tail(&list, &chan->desc.chunks_free); in rcar_dmac_xfer_chunk_alloc()
615 list_add_tail(&page->node, &chan->desc.pages); in rcar_dmac_xfer_chunk_alloc()
639 while (list_empty(&chan->desc.chunks_free)) { in rcar_dmac_xfer_chunk_get()
653 chunk = list_first_entry(&chan->desc.chunks_free, in rcar_dmac_xfer_chunk_get()
663 struct rcar_dmac_desc *desc, size_t size) in rcar_dmac_realloc_hwdesc() argument
673 if (desc->hwdescs.size == size) in rcar_dmac_realloc_hwdesc()
676 if (desc->hwdescs.mem) { in rcar_dmac_realloc_hwdesc()
677 dma_free_coherent(chan->chan.device->dev, desc->hwdescs.size, in rcar_dmac_realloc_hwdesc()
678 desc->hwdescs.mem, desc->hwdescs.dma); in rcar_dmac_realloc_hwdesc()
679 desc->hwdescs.mem = NULL; in rcar_dmac_realloc_hwdesc()
680 desc->hwdescs.size = 0; in rcar_dmac_realloc_hwdesc()
686 desc->hwdescs.mem = dma_alloc_coherent(chan->chan.device->dev, size, in rcar_dmac_realloc_hwdesc()
687 &desc->hwdescs.dma, GFP_NOWAIT); in rcar_dmac_realloc_hwdesc()
688 if (!desc->hwdescs.mem) in rcar_dmac_realloc_hwdesc()
691 desc->hwdescs.size = size; in rcar_dmac_realloc_hwdesc()
695 struct rcar_dmac_desc *desc) in rcar_dmac_fill_hwdesc() argument
700 rcar_dmac_realloc_hwdesc(chan, desc, desc->nchunks * sizeof(*hwdesc)); in rcar_dmac_fill_hwdesc()
702 hwdesc = desc->hwdescs.mem; in rcar_dmac_fill_hwdesc()
706 list_for_each_entry(chunk, &desc->chunks, node) { in rcar_dmac_fill_hwdesc()
709 hwdesc->tcr = chunk->size >> desc->xfer_shift; in rcar_dmac_fill_hwdesc()
731 struct rcar_dmac_desc *desc, *_desc; in rcar_dmac_chan_reinit() local
738 list_splice_init(&chan->desc.pending, &descs); in rcar_dmac_chan_reinit()
739 list_splice_init(&chan->desc.active, &descs); in rcar_dmac_chan_reinit()
740 list_splice_init(&chan->desc.done, &descs); in rcar_dmac_chan_reinit()
741 list_splice_init(&chan->desc.wait, &descs); in rcar_dmac_chan_reinit()
743 chan->desc.running = NULL; in rcar_dmac_chan_reinit()
747 list_for_each_entry_safe(desc, _desc, &descs, node) { in rcar_dmac_chan_reinit()
748 list_del(&desc->node); in rcar_dmac_chan_reinit()
749 rcar_dmac_desc_put(chan, desc); in rcar_dmac_chan_reinit()
780 struct rcar_dmac_desc *desc) in rcar_dmac_chan_configure_desc() argument
792 switch (desc->direction) { in rcar_dmac_chan_configure_desc()
813 desc->xfer_shift = ilog2(xfer_size); in rcar_dmac_chan_configure_desc()
814 desc->chcr = chcr | chcr_ts[desc->xfer_shift]; in rcar_dmac_chan_configure_desc()
834 struct rcar_dmac_desc *desc; in rcar_dmac_chan_prep_sg() local
842 desc = rcar_dmac_desc_get(chan); in rcar_dmac_chan_prep_sg()
843 if (!desc) in rcar_dmac_chan_prep_sg()
846 desc->async_tx.flags = dma_flags; in rcar_dmac_chan_prep_sg()
847 desc->async_tx.cookie = -EBUSY; in rcar_dmac_chan_prep_sg()
849 desc->cyclic = cyclic; in rcar_dmac_chan_prep_sg()
850 desc->direction = dir; in rcar_dmac_chan_prep_sg()
852 rcar_dmac_chan_configure_desc(chan, desc); in rcar_dmac_chan_prep_sg()
854 max_chunk_size = (RCAR_DMATCR_MASK + 1) << desc->xfer_shift; in rcar_dmac_chan_prep_sg()
890 rcar_dmac_desc_put(chan, desc); in rcar_dmac_chan_prep_sg()
906 chan->index, chunk, desc, i, sg, size, len, in rcar_dmac_chan_prep_sg()
915 list_add_tail(&chunk->node, &desc->chunks); in rcar_dmac_chan_prep_sg()
920 desc->nchunks = nchunks; in rcar_dmac_chan_prep_sg()
921 desc->size = full_size; in rcar_dmac_chan_prep_sg()
933 desc->hwdescs.use = !highmem && nchunks > 1; in rcar_dmac_chan_prep_sg()
934 if (desc->hwdescs.use) { in rcar_dmac_chan_prep_sg()
935 if (rcar_dmac_fill_hwdesc(chan, desc) < 0) in rcar_dmac_chan_prep_sg()
936 desc->hwdescs.use = false; in rcar_dmac_chan_prep_sg()
939 return &desc->async_tx; in rcar_dmac_chan_prep_sg()
951 INIT_LIST_HEAD(&rchan->desc.chunks_free); in rcar_dmac_alloc_chan_resources()
952 INIT_LIST_HEAD(&rchan->desc.pages); in rcar_dmac_alloc_chan_resources()
971 struct rcar_dmac_desc *desc; in rcar_dmac_free_chan_resources() local
987 list_splice_init(&rchan->desc.free, &list); in rcar_dmac_free_chan_resources()
988 list_splice_init(&rchan->desc.pending, &list); in rcar_dmac_free_chan_resources()
989 list_splice_init(&rchan->desc.active, &list); in rcar_dmac_free_chan_resources()
990 list_splice_init(&rchan->desc.done, &list); in rcar_dmac_free_chan_resources()
991 list_splice_init(&rchan->desc.wait, &list); in rcar_dmac_free_chan_resources()
993 list_for_each_entry(desc, &list, node) in rcar_dmac_free_chan_resources()
994 rcar_dmac_realloc_hwdesc(rchan, desc, 0); in rcar_dmac_free_chan_resources()
996 list_for_each_entry_safe(page, _page, &rchan->desc.pages, node) { in rcar_dmac_free_chan_resources()
1054 struct dma_async_tx_descriptor *desc; in rcar_dmac_prep_dma_cyclic() local
1097 desc = rcar_dmac_chan_prep_sg(rchan, sgl, sg_len, dev_addr, in rcar_dmac_prep_dma_cyclic()
1101 return desc; in rcar_dmac_prep_dma_cyclic()
1143 struct rcar_dmac_desc *desc = chan->desc.running; in rcar_dmac_chan_get_residue() local
1149 if (!desc) in rcar_dmac_chan_get_residue()
1157 if (cookie != desc->async_tx.cookie) in rcar_dmac_chan_get_residue()
1158 return desc->size; in rcar_dmac_chan_get_residue()
1166 if (desc->hwdescs.use) { in rcar_dmac_chan_get_residue()
1169 WARN_ON(dptr >= desc->nchunks); in rcar_dmac_chan_get_residue()
1171 running = desc->running; in rcar_dmac_chan_get_residue()
1175 list_for_each_entry_reverse(chunk, &desc->chunks, node) { in rcar_dmac_chan_get_residue()
1176 if (chunk == running || ++dptr == desc->nchunks) in rcar_dmac_chan_get_residue()
1183 residue += rcar_dmac_chan_read(chan, RCAR_DMATCR) << desc->xfer_shift; in rcar_dmac_chan_get_residue()
1217 if (list_empty(&rchan->desc.pending)) in rcar_dmac_issue_pending()
1221 list_splice_tail_init(&rchan->desc.pending, &rchan->desc.active); in rcar_dmac_issue_pending()
1227 if (!rchan->desc.running) { in rcar_dmac_issue_pending()
1228 struct rcar_dmac_desc *desc; in rcar_dmac_issue_pending() local
1230 desc = list_first_entry(&rchan->desc.active, in rcar_dmac_issue_pending()
1232 rchan->desc.running = desc; in rcar_dmac_issue_pending()
1247 struct rcar_dmac_desc *desc = chan->desc.running; in rcar_dmac_isr_desc_stage_end() local
1250 if (WARN_ON(!desc || !desc->cyclic)) { in rcar_dmac_isr_desc_stage_end()
1269 struct rcar_dmac_desc *desc = chan->desc.running; in rcar_dmac_isr_transfer_end() local
1272 if (WARN_ON_ONCE(!desc)) { in rcar_dmac_isr_transfer_end()
1286 if (!desc->hwdescs.use) { in rcar_dmac_isr_transfer_end()
1292 if (!list_is_last(&desc->running->node, &desc->chunks)) { in rcar_dmac_isr_transfer_end()
1293 desc->running = list_next_entry(desc->running, node); in rcar_dmac_isr_transfer_end()
1294 if (!desc->cyclic) in rcar_dmac_isr_transfer_end()
1303 if (desc->cyclic) { in rcar_dmac_isr_transfer_end()
1304 desc->running = in rcar_dmac_isr_transfer_end()
1305 list_first_entry(&desc->chunks, in rcar_dmac_isr_transfer_end()
1313 list_move_tail(&desc->node, &chan->desc.done); in rcar_dmac_isr_transfer_end()
1316 if (!list_empty(&chan->desc.active)) in rcar_dmac_isr_transfer_end()
1317 chan->desc.running = list_first_entry(&chan->desc.active, in rcar_dmac_isr_transfer_end()
1321 chan->desc.running = NULL; in rcar_dmac_isr_transfer_end()
1324 if (chan->desc.running) in rcar_dmac_isr_transfer_end()
1358 struct rcar_dmac_desc *desc; in rcar_dmac_isr_channel_thread() local
1363 if (chan->desc.running && chan->desc.running->cyclic) { in rcar_dmac_isr_channel_thread()
1367 desc = chan->desc.running; in rcar_dmac_isr_channel_thread()
1368 callback = desc->async_tx.callback; in rcar_dmac_isr_channel_thread()
1369 callback_param = desc->async_tx.callback_param; in rcar_dmac_isr_channel_thread()
1382 while (!list_empty(&chan->desc.done)) { in rcar_dmac_isr_channel_thread()
1383 desc = list_first_entry(&chan->desc.done, struct rcar_dmac_desc, in rcar_dmac_isr_channel_thread()
1385 dma_cookie_complete(&desc->async_tx); in rcar_dmac_isr_channel_thread()
1386 list_del(&desc->node); in rcar_dmac_isr_channel_thread()
1388 if (desc->async_tx.callback) { in rcar_dmac_isr_channel_thread()
1395 desc->async_tx.callback(desc->async_tx.callback_param); in rcar_dmac_isr_channel_thread()
1399 list_add_tail(&desc->node, &chan->desc.wait); in rcar_dmac_isr_channel_thread()
1536 INIT_LIST_HEAD(&rchan->desc.free); in rcar_dmac_chan_probe()
1537 INIT_LIST_HEAD(&rchan->desc.pending); in rcar_dmac_chan_probe()
1538 INIT_LIST_HEAD(&rchan->desc.active); in rcar_dmac_chan_probe()
1539 INIT_LIST_HEAD(&rchan->desc.done); in rcar_dmac_chan_probe()
1540 INIT_LIST_HEAD(&rchan->desc.wait); in rcar_dmac_chan_probe()