Lines Matching refs:desc

415 	struct at_xdmac_desc	*desc = txd_to_at_desc(tx);  in at_xdmac_tx_submit()  local
424 __func__, atchan, desc); in at_xdmac_tx_submit()
425 list_add_tail(&desc->xfer_node, &atchan->xfers_list); in at_xdmac_tx_submit()
427 at_xdmac_start_xfer(atchan, desc); in at_xdmac_tx_submit()
436 struct at_xdmac_desc *desc; in at_xdmac_alloc_desc() local
440 desc = dma_pool_alloc(atxdmac->at_xdmac_desc_pool, gfp_flags, &phys); in at_xdmac_alloc_desc()
441 if (desc) { in at_xdmac_alloc_desc()
442 memset(desc, 0, sizeof(*desc)); in at_xdmac_alloc_desc()
443 INIT_LIST_HEAD(&desc->descs_list); in at_xdmac_alloc_desc()
444 dma_async_tx_descriptor_init(&desc->tx_dma_desc, chan); in at_xdmac_alloc_desc()
445 desc->tx_dma_desc.tx_submit = at_xdmac_tx_submit; in at_xdmac_alloc_desc()
446 desc->tx_dma_desc.phys = phys; in at_xdmac_alloc_desc()
449 return desc; in at_xdmac_alloc_desc()
455 struct at_xdmac_desc *desc; in at_xdmac_get_desc() local
458 desc = at_xdmac_alloc_desc(&atchan->chan, GFP_NOWAIT); in at_xdmac_get_desc()
460 desc = list_first_entry(&atchan->free_descs_list, in at_xdmac_get_desc()
462 list_del(&desc->desc_node); in at_xdmac_get_desc()
463 desc->active_xfer = false; in at_xdmac_get_desc()
466 return desc; in at_xdmac_get_desc()
624 struct at_xdmac_desc *desc = NULL; in at_xdmac_prep_slave_sg() local
636 desc = at_xdmac_get_desc(atchan); in at_xdmac_prep_slave_sg()
637 if (!desc) { in at_xdmac_prep_slave_sg()
646 desc->lld.mbr_sa = atchan->sconfig.src_addr; in at_xdmac_prep_slave_sg()
647 desc->lld.mbr_da = mem; in at_xdmac_prep_slave_sg()
649 desc->lld.mbr_sa = mem; in at_xdmac_prep_slave_sg()
650 desc->lld.mbr_da = atchan->sconfig.dst_addr; in at_xdmac_prep_slave_sg()
656 desc->lld.mbr_ubc = AT_XDMAC_MBR_UBC_NDV2 /* next descriptor view */ in at_xdmac_prep_slave_sg()
661 desc->lld.mbr_cfg = (atchan->cfg & ~AT_XDMAC_CC_DWIDTH_MASK) | in at_xdmac_prep_slave_sg()
665 __func__, &desc->lld.mbr_sa, &desc->lld.mbr_da, desc->lld.mbr_ubc); in at_xdmac_prep_slave_sg()
669 prev->lld.mbr_nda = desc->tx_dma_desc.phys; in at_xdmac_prep_slave_sg()
675 prev = desc; in at_xdmac_prep_slave_sg()
677 first = desc; in at_xdmac_prep_slave_sg()
680 __func__, desc, first); in at_xdmac_prep_slave_sg()
681 list_add_tail(&desc->desc_node, &first->descs_list); in at_xdmac_prep_slave_sg()
726 struct at_xdmac_desc *desc = NULL; in at_xdmac_prep_dma_cyclic() local
729 desc = at_xdmac_get_desc(atchan); in at_xdmac_prep_dma_cyclic()
730 if (!desc) { in at_xdmac_prep_dma_cyclic()
740 __func__, desc, &desc->tx_dma_desc.phys); in at_xdmac_prep_dma_cyclic()
743 desc->lld.mbr_sa = atchan->sconfig.src_addr; in at_xdmac_prep_dma_cyclic()
744 desc->lld.mbr_da = buf_addr + i * period_len; in at_xdmac_prep_dma_cyclic()
746 desc->lld.mbr_sa = buf_addr + i * period_len; in at_xdmac_prep_dma_cyclic()
747 desc->lld.mbr_da = atchan->sconfig.dst_addr; in at_xdmac_prep_dma_cyclic()
749 desc->lld.mbr_cfg = atchan->cfg; in at_xdmac_prep_dma_cyclic()
750 desc->lld.mbr_ubc = AT_XDMAC_MBR_UBC_NDV1 in at_xdmac_prep_dma_cyclic()
754 | period_len >> at_xdmac_get_dwidth(desc->lld.mbr_cfg); in at_xdmac_prep_dma_cyclic()
758 __func__, &desc->lld.mbr_sa, &desc->lld.mbr_da, desc->lld.mbr_ubc); in at_xdmac_prep_dma_cyclic()
762 prev->lld.mbr_nda = desc->tx_dma_desc.phys; in at_xdmac_prep_dma_cyclic()
768 prev = desc; in at_xdmac_prep_dma_cyclic()
770 first = desc; in at_xdmac_prep_dma_cyclic()
773 __func__, desc, first); in at_xdmac_prep_dma_cyclic()
774 list_add_tail(&desc->desc_node, &first->descs_list); in at_xdmac_prep_dma_cyclic()
840 struct at_xdmac_desc *desc = NULL; in at_xdmac_prep_dma_memcpy() local
845 desc = at_xdmac_get_desc(atchan); in at_xdmac_prep_dma_memcpy()
847 if (!desc) { in at_xdmac_prep_dma_memcpy()
884 desc->lld.mbr_sa = src_addr; in at_xdmac_prep_dma_memcpy()
885 desc->lld.mbr_da = dst_addr; in at_xdmac_prep_dma_memcpy()
886 desc->lld.mbr_ubc = AT_XDMAC_MBR_UBC_NDV2 in at_xdmac_prep_dma_memcpy()
891 desc->lld.mbr_cfg = chan_cc; in at_xdmac_prep_dma_memcpy()
895 __func__, &desc->lld.mbr_sa, &desc->lld.mbr_da, desc->lld.mbr_ubc, desc->lld.mbr_cfg); in at_xdmac_prep_dma_memcpy()
899 prev->lld.mbr_nda = desc->tx_dma_desc.phys; in at_xdmac_prep_dma_memcpy()
905 prev = desc; in at_xdmac_prep_dma_memcpy()
907 first = desc; in at_xdmac_prep_dma_memcpy()
910 __func__, desc, first); in at_xdmac_prep_dma_memcpy()
911 list_add_tail(&desc->desc_node, &first->descs_list); in at_xdmac_prep_dma_memcpy()
926 struct at_xdmac_desc *desc, *_desc; in at_xdmac_tx_status() local
943 desc = list_first_entry(&atchan->xfers_list, struct at_xdmac_desc, xfer_node); in at_xdmac_tx_status()
949 if (!desc->active_xfer) { in at_xdmac_tx_status()
950 dma_set_residue(txstate, desc->xfer_size); in at_xdmac_tx_status()
954 residue = desc->xfer_size; in at_xdmac_tx_status()
961 if ((desc->lld.mbr_cfg & mask) == value) { in at_xdmac_tx_status()
1008 descs_list = &desc->descs_list; in at_xdmac_tx_status()
1009 list_for_each_entry_safe(desc, _desc, descs_list, desc_node) { in at_xdmac_tx_status()
1010 dwidth = at_xdmac_get_dwidth(desc->lld.mbr_cfg); in at_xdmac_tx_status()
1011 residue -= (desc->lld.mbr_ubc & 0xffffff) << dwidth; in at_xdmac_tx_status()
1012 if ((desc->lld.mbr_nda & 0xfffffffc) == cur_nda) in at_xdmac_tx_status()
1021 __func__, desc, &desc->tx_dma_desc.phys, ret, cookie, residue); in at_xdmac_tx_status()
1030 struct at_xdmac_desc *desc) in at_xdmac_remove_xfer() argument
1032 dev_dbg(chan2dev(&atchan->chan), "%s: desc 0x%p\n", __func__, desc); in at_xdmac_remove_xfer()
1038 list_del(&desc->xfer_node); in at_xdmac_remove_xfer()
1039 list_splice_init(&desc->descs_list, &atchan->free_descs_list); in at_xdmac_remove_xfer()
1044 struct at_xdmac_desc *desc; in at_xdmac_advance_work() local
1054 desc = list_first_entry(&atchan->xfers_list, in at_xdmac_advance_work()
1057 dev_vdbg(chan2dev(&atchan->chan), "%s: desc 0x%p\n", __func__, desc); in at_xdmac_advance_work()
1058 if (!desc->active_xfer) in at_xdmac_advance_work()
1059 at_xdmac_start_xfer(atchan, desc); in at_xdmac_advance_work()
1067 struct at_xdmac_desc *desc; in at_xdmac_handle_cyclic() local
1070 desc = list_first_entry(&atchan->xfers_list, struct at_xdmac_desc, xfer_node); in at_xdmac_handle_cyclic()
1071 txd = &desc->tx_dma_desc; in at_xdmac_handle_cyclic()
1080 struct at_xdmac_desc *desc; in at_xdmac_tasklet() local
1104 desc = list_first_entry(&atchan->xfers_list, in at_xdmac_tasklet()
1107 dev_vdbg(chan2dev(&atchan->chan), "%s: desc 0x%p\n", __func__, desc); in at_xdmac_tasklet()
1108 BUG_ON(!desc->active_xfer); in at_xdmac_tasklet()
1110 txd = &desc->tx_dma_desc; in at_xdmac_tasklet()
1112 at_xdmac_remove_xfer(atchan, desc); in at_xdmac_tasklet()
1253 struct at_xdmac_desc *desc, *_desc; in at_xdmac_device_terminate_all() local
1266 list_for_each_entry_safe(desc, _desc, &atchan->xfers_list, xfer_node) in at_xdmac_device_terminate_all()
1267 at_xdmac_remove_xfer(atchan, desc); in at_xdmac_device_terminate_all()
1279 struct at_xdmac_desc *desc; in at_xdmac_alloc_chan_resources() local
1300 desc = at_xdmac_alloc_desc(chan, GFP_ATOMIC); in at_xdmac_alloc_chan_resources()
1301 if (!desc) { in at_xdmac_alloc_chan_resources()
1306 list_add_tail(&desc->desc_node, &atchan->free_descs_list); in at_xdmac_alloc_chan_resources()
1322 struct at_xdmac_desc *desc, *_desc; in at_xdmac_free_chan_resources() local
1324 list_for_each_entry_safe(desc, _desc, &atchan->free_descs_list, desc_node) { in at_xdmac_free_chan_resources()
1325 dev_dbg(chan2dev(chan), "%s: freeing descriptor %p\n", __func__, desc); in at_xdmac_free_chan_resources()
1326 list_del(&desc->desc_node); in at_xdmac_free_chan_resources()
1327 dma_pool_free(atxdmac->at_xdmac_desc_pool, desc, desc->tx_dma_desc.phys); in at_xdmac_free_chan_resources()