Lines Matching refs:desc

422 	struct at_xdmac_desc	*desc = txd_to_at_desc(tx);  in at_xdmac_tx_submit()  local
431 __func__, atchan, desc); in at_xdmac_tx_submit()
432 list_add_tail(&desc->xfer_node, &atchan->xfers_list); in at_xdmac_tx_submit()
434 at_xdmac_start_xfer(atchan, desc); in at_xdmac_tx_submit()
443 struct at_xdmac_desc *desc; in at_xdmac_alloc_desc() local
447 desc = dma_pool_alloc(atxdmac->at_xdmac_desc_pool, gfp_flags, &phys); in at_xdmac_alloc_desc()
448 if (desc) { in at_xdmac_alloc_desc()
449 memset(desc, 0, sizeof(*desc)); in at_xdmac_alloc_desc()
450 INIT_LIST_HEAD(&desc->descs_list); in at_xdmac_alloc_desc()
451 dma_async_tx_descriptor_init(&desc->tx_dma_desc, chan); in at_xdmac_alloc_desc()
452 desc->tx_dma_desc.tx_submit = at_xdmac_tx_submit; in at_xdmac_alloc_desc()
453 desc->tx_dma_desc.phys = phys; in at_xdmac_alloc_desc()
456 return desc; in at_xdmac_alloc_desc()
459 void at_xdmac_init_used_desc(struct at_xdmac_desc *desc) in at_xdmac_init_used_desc() argument
461 memset(&desc->lld, 0, sizeof(desc->lld)); in at_xdmac_init_used_desc()
462 INIT_LIST_HEAD(&desc->descs_list); in at_xdmac_init_used_desc()
463 desc->direction = DMA_TRANS_NONE; in at_xdmac_init_used_desc()
464 desc->xfer_size = 0; in at_xdmac_init_used_desc()
465 desc->active_xfer = false; in at_xdmac_init_used_desc()
471 struct at_xdmac_desc *desc; in at_xdmac_get_desc() local
474 desc = at_xdmac_alloc_desc(&atchan->chan, GFP_NOWAIT); in at_xdmac_get_desc()
476 desc = list_first_entry(&atchan->free_descs_list, in at_xdmac_get_desc()
478 list_del(&desc->desc_node); in at_xdmac_get_desc()
479 at_xdmac_init_used_desc(desc); in at_xdmac_get_desc()
482 return desc; in at_xdmac_get_desc()
487 struct at_xdmac_desc *desc) in at_xdmac_queue_desc() argument
489 if (!prev || !desc) in at_xdmac_queue_desc()
492 prev->lld.mbr_nda = desc->tx_dma_desc.phys; in at_xdmac_queue_desc()
500 struct at_xdmac_desc *desc) in at_xdmac_increment_block_count() argument
502 if (!desc) in at_xdmac_increment_block_count()
505 desc->lld.mbr_bc++; in at_xdmac_increment_block_count()
509 __func__, desc); in at_xdmac_increment_block_count()
667 struct at_xdmac_desc *desc = NULL; in at_xdmac_prep_slave_sg() local
679 desc = at_xdmac_get_desc(atchan); in at_xdmac_prep_slave_sg()
680 if (!desc) { in at_xdmac_prep_slave_sg()
689 desc->lld.mbr_sa = atchan->sconfig.src_addr; in at_xdmac_prep_slave_sg()
690 desc->lld.mbr_da = mem; in at_xdmac_prep_slave_sg()
692 desc->lld.mbr_sa = mem; in at_xdmac_prep_slave_sg()
693 desc->lld.mbr_da = atchan->sconfig.dst_addr; in at_xdmac_prep_slave_sg()
699 desc->lld.mbr_ubc = AT_XDMAC_MBR_UBC_NDV2 /* next descriptor view */ in at_xdmac_prep_slave_sg()
703 desc->lld.mbr_cfg = (atchan->cfg & ~AT_XDMAC_CC_DWIDTH_MASK) | in at_xdmac_prep_slave_sg()
707 __func__, &desc->lld.mbr_sa, &desc->lld.mbr_da, desc->lld.mbr_ubc); in at_xdmac_prep_slave_sg()
711 at_xdmac_queue_desc(chan, prev, desc); in at_xdmac_prep_slave_sg()
713 prev = desc; in at_xdmac_prep_slave_sg()
715 first = desc; in at_xdmac_prep_slave_sg()
718 __func__, desc, first); in at_xdmac_prep_slave_sg()
719 list_add_tail(&desc->desc_node, &first->descs_list); in at_xdmac_prep_slave_sg()
764 struct at_xdmac_desc *desc = NULL; in at_xdmac_prep_dma_cyclic() local
767 desc = at_xdmac_get_desc(atchan); in at_xdmac_prep_dma_cyclic()
768 if (!desc) { in at_xdmac_prep_dma_cyclic()
778 __func__, desc, &desc->tx_dma_desc.phys); in at_xdmac_prep_dma_cyclic()
781 desc->lld.mbr_sa = atchan->sconfig.src_addr; in at_xdmac_prep_dma_cyclic()
782 desc->lld.mbr_da = buf_addr + i * period_len; in at_xdmac_prep_dma_cyclic()
784 desc->lld.mbr_sa = buf_addr + i * period_len; in at_xdmac_prep_dma_cyclic()
785 desc->lld.mbr_da = atchan->sconfig.dst_addr; in at_xdmac_prep_dma_cyclic()
787 desc->lld.mbr_cfg = atchan->cfg; in at_xdmac_prep_dma_cyclic()
788 desc->lld.mbr_ubc = AT_XDMAC_MBR_UBC_NDV1 in at_xdmac_prep_dma_cyclic()
791 | period_len >> at_xdmac_get_dwidth(desc->lld.mbr_cfg); in at_xdmac_prep_dma_cyclic()
795 __func__, &desc->lld.mbr_sa, &desc->lld.mbr_da, desc->lld.mbr_ubc); in at_xdmac_prep_dma_cyclic()
799 at_xdmac_queue_desc(chan, prev, desc); in at_xdmac_prep_dma_cyclic()
801 prev = desc; in at_xdmac_prep_dma_cyclic()
803 first = desc; in at_xdmac_prep_dma_cyclic()
806 __func__, desc, first); in at_xdmac_prep_dma_cyclic()
807 list_add_tail(&desc->desc_node, &first->descs_list); in at_xdmac_prep_dma_cyclic()
855 struct at_xdmac_desc *desc; in at_xdmac_interleaved_queue_desc() local
901 desc = at_xdmac_get_desc(atchan); in at_xdmac_interleaved_queue_desc()
903 if (!desc) { in at_xdmac_interleaved_queue_desc()
912 desc->lld.mbr_sa = src; in at_xdmac_interleaved_queue_desc()
913 desc->lld.mbr_da = dst; in at_xdmac_interleaved_queue_desc()
914 desc->lld.mbr_sus = dmaengine_get_src_icg(xt, chunk); in at_xdmac_interleaved_queue_desc()
915 desc->lld.mbr_dus = dmaengine_get_dst_icg(xt, chunk); in at_xdmac_interleaved_queue_desc()
917 desc->lld.mbr_ubc = AT_XDMAC_MBR_UBC_NDV3 in at_xdmac_interleaved_queue_desc()
921 desc->lld.mbr_cfg = chan_cc; in at_xdmac_interleaved_queue_desc()
925 __func__, &desc->lld.mbr_sa, &desc->lld.mbr_da, in at_xdmac_interleaved_queue_desc()
926 desc->lld.mbr_ubc, desc->lld.mbr_cfg); in at_xdmac_interleaved_queue_desc()
930 at_xdmac_queue_desc(chan, prev, desc); in at_xdmac_interleaved_queue_desc()
932 return desc; in at_xdmac_interleaved_queue_desc()
980 struct at_xdmac_desc *desc; in at_xdmac_prep_interleaved() local
994 desc = at_xdmac_interleaved_queue_desc(chan, atchan, in at_xdmac_prep_interleaved()
998 if (!desc) { in at_xdmac_prep_interleaved()
1005 first = desc; in at_xdmac_prep_interleaved()
1008 __func__, desc, first); in at_xdmac_prep_interleaved()
1009 list_add_tail(&desc->desc_node, &first->descs_list); in at_xdmac_prep_interleaved()
1018 prev = desc; in at_xdmac_prep_interleaved()
1064 struct at_xdmac_desc *desc = NULL; in at_xdmac_prep_dma_memcpy() local
1069 desc = at_xdmac_get_desc(atchan); in at_xdmac_prep_dma_memcpy()
1071 if (!desc) { in at_xdmac_prep_dma_memcpy()
1098 desc->lld.mbr_sa = src_addr; in at_xdmac_prep_dma_memcpy()
1099 desc->lld.mbr_da = dst_addr; in at_xdmac_prep_dma_memcpy()
1100 desc->lld.mbr_ubc = AT_XDMAC_MBR_UBC_NDV2 in at_xdmac_prep_dma_memcpy()
1104 desc->lld.mbr_cfg = chan_cc; in at_xdmac_prep_dma_memcpy()
1108 __func__, &desc->lld.mbr_sa, &desc->lld.mbr_da, desc->lld.mbr_ubc, desc->lld.mbr_cfg); in at_xdmac_prep_dma_memcpy()
1112 at_xdmac_queue_desc(chan, prev, desc); in at_xdmac_prep_dma_memcpy()
1114 prev = desc; in at_xdmac_prep_dma_memcpy()
1116 first = desc; in at_xdmac_prep_dma_memcpy()
1119 __func__, desc, first); in at_xdmac_prep_dma_memcpy()
1120 list_add_tail(&desc->desc_node, &first->descs_list); in at_xdmac_prep_dma_memcpy()
1135 struct at_xdmac_desc *desc; in at_xdmac_memset_create_desc() local
1166 desc = at_xdmac_get_desc(atchan); in at_xdmac_memset_create_desc()
1168 if (!desc) { in at_xdmac_memset_create_desc()
1177 desc->lld.mbr_da = dst_addr; in at_xdmac_memset_create_desc()
1178 desc->lld.mbr_ds = value; in at_xdmac_memset_create_desc()
1179 desc->lld.mbr_ubc = AT_XDMAC_MBR_UBC_NDV3 in at_xdmac_memset_create_desc()
1183 desc->lld.mbr_cfg = chan_cc; in at_xdmac_memset_create_desc()
1187 __func__, &desc->lld.mbr_da, &desc->lld.mbr_ds, desc->lld.mbr_ubc, in at_xdmac_memset_create_desc()
1188 desc->lld.mbr_cfg); in at_xdmac_memset_create_desc()
1190 return desc; in at_xdmac_memset_create_desc()
1198 struct at_xdmac_desc *desc; in at_xdmac_prep_dma_memset() local
1206 desc = at_xdmac_memset_create_desc(chan, atchan, dest, len, value); in at_xdmac_prep_dma_memset()
1207 list_add_tail(&desc->desc_node, &desc->descs_list); in at_xdmac_prep_dma_memset()
1209 desc->tx_dma_desc.cookie = -EBUSY; in at_xdmac_prep_dma_memset()
1210 desc->tx_dma_desc.flags = flags; in at_xdmac_prep_dma_memset()
1211 desc->xfer_size = len; in at_xdmac_prep_dma_memset()
1213 return &desc->tx_dma_desc; in at_xdmac_prep_dma_memset()
1222 struct at_xdmac_desc *desc, *pdesc = NULL, in at_xdmac_prep_dma_memset_sg() local
1239 desc = at_xdmac_memset_create_desc(chan, atchan, in at_xdmac_prep_dma_memset_sg()
1243 if (!desc && first) in at_xdmac_prep_dma_memset_sg()
1248 first = desc; in at_xdmac_prep_dma_memset_sg()
1324 list_add_tail(&desc->desc_node, in at_xdmac_prep_dma_memset_sg()
1328 __func__, desc, first); in at_xdmac_prep_dma_memset_sg()
1343 __func__, desc, pdesc); in at_xdmac_prep_dma_memset_sg()
1356 list_add_tail(&desc->desc_node, in at_xdmac_prep_dma_memset_sg()
1362 pdesc = desc; in at_xdmac_prep_dma_memset_sg()
1384 struct at_xdmac_desc *desc, *_desc; in at_xdmac_tx_status() local
1401 desc = list_first_entry(&atchan->xfers_list, struct at_xdmac_desc, xfer_node); in at_xdmac_tx_status()
1407 if (!desc->active_xfer) { in at_xdmac_tx_status()
1408 dma_set_residue(txstate, desc->xfer_size); in at_xdmac_tx_status()
1412 residue = desc->xfer_size; in at_xdmac_tx_status()
1419 if ((desc->lld.mbr_cfg & mask) == value) { in at_xdmac_tx_status()
1466 descs_list = &desc->descs_list; in at_xdmac_tx_status()
1467 list_for_each_entry_safe(desc, _desc, descs_list, desc_node) { in at_xdmac_tx_status()
1468 dwidth = at_xdmac_get_dwidth(desc->lld.mbr_cfg); in at_xdmac_tx_status()
1469 residue -= (desc->lld.mbr_ubc & 0xffffff) << dwidth; in at_xdmac_tx_status()
1470 if ((desc->lld.mbr_nda & 0xfffffffc) == cur_nda) in at_xdmac_tx_status()
1479 __func__, desc, &desc->tx_dma_desc.phys, ret, cookie, residue); in at_xdmac_tx_status()
1488 struct at_xdmac_desc *desc) in at_xdmac_remove_xfer() argument
1490 dev_dbg(chan2dev(&atchan->chan), "%s: desc 0x%p\n", __func__, desc); in at_xdmac_remove_xfer()
1496 list_del(&desc->xfer_node); in at_xdmac_remove_xfer()
1497 list_splice_init(&desc->descs_list, &atchan->free_descs_list); in at_xdmac_remove_xfer()
1502 struct at_xdmac_desc *desc; in at_xdmac_advance_work() local
1512 desc = list_first_entry(&atchan->xfers_list, in at_xdmac_advance_work()
1515 dev_vdbg(chan2dev(&atchan->chan), "%s: desc 0x%p\n", __func__, desc); in at_xdmac_advance_work()
1516 if (!desc->active_xfer) in at_xdmac_advance_work()
1517 at_xdmac_start_xfer(atchan, desc); in at_xdmac_advance_work()
1525 struct at_xdmac_desc *desc; in at_xdmac_handle_cyclic() local
1528 desc = list_first_entry(&atchan->xfers_list, struct at_xdmac_desc, xfer_node); in at_xdmac_handle_cyclic()
1529 txd = &desc->tx_dma_desc; in at_xdmac_handle_cyclic()
1538 struct at_xdmac_desc *desc; in at_xdmac_tasklet() local
1562 desc = list_first_entry(&atchan->xfers_list, in at_xdmac_tasklet()
1565 dev_vdbg(chan2dev(&atchan->chan), "%s: desc 0x%p\n", __func__, desc); in at_xdmac_tasklet()
1566 BUG_ON(!desc->active_xfer); in at_xdmac_tasklet()
1568 txd = &desc->tx_dma_desc; in at_xdmac_tasklet()
1570 at_xdmac_remove_xfer(atchan, desc); in at_xdmac_tasklet()
1711 struct at_xdmac_desc *desc, *_desc; in at_xdmac_device_terminate_all() local
1724 list_for_each_entry_safe(desc, _desc, &atchan->xfers_list, xfer_node) in at_xdmac_device_terminate_all()
1725 at_xdmac_remove_xfer(atchan, desc); in at_xdmac_device_terminate_all()
1737 struct at_xdmac_desc *desc; in at_xdmac_alloc_chan_resources() local
1758 desc = at_xdmac_alloc_desc(chan, GFP_ATOMIC); in at_xdmac_alloc_chan_resources()
1759 if (!desc) { in at_xdmac_alloc_chan_resources()
1764 list_add_tail(&desc->desc_node, &atchan->free_descs_list); in at_xdmac_alloc_chan_resources()
1780 struct at_xdmac_desc *desc, *_desc; in at_xdmac_free_chan_resources() local
1782 list_for_each_entry_safe(desc, _desc, &atchan->free_descs_list, desc_node) { in at_xdmac_free_chan_resources()
1783 dev_dbg(chan2dev(chan), "%s: freeing descriptor %p\n", __func__, desc); in at_xdmac_free_chan_resources()
1784 list_del(&desc->desc_node); in at_xdmac_free_chan_resources()
1785 dma_pool_free(atxdmac->at_xdmac_desc_pool, desc, desc->tx_dma_desc.phys); in at_xdmac_free_chan_resources()