Lines Matching refs:desc
92 struct pch_dma_desc_regs desc[MAX_CHAN_NR]; member
331 static void pdc_dostart(struct pch_dma_chan *pd_chan, struct pch_dma_desc* desc) in pdc_dostart() argument
340 pd_chan->chan.chan_id, desc->regs.dev_addr); in pdc_dostart()
342 pd_chan->chan.chan_id, desc->regs.mem_addr); in pdc_dostart()
344 pd_chan->chan.chan_id, desc->regs.size); in pdc_dostart()
346 pd_chan->chan.chan_id, desc->regs.next); in pdc_dostart()
348 if (list_empty(&desc->tx_list)) { in pdc_dostart()
349 channel_writel(pd_chan, DEV_ADDR, desc->regs.dev_addr); in pdc_dostart()
350 channel_writel(pd_chan, MEM_ADDR, desc->regs.mem_addr); in pdc_dostart()
351 channel_writel(pd_chan, SIZE, desc->regs.size); in pdc_dostart()
352 channel_writel(pd_chan, NEXT, desc->regs.next); in pdc_dostart()
355 channel_writel(pd_chan, NEXT, desc->txd.phys); in pdc_dostart()
361 struct pch_dma_desc *desc) in pdc_chain_complete() argument
363 struct dma_async_tx_descriptor *txd = &desc->txd; in pdc_chain_complete()
367 list_splice_init(&desc->tx_list, &pd_chan->free_list); in pdc_chain_complete()
368 list_move(&desc->desc_node, &pd_chan->free_list); in pdc_chain_complete()
376 struct pch_dma_desc *desc, *_d; in pdc_complete_all() local
387 list_for_each_entry_safe(desc, _d, &list, desc_node) in pdc_complete_all()
388 pdc_chain_complete(pd_chan, desc); in pdc_complete_all()
423 struct pch_dma_desc *desc = to_pd_desc(txd); in pd_tx_submit() local
431 list_add_tail(&desc->desc_node, &pd_chan->active_list); in pd_tx_submit()
432 pdc_dostart(pd_chan, desc); in pd_tx_submit()
434 list_add_tail(&desc->desc_node, &pd_chan->queue); in pd_tx_submit()
443 struct pch_dma_desc *desc = NULL; in pdc_alloc_desc() local
447 desc = pci_pool_alloc(pd->pool, flags, &addr); in pdc_alloc_desc()
448 if (desc) { in pdc_alloc_desc()
449 memset(desc, 0, sizeof(struct pch_dma_desc)); in pdc_alloc_desc()
450 INIT_LIST_HEAD(&desc->tx_list); in pdc_alloc_desc()
451 dma_async_tx_descriptor_init(&desc->txd, chan); in pdc_alloc_desc()
452 desc->txd.tx_submit = pd_tx_submit; in pdc_alloc_desc()
453 desc->txd.flags = DMA_CTRL_ACK; in pdc_alloc_desc()
454 desc->txd.phys = addr; in pdc_alloc_desc()
457 return desc; in pdc_alloc_desc()
462 struct pch_dma_desc *desc, *_d; in pdc_desc_get() local
467 list_for_each_entry_safe(desc, _d, &pd_chan->free_list, desc_node) { in pdc_desc_get()
469 if (async_tx_test_ack(&desc->txd)) { in pdc_desc_get()
470 list_del(&desc->desc_node); in pdc_desc_get()
471 ret = desc; in pdc_desc_get()
474 dev_dbg(chan2dev(&pd_chan->chan), "desc %p not ACKed\n", desc); in pdc_desc_get()
495 struct pch_dma_desc *desc) in pdc_desc_put() argument
497 if (desc) { in pdc_desc_put()
499 list_splice_init(&desc->tx_list, &pd_chan->free_list); in pdc_desc_put()
500 list_add(&desc->desc_node, &pd_chan->free_list); in pdc_desc_put()
508 struct pch_dma_desc *desc; in pd_alloc_chan_resources() local
521 desc = pdc_alloc_desc(chan, GFP_KERNEL); in pd_alloc_chan_resources()
523 if (!desc) { in pd_alloc_chan_resources()
529 list_add_tail(&desc->desc_node, &tmp_list); in pd_alloc_chan_resources()
547 struct pch_dma_desc *desc, *_d; in pd_free_chan_resources() local
559 list_for_each_entry_safe(desc, _d, &tmp_list, desc_node) in pd_free_chan_resources()
560 pci_pool_free(pd->pool, desc, desc->txd.phys); in pd_free_chan_resources()
591 struct pch_dma_desc *desc = NULL; in pd_prep_slave_sg() local
612 desc = pdc_desc_get(pd_chan); in pd_prep_slave_sg()
614 if (!desc) in pd_prep_slave_sg()
617 desc->regs.dev_addr = reg; in pd_prep_slave_sg()
618 desc->regs.mem_addr = sg_dma_address(sg); in pd_prep_slave_sg()
619 desc->regs.size = sg_dma_len(sg); in pd_prep_slave_sg()
620 desc->regs.next = DMA_DESC_FOLLOW_WITHOUT_IRQ; in pd_prep_slave_sg()
624 if (desc->regs.size > DMA_DESC_MAX_COUNT_1_BYTE) in pd_prep_slave_sg()
626 desc->regs.size |= DMA_DESC_WIDTH_1_BYTE; in pd_prep_slave_sg()
629 if (desc->regs.size > DMA_DESC_MAX_COUNT_2_BYTES) in pd_prep_slave_sg()
631 desc->regs.size |= DMA_DESC_WIDTH_2_BYTES; in pd_prep_slave_sg()
634 if (desc->regs.size > DMA_DESC_MAX_COUNT_4_BYTES) in pd_prep_slave_sg()
636 desc->regs.size |= DMA_DESC_WIDTH_4_BYTES; in pd_prep_slave_sg()
643 first = desc; in pd_prep_slave_sg()
645 prev->regs.next |= desc->txd.phys; in pd_prep_slave_sg()
646 list_add_tail(&desc->desc_node, &first->tx_list); in pd_prep_slave_sg()
649 prev = desc; in pd_prep_slave_sg()
653 desc->regs.next = DMA_DESC_END_WITH_IRQ; in pd_prep_slave_sg()
655 desc->regs.next = DMA_DESC_END_WITHOUT_IRQ; in pd_prep_slave_sg()
658 desc->txd.flags = flags; in pd_prep_slave_sg()
671 struct pch_dma_desc *desc, *_d; in pd_device_terminate_all() local
681 list_for_each_entry_safe(desc, _d, &list, desc_node) in pd_device_terminate_all()
682 pdc_chain_complete(pd_chan, desc); in pd_device_terminate_all()
909 pd_chan->membase = ®s->desc[i]; in pch_dma_probe()