Lines Matching refs:sdesc

127 	void (*exec_desc)(struct sirfsoc_dma_desc *sdesc,
133 void (*exec)(struct sirfsoc_dma_desc *sdesc,
163 static void sirfsoc_dma_execute_hw_a7v2(struct sirfsoc_dma_desc *sdesc, in sirfsoc_dma_execute_hw_a7v2() argument
166 if (sdesc->chain) { in sirfsoc_dma_execute_hw_a7v2()
168 writel_relaxed((sdesc->dir << SIRFSOC_DMA_DIR_CTRL_BIT_ATLAS7) | in sirfsoc_dma_execute_hw_a7v2()
169 (sdesc->chain << in sirfsoc_dma_execute_hw_a7v2()
175 writel_relaxed(sdesc->xlen, base + SIRFSOC_DMA_CH_XLEN); in sirfsoc_dma_execute_hw_a7v2()
176 writel_relaxed(sdesc->ylen, base + SIRFSOC_DMA_CH_YLEN); in sirfsoc_dma_execute_hw_a7v2()
177 writel_relaxed(sdesc->width, base + SIRFSOC_DMA_WIDTH_ATLAS7); in sirfsoc_dma_execute_hw_a7v2()
178 writel_relaxed((sdesc->width*((sdesc->ylen+1)>>1)), in sirfsoc_dma_execute_hw_a7v2()
180 writel_relaxed((sdesc->dir << SIRFSOC_DMA_DIR_CTRL_BIT_ATLAS7) | in sirfsoc_dma_execute_hw_a7v2()
181 (sdesc->chain << in sirfsoc_dma_execute_hw_a7v2()
185 writel_relaxed(sdesc->chain ? SIRFSOC_DMA_INT_END_INT_ATLAS7 : in sirfsoc_dma_execute_hw_a7v2()
189 writel(sdesc->addr, base + SIRFSOC_DMA_CH_ADDR); in sirfsoc_dma_execute_hw_a7v2()
190 if (sdesc->cyclic) in sirfsoc_dma_execute_hw_a7v2()
194 static void sirfsoc_dma_execute_hw_a7v1(struct sirfsoc_dma_desc *sdesc, in sirfsoc_dma_execute_hw_a7v1() argument
199 writel_relaxed(sdesc->width, base + SIRFSOC_DMA_WIDTH_0 + cid * 4); in sirfsoc_dma_execute_hw_a7v1()
201 (sdesc->dir << SIRFSOC_DMA_DIR_CTRL_BIT), in sirfsoc_dma_execute_hw_a7v1()
203 writel_relaxed(sdesc->xlen, base + cid * 0x10 + SIRFSOC_DMA_CH_XLEN); in sirfsoc_dma_execute_hw_a7v1()
204 writel_relaxed(sdesc->ylen, base + cid * 0x10 + SIRFSOC_DMA_CH_YLEN); in sirfsoc_dma_execute_hw_a7v1()
207 writel(sdesc->addr >> 2, base + cid * 0x10 + SIRFSOC_DMA_CH_ADDR); in sirfsoc_dma_execute_hw_a7v1()
208 if (sdesc->cyclic) { in sirfsoc_dma_execute_hw_a7v1()
216 static void sirfsoc_dma_execute_hw_a6(struct sirfsoc_dma_desc *sdesc, in sirfsoc_dma_execute_hw_a6() argument
219 writel_relaxed(sdesc->width, base + SIRFSOC_DMA_WIDTH_0 + cid * 4); in sirfsoc_dma_execute_hw_a6()
221 (sdesc->dir << SIRFSOC_DMA_DIR_CTRL_BIT), in sirfsoc_dma_execute_hw_a6()
223 writel_relaxed(sdesc->xlen, base + cid * 0x10 + SIRFSOC_DMA_CH_XLEN); in sirfsoc_dma_execute_hw_a6()
224 writel_relaxed(sdesc->ylen, base + cid * 0x10 + SIRFSOC_DMA_CH_YLEN); in sirfsoc_dma_execute_hw_a6()
227 writel(sdesc->addr >> 2, base + cid * 0x10 + SIRFSOC_DMA_CH_ADDR); in sirfsoc_dma_execute_hw_a6()
228 if (sdesc->cyclic) { in sirfsoc_dma_execute_hw_a6()
241 struct sirfsoc_dma_desc *sdesc = NULL; in sirfsoc_dma_execute() local
249 sdesc = list_first_entry(&schan->queued, struct sirfsoc_dma_desc, in sirfsoc_dma_execute()
252 list_move_tail(&sdesc->node, &schan->active); in sirfsoc_dma_execute()
258 sdma->exec_desc(sdesc, cid, schan->mode, base); in sirfsoc_dma_execute()
260 if (sdesc->cyclic) in sirfsoc_dma_execute()
269 struct sirfsoc_dma_desc *sdesc = NULL; in sirfsoc_dma_irq() local
285 sdesc = list_first_entry(&schan->active, in sirfsoc_dma_irq()
287 if (!sdesc->cyclic) { in sirfsoc_dma_irq()
291 dma_cookie_complete(&sdesc->desc); in sirfsoc_dma_irq()
307 sdesc = list_first_entry(&schan->active, in sirfsoc_dma_irq()
309 if (!sdesc->cyclic) { in sirfsoc_dma_irq()
310 chain = sdesc->chain; in sirfsoc_dma_irq()
317 dma_cookie_complete(&sdesc->desc); in sirfsoc_dma_irq()
321 } else if (sdesc->cyclic && (is & in sirfsoc_dma_irq()
343 struct sirfsoc_dma_desc *sdesc; in sirfsoc_dma_process_completed() local
360 list_for_each_entry(sdesc, &list, node) { in sirfsoc_dma_process_completed()
361 desc = &sdesc->desc; in sirfsoc_dma_process_completed()
382 sdesc = list_first_entry(&schan->active, in sirfsoc_dma_process_completed()
389 desc = &sdesc->desc; in sirfsoc_dma_process_completed()
411 struct sirfsoc_dma_desc *sdesc; in sirfsoc_dma_tx_submit() local
415 sdesc = container_of(txd, struct sirfsoc_dma_desc, desc); in sirfsoc_dma_tx_submit()
420 list_move_tail(&sdesc->node, &schan->queued); in sirfsoc_dma_tx_submit()
564 struct sirfsoc_dma_desc *sdesc; in sirfsoc_dma_alloc_chan_resources() local
573 sdesc = kzalloc(sizeof(*sdesc), GFP_KERNEL); in sirfsoc_dma_alloc_chan_resources()
574 if (!sdesc) { in sirfsoc_dma_alloc_chan_resources()
580 dma_async_tx_descriptor_init(&sdesc->desc, chan); in sirfsoc_dma_alloc_chan_resources()
581 sdesc->desc.flags = DMA_CTRL_ACK; in sirfsoc_dma_alloc_chan_resources()
582 sdesc->desc.tx_submit = sirfsoc_dma_tx_submit; in sirfsoc_dma_alloc_chan_resources()
584 list_add_tail(&sdesc->node, &descs); in sirfsoc_dma_alloc_chan_resources()
604 struct sirfsoc_dma_desc *sdesc, *tmp; in sirfsoc_dma_free_chan_resources() local
622 list_for_each_entry_safe(sdesc, tmp, &descs, node) in sirfsoc_dma_free_chan_resources()
623 kfree(sdesc); in sirfsoc_dma_free_chan_resources()
651 struct sirfsoc_dma_desc *sdesc; in sirfsoc_dma_tx_status() local
665 sdesc = list_first_entry(&schan->active, struct sirfsoc_dma_desc, node); in sirfsoc_dma_tx_status()
666 if (sdesc->cyclic) in sirfsoc_dma_tx_status()
667 dma_request_bytes = (sdesc->xlen + 1) * (sdesc->ylen + 1) * in sirfsoc_dma_tx_status()
668 (sdesc->width * SIRFSOC_DMA_WORD_LEN); in sirfsoc_dma_tx_status()
670 dma_request_bytes = sdesc->xlen * SIRFSOC_DMA_WORD_LEN; in sirfsoc_dma_tx_status()
684 residue = dma_request_bytes - (dma_pos - sdesc->addr); in sirfsoc_dma_tx_status()
698 struct sirfsoc_dma_desc *sdesc = NULL; in sirfsoc_dma_prep_interleaved() local
710 sdesc = list_first_entry(&schan->free, struct sirfsoc_dma_desc, in sirfsoc_dma_prep_interleaved()
712 list_del(&sdesc->node); in sirfsoc_dma_prep_interleaved()
716 if (!sdesc) { in sirfsoc_dma_prep_interleaved()
731 sdesc->cyclic = 0; in sirfsoc_dma_prep_interleaved()
732 sdesc->xlen = xt->sgl[0].size / SIRFSOC_DMA_WORD_LEN; in sirfsoc_dma_prep_interleaved()
733 sdesc->width = (xt->sgl[0].size + xt->sgl[0].icg) / in sirfsoc_dma_prep_interleaved()
735 sdesc->ylen = xt->numf - 1; in sirfsoc_dma_prep_interleaved()
737 sdesc->addr = xt->src_start; in sirfsoc_dma_prep_interleaved()
738 sdesc->dir = 1; in sirfsoc_dma_prep_interleaved()
740 sdesc->addr = xt->dst_start; in sirfsoc_dma_prep_interleaved()
741 sdesc->dir = 0; in sirfsoc_dma_prep_interleaved()
744 list_add_tail(&sdesc->node, &schan->prepared); in sirfsoc_dma_prep_interleaved()
752 return &sdesc->desc; in sirfsoc_dma_prep_interleaved()
766 struct sirfsoc_dma_desc *sdesc = NULL; in sirfsoc_dma_prep_cyclic() local
786 sdesc = list_first_entry(&schan->free, struct sirfsoc_dma_desc, in sirfsoc_dma_prep_cyclic()
788 list_del(&sdesc->node); in sirfsoc_dma_prep_cyclic()
792 if (!sdesc) in sirfsoc_dma_prep_cyclic()
797 sdesc->addr = addr; in sirfsoc_dma_prep_cyclic()
798 sdesc->cyclic = 1; in sirfsoc_dma_prep_cyclic()
799 sdesc->xlen = 0; in sirfsoc_dma_prep_cyclic()
800 sdesc->ylen = buf_len / SIRFSOC_DMA_WORD_LEN - 1; in sirfsoc_dma_prep_cyclic()
801 sdesc->width = 1; in sirfsoc_dma_prep_cyclic()
802 list_add_tail(&sdesc->node, &schan->prepared); in sirfsoc_dma_prep_cyclic()
805 return &sdesc->desc; in sirfsoc_dma_prep_cyclic()
1018 struct sirfsoc_dma_desc *sdesc; in sirfsoc_dma_pm_suspend() local
1051 sdesc = list_first_entry(&schan->active, in sirfsoc_dma_pm_suspend()
1069 struct sirfsoc_dma_desc *sdesc; in sirfsoc_dma_pm_resume() local
1097 sdesc = list_first_entry(&schan->active, in sirfsoc_dma_pm_resume()
1100 writel_relaxed(sdesc->width, in sirfsoc_dma_pm_resume()
1102 writel_relaxed(sdesc->xlen, in sirfsoc_dma_pm_resume()
1104 writel_relaxed(sdesc->ylen, in sirfsoc_dma_pm_resume()
1109 writel_relaxed(sdesc->addr, in sirfsoc_dma_pm_resume()
1112 writel_relaxed(sdesc->addr >> 2, in sirfsoc_dma_pm_resume()