Lines Matching refs:od
222 static void omap_dma_glbl_write(struct omap_dmadev *od, unsigned reg, unsigned val) in omap_dma_glbl_write() argument
224 const struct omap_dma_reg *r = od->reg_map + reg; in omap_dma_glbl_write()
228 omap_dma_write(val, r->type, od->base + r->offset); in omap_dma_glbl_write()
231 static unsigned omap_dma_glbl_read(struct omap_dmadev *od, unsigned reg) in omap_dma_glbl_read() argument
233 const struct omap_dma_reg *r = od->reg_map + reg; in omap_dma_glbl_read()
237 return omap_dma_read(r->type, od->base + r->offset); in omap_dma_glbl_read()
272 static void omap_dma_assign(struct omap_dmadev *od, struct omap_chan *c, in omap_dma_assign() argument
275 c->channel_base = od->base + od->plat->channel_stride * lch; in omap_dma_assign()
277 od->lch_map[lch] = c; in omap_dma_assign()
282 struct omap_dmadev *od = to_omap_dma_dev(c->vc.chan.device); in omap_dma_start() local
284 if (__dma_omap15xx(od->plat->dma_attr)) in omap_dma_start()
300 struct omap_dmadev *od = to_omap_dma_dev(c->vc.chan.device); in omap_dma_stop() local
309 if (od->plat->errata & DMA_ERRATA_i541 && val & CCR_TRIGGER_SRC) { in omap_dma_stop()
313 sysconfig = omap_dma_glbl_read(od, OCP_SYSCONFIG); in omap_dma_stop()
316 omap_dma_glbl_write(od, OCP_SYSCONFIG, val); in omap_dma_stop()
339 omap_dma_glbl_write(od, OCP_SYSCONFIG, sysconfig); in omap_dma_stop()
347 if (!__dma_omap15xx(od->plat->dma_attr) && c->cyclic) { in omap_dma_stop()
482 struct omap_dmadev *od = devid; in omap_dma_irq() local
485 spin_lock(&od->irq_lock); in omap_dma_irq()
487 status = omap_dma_glbl_read(od, IRQSTATUS_L1); in omap_dma_irq()
488 status &= od->irq_enable_mask; in omap_dma_irq()
490 spin_unlock(&od->irq_lock); in omap_dma_irq()
502 c = od->lch_map[channel]; in omap_dma_irq()
505 dev_err(od->ddev.dev, "invalid channel %u\n", channel); in omap_dma_irq()
510 omap_dma_glbl_write(od, IRQSTATUS_L1, mask); in omap_dma_irq()
515 spin_unlock(&od->irq_lock); in omap_dma_irq()
522 struct omap_dmadev *od = to_omap_dma_dev(chan->device); in omap_dma_alloc_chan_resources() local
526 if (od->legacy) { in omap_dma_alloc_chan_resources()
534 dev_dbg(od->ddev.dev, "allocating channel %u for %u\n", in omap_dma_alloc_chan_resources()
538 omap_dma_assign(od, c, c->dma_ch); in omap_dma_alloc_chan_resources()
540 if (!od->legacy) { in omap_dma_alloc_chan_resources()
543 spin_lock_irq(&od->irq_lock); in omap_dma_alloc_chan_resources()
545 omap_dma_glbl_write(od, IRQSTATUS_L1, val); in omap_dma_alloc_chan_resources()
546 od->irq_enable_mask |= val; in omap_dma_alloc_chan_resources()
547 omap_dma_glbl_write(od, IRQENABLE_L1, od->irq_enable_mask); in omap_dma_alloc_chan_resources()
549 val = omap_dma_glbl_read(od, IRQENABLE_L0); in omap_dma_alloc_chan_resources()
551 omap_dma_glbl_write(od, IRQENABLE_L0, val); in omap_dma_alloc_chan_resources()
552 spin_unlock_irq(&od->irq_lock); in omap_dma_alloc_chan_resources()
557 if (__dma_omap16xx(od->plat->dma_attr)) { in omap_dma_alloc_chan_resources()
568 if (od->plat->errata & DMA_ERRATA_IFRAME_BUFFERING) in omap_dma_alloc_chan_resources()
576 struct omap_dmadev *od = to_omap_dma_dev(chan->device); in omap_dma_free_chan_resources() local
579 if (!od->legacy) { in omap_dma_free_chan_resources()
580 spin_lock_irq(&od->irq_lock); in omap_dma_free_chan_resources()
581 od->irq_enable_mask &= ~BIT(c->dma_ch); in omap_dma_free_chan_resources()
582 omap_dma_glbl_write(od, IRQENABLE_L1, od->irq_enable_mask); in omap_dma_free_chan_resources()
583 spin_unlock_irq(&od->irq_lock); in omap_dma_free_chan_resources()
587 od->lch_map[c->dma_ch] = NULL; in omap_dma_free_chan_resources()
591 dev_dbg(od->ddev.dev, "freeing channel for %u\n", c->dma_sig); in omap_dma_free_chan_resources()
633 struct omap_dmadev *od = to_omap_dma_dev(c->vc.chan.device); in omap_dma_chan_read_3_3() local
637 if (val == 0 && od->plat->errata & DMA_ERRATA_3_3) in omap_dma_chan_read_3_3()
645 struct omap_dmadev *od = to_omap_dma_dev(c->vc.chan.device); in omap_dma_get_src_pos() local
648 if (__dma_omap15xx(od->plat->dma_attr)) { in omap_dma_get_src_pos()
671 struct omap_dmadev *od = to_omap_dma_dev(c->vc.chan.device); in omap_dma_get_dst_pos() local
674 if (__dma_omap15xx(od->plat->dma_attr)) { in omap_dma_get_dst_pos()
760 struct omap_dmadev *od = to_omap_dma_dev(chan->device); in omap_dma_prep_slave_sg() local
828 if (od->plat->errata & DMA_ERRATA_PARALLEL_CHANNELS) in omap_dma_prep_slave_sg()
858 struct omap_dmadev *od = to_omap_dma_dev(chan->device); in omap_dma_prep_dma_cyclic() local
941 if (__dma_omap15xx(od->plat->dma_attr)) in omap_dma_prep_dma_cyclic()
1040 static int omap_dma_chan_init(struct omap_dmadev *od, int dma_sig) in omap_dma_chan_init() argument
1048 c->reg_map = od->reg_map; in omap_dma_chan_init()
1051 vchan_init(&c->vc, &od->ddev); in omap_dma_chan_init()
1057 static void omap_dma_free(struct omap_dmadev *od) in omap_dma_free() argument
1059 tasklet_kill(&od->task); in omap_dma_free()
1060 while (!list_empty(&od->ddev.channels)) { in omap_dma_free()
1061 struct omap_chan *c = list_first_entry(&od->ddev.channels, in omap_dma_free()
1076 struct omap_dmadev *od; in omap_dma_probe() local
1080 od = devm_kzalloc(&pdev->dev, sizeof(*od), GFP_KERNEL); in omap_dma_probe()
1081 if (!od) in omap_dma_probe()
1085 od->base = devm_ioremap_resource(&pdev->dev, res); in omap_dma_probe()
1086 if (IS_ERR(od->base)) in omap_dma_probe()
1087 return PTR_ERR(od->base); in omap_dma_probe()
1089 od->plat = omap_get_plat_info(); in omap_dma_probe()
1090 if (!od->plat) in omap_dma_probe()
1093 od->reg_map = od->plat->reg_map; in omap_dma_probe()
1095 dma_cap_set(DMA_SLAVE, od->ddev.cap_mask); in omap_dma_probe()
1096 dma_cap_set(DMA_CYCLIC, od->ddev.cap_mask); in omap_dma_probe()
1097 od->ddev.device_alloc_chan_resources = omap_dma_alloc_chan_resources; in omap_dma_probe()
1098 od->ddev.device_free_chan_resources = omap_dma_free_chan_resources; in omap_dma_probe()
1099 od->ddev.device_tx_status = omap_dma_tx_status; in omap_dma_probe()
1100 od->ddev.device_issue_pending = omap_dma_issue_pending; in omap_dma_probe()
1101 od->ddev.device_prep_slave_sg = omap_dma_prep_slave_sg; in omap_dma_probe()
1102 od->ddev.device_prep_dma_cyclic = omap_dma_prep_dma_cyclic; in omap_dma_probe()
1103 od->ddev.device_config = omap_dma_slave_config; in omap_dma_probe()
1104 od->ddev.device_pause = omap_dma_pause; in omap_dma_probe()
1105 od->ddev.device_resume = omap_dma_resume; in omap_dma_probe()
1106 od->ddev.device_terminate_all = omap_dma_terminate_all; in omap_dma_probe()
1107 od->ddev.src_addr_widths = OMAP_DMA_BUSWIDTHS; in omap_dma_probe()
1108 od->ddev.dst_addr_widths = OMAP_DMA_BUSWIDTHS; in omap_dma_probe()
1109 od->ddev.directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV); in omap_dma_probe()
1110 od->ddev.residue_granularity = DMA_RESIDUE_GRANULARITY_BURST; in omap_dma_probe()
1111 od->ddev.dev = &pdev->dev; in omap_dma_probe()
1112 INIT_LIST_HEAD(&od->ddev.channels); in omap_dma_probe()
1113 INIT_LIST_HEAD(&od->pending); in omap_dma_probe()
1114 spin_lock_init(&od->lock); in omap_dma_probe()
1115 spin_lock_init(&od->irq_lock); in omap_dma_probe()
1117 tasklet_init(&od->task, omap_dma_sched, (unsigned long)od); in omap_dma_probe()
1120 rc = omap_dma_chan_init(od, i); in omap_dma_probe()
1122 omap_dma_free(od); in omap_dma_probe()
1130 od->legacy = true; in omap_dma_probe()
1133 od->irq_enable_mask = 0; in omap_dma_probe()
1134 omap_dma_glbl_write(od, IRQENABLE_L1, 0); in omap_dma_probe()
1137 IRQF_SHARED, "omap-dma-engine", od); in omap_dma_probe()
1142 rc = dma_async_device_register(&od->ddev); in omap_dma_probe()
1146 omap_dma_free(od); in omap_dma_probe()
1150 platform_set_drvdata(pdev, od); in omap_dma_probe()
1153 omap_dma_info.dma_cap = od->ddev.cap_mask; in omap_dma_probe()
1160 dma_async_device_unregister(&od->ddev); in omap_dma_probe()
1161 omap_dma_free(od); in omap_dma_probe()
1172 struct omap_dmadev *od = platform_get_drvdata(pdev); in omap_dma_remove() local
1177 dma_async_device_unregister(&od->ddev); in omap_dma_remove()
1179 if (!od->legacy) { in omap_dma_remove()
1181 omap_dma_glbl_write(od, IRQENABLE_L0, 0); in omap_dma_remove()
1184 omap_dma_free(od); in omap_dma_remove()