Lines Matching refs:dma

314 	struct dma_device *dma = &ioat_dma->dma_dev;  in ioat_dma_self_test()  local
339 dma_chan = container_of(dma->channels.next, struct dma_chan, in ioat_dma_self_test()
341 if (dma->device_alloc_chan_resources(dma_chan) < 1) { in ioat_dma_self_test()
377 dma->device_issue_pending(dma_chan); in ioat_dma_self_test()
382 dma->device_tx_status(dma_chan, cookie, NULL) in ioat_dma_self_test()
399 dma->device_free_chan_resources(dma_chan); in ioat_dma_self_test()
503 struct dma_device *dma = &ioat_dma->dma_dev; in ioat_probe() local
528 dma_cap_set(DMA_MEMCPY, dma->cap_mask); in ioat_probe()
529 dma->dev = &pdev->dev; in ioat_probe()
531 if (!dma->chancnt) { in ioat_probe()
571 struct dma_device *dma = &ioat_dma->dma_dev; in ioat_dma_remove() local
577 dma_async_device_unregister(dma); in ioat_dma_remove()
582 INIT_LIST_HEAD(&dma->channels); in ioat_dma_remove()
593 struct dma_device *dma = &ioat_dma->dma_dev; in ioat_enumerate_channels() local
597 INIT_LIST_HEAD(&dma->channels); in ioat_enumerate_channels()
598 dma->chancnt = readb(ioat_dma->reg_base + IOAT_CHANCNT_OFFSET); in ioat_enumerate_channels()
599 dma->chancnt &= 0x1f; /* bits [4:0] valid */ in ioat_enumerate_channels()
600 if (dma->chancnt > ARRAY_SIZE(ioat_dma->idx)) { in ioat_enumerate_channels()
602 dma->chancnt, ARRAY_SIZE(ioat_dma->idx)); in ioat_enumerate_channels()
603 dma->chancnt = ARRAY_SIZE(ioat_dma->idx); in ioat_enumerate_channels()
611 for (i = 0; i < dma->chancnt; i++) { in ioat_enumerate_channels()
624 dma->chancnt = i; in ioat_enumerate_channels()
755 struct dma_device *dma = &ioat_dma->dma_dev; in ioat_init_channel() local
762 ioat_chan->dma_chan.device = dma; in ioat_init_channel()
764 list_add_tail(&ioat_chan->dma_chan.device_node, &dma->channels); in ioat_init_channel()
791 struct dma_device *dma = &ioat_dma->dma_dev; in ioat_xor_val_self_test() local
796 if (!dma_has_cap(DMA_XOR, dma->cap_mask)) in ioat_xor_val_self_test()
831 dma_chan = container_of(dma->channels.next, struct dma_chan, in ioat_xor_val_self_test()
833 if (dma->device_alloc_chan_resources(dma_chan) < 1) { in ioat_xor_val_self_test()
853 tx = dma->device_prep_dma_xor(dma_chan, dest_dma, dma_srcs, in ioat_xor_val_self_test()
873 dma->device_issue_pending(dma_chan); in ioat_xor_val_self_test()
878 dma->device_tx_status(dma_chan, cookie, NULL) != DMA_COMPLETE) { in ioat_xor_val_self_test()
922 tx = dma->device_prep_dma_xor_val(dma_chan, dma_srcs, in ioat_xor_val_self_test()
941 dma->device_issue_pending(dma_chan); in ioat_xor_val_self_test()
946 dma->device_tx_status(dma_chan, cookie, NULL) != DMA_COMPLETE) { in ioat_xor_val_self_test()
975 tx = dma->device_prep_dma_xor_val(dma_chan, dma_srcs, in ioat_xor_val_self_test()
994 dma->device_issue_pending(dma_chan); in ioat_xor_val_self_test()
999 dma->device_tx_status(dma_chan, cookie, NULL) != DMA_COMPLETE) { in ioat_xor_val_self_test()
1031 dma->device_free_chan_resources(dma_chan); in ioat_xor_val_self_test()
1055 struct dma_device *dma; in ioat_intr_quirk() local
1060 dma = &ioat_dma->dma_dev; in ioat_intr_quirk()
1067 list_for_each_entry(c, &dma->channels, device_node) { in ioat_intr_quirk()
1083 struct dma_device *dma; in ioat3_dma_probe() local
1089 dma = &ioat_dma->dma_dev; in ioat3_dma_probe()
1090 dma->device_prep_dma_memcpy = ioat_dma_prep_memcpy_lock; in ioat3_dma_probe()
1091 dma->device_issue_pending = ioat_issue_pending; in ioat3_dma_probe()
1092 dma->device_alloc_chan_resources = ioat_alloc_chan_resources; in ioat3_dma_probe()
1093 dma->device_free_chan_resources = ioat_free_chan_resources; in ioat3_dma_probe()
1095 dma_cap_set(DMA_INTERRUPT, dma->cap_mask); in ioat3_dma_probe()
1096 dma->device_prep_dma_interrupt = ioat_prep_interrupt_lock; in ioat3_dma_probe()
1110 dma->max_xor = 8; in ioat3_dma_probe()
1112 dma_cap_set(DMA_XOR, dma->cap_mask); in ioat3_dma_probe()
1113 dma->device_prep_dma_xor = ioat_prep_xor; in ioat3_dma_probe()
1115 dma_cap_set(DMA_XOR_VAL, dma->cap_mask); in ioat3_dma_probe()
1116 dma->device_prep_dma_xor_val = ioat_prep_xor_val; in ioat3_dma_probe()
1122 dma->device_prep_dma_pq = ioat_prep_pq; in ioat3_dma_probe()
1123 dma->device_prep_dma_pq_val = ioat_prep_pq_val; in ioat3_dma_probe()
1124 dma_cap_set(DMA_PQ, dma->cap_mask); in ioat3_dma_probe()
1125 dma_cap_set(DMA_PQ_VAL, dma->cap_mask); in ioat3_dma_probe()
1128 dma_set_maxpq(dma, 16, 0); in ioat3_dma_probe()
1130 dma_set_maxpq(dma, 8, 0); in ioat3_dma_probe()
1133 dma->device_prep_dma_xor = ioat_prep_pqxor; in ioat3_dma_probe()
1134 dma->device_prep_dma_xor_val = ioat_prep_pqxor_val; in ioat3_dma_probe()
1135 dma_cap_set(DMA_XOR, dma->cap_mask); in ioat3_dma_probe()
1136 dma_cap_set(DMA_XOR_VAL, dma->cap_mask); in ioat3_dma_probe()
1139 dma->max_xor = 16; in ioat3_dma_probe()
1141 dma->max_xor = 8; in ioat3_dma_probe()
1145 dma->device_tx_status = ioat_tx_status; in ioat3_dma_probe()
1166 dma_cap_set(DMA_PRIVATE, dma->cap_mask); in ioat3_dma_probe()
1172 list_for_each_entry(c, &dma->channels, device_node) { in ioat3_dma_probe()