Lines Matching refs:mv_chan

216 static void mv_chan_start_new_chain(struct mv_xor_chan *mv_chan,  in mv_chan_start_new_chain()  argument
219 dev_dbg(mv_chan_to_devp(mv_chan), "%s %d: sw_desc %p\n", in mv_chan_start_new_chain()
223 mv_chan_set_next_descriptor(mv_chan, sw_desc->async_tx.phys); in mv_chan_start_new_chain()
225 mv_chan->pending++; in mv_chan_start_new_chain()
226 mv_xor_issue_pending(&mv_chan->dmachan); in mv_chan_start_new_chain()
231 struct mv_xor_chan *mv_chan, in mv_desc_run_tx_complete_actions() argument
256 mv_chan_clean_completed_slots(struct mv_xor_chan *mv_chan) in mv_chan_clean_completed_slots() argument
260 dev_dbg(mv_chan_to_devp(mv_chan), "%s %d\n", __func__, __LINE__); in mv_chan_clean_completed_slots()
261 list_for_each_entry_safe(iter, _iter, &mv_chan->completed_slots, in mv_chan_clean_completed_slots()
265 list_move_tail(&iter->node, &mv_chan->free_slots); in mv_chan_clean_completed_slots()
272 struct mv_xor_chan *mv_chan) in mv_desc_clean_slot() argument
274 dev_dbg(mv_chan_to_devp(mv_chan), "%s %d: desc %p flags %d\n", in mv_desc_clean_slot()
282 list_move_tail(&desc->node, &mv_chan->completed_slots); in mv_desc_clean_slot()
284 list_move_tail(&desc->node, &mv_chan->free_slots); in mv_desc_clean_slot()
290 static void mv_chan_slot_cleanup(struct mv_xor_chan *mv_chan) in mv_chan_slot_cleanup() argument
294 int busy = mv_chan_is_busy(mv_chan); in mv_chan_slot_cleanup()
295 u32 current_desc = mv_chan_get_current_desc(mv_chan); in mv_chan_slot_cleanup()
299 dev_dbg(mv_chan_to_devp(mv_chan), "%s %d\n", __func__, __LINE__); in mv_chan_slot_cleanup()
300 dev_dbg(mv_chan_to_devp(mv_chan), "current_desc %x\n", current_desc); in mv_chan_slot_cleanup()
301 mv_chan_clean_completed_slots(mv_chan); in mv_chan_slot_cleanup()
307 list_for_each_entry_safe(iter, _iter, &mv_chan->chain, in mv_chan_slot_cleanup()
313 cookie = mv_desc_run_tx_complete_actions(iter, mv_chan, in mv_chan_slot_cleanup()
317 mv_desc_clean_slot(iter, mv_chan); in mv_chan_slot_cleanup()
332 if ((busy == 0) && !list_empty(&mv_chan->chain)) { in mv_chan_slot_cleanup()
338 iter = list_entry(mv_chan->chain.next, in mv_chan_slot_cleanup()
341 mv_chan_start_new_chain(mv_chan, iter); in mv_chan_slot_cleanup()
343 if (!list_is_last(&iter->node, &mv_chan->chain)) { in mv_chan_slot_cleanup()
351 mv_chan_start_new_chain(mv_chan, iter); in mv_chan_slot_cleanup()
357 tasklet_schedule(&mv_chan->irq_tasklet); in mv_chan_slot_cleanup()
363 mv_chan->dmachan.completed_cookie = cookie; in mv_chan_slot_cleanup()
376 mv_chan_alloc_slot(struct mv_xor_chan *mv_chan) in mv_chan_alloc_slot() argument
380 spin_lock_bh(&mv_chan->lock); in mv_chan_alloc_slot()
382 if (!list_empty(&mv_chan->free_slots)) { in mv_chan_alloc_slot()
383 iter = list_first_entry(&mv_chan->free_slots, in mv_chan_alloc_slot()
387 list_move_tail(&iter->node, &mv_chan->allocated_slots); in mv_chan_alloc_slot()
389 spin_unlock_bh(&mv_chan->lock); in mv_chan_alloc_slot()
399 spin_unlock_bh(&mv_chan->lock); in mv_chan_alloc_slot()
402 tasklet_schedule(&mv_chan->irq_tasklet); in mv_chan_alloc_slot()
412 struct mv_xor_chan *mv_chan = to_mv_xor_chan(tx->chan); in mv_xor_tx_submit() local
417 dev_dbg(mv_chan_to_devp(mv_chan), in mv_xor_tx_submit()
421 spin_lock_bh(&mv_chan->lock); in mv_xor_tx_submit()
424 if (list_empty(&mv_chan->chain)) in mv_xor_tx_submit()
425 list_move_tail(&sw_desc->node, &mv_chan->chain); in mv_xor_tx_submit()
429 old_chain_tail = list_entry(mv_chan->chain.prev, in mv_xor_tx_submit()
432 list_move_tail(&sw_desc->node, &mv_chan->chain); in mv_xor_tx_submit()
434 dev_dbg(mv_chan_to_devp(mv_chan), "Append to last desc %pa\n", in mv_xor_tx_submit()
441 if (!mv_chan_is_busy(mv_chan)) { in mv_xor_tx_submit()
442 u32 current_desc = mv_chan_get_current_desc(mv_chan); in mv_xor_tx_submit()
453 mv_chan_start_new_chain(mv_chan, sw_desc); in mv_xor_tx_submit()
455 spin_unlock_bh(&mv_chan->lock); in mv_xor_tx_submit()
466 struct mv_xor_chan *mv_chan = to_mv_xor_chan(chan); in mv_xor_alloc_chan_resources() local
471 idx = mv_chan->slots_allocated; in mv_xor_alloc_chan_resources()
475 dev_info(mv_chan_to_devp(mv_chan), in mv_xor_alloc_chan_resources()
480 virt_desc = mv_chan->dma_desc_pool_virt; in mv_xor_alloc_chan_resources()
486 dma_desc = mv_chan->dma_desc_pool; in mv_xor_alloc_chan_resources()
490 spin_lock_bh(&mv_chan->lock); in mv_xor_alloc_chan_resources()
491 mv_chan->slots_allocated = idx; in mv_xor_alloc_chan_resources()
492 list_add_tail(&slot->node, &mv_chan->free_slots); in mv_xor_alloc_chan_resources()
493 spin_unlock_bh(&mv_chan->lock); in mv_xor_alloc_chan_resources()
496 dev_dbg(mv_chan_to_devp(mv_chan), in mv_xor_alloc_chan_resources()
498 mv_chan->slots_allocated); in mv_xor_alloc_chan_resources()
500 return mv_chan->slots_allocated ? : -ENOMEM; in mv_xor_alloc_chan_resources()
507 struct mv_xor_chan *mv_chan = to_mv_xor_chan(chan); in mv_xor_prep_dma_xor() local
515 dev_dbg(mv_chan_to_devp(mv_chan), in mv_xor_prep_dma_xor()
519 sw_desc = mv_chan_alloc_slot(mv_chan); in mv_xor_prep_dma_xor()
524 if (mv_chan->op_in_desc == XOR_MODE_IN_DESC) in mv_xor_prep_dma_xor()
530 dev_dbg(mv_chan_to_devp(mv_chan), in mv_xor_prep_dma_xor()
550 struct mv_xor_chan *mv_chan = to_mv_xor_chan(chan); in mv_xor_prep_dma_interrupt() local
554 src = mv_chan->dummy_src_addr; in mv_xor_prep_dma_interrupt()
555 dest = mv_chan->dummy_dst_addr; in mv_xor_prep_dma_interrupt()
567 struct mv_xor_chan *mv_chan = to_mv_xor_chan(chan); in mv_xor_free_chan_resources() local
571 spin_lock_bh(&mv_chan->lock); in mv_xor_free_chan_resources()
573 mv_chan_slot_cleanup(mv_chan); in mv_xor_free_chan_resources()
575 list_for_each_entry_safe(iter, _iter, &mv_chan->chain, in mv_xor_free_chan_resources()
578 list_move_tail(&iter->node, &mv_chan->free_slots); in mv_xor_free_chan_resources()
580 list_for_each_entry_safe(iter, _iter, &mv_chan->completed_slots, in mv_xor_free_chan_resources()
583 list_move_tail(&iter->node, &mv_chan->free_slots); in mv_xor_free_chan_resources()
585 list_for_each_entry_safe(iter, _iter, &mv_chan->allocated_slots, in mv_xor_free_chan_resources()
588 list_move_tail(&iter->node, &mv_chan->free_slots); in mv_xor_free_chan_resources()
591 iter, _iter, &mv_chan->free_slots, node) { in mv_xor_free_chan_resources()
594 mv_chan->slots_allocated--; in mv_xor_free_chan_resources()
597 dev_dbg(mv_chan_to_devp(mv_chan), "%s slots_allocated %d\n", in mv_xor_free_chan_resources()
598 __func__, mv_chan->slots_allocated); in mv_xor_free_chan_resources()
599 spin_unlock_bh(&mv_chan->lock); in mv_xor_free_chan_resources()
602 dev_err(mv_chan_to_devp(mv_chan), in mv_xor_free_chan_resources()
616 struct mv_xor_chan *mv_chan = to_mv_xor_chan(chan); in mv_xor_status() local
623 spin_lock_bh(&mv_chan->lock); in mv_xor_status()
624 mv_chan_slot_cleanup(mv_chan); in mv_xor_status()
625 spin_unlock_bh(&mv_chan->lock); in mv_xor_status()
687 struct mv_xor_chan *mv_chan = to_mv_xor_chan(chan); in mv_xor_issue_pending() local
689 if (mv_chan->pending >= MV_XOR_THRESHOLD) { in mv_xor_issue_pending()
690 mv_chan->pending = 0; in mv_xor_issue_pending()
691 mv_chan_activate(mv_chan); in mv_xor_issue_pending()
699 static int mv_chan_memcpy_self_test(struct mv_xor_chan *mv_chan) in mv_chan_memcpy_self_test() argument
724 dma_chan = &mv_chan->dmachan; in mv_chan_memcpy_self_test()
808 mv_chan_xor_self_test(struct mv_xor_chan *mv_chan) in mv_chan_xor_self_test() argument
855 dma_chan = &mv_chan->dmachan; in mv_chan_xor_self_test()
945 static int mv_xor_channel_remove(struct mv_xor_chan *mv_chan) in mv_xor_channel_remove() argument
948 struct device *dev = mv_chan->dmadev.dev; in mv_xor_channel_remove()
950 dma_async_device_unregister(&mv_chan->dmadev); in mv_xor_channel_remove()
953 mv_chan->dma_desc_pool_virt, mv_chan->dma_desc_pool); in mv_xor_channel_remove()
954 dma_unmap_single(dev, mv_chan->dummy_src_addr, in mv_xor_channel_remove()
956 dma_unmap_single(dev, mv_chan->dummy_dst_addr, in mv_xor_channel_remove()
959 list_for_each_entry_safe(chan, _chan, &mv_chan->dmadev.channels, in mv_xor_channel_remove()
964 free_irq(mv_chan->irq, mv_chan); in mv_xor_channel_remove()
975 struct mv_xor_chan *mv_chan; in mv_xor_channel_add() local
978 mv_chan = devm_kzalloc(&pdev->dev, sizeof(*mv_chan), GFP_KERNEL); in mv_xor_channel_add()
979 if (!mv_chan) in mv_xor_channel_add()
982 mv_chan->idx = idx; in mv_xor_channel_add()
983 mv_chan->irq = irq; in mv_xor_channel_add()
984 mv_chan->op_in_desc = op_in_desc; in mv_xor_channel_add()
986 dma_dev = &mv_chan->dmadev; in mv_xor_channel_add()
993 mv_chan->dummy_src_addr = dma_map_single(dma_dev->dev, in mv_xor_channel_add()
994 mv_chan->dummy_src, MV_XOR_MIN_BYTE_COUNT, DMA_FROM_DEVICE); in mv_xor_channel_add()
995 mv_chan->dummy_dst_addr = dma_map_single(dma_dev->dev, in mv_xor_channel_add()
996 mv_chan->dummy_dst, MV_XOR_MIN_BYTE_COUNT, DMA_TO_DEVICE); in mv_xor_channel_add()
1002 mv_chan->dma_desc_pool_virt = in mv_xor_channel_add()
1004 &mv_chan->dma_desc_pool, GFP_KERNEL); in mv_xor_channel_add()
1005 if (!mv_chan->dma_desc_pool_virt) in mv_xor_channel_add()
1030 mv_chan->mmr_base = xordev->xor_base; in mv_xor_channel_add()
1031 mv_chan->mmr_high_base = xordev->xor_high_base; in mv_xor_channel_add()
1032 tasklet_init(&mv_chan->irq_tasklet, mv_xor_tasklet, (unsigned long) in mv_xor_channel_add()
1033 mv_chan); in mv_xor_channel_add()
1036 mv_chan_clear_err_status(mv_chan); in mv_xor_channel_add()
1038 ret = request_irq(mv_chan->irq, mv_xor_interrupt_handler, in mv_xor_channel_add()
1039 0, dev_name(&pdev->dev), mv_chan); in mv_xor_channel_add()
1043 mv_chan_unmask_interrupts(mv_chan); in mv_xor_channel_add()
1045 if (mv_chan->op_in_desc == XOR_MODE_IN_DESC) in mv_xor_channel_add()
1046 mv_chan_set_mode_to_desc(mv_chan); in mv_xor_channel_add()
1048 mv_chan_set_mode(mv_chan, DMA_XOR); in mv_xor_channel_add()
1050 spin_lock_init(&mv_chan->lock); in mv_xor_channel_add()
1051 INIT_LIST_HEAD(&mv_chan->chain); in mv_xor_channel_add()
1052 INIT_LIST_HEAD(&mv_chan->completed_slots); in mv_xor_channel_add()
1053 INIT_LIST_HEAD(&mv_chan->free_slots); in mv_xor_channel_add()
1054 INIT_LIST_HEAD(&mv_chan->allocated_slots); in mv_xor_channel_add()
1055 mv_chan->dmachan.device = dma_dev; in mv_xor_channel_add()
1056 dma_cookie_init(&mv_chan->dmachan); in mv_xor_channel_add()
1058 list_add_tail(&mv_chan->dmachan.device_node, &dma_dev->channels); in mv_xor_channel_add()
1061 ret = mv_chan_memcpy_self_test(mv_chan); in mv_xor_channel_add()
1068 ret = mv_chan_xor_self_test(mv_chan); in mv_xor_channel_add()
1075 mv_chan->op_in_desc ? "Descriptor Mode" : "Registers Mode", in mv_xor_channel_add()
1081 return mv_chan; in mv_xor_channel_add()
1084 free_irq(mv_chan->irq, mv_chan); in mv_xor_channel_add()
1087 mv_chan->dma_desc_pool_virt, mv_chan->dma_desc_pool); in mv_xor_channel_add()