Lines Matching refs:cur_p
587 struct axidma_bd *cur_p; in axienet_start_xmit_done() local
590 cur_p = &lp->tx_bd_v[lp->tx_bd_ci]; in axienet_start_xmit_done()
591 status = cur_p->status; in axienet_start_xmit_done()
593 dma_unmap_single(ndev->dev.parent, cur_p->phys, in axienet_start_xmit_done()
594 (cur_p->cntrl & XAXIDMA_BD_CTRL_LENGTH_MASK), in axienet_start_xmit_done()
596 if (cur_p->app4) in axienet_start_xmit_done()
597 dev_kfree_skb_irq((struct sk_buff *)cur_p->app4); in axienet_start_xmit_done()
599 cur_p->app0 = 0; in axienet_start_xmit_done()
600 cur_p->app1 = 0; in axienet_start_xmit_done()
601 cur_p->app2 = 0; in axienet_start_xmit_done()
602 cur_p->app4 = 0; in axienet_start_xmit_done()
603 cur_p->status = 0; in axienet_start_xmit_done()
610 cur_p = &lp->tx_bd_v[lp->tx_bd_ci]; in axienet_start_xmit_done()
611 status = cur_p->status; in axienet_start_xmit_done()
635 struct axidma_bd *cur_p; in axienet_check_tx_bd_space() local
636 cur_p = &lp->tx_bd_v[(lp->tx_bd_tail + num_frag) % TX_BD_NUM]; in axienet_check_tx_bd_space()
637 if (cur_p->status & XAXIDMA_BD_STS_ALL_MASK) in axienet_check_tx_bd_space()
664 struct axidma_bd *cur_p; in axienet_start_xmit() local
667 cur_p = &lp->tx_bd_v[lp->tx_bd_tail]; in axienet_start_xmit()
678 cur_p->app0 |= 2; in axienet_start_xmit()
683 cur_p->app0 |= 1; in axienet_start_xmit()
684 cur_p->app1 = (csum_start_off << 16) | csum_index_off; in axienet_start_xmit()
687 cur_p->app0 |= 2; /* Tx Full Checksum Offload Enabled */ in axienet_start_xmit()
690 cur_p->cntrl = skb_headlen(skb) | XAXIDMA_BD_CTRL_TXSOF_MASK; in axienet_start_xmit()
691 cur_p->phys = dma_map_single(ndev->dev.parent, skb->data, in axienet_start_xmit()
697 cur_p = &lp->tx_bd_v[lp->tx_bd_tail]; in axienet_start_xmit()
699 cur_p->phys = dma_map_single(ndev->dev.parent, in axienet_start_xmit()
703 cur_p->cntrl = skb_frag_size(frag); in axienet_start_xmit()
706 cur_p->cntrl |= XAXIDMA_BD_CTRL_TXEOF_MASK; in axienet_start_xmit()
707 cur_p->app4 = (unsigned long)skb; in axienet_start_xmit()
736 struct axidma_bd *cur_p; in axienet_recv() local
738 cur_p = &lp->rx_bd_v[lp->rx_bd_ci]; in axienet_recv()
740 while ((cur_p->status & XAXIDMA_BD_STS_COMPLETE_MASK)) { in axienet_recv()
742 skb = (struct sk_buff *) (cur_p->sw_id_offset); in axienet_recv()
743 length = cur_p->app4 & 0x0000FFFF; in axienet_recv()
745 dma_unmap_single(ndev->dev.parent, cur_p->phys, in axienet_recv()
756 csumstatus = (cur_p->app2 & in axienet_recv()
765 skb->csum = be32_to_cpu(cur_p->app3 & 0xFFFF); in axienet_recv()
778 cur_p->phys = dma_map_single(ndev->dev.parent, new_skb->data, in axienet_recv()
781 cur_p->cntrl = lp->max_frm_size; in axienet_recv()
782 cur_p->status = 0; in axienet_recv()
783 cur_p->sw_id_offset = (u32) new_skb; in axienet_recv()
787 cur_p = &lp->rx_bd_v[lp->rx_bd_ci]; in axienet_recv()
1373 struct axidma_bd *cur_p; in axienet_dma_err_handler() local
1394 cur_p = &lp->tx_bd_v[i]; in axienet_dma_err_handler()
1395 if (cur_p->phys) in axienet_dma_err_handler()
1396 dma_unmap_single(ndev->dev.parent, cur_p->phys, in axienet_dma_err_handler()
1397 (cur_p->cntrl & in axienet_dma_err_handler()
1400 if (cur_p->app4) in axienet_dma_err_handler()
1401 dev_kfree_skb_irq((struct sk_buff *) cur_p->app4); in axienet_dma_err_handler()
1402 cur_p->phys = 0; in axienet_dma_err_handler()
1403 cur_p->cntrl = 0; in axienet_dma_err_handler()
1404 cur_p->status = 0; in axienet_dma_err_handler()
1405 cur_p->app0 = 0; in axienet_dma_err_handler()
1406 cur_p->app1 = 0; in axienet_dma_err_handler()
1407 cur_p->app2 = 0; in axienet_dma_err_handler()
1408 cur_p->app3 = 0; in axienet_dma_err_handler()
1409 cur_p->app4 = 0; in axienet_dma_err_handler()
1410 cur_p->sw_id_offset = 0; in axienet_dma_err_handler()
1414 cur_p = &lp->rx_bd_v[i]; in axienet_dma_err_handler()
1415 cur_p->status = 0; in axienet_dma_err_handler()
1416 cur_p->app0 = 0; in axienet_dma_err_handler()
1417 cur_p->app1 = 0; in axienet_dma_err_handler()
1418 cur_p->app2 = 0; in axienet_dma_err_handler()
1419 cur_p->app3 = 0; in axienet_dma_err_handler()
1420 cur_p->app4 = 0; in axienet_dma_err_handler()