Lines Matching refs:next2fill
400 while (tq->tx_ring.next2comp != tq->tx_ring.next2fill) { in vmxnet3_tq_cleanup()
420 tq->tx_ring.next2fill = tq->tx_ring.next2comp = 0; in vmxnet3_tq_cleanup()
478 tq->tx_ring.next2fill = tq->tx_ring.next2comp = 0; in vmxnet3_tq_init()
573 rbi = rbi_base + ring->next2fill; in vmxnet3_rq_alloc_rx_buf()
574 gd = ring->base + ring->next2fill; in vmxnet3_rq_alloc_rx_buf()
631 num_allocated, ring->next2fill, ring->next2comp); in vmxnet3_rq_alloc_rx_buf()
634 BUG_ON(num_allocated != 0 && ring->next2fill == ring->next2comp); in vmxnet3_rq_alloc_rx_buf()
674 ctx->sop_txd = tq->tx_ring.base + tq->tx_ring.next2fill; in vmxnet3_map_pkt()
680 tq->tx_ring.next2fill * in vmxnet3_map_pkt()
685 tbi = tq->buf_info + tq->tx_ring.next2fill; in vmxnet3_map_pkt()
690 tq->tx_ring.next2fill, in vmxnet3_map_pkt()
713 tbi = tq->buf_info + tq->tx_ring.next2fill; in vmxnet3_map_pkt()
721 gdesc = tq->tx_ring.base + tq->tx_ring.next2fill; in vmxnet3_map_pkt()
730 tq->tx_ring.next2fill, le64_to_cpu(gdesc->txd.addr), in vmxnet3_map_pkt()
746 tbi = tq->buf_info + tq->tx_ring.next2fill; in vmxnet3_map_pkt()
761 gdesc = tq->tx_ring.base + tq->tx_ring.next2fill; in vmxnet3_map_pkt()
770 tq->tx_ring.next2fill, le64_to_cpu(gdesc->txd.addr), in vmxnet3_map_pkt()
875 tdd = tq->data_ring.base + tq->tx_ring.next2fill; in vmxnet3_parse_and_copy_hdr()
880 ctx->copy_size, tq->tx_ring.next2fill); in vmxnet3_parse_and_copy_hdr()
988 tq->tx_ring.next2comp, tq->tx_ring.next2fill); in vmxnet3_tq_xmit()
1081 tq->tx_ring.next2fill); in vmxnet3_tq_xmit()
1334 vmxnet3_getRxDesc(rxd, &ring->base[ring->next2fill].rxd, in vmxnet3_rq_rx_complete()
1348 ring->next2fill); in vmxnet3_rq_rx_complete()
1391 rq->rx_ring[ring_idx].next2fill = in vmxnet3_rq_cleanup()
1478 rq->rx_ring[i].next2fill = rq->rx_ring[i].next2comp = 0; in vmxnet3_rq_init()
2307 adapter->rx_queue[i].rx_ring[0].next2fill); in vmxnet3_activate_dev()
2310 adapter->rx_queue[i].rx_ring[1].next2fill); in vmxnet3_activate_dev()