Lines Matching refs:next2fill

400 	while (tq->tx_ring.next2comp != tq->tx_ring.next2fill) {  in vmxnet3_tq_cleanup()
420 tq->tx_ring.next2fill = tq->tx_ring.next2comp = 0; in vmxnet3_tq_cleanup()
478 tq->tx_ring.next2fill = tq->tx_ring.next2comp = 0; in vmxnet3_tq_init()
573 rbi = rbi_base + ring->next2fill; in vmxnet3_rq_alloc_rx_buf()
574 gd = ring->base + ring->next2fill; in vmxnet3_rq_alloc_rx_buf()
642 num_allocated, ring->next2fill, ring->next2comp); in vmxnet3_rq_alloc_rx_buf()
645 BUG_ON(num_allocated != 0 && ring->next2fill == ring->next2comp); in vmxnet3_rq_alloc_rx_buf()
685 ctx->sop_txd = tq->tx_ring.base + tq->tx_ring.next2fill; in vmxnet3_map_pkt()
691 tq->tx_ring.next2fill * in vmxnet3_map_pkt()
696 tbi = tq->buf_info + tq->tx_ring.next2fill; in vmxnet3_map_pkt()
701 tq->tx_ring.next2fill, in vmxnet3_map_pkt()
724 tbi = tq->buf_info + tq->tx_ring.next2fill; in vmxnet3_map_pkt()
734 gdesc = tq->tx_ring.base + tq->tx_ring.next2fill; in vmxnet3_map_pkt()
743 tq->tx_ring.next2fill, le64_to_cpu(gdesc->txd.addr), in vmxnet3_map_pkt()
759 tbi = tq->buf_info + tq->tx_ring.next2fill; in vmxnet3_map_pkt()
776 gdesc = tq->tx_ring.base + tq->tx_ring.next2fill; in vmxnet3_map_pkt()
785 tq->tx_ring.next2fill, le64_to_cpu(gdesc->txd.addr), in vmxnet3_map_pkt()
895 tdd = tq->data_ring.base + tq->tx_ring.next2fill; in vmxnet3_parse_and_copy_hdr()
900 ctx->copy_size, tq->tx_ring.next2fill); in vmxnet3_parse_and_copy_hdr()
1008 tq->tx_ring.next2comp, tq->tx_ring.next2fill); in vmxnet3_tq_xmit()
1102 tq->tx_ring.next2fill); in vmxnet3_tq_xmit()
1466 vmxnet3_getRxDesc(rxd, &ring->base[ring->next2fill].rxd, in vmxnet3_rq_rx_complete()
1480 ring->next2fill); in vmxnet3_rq_rx_complete()
1523 rq->rx_ring[ring_idx].next2fill = in vmxnet3_rq_cleanup()
1610 rq->rx_ring[i].next2fill = rq->rx_ring[i].next2comp = 0; in vmxnet3_rq_init()
2441 adapter->rx_queue[i].rx_ring[0].next2fill); in vmxnet3_activate_dev()
2444 adapter->rx_queue[i].rx_ring[1].next2fill); in vmxnet3_activate_dev()