Lines Matching refs:txq

202 			    struct qede_tx_queue *txq,  in qede_free_tx_pkt()  argument
205 u16 idx = txq->sw_tx_cons & NUM_TX_BDS_MAX; in qede_free_tx_pkt()
206 struct sk_buff *skb = txq->sw_tx_ring[idx].skb; in qede_free_tx_pkt()
211 bool data_split = txq->sw_tx_ring[idx].flags & QEDE_TSO_SPLIT_BD; in qede_free_tx_pkt()
217 idx, txq->sw_tx_cons, txq->sw_tx_prod); in qede_free_tx_pkt()
223 first_bd = (struct eth_tx_1st_bd *)qed_chain_consume(&txq->tx_pbl); in qede_free_tx_pkt()
231 qed_chain_consume(&txq->tx_pbl); in qede_free_tx_pkt()
241 qed_chain_consume(&txq->tx_pbl); in qede_free_tx_pkt()
247 qed_chain_consume(&txq->tx_pbl); in qede_free_tx_pkt()
251 txq->sw_tx_ring[idx].skb = NULL; in qede_free_tx_pkt()
252 txq->sw_tx_ring[idx].flags = 0; in qede_free_tx_pkt()
259 struct qede_tx_queue *txq, in qede_free_failed_tx_pkt() argument
264 u16 idx = txq->sw_tx_prod & NUM_TX_BDS_MAX; in qede_free_failed_tx_pkt()
265 struct sk_buff *skb = txq->sw_tx_ring[idx].skb; in qede_free_failed_tx_pkt()
270 qed_chain_set_prod(&txq->tx_pbl, in qede_free_failed_tx_pkt()
271 le16_to_cpu(txq->tx_db.data.bd_prod), in qede_free_failed_tx_pkt()
274 first_bd = (struct eth_tx_1st_bd *)qed_chain_produce(&txq->tx_pbl); in qede_free_failed_tx_pkt()
278 qed_chain_produce(&txq->tx_pbl); in qede_free_failed_tx_pkt()
289 qed_chain_produce(&txq->tx_pbl); in qede_free_failed_tx_pkt()
297 qed_chain_set_prod(&txq->tx_pbl, in qede_free_failed_tx_pkt()
298 le16_to_cpu(txq->tx_db.data.bd_prod), in qede_free_failed_tx_pkt()
303 txq->sw_tx_ring[idx].skb = NULL; in qede_free_failed_tx_pkt()
304 txq->sw_tx_ring[idx].flags = 0; in qede_free_failed_tx_pkt()
391 struct qede_tx_queue *txq; in qede_start_xmit() local
408 txq = QEDE_TX_QUEUE(edev, txq_index); in qede_start_xmit()
416 WARN_ON(qed_chain_get_elem_left(&txq->tx_pbl) < in qede_start_xmit()
422 idx = txq->sw_tx_prod & NUM_TX_BDS_MAX; in qede_start_xmit()
423 txq->sw_tx_ring[idx].skb = skb; in qede_start_xmit()
425 qed_chain_produce(&txq->tx_pbl); in qede_start_xmit()
435 qede_free_failed_tx_pkt(edev, txq, first_bd, 0, false); in qede_start_xmit()
446 qed_chain_produce(&txq->tx_pbl); in qede_start_xmit()
451 qed_chain_produce(&txq->tx_pbl); in qede_start_xmit()
516 txq->sw_tx_ring[idx].flags |= QEDE_TSO_SPLIT_BD; in qede_start_xmit()
532 qede_free_failed_tx_pkt(edev, txq, first_bd, nbd, in qede_start_xmit()
548 qed_chain_produce(&txq->tx_pbl); in qede_start_xmit()
556 qede_free_failed_tx_pkt(edev, txq, first_bd, nbd, in qede_start_xmit()
572 txq->sw_tx_prod++; in qede_start_xmit()
575 txq->tx_db.data.bd_prod = in qede_start_xmit()
576 cpu_to_le16(qed_chain_get_prod_idx(&txq->tx_pbl)); in qede_start_xmit()
583 writel(txq->tx_db.raw, txq->doorbell_addr); in qede_start_xmit()
593 if (unlikely(qed_chain_get_elem_left(&txq->tx_pbl) in qede_start_xmit()
604 if (qed_chain_get_elem_left(&txq->tx_pbl) in qede_start_xmit()
616 static int qede_txq_has_work(struct qede_tx_queue *txq) in qede_txq_has_work() argument
622 hw_bd_cons = le16_to_cpu(*txq->hw_cons_ptr); in qede_txq_has_work()
623 if (qed_chain_get_cons_idx(&txq->tx_pbl) == hw_bd_cons + 1) in qede_txq_has_work()
626 return hw_bd_cons != qed_chain_get_cons_idx(&txq->tx_pbl); in qede_txq_has_work()
630 struct qede_tx_queue *txq) in qede_tx_int() argument
637 netdev_txq = netdev_get_tx_queue(edev->ndev, txq->index); in qede_tx_int()
639 hw_bd_cons = le16_to_cpu(*txq->hw_cons_ptr); in qede_tx_int()
642 while (hw_bd_cons != qed_chain_get_cons_idx(&txq->tx_pbl)) { in qede_tx_int()
645 rc = qede_free_tx_pkt(edev, txq, &len); in qede_tx_int()
649 qed_chain_get_cons_idx(&txq->tx_pbl)); in qede_tx_int()
655 txq->sw_tx_cons++; in qede_tx_int()
686 (qed_chain_get_elem_left(&txq->tx_pbl) in qede_tx_int()
1701 struct qede_tx_queue *txq) in qede_free_mem_txq() argument
1704 kfree(txq->sw_tx_ring); in qede_free_mem_txq()
1707 edev->ops->common->chain_free(edev->cdev, &txq->tx_pbl); in qede_free_mem_txq()
1712 struct qede_tx_queue *txq) in qede_alloc_mem_txq() argument
1717 txq->num_tx_buffers = edev->q_num_tx_buffers; in qede_alloc_mem_txq()
1720 size = sizeof(*txq->sw_tx_ring) * NUM_TX_BDS_MAX; in qede_alloc_mem_txq()
1721 txq->sw_tx_ring = kzalloc(size, GFP_KERNEL); in qede_alloc_mem_txq()
1722 if (!txq->sw_tx_ring) { in qede_alloc_mem_txq()
1732 &txq->tx_pbl); in qede_alloc_mem_txq()
1739 qede_free_mem_txq(edev, txq); in qede_alloc_mem_txq()
1982 struct qede_tx_queue *txq, in qede_drain_txq() argument
1987 while (txq->sw_tx_cons != txq->sw_tx_prod) { in qede_drain_txq()
1992 txq->index); in qede_drain_txq()
1996 return qede_drain_txq(edev, txq, false); in qede_drain_txq()
2000 txq->index, txq->sw_tx_prod, in qede_drain_txq()
2001 txq->sw_tx_cons); in qede_drain_txq()
2039 struct qede_tx_queue *txq = &fp->txqs[tc]; in qede_stop_queues() local
2041 rc = qede_drain_txq(edev, txq, true); in qede_stop_queues()
2141 struct qede_tx_queue *txq = &fp->txqs[tc]; in qede_start_queues() local
2152 txq->tx_pbl.pbl.p_phys_table, in qede_start_queues()
2153 txq->tx_pbl.page_cnt, in qede_start_queues()
2154 &txq->doorbell_addr); in qede_start_queues()
2161 txq->hw_cons_ptr = in qede_start_queues()
2163 SET_FIELD(txq->tx_db.data.params, in qede_start_queues()
2165 SET_FIELD(txq->tx_db.data.params, ETH_DB_DATA_AGG_CMD, in qede_start_queues()
2167 SET_FIELD(txq->tx_db.data.params, in qede_start_queues()
2171 txq->tx_db.data.agg_flags = DQ_XCM_ETH_DQ_CF_CMD; in qede_start_queues()