Lines Matching refs:wr
1479 int ind, struct ib_send_wr *wr, in build_mlx_header() argument
1488 mthca_ah_grh_present(to_mah(wr->wr.ud.ah)), 0, in build_mlx_header()
1491 err = mthca_read_ah(dev, to_mah(wr->wr.ud.ah), &sqp->ud_header); in build_mlx_header()
1502 switch (wr->opcode) { in build_mlx_header()
1510 sqp->ud_header.immediate_data = wr->ex.imm_data; in build_mlx_header()
1519 sqp->ud_header.bth.solicited_event = !!(wr->send_flags & IB_SEND_SOLICITED); in build_mlx_header()
1525 wr->wr.ud.pkey_index, &pkey); in build_mlx_header()
1527 sqp->ud_header.bth.destination_qpn = cpu_to_be32(wr->wr.ud.remote_qpn); in build_mlx_header()
1529 sqp->ud_header.deth.qkey = cpu_to_be32(wr->wr.ud.remote_qkey & 0x80000000 ? in build_mlx_header()
1530 sqp->qkey : wr->wr.ud.remote_qkey); in build_mlx_header()
1572 struct ib_send_wr *wr) in set_atomic_seg() argument
1574 if (wr->opcode == IB_WR_ATOMIC_CMP_AND_SWP) { in set_atomic_seg()
1575 aseg->swap_add = cpu_to_be64(wr->wr.atomic.swap); in set_atomic_seg()
1576 aseg->compare = cpu_to_be64(wr->wr.atomic.compare_add); in set_atomic_seg()
1578 aseg->swap_add = cpu_to_be64(wr->wr.atomic.compare_add); in set_atomic_seg()
1585 struct ib_send_wr *wr) in set_tavor_ud_seg() argument
1587 useg->lkey = cpu_to_be32(to_mah(wr->wr.ud.ah)->key); in set_tavor_ud_seg()
1588 useg->av_addr = cpu_to_be64(to_mah(wr->wr.ud.ah)->avdma); in set_tavor_ud_seg()
1589 useg->dqpn = cpu_to_be32(wr->wr.ud.remote_qpn); in set_tavor_ud_seg()
1590 useg->qkey = cpu_to_be32(wr->wr.ud.remote_qkey); in set_tavor_ud_seg()
1595 struct ib_send_wr *wr) in set_arbel_ud_seg() argument
1597 memcpy(useg->av, to_mah(wr->wr.ud.ah)->av, MTHCA_AV_SIZE); in set_arbel_ud_seg()
1598 useg->dqpn = cpu_to_be32(wr->wr.ud.remote_qpn); in set_arbel_ud_seg()
1599 useg->qkey = cpu_to_be32(wr->wr.ud.remote_qkey); in set_arbel_ud_seg()
1602 int mthca_tavor_post_send(struct ib_qp *ibqp, struct ib_send_wr *wr, in mthca_tavor_post_send() argument
1632 for (nreq = 0; wr; ++nreq, wr = wr->next) { in mthca_tavor_post_send()
1639 *bad_wr = wr; in mthca_tavor_post_send()
1650 ((wr->send_flags & IB_SEND_SIGNALED) ? in mthca_tavor_post_send()
1652 ((wr->send_flags & IB_SEND_SOLICITED) ? in mthca_tavor_post_send()
1655 if (wr->opcode == IB_WR_SEND_WITH_IMM || in mthca_tavor_post_send()
1656 wr->opcode == IB_WR_RDMA_WRITE_WITH_IMM) in mthca_tavor_post_send()
1657 ((struct mthca_next_seg *) wqe)->imm = wr->ex.imm_data; in mthca_tavor_post_send()
1664 switch (wr->opcode) { in mthca_tavor_post_send()
1667 set_raddr_seg(wqe, wr->wr.atomic.remote_addr, in mthca_tavor_post_send()
1668 wr->wr.atomic.rkey); in mthca_tavor_post_send()
1671 set_atomic_seg(wqe, wr); in mthca_tavor_post_send()
1680 set_raddr_seg(wqe, wr->wr.rdma.remote_addr, in mthca_tavor_post_send()
1681 wr->wr.rdma.rkey); in mthca_tavor_post_send()
1694 switch (wr->opcode) { in mthca_tavor_post_send()
1697 set_raddr_seg(wqe, wr->wr.rdma.remote_addr, in mthca_tavor_post_send()
1698 wr->wr.rdma.rkey); in mthca_tavor_post_send()
1711 set_tavor_ud_seg(wqe, wr); in mthca_tavor_post_send()
1717 err = build_mlx_header(dev, to_msqp(qp), ind, wr, in mthca_tavor_post_send()
1721 *bad_wr = wr; in mthca_tavor_post_send()
1729 if (wr->num_sge > qp->sq.max_gs) { in mthca_tavor_post_send()
1732 *bad_wr = wr; in mthca_tavor_post_send()
1736 for (i = 0; i < wr->num_sge; ++i) { in mthca_tavor_post_send()
1737 mthca_set_data_seg(wqe, wr->sg_list + i); in mthca_tavor_post_send()
1751 qp->wrid[ind + qp->rq.max] = wr->wr_id; in mthca_tavor_post_send()
1753 if (wr->opcode >= ARRAY_SIZE(mthca_opcode)) { in mthca_tavor_post_send()
1756 *bad_wr = wr; in mthca_tavor_post_send()
1763 mthca_opcode[wr->opcode]); in mthca_tavor_post_send()
1767 ((wr->send_flags & IB_SEND_FENCE) ? in mthca_tavor_post_send()
1772 op0 = mthca_opcode[wr->opcode]; in mthca_tavor_post_send()
1773 f0 = wr->send_flags & IB_SEND_FENCE ? in mthca_tavor_post_send()
1805 int mthca_tavor_post_receive(struct ib_qp *ibqp, struct ib_recv_wr *wr, in mthca_tavor_post_receive() argument
1833 for (nreq = 0; wr; wr = wr->next) { in mthca_tavor_post_receive()
1840 *bad_wr = wr; in mthca_tavor_post_receive()
1855 if (unlikely(wr->num_sge > qp->rq.max_gs)) { in mthca_tavor_post_receive()
1857 *bad_wr = wr; in mthca_tavor_post_receive()
1861 for (i = 0; i < wr->num_sge; ++i) { in mthca_tavor_post_receive()
1862 mthca_set_data_seg(wqe, wr->sg_list + i); in mthca_tavor_post_receive()
1867 qp->wrid[ind] = wr->wr_id; in mthca_tavor_post_receive()
1916 int mthca_arbel_post_send(struct ib_qp *ibqp, struct ib_send_wr *wr, in mthca_arbel_post_send() argument
1947 for (nreq = 0; wr; ++nreq, wr = wr->next) { in mthca_arbel_post_send()
1980 *bad_wr = wr; in mthca_arbel_post_send()
1989 ((wr->send_flags & IB_SEND_SIGNALED) ? in mthca_arbel_post_send()
1991 ((wr->send_flags & IB_SEND_SOLICITED) ? in mthca_arbel_post_send()
1993 ((wr->send_flags & IB_SEND_IP_CSUM) ? in mthca_arbel_post_send()
1996 if (wr->opcode == IB_WR_SEND_WITH_IMM || in mthca_arbel_post_send()
1997 wr->opcode == IB_WR_RDMA_WRITE_WITH_IMM) in mthca_arbel_post_send()
1998 ((struct mthca_next_seg *) wqe)->imm = wr->ex.imm_data; in mthca_arbel_post_send()
2005 switch (wr->opcode) { in mthca_arbel_post_send()
2008 set_raddr_seg(wqe, wr->wr.atomic.remote_addr, in mthca_arbel_post_send()
2009 wr->wr.atomic.rkey); in mthca_arbel_post_send()
2012 set_atomic_seg(wqe, wr); in mthca_arbel_post_send()
2021 set_raddr_seg(wqe, wr->wr.rdma.remote_addr, in mthca_arbel_post_send()
2022 wr->wr.rdma.rkey); in mthca_arbel_post_send()
2035 switch (wr->opcode) { in mthca_arbel_post_send()
2038 set_raddr_seg(wqe, wr->wr.rdma.remote_addr, in mthca_arbel_post_send()
2039 wr->wr.rdma.rkey); in mthca_arbel_post_send()
2052 set_arbel_ud_seg(wqe, wr); in mthca_arbel_post_send()
2058 err = build_mlx_header(dev, to_msqp(qp), ind, wr, in mthca_arbel_post_send()
2062 *bad_wr = wr; in mthca_arbel_post_send()
2070 if (wr->num_sge > qp->sq.max_gs) { in mthca_arbel_post_send()
2073 *bad_wr = wr; in mthca_arbel_post_send()
2077 for (i = 0; i < wr->num_sge; ++i) { in mthca_arbel_post_send()
2078 mthca_set_data_seg(wqe, wr->sg_list + i); in mthca_arbel_post_send()
2092 qp->wrid[ind + qp->rq.max] = wr->wr_id; in mthca_arbel_post_send()
2094 if (wr->opcode >= ARRAY_SIZE(mthca_opcode)) { in mthca_arbel_post_send()
2097 *bad_wr = wr; in mthca_arbel_post_send()
2104 mthca_opcode[wr->opcode]); in mthca_arbel_post_send()
2108 ((wr->send_flags & IB_SEND_FENCE) ? in mthca_arbel_post_send()
2113 op0 = mthca_opcode[wr->opcode]; in mthca_arbel_post_send()
2114 f0 = wr->send_flags & IB_SEND_FENCE ? in mthca_arbel_post_send()
2156 int mthca_arbel_post_receive(struct ib_qp *ibqp, struct ib_recv_wr *wr, in mthca_arbel_post_receive() argument
2174 for (nreq = 0; wr; ++nreq, wr = wr->next) { in mthca_arbel_post_receive()
2181 *bad_wr = wr; in mthca_arbel_post_receive()
2191 if (unlikely(wr->num_sge > qp->rq.max_gs)) { in mthca_arbel_post_receive()
2193 *bad_wr = wr; in mthca_arbel_post_receive()
2197 for (i = 0; i < wr->num_sge; ++i) { in mthca_arbel_post_receive()
2198 mthca_set_data_seg(wqe, wr->sg_list + i); in mthca_arbel_post_receive()
2205 qp->wrid[ind] = wr->wr_id; in mthca_arbel_post_receive()