Lines Matching refs:wr
42 static int build_rdma_send(union t3_wr *wqe, struct ib_send_wr *wr, in build_rdma_send() argument
48 switch (wr->opcode) { in build_rdma_send()
50 if (wr->send_flags & IB_SEND_SOLICITED) in build_rdma_send()
57 if (wr->send_flags & IB_SEND_SOLICITED) in build_rdma_send()
61 wqe->send.rem_stag = cpu_to_be32(wr->ex.invalidate_rkey); in build_rdma_send()
66 if (wr->num_sge > T3_MAX_SGE) in build_rdma_send()
72 for (i = 0; i < wr->num_sge; i++) { in build_rdma_send()
73 if ((plen + wr->sg_list[i].length) < plen) in build_rdma_send()
76 plen += wr->sg_list[i].length; in build_rdma_send()
77 wqe->send.sgl[i].stag = cpu_to_be32(wr->sg_list[i].lkey); in build_rdma_send()
78 wqe->send.sgl[i].len = cpu_to_be32(wr->sg_list[i].length); in build_rdma_send()
79 wqe->send.sgl[i].to = cpu_to_be64(wr->sg_list[i].addr); in build_rdma_send()
81 wqe->send.num_sgle = cpu_to_be32(wr->num_sge); in build_rdma_send()
82 *flit_cnt = 4 + ((wr->num_sge) << 1); in build_rdma_send()
87 static int build_rdma_write(union t3_wr *wqe, struct ib_send_wr *wr, in build_rdma_write() argument
92 if (wr->num_sge > T3_MAX_SGE) in build_rdma_write()
98 wqe->write.stag_sink = cpu_to_be32(rdma_wr(wr)->rkey); in build_rdma_write()
99 wqe->write.to_sink = cpu_to_be64(rdma_wr(wr)->remote_addr); in build_rdma_write()
101 if (wr->opcode == IB_WR_RDMA_WRITE_WITH_IMM) { in build_rdma_write()
103 wqe->write.sgl[0].stag = wr->ex.imm_data; in build_rdma_write()
109 for (i = 0; i < wr->num_sge; i++) { in build_rdma_write()
110 if ((plen + wr->sg_list[i].length) < plen) { in build_rdma_write()
113 plen += wr->sg_list[i].length; in build_rdma_write()
115 cpu_to_be32(wr->sg_list[i].lkey); in build_rdma_write()
117 cpu_to_be32(wr->sg_list[i].length); in build_rdma_write()
119 cpu_to_be64(wr->sg_list[i].addr); in build_rdma_write()
121 wqe->write.num_sgle = cpu_to_be32(wr->num_sge); in build_rdma_write()
122 *flit_cnt = 5 + ((wr->num_sge) << 1); in build_rdma_write()
128 static int build_rdma_read(union t3_wr *wqe, struct ib_send_wr *wr, in build_rdma_read() argument
131 if (wr->num_sge > 1) in build_rdma_read()
134 if (wr->opcode == IB_WR_RDMA_READ_WITH_INV) in build_rdma_read()
140 wqe->read.rem_stag = cpu_to_be32(rdma_wr(wr)->rkey); in build_rdma_read()
141 wqe->read.rem_to = cpu_to_be64(rdma_wr(wr)->remote_addr); in build_rdma_read()
142 wqe->read.local_stag = cpu_to_be32(wr->sg_list[0].lkey); in build_rdma_read()
143 wqe->read.local_len = cpu_to_be32(wr->sg_list[0].length); in build_rdma_read()
144 wqe->read.local_to = cpu_to_be64(wr->sg_list[0].addr); in build_rdma_read()
149 static int build_memreg(union t3_wr *wqe, struct ib_reg_wr *wr, in build_memreg() argument
152 struct iwch_mr *mhp = to_iwch_mr(wr->mr); in build_memreg()
159 wqe->fastreg.stag = cpu_to_be32(wr->key); in build_memreg()
166 V_FR_PAGE_SIZE(ilog2(wr->mr->page_size) - 12) | in build_memreg()
168 V_FR_PERMS(iwch_ib_to_tpt_access(wr->access))); in build_memreg()
192 static int build_inv_stag(union t3_wr *wqe, struct ib_send_wr *wr, in build_inv_stag() argument
195 wqe->local_inv.stag = cpu_to_be32(wr->ex.invalidate_rkey); in build_inv_stag()
249 struct ib_recv_wr *wr) in build_rdma_recv() argument
255 err = iwch_sgl2pbl_map(qhp->rhp, wr->sg_list, wr->num_sge, pbl_addr, in build_rdma_recv()
263 wqe->recv.num_sgle = cpu_to_be32(wr->num_sge); in build_rdma_recv()
264 for (i = 0; i < wr->num_sge; i++) { in build_rdma_recv()
265 wqe->recv.sgl[i].stag = cpu_to_be32(wr->sg_list[i].lkey); in build_rdma_recv()
266 wqe->recv.sgl[i].len = cpu_to_be32(wr->sg_list[i].length); in build_rdma_recv()
269 wqe->recv.sgl[i].to = cpu_to_be64(((u32)wr->sg_list[i].addr) & in build_rdma_recv()
282 qhp->wq.rq_size_log2)].wr_id = wr->wr_id; in build_rdma_recv()
289 struct ib_recv_wr *wr) in build_zero_stag_recv() argument
311 wqe->recv.num_sgle = cpu_to_be32(wr->num_sge); in build_zero_stag_recv()
313 for (i = 0; i < wr->num_sge; i++) { in build_zero_stag_recv()
322 if (wr->sg_list[i].length > T3_STAG0_MAX_PBE_LEN) in build_zero_stag_recv()
329 if (wr->sg_list[i].lkey != 0) in build_zero_stag_recv()
332 wqe->recv.sgl[i].len = cpu_to_be32(wr->sg_list[i].length); in build_zero_stag_recv()
333 wqe->recv.sgl[i].to = cpu_to_be64(wr->sg_list[i].addr); in build_zero_stag_recv()
345 qhp->wq.rq_size_log2)].wr_id = wr->wr_id; in build_zero_stag_recv()
351 int iwch_post_send(struct ib_qp *ibqp, struct ib_send_wr *wr, in iwch_post_send() argument
380 while (wr) { in iwch_post_send()
388 if (wr->send_flags & IB_SEND_SOLICITED) in iwch_post_send()
390 if (wr->send_flags & IB_SEND_SIGNALED) in iwch_post_send()
394 switch (wr->opcode) { in iwch_post_send()
397 if (wr->send_flags & IB_SEND_FENCE) in iwch_post_send()
400 err = build_rdma_send(wqe, wr, &t3_wr_flit_cnt); in iwch_post_send()
405 err = build_rdma_write(wqe, wr, &t3_wr_flit_cnt); in iwch_post_send()
411 err = build_rdma_read(wqe, wr, &t3_wr_flit_cnt); in iwch_post_send()
420 err = build_memreg(wqe, reg_wr(wr), &t3_wr_flit_cnt, in iwch_post_send()
424 if (wr->send_flags & IB_SEND_FENCE) in iwch_post_send()
427 err = build_inv_stag(wqe, wr, &t3_wr_flit_cnt); in iwch_post_send()
431 wr->opcode); in iwch_post_send()
437 sqp->wr_id = wr->wr_id; in iwch_post_send()
441 sqp->signaled = (wr->send_flags & IB_SEND_SIGNALED); in iwch_post_send()
448 __func__, (unsigned long long) wr->wr_id, idx, in iwch_post_send()
451 wr = wr->next; in iwch_post_send()
462 *bad_wr = wr; in iwch_post_send()
466 int iwch_post_receive(struct ib_qp *ibqp, struct ib_recv_wr *wr, in iwch_post_receive() argument
485 if (!wr) { in iwch_post_receive()
490 while (wr) { in iwch_post_receive()
491 if (wr->num_sge > T3_MAX_SGE) { in iwch_post_receive()
498 if (wr->sg_list[0].lkey) in iwch_post_receive()
499 err = build_rdma_recv(qhp, wqe, wr); in iwch_post_receive()
501 err = build_zero_stag_recv(qhp, wqe, wr); in iwch_post_receive()
512 "wqe %p \n", __func__, (unsigned long long) wr->wr_id, in iwch_post_receive()
516 wr = wr->next; in iwch_post_receive()
525 *bad_wr = wr; in iwch_post_receive()