Lines Matching refs:wq

149 static int destroy_qp(struct c4iw_rdev *rdev, struct t4_wq *wq,  in destroy_qp()  argument
157 wq->rq.memsize, wq->rq.queue, in destroy_qp()
158 dma_unmap_addr(&wq->rq, mapping)); in destroy_qp()
159 dealloc_sq(rdev, &wq->sq); in destroy_qp()
160 c4iw_rqtpool_free(rdev, wq->rq.rqt_hwaddr, wq->rq.rqt_size); in destroy_qp()
161 kfree(wq->rq.sw_rq); in destroy_qp()
162 kfree(wq->sq.sw_sq); in destroy_qp()
163 c4iw_put_qpid(rdev, wq->rq.qid, uctx); in destroy_qp()
164 c4iw_put_qpid(rdev, wq->sq.qid, uctx); in destroy_qp()
168 static int create_qp(struct c4iw_rdev *rdev, struct t4_wq *wq, in create_qp() argument
181 wq->sq.qid = c4iw_get_qpid(rdev, uctx); in create_qp()
182 if (!wq->sq.qid) in create_qp()
185 wq->rq.qid = c4iw_get_qpid(rdev, uctx); in create_qp()
186 if (!wq->rq.qid) { in create_qp()
192 wq->sq.sw_sq = kzalloc(wq->sq.size * sizeof *wq->sq.sw_sq, in create_qp()
194 if (!wq->sq.sw_sq) { in create_qp()
199 wq->rq.sw_rq = kzalloc(wq->rq.size * sizeof *wq->rq.sw_rq, in create_qp()
201 if (!wq->rq.sw_rq) { in create_qp()
210 wq->rq.rqt_size = roundup_pow_of_two(max_t(u16, wq->rq.size, 16)); in create_qp()
211 wq->rq.rqt_hwaddr = c4iw_rqtpool_alloc(rdev, wq->rq.rqt_size); in create_qp()
212 if (!wq->rq.rqt_hwaddr) { in create_qp()
217 ret = alloc_sq(rdev, &wq->sq, user); in create_qp()
220 memset(wq->sq.queue, 0, wq->sq.memsize); in create_qp()
221 dma_unmap_addr_set(&wq->sq, mapping, wq->sq.dma_addr); in create_qp()
223 wq->rq.queue = dma_alloc_coherent(&(rdev->lldi.pdev->dev), in create_qp()
224 wq->rq.memsize, &(wq->rq.dma_addr), in create_qp()
226 if (!wq->rq.queue) { in create_qp()
231 __func__, wq->sq.queue, in create_qp()
232 (unsigned long long)virt_to_phys(wq->sq.queue), in create_qp()
233 wq->rq.queue, in create_qp()
234 (unsigned long long)virt_to_phys(wq->rq.queue)); in create_qp()
235 memset(wq->rq.queue, 0, wq->rq.memsize); in create_qp()
236 dma_unmap_addr_set(&wq->rq, mapping, wq->rq.dma_addr); in create_qp()
238 wq->db = rdev->lldi.db_reg; in create_qp()
239 wq->gts = rdev->lldi.gts_reg; in create_qp()
243 off = (wq->sq.qid << rdev->qpshift) & PAGE_MASK; in create_qp()
245 wq->sq.udb = (u64 __iomem *)(rdev->bar2_pa + off); in create_qp()
247 off += 128 * (wq->sq.qid & rdev->qpmask) + 8; in create_qp()
248 wq->sq.udb = (u64 __iomem *)(rdev->bar2_kva + off); in create_qp()
250 off = (wq->rq.qid << rdev->qpshift) & PAGE_MASK; in create_qp()
252 wq->rq.udb = (u64 __iomem *)(rdev->bar2_pa + off); in create_qp()
254 off += 128 * (wq->rq.qid & rdev->qpmask) + 8; in create_qp()
255 wq->rq.udb = (u64 __iomem *)(rdev->bar2_kva + off); in create_qp()
258 wq->rdev = rdev; in create_qp()
259 wq->rq.msn = 1; in create_qp()
286 eqsize = wq->sq.size * T4_SQ_NUM_SLOTS + in create_qp()
293 (t4_sq_onchip(&wq->sq) ? FW_RI_RES_WR_ONCHIP_F : 0) | in create_qp()
303 res->u.sqrq.eqid = cpu_to_be32(wq->sq.qid); in create_qp()
304 res->u.sqrq.eqaddr = cpu_to_be64(wq->sq.dma_addr); in create_qp()
312 eqsize = wq->rq.size * T4_RQ_NUM_SLOTS + in create_qp()
327 res->u.sqrq.eqid = cpu_to_be32(wq->rq.qid); in create_qp()
328 res->u.sqrq.eqaddr = cpu_to_be64(wq->rq.dma_addr); in create_qp()
335 ret = c4iw_wait_for_reply(rdev, &wr_wait, 0, wq->sq.qid, __func__); in create_qp()
340 __func__, wq->sq.qid, wq->rq.qid, wq->db, in create_qp()
341 (__force unsigned long) wq->sq.udb, in create_qp()
342 (__force unsigned long) wq->rq.udb); in create_qp()
347 wq->rq.memsize, wq->rq.queue, in create_qp()
348 dma_unmap_addr(&wq->rq, mapping)); in create_qp()
350 dealloc_sq(rdev, &wq->sq); in create_qp()
352 c4iw_rqtpool_free(rdev, wq->rq.rqt_hwaddr, wq->rq.rqt_size); in create_qp()
354 kfree(wq->rq.sw_rq); in create_qp()
356 kfree(wq->sq.sw_sq); in create_qp()
358 c4iw_put_qpid(rdev, wq->rq.qid, uctx); in create_qp()
360 c4iw_put_qpid(rdev, wq->sq.qid, uctx); in create_qp()
578 ret = build_isgl((__be64 *)qhp->wq.rq.queue, in build_rdma_recv()
579 (__be64 *)&qhp->wq.rq.queue[qhp->wq.rq.size], in build_rdma_recv()
695 t4_ring_sq_db(&qhp->wq, inc, in ring_kernel_sq_db()
699 qhp->wq.sq.wq_pidx_inc += inc; in ring_kernel_sq_db()
713 t4_ring_rq_db(&qhp->wq, inc, in ring_kernel_rq_db()
717 qhp->wq.rq.wq_pidx_inc += inc; in ring_kernel_rq_db()
740 if (t4_wq_in_error(&qhp->wq)) { in c4iw_post_send()
744 num_wrs = t4_sq_avail(&qhp->wq); in c4iw_post_send()
755 wqe = (union t4_wr *)((u8 *)qhp->wq.sq.queue + in c4iw_post_send()
756 qhp->wq.sq.wq_pidx * T4_EQ_ENTRY_SIZE); in c4iw_post_send()
763 swsqe = &qhp->wq.sq.sw_sq[qhp->wq.sq.pidx]; in c4iw_post_send()
774 err = build_rdma_send(&qhp->wq.sq, wqe, wr, &len16); in c4iw_post_send()
779 err = build_rdma_write(&qhp->wq.sq, wqe, wr, &len16); in c4iw_post_send()
793 if (!qhp->wq.sq.oldest_read) in c4iw_post_send()
794 qhp->wq.sq.oldest_read = swsqe; in c4iw_post_send()
799 err = build_fastreg(&qhp->wq.sq, wqe, wr, &len16, in c4iw_post_send()
820 swsqe->idx = qhp->wq.sq.pidx; in c4iw_post_send()
832 init_wr_hdr(wqe, qhp->wq.sq.pidx, fw_opcode, fw_flags, len16); in c4iw_post_send()
835 __func__, (unsigned long long)wr->wr_id, qhp->wq.sq.pidx, in c4iw_post_send()
839 t4_sq_produce(&qhp->wq, len16); in c4iw_post_send()
843 t4_ring_sq_db(&qhp->wq, idx, in c4iw_post_send()
866 if (t4_wq_in_error(&qhp->wq)) { in c4iw_post_receive()
870 num_wrs = t4_rq_avail(&qhp->wq); in c4iw_post_receive()
881 wqe = (union t4_recv_wr *)((u8 *)qhp->wq.rq.queue + in c4iw_post_receive()
882 qhp->wq.rq.wq_pidx * in c4iw_post_receive()
893 qhp->wq.rq.sw_rq[qhp->wq.rq.pidx].wr_id = wr->wr_id; in c4iw_post_receive()
895 qhp->wq.rq.sw_rq[qhp->wq.rq.pidx].sge_ts = in c4iw_post_receive()
899 &qhp->wq.rq.sw_rq[qhp->wq.rq.pidx].host_ts); in c4iw_post_receive()
904 wqe->recv.wrid = qhp->wq.rq.pidx; in c4iw_post_receive()
910 (unsigned long long) wr->wr_id, qhp->wq.rq.pidx); in c4iw_post_receive()
911 t4_rq_produce(&qhp->wq, len16); in c4iw_post_receive()
917 t4_ring_rq_db(&qhp->wq, idx, in c4iw_post_receive()
1075 PDBG("%s qhp %p qid 0x%x tid %u\n", __func__, qhp, qhp->wq.sq.qid, in post_terminate()
1117 if (qhp->wq.flushed) { in __flush_qp()
1122 qhp->wq.flushed = 1; in __flush_qp()
1125 c4iw_count_rcqes(&rchp->cq, &qhp->wq, &count); in __flush_qp()
1126 rq_flushed = c4iw_flush_rq(&qhp->wq, &rchp->cq, count); in __flush_qp()
1171 t4_set_wq_in_error(&qhp->wq); in flush_qp()
1196 PDBG("%s qhp %p qid 0x%x tid %u\n", __func__, qhp, qhp->wq.sq.qid, in rdma_fini()
1220 qhp->wq.sq.qid, __func__); in rdma_fini()
1258 qhp->wq.sq.qid, qhp->ep->hwtid, qhp->ep->ird, qhp->ep->ord); in rdma_init()
1302 wqe->u.init.nrqe = cpu_to_be16(t4_rqes_posted(&qhp->wq)); in rdma_init()
1304 wqe->u.init.qpid = cpu_to_be32(qhp->wq.sq.qid); in rdma_init()
1305 wqe->u.init.sq_eqid = cpu_to_be32(qhp->wq.sq.qid); in rdma_init()
1306 wqe->u.init.rq_eqid = cpu_to_be32(qhp->wq.rq.qid); in rdma_init()
1313 wqe->u.init.hwrqsize = cpu_to_be32(qhp->wq.rq.rqt_size); in rdma_init()
1314 wqe->u.init.hwrqaddr = cpu_to_be32(qhp->wq.rq.rqt_hwaddr - in rdma_init()
1324 qhp->ep->hwtid, qhp->wq.sq.qid, __func__); in rdma_init()
1348 qhp, qhp->wq.sq.qid, qhp->wq.rq.qid, qhp->ep, qhp->attr.state, in c4iw_modify_qp()
1437 t4_set_wq_in_error(&qhp->wq); in c4iw_modify_qp()
1450 t4_set_wq_in_error(&qhp->wq); in c4iw_modify_qp()
1467 t4_set_wq_in_error(&qhp->wq); in c4iw_modify_qp()
1508 if (!t4_sq_empty(&qhp->wq) || !t4_rq_empty(&qhp->wq)) { in c4iw_modify_qp()
1531 qhp->wq.sq.qid); in c4iw_modify_qp()
1588 remove_handle(rhp, &rhp->qpidr, qhp->wq.sq.qid); in c4iw_destroy_qp()
1600 destroy_qp(&rhp->rdev, &qhp->wq, in c4iw_destroy_qp()
1603 PDBG("%s ib_qp %p qpid 0x%0x\n", __func__, ib_qp, qhp->wq.sq.qid); in c4iw_destroy_qp()
1654 qhp->wq.sq.size = sqsize; in c4iw_create_qp()
1655 qhp->wq.sq.memsize = in c4iw_create_qp()
1657 sizeof(*qhp->wq.sq.queue) + 16 * sizeof(__be64); in c4iw_create_qp()
1658 qhp->wq.sq.flush_cidx = -1; in c4iw_create_qp()
1659 qhp->wq.rq.size = rqsize; in c4iw_create_qp()
1660 qhp->wq.rq.memsize = in c4iw_create_qp()
1662 sizeof(*qhp->wq.rq.queue); in c4iw_create_qp()
1665 qhp->wq.sq.memsize = roundup(qhp->wq.sq.memsize, PAGE_SIZE); in c4iw_create_qp()
1666 qhp->wq.rq.memsize = roundup(qhp->wq.rq.memsize, PAGE_SIZE); in c4iw_create_qp()
1669 ret = create_qp(&rhp->rdev, &qhp->wq, &schp->cq, &rchp->cq, in c4iw_create_qp()
1700 ret = insert_handle(rhp, &rhp->qpidr, qhp, qhp->wq.sq.qid); in c4iw_create_qp()
1725 if (t4_sq_onchip(&qhp->wq.sq)) { in c4iw_create_qp()
1735 uresp.sqid = qhp->wq.sq.qid; in c4iw_create_qp()
1736 uresp.sq_size = qhp->wq.sq.size; in c4iw_create_qp()
1737 uresp.sq_memsize = qhp->wq.sq.memsize; in c4iw_create_qp()
1738 uresp.rqid = qhp->wq.rq.qid; in c4iw_create_qp()
1739 uresp.rq_size = qhp->wq.rq.size; in c4iw_create_qp()
1740 uresp.rq_memsize = qhp->wq.rq.memsize; in c4iw_create_qp()
1761 mm1->addr = qhp->wq.sq.phys_addr; in c4iw_create_qp()
1762 mm1->len = PAGE_ALIGN(qhp->wq.sq.memsize); in c4iw_create_qp()
1765 mm2->addr = virt_to_phys(qhp->wq.rq.queue); in c4iw_create_qp()
1766 mm2->len = PAGE_ALIGN(qhp->wq.rq.memsize); in c4iw_create_qp()
1769 mm3->addr = (__force unsigned long)qhp->wq.sq.udb; in c4iw_create_qp()
1773 mm4->addr = (__force unsigned long)qhp->wq.rq.udb; in c4iw_create_qp()
1784 qhp->ibqp.qp_num = qhp->wq.sq.qid; in c4iw_create_qp()
1789 qhp->wq.sq.qid, qhp->wq.sq.size, qhp->wq.sq.memsize, in c4iw_create_qp()
1790 attrs->cap.max_send_wr, qhp->wq.rq.qid, qhp->wq.rq.size, in c4iw_create_qp()
1791 qhp->wq.rq.memsize, attrs->cap.max_recv_wr); in c4iw_create_qp()
1804 remove_handle(rhp, &rhp->qpidr, qhp->wq.sq.qid); in c4iw_create_qp()
1806 destroy_qp(&rhp->rdev, &qhp->wq, in c4iw_create_qp()