Lines Matching refs:wr
687 static void prep_umr_reg_wqe(struct ib_pd *pd, struct ib_send_wr *wr, in prep_umr_reg_wqe() argument
694 struct mlx5_umr_wr *umrwr = (struct mlx5_umr_wr *)&wr->wr.fast_reg; in prep_umr_reg_wqe()
700 wr->next = NULL; in prep_umr_reg_wqe()
701 wr->send_flags = 0; in prep_umr_reg_wqe()
702 wr->sg_list = sg; in prep_umr_reg_wqe()
704 wr->num_sge = 1; in prep_umr_reg_wqe()
706 wr->num_sge = 0; in prep_umr_reg_wqe()
708 wr->opcode = MLX5_IB_WR_UMR; in prep_umr_reg_wqe()
720 struct ib_send_wr *wr, u32 key) in prep_umr_unreg_wqe() argument
722 struct mlx5_umr_wr *umrwr = (struct mlx5_umr_wr *)&wr->wr.fast_reg; in prep_umr_unreg_wqe()
724 wr->send_flags = MLX5_IB_SEND_UMR_UNREG | MLX5_IB_SEND_UMR_FAIL_IF_FREE; in prep_umr_unreg_wqe()
725 wr->opcode = MLX5_IB_WR_UMR; in prep_umr_unreg_wqe()
759 struct ib_send_wr wr, *bad; in reg_umr() local
805 memset(&wr, 0, sizeof(wr)); in reg_umr()
806 wr.wr_id = (u64)(unsigned long)&umr_context; in reg_umr()
807 prep_umr_reg_wqe(pd, &wr, &sg, dma, npages, mr->mmr.key, page_shift, in reg_umr()
812 err = ib_post_send(umrc->qp, &wr, &bad); in reg_umr()
858 struct ib_send_wr wr, *bad; in mlx5_ib_update_mtt() local
859 struct mlx5_umr_wr *umrwr = (struct mlx5_umr_wr *)&wr.wr.fast_reg; in mlx5_ib_update_mtt()
923 memset(&wr, 0, sizeof(wr)); in mlx5_ib_update_mtt()
924 wr.wr_id = (u64)(unsigned long)&umr_context; in mlx5_ib_update_mtt()
931 wr.send_flags = MLX5_IB_SEND_UMR_FAIL_IF_FREE | in mlx5_ib_update_mtt()
933 wr.sg_list = &sg; in mlx5_ib_update_mtt()
934 wr.num_sge = 1; in mlx5_ib_update_mtt()
935 wr.opcode = MLX5_IB_WR_UMR; in mlx5_ib_update_mtt()
943 err = ib_post_send(umrc->qp, &wr, &bad); in mlx5_ib_update_mtt()
1130 struct ib_send_wr wr, *bad; in unreg_umr() local
1133 memset(&wr, 0, sizeof(wr)); in unreg_umr()
1134 wr.wr_id = (u64)(unsigned long)&umr_context; in unreg_umr()
1135 prep_umr_unreg_wqe(dev, &wr, mr->mmr.key); in unreg_umr()
1139 err = ib_post_send(umrc->qp, &wr, &bad); in unreg_umr()