Lines Matching refs:tun_qp
468 struct mlx4_ib_demux_pv_qp *tun_qp; in mlx4_ib_send_to_slave() local
490 tun_qp = &tun_ctx->qp[0]; in mlx4_ib_send_to_slave()
492 tun_qp = &tun_ctx->qp[1]; in mlx4_ib_send_to_slave()
511 src_qp = tun_qp->qp; in mlx4_ib_send_to_slave()
526 spin_lock(&tun_qp->tx_lock); in mlx4_ib_send_to_slave()
527 if (tun_qp->tx_ix_head - tun_qp->tx_ix_tail >= in mlx4_ib_send_to_slave()
531 tun_tx_ix = (++tun_qp->tx_ix_head) & (MLX4_NUM_TUNNEL_BUFS - 1); in mlx4_ib_send_to_slave()
532 spin_unlock(&tun_qp->tx_lock); in mlx4_ib_send_to_slave()
536 tun_mad = (struct mlx4_rcv_tunnel_mad *) (tun_qp->tx_ring[tun_tx_ix].buf.addr); in mlx4_ib_send_to_slave()
537 if (tun_qp->tx_ring[tun_tx_ix].ah) in mlx4_ib_send_to_slave()
538 ib_destroy_ah(tun_qp->tx_ring[tun_tx_ix].ah); in mlx4_ib_send_to_slave()
539 tun_qp->tx_ring[tun_tx_ix].ah = ah; in mlx4_ib_send_to_slave()
541 tun_qp->tx_ring[tun_tx_ix].buf.map, in mlx4_ib_send_to_slave()
583 tun_qp->tx_ring[tun_tx_ix].buf.map, in mlx4_ib_send_to_slave()
587 list.addr = tun_qp->tx_ring[tun_tx_ix].buf.map; in mlx4_ib_send_to_slave()
1118 struct mlx4_ib_demux_pv_qp *tun_qp, in mlx4_ib_post_pv_qp_buf() argument
1125 size = (tun_qp->qp->qp_type == IB_QPT_UD) ? in mlx4_ib_post_pv_qp_buf()
1128 sg_list.addr = tun_qp->ring[index].map; in mlx4_ib_post_pv_qp_buf()
1136 MLX4_TUN_SET_WRID_QPN(tun_qp->proxy_qpt); in mlx4_ib_post_pv_qp_buf()
1137 ib_dma_sync_single_for_device(ctx->ib_dev, tun_qp->ring[index].map, in mlx4_ib_post_pv_qp_buf()
1139 return ib_post_recv(tun_qp->qp, &recv_wr, &bad_recv_wr); in mlx4_ib_post_pv_qp_buf()
1284 struct mlx4_ib_demux_pv_qp *tun_qp = &ctx->qp[MLX4_TUN_WRID_QPN(wc->wr_id)]; in mlx4_ib_multiplex_mad() local
1286 struct mlx4_tunnel_mad *tunnel = tun_qp->ring[wr_ix].addr; in mlx4_ib_multiplex_mad()
1309 ib_dma_sync_single_for_cpu(ctx->ib_dev, tun_qp->ring[wr_ix].map, in mlx4_ib_multiplex_mad()
1395 struct mlx4_ib_demux_pv_qp *tun_qp; in mlx4_ib_alloc_pv_bufs() local
1401 tun_qp = &ctx->qp[qp_type]; in mlx4_ib_alloc_pv_bufs()
1403 tun_qp->ring = kzalloc(sizeof (struct mlx4_ib_buf) * MLX4_NUM_TUNNEL_BUFS, in mlx4_ib_alloc_pv_bufs()
1405 if (!tun_qp->ring) in mlx4_ib_alloc_pv_bufs()
1408 tun_qp->tx_ring = kcalloc(MLX4_NUM_TUNNEL_BUFS, in mlx4_ib_alloc_pv_bufs()
1411 if (!tun_qp->tx_ring) { in mlx4_ib_alloc_pv_bufs()
1412 kfree(tun_qp->ring); in mlx4_ib_alloc_pv_bufs()
1413 tun_qp->ring = NULL; in mlx4_ib_alloc_pv_bufs()
1426 tun_qp->ring[i].addr = kmalloc(rx_buf_size, GFP_KERNEL); in mlx4_ib_alloc_pv_bufs()
1427 if (!tun_qp->ring[i].addr) in mlx4_ib_alloc_pv_bufs()
1429 tun_qp->ring[i].map = ib_dma_map_single(ctx->ib_dev, in mlx4_ib_alloc_pv_bufs()
1430 tun_qp->ring[i].addr, in mlx4_ib_alloc_pv_bufs()
1433 if (ib_dma_mapping_error(ctx->ib_dev, tun_qp->ring[i].map)) { in mlx4_ib_alloc_pv_bufs()
1434 kfree(tun_qp->ring[i].addr); in mlx4_ib_alloc_pv_bufs()
1440 tun_qp->tx_ring[i].buf.addr = in mlx4_ib_alloc_pv_bufs()
1442 if (!tun_qp->tx_ring[i].buf.addr) in mlx4_ib_alloc_pv_bufs()
1444 tun_qp->tx_ring[i].buf.map = in mlx4_ib_alloc_pv_bufs()
1446 tun_qp->tx_ring[i].buf.addr, in mlx4_ib_alloc_pv_bufs()
1450 tun_qp->tx_ring[i].buf.map)) { in mlx4_ib_alloc_pv_bufs()
1451 kfree(tun_qp->tx_ring[i].buf.addr); in mlx4_ib_alloc_pv_bufs()
1454 tun_qp->tx_ring[i].ah = NULL; in mlx4_ib_alloc_pv_bufs()
1456 spin_lock_init(&tun_qp->tx_lock); in mlx4_ib_alloc_pv_bufs()
1457 tun_qp->tx_ix_head = 0; in mlx4_ib_alloc_pv_bufs()
1458 tun_qp->tx_ix_tail = 0; in mlx4_ib_alloc_pv_bufs()
1459 tun_qp->proxy_qpt = qp_type; in mlx4_ib_alloc_pv_bufs()
1466 ib_dma_unmap_single(ctx->ib_dev, tun_qp->tx_ring[i].buf.map, in mlx4_ib_alloc_pv_bufs()
1468 kfree(tun_qp->tx_ring[i].buf.addr); in mlx4_ib_alloc_pv_bufs()
1470 kfree(tun_qp->tx_ring); in mlx4_ib_alloc_pv_bufs()
1471 tun_qp->tx_ring = NULL; in mlx4_ib_alloc_pv_bufs()
1476 ib_dma_unmap_single(ctx->ib_dev, tun_qp->ring[i].map, in mlx4_ib_alloc_pv_bufs()
1478 kfree(tun_qp->ring[i].addr); in mlx4_ib_alloc_pv_bufs()
1480 kfree(tun_qp->ring); in mlx4_ib_alloc_pv_bufs()
1481 tun_qp->ring = NULL; in mlx4_ib_alloc_pv_bufs()
1489 struct mlx4_ib_demux_pv_qp *tun_qp; in mlx4_ib_free_pv_qp_bufs() local
1495 tun_qp = &ctx->qp[qp_type]; in mlx4_ib_free_pv_qp_bufs()
1506 ib_dma_unmap_single(ctx->ib_dev, tun_qp->ring[i].map, in mlx4_ib_free_pv_qp_bufs()
1508 kfree(tun_qp->ring[i].addr); in mlx4_ib_free_pv_qp_bufs()
1512 ib_dma_unmap_single(ctx->ib_dev, tun_qp->tx_ring[i].buf.map, in mlx4_ib_free_pv_qp_bufs()
1514 kfree(tun_qp->tx_ring[i].buf.addr); in mlx4_ib_free_pv_qp_bufs()
1515 if (tun_qp->tx_ring[i].ah) in mlx4_ib_free_pv_qp_bufs()
1516 ib_destroy_ah(tun_qp->tx_ring[i].ah); in mlx4_ib_free_pv_qp_bufs()
1518 kfree(tun_qp->tx_ring); in mlx4_ib_free_pv_qp_bufs()
1519 kfree(tun_qp->ring); in mlx4_ib_free_pv_qp_bufs()
1525 struct mlx4_ib_demux_pv_qp *tun_qp; in mlx4_ib_tunnel_comp_worker() local
1532 tun_qp = &ctx->qp[MLX4_TUN_WRID_QPN(wc.wr_id)]; in mlx4_ib_tunnel_comp_worker()
1537 ret = mlx4_ib_post_pv_qp_buf(ctx, tun_qp, in mlx4_ib_tunnel_comp_worker()
1548 ib_destroy_ah(tun_qp->tx_ring[wc.wr_id & in mlx4_ib_tunnel_comp_worker()
1550 tun_qp->tx_ring[wc.wr_id & (MLX4_NUM_TUNNEL_BUFS - 1)].ah in mlx4_ib_tunnel_comp_worker()
1552 spin_lock(&tun_qp->tx_lock); in mlx4_ib_tunnel_comp_worker()
1553 tun_qp->tx_ix_tail++; in mlx4_ib_tunnel_comp_worker()
1554 spin_unlock(&tun_qp->tx_lock); in mlx4_ib_tunnel_comp_worker()
1565 ib_destroy_ah(tun_qp->tx_ring[wc.wr_id & in mlx4_ib_tunnel_comp_worker()
1567 tun_qp->tx_ring[wc.wr_id & (MLX4_NUM_TUNNEL_BUFS - 1)].ah in mlx4_ib_tunnel_comp_worker()
1569 spin_lock(&tun_qp->tx_lock); in mlx4_ib_tunnel_comp_worker()
1570 tun_qp->tx_ix_tail++; in mlx4_ib_tunnel_comp_worker()
1571 spin_unlock(&tun_qp->tx_lock); in mlx4_ib_tunnel_comp_worker()
1590 struct mlx4_ib_demux_pv_qp *tun_qp; in create_pv_sqp() local
1598 tun_qp = &ctx->qp[qp_type]; in create_pv_sqp()
1624 tun_qp->qp = ib_create_qp(ctx->pd, &qp_init_attr.init_attr); in create_pv_sqp()
1625 if (IS_ERR(tun_qp->qp)) { in create_pv_sqp()
1626 ret = PTR_ERR(tun_qp->qp); in create_pv_sqp()
1627 tun_qp->qp = NULL; in create_pv_sqp()
1645 ret = ib_modify_qp(tun_qp->qp, &attr, qp_attr_mask_INIT); in create_pv_sqp()
1652 ret = ib_modify_qp(tun_qp->qp, &attr, IB_QP_STATE); in create_pv_sqp()
1660 ret = ib_modify_qp(tun_qp->qp, &attr, IB_QP_STATE | IB_QP_SQ_PSN); in create_pv_sqp()
1668 ret = mlx4_ib_post_pv_qp_buf(ctx, tun_qp, i); in create_pv_sqp()
1678 ib_destroy_qp(tun_qp->qp); in create_pv_sqp()
1679 tun_qp->qp = NULL; in create_pv_sqp()