Lines Matching refs:cq

650 	struct mthca_cq *cq;  in mthca_create_cq()  local
674 cq = kmalloc(sizeof *cq, GFP_KERNEL); in mthca_create_cq()
675 if (!cq) { in mthca_create_cq()
681 cq->buf.mr.ibmr.lkey = ucmd.lkey; in mthca_create_cq()
682 cq->set_ci_db_index = ucmd.set_db_index; in mthca_create_cq()
683 cq->arm_db_index = ucmd.arm_db_index; in mthca_create_cq()
692 cq); in mthca_create_cq()
696 if (context && ib_copy_to_udata(udata, &cq->cqn, sizeof (__u32))) { in mthca_create_cq()
697 mthca_free_cq(to_mdev(ibdev), cq); in mthca_create_cq()
702 cq->resize_buf = NULL; in mthca_create_cq()
704 return &cq->ibcq; in mthca_create_cq()
707 kfree(cq); in mthca_create_cq()
722 static int mthca_alloc_resize_buf(struct mthca_dev *dev, struct mthca_cq *cq, in mthca_alloc_resize_buf() argument
727 spin_lock_irq(&cq->lock); in mthca_alloc_resize_buf()
728 if (cq->resize_buf) { in mthca_alloc_resize_buf()
733 cq->resize_buf = kmalloc(sizeof *cq->resize_buf, GFP_ATOMIC); in mthca_alloc_resize_buf()
734 if (!cq->resize_buf) { in mthca_alloc_resize_buf()
739 cq->resize_buf->state = CQ_RESIZE_ALLOC; in mthca_alloc_resize_buf()
744 spin_unlock_irq(&cq->lock); in mthca_alloc_resize_buf()
749 ret = mthca_alloc_cq_buf(dev, &cq->resize_buf->buf, entries); in mthca_alloc_resize_buf()
751 spin_lock_irq(&cq->lock); in mthca_alloc_resize_buf()
752 kfree(cq->resize_buf); in mthca_alloc_resize_buf()
753 cq->resize_buf = NULL; in mthca_alloc_resize_buf()
754 spin_unlock_irq(&cq->lock); in mthca_alloc_resize_buf()
758 cq->resize_buf->cqe = entries - 1; in mthca_alloc_resize_buf()
760 spin_lock_irq(&cq->lock); in mthca_alloc_resize_buf()
761 cq->resize_buf->state = CQ_RESIZE_READY; in mthca_alloc_resize_buf()
762 spin_unlock_irq(&cq->lock); in mthca_alloc_resize_buf()
770 struct mthca_cq *cq = to_mcq(ibcq); in mthca_resize_cq() local
778 mutex_lock(&cq->mutex); in mthca_resize_cq()
786 if (cq->is_kernel) { in mthca_resize_cq()
787 ret = mthca_alloc_resize_buf(dev, cq, entries); in mthca_resize_cq()
790 lkey = cq->resize_buf->buf.mr.ibmr.lkey; in mthca_resize_cq()
799 ret = mthca_RESIZE_CQ(dev, cq->cqn, lkey, ilog2(entries)); in mthca_resize_cq()
802 if (cq->resize_buf) { in mthca_resize_cq()
803 mthca_free_cq_buf(dev, &cq->resize_buf->buf, in mthca_resize_cq()
804 cq->resize_buf->cqe); in mthca_resize_cq()
805 kfree(cq->resize_buf); in mthca_resize_cq()
806 spin_lock_irq(&cq->lock); in mthca_resize_cq()
807 cq->resize_buf = NULL; in mthca_resize_cq()
808 spin_unlock_irq(&cq->lock); in mthca_resize_cq()
813 if (cq->is_kernel) { in mthca_resize_cq()
817 spin_lock_irq(&cq->lock); in mthca_resize_cq()
818 if (cq->resize_buf->state == CQ_RESIZE_READY) { in mthca_resize_cq()
819 mthca_cq_resize_copy_cqes(cq); in mthca_resize_cq()
820 tbuf = cq->buf; in mthca_resize_cq()
821 tcqe = cq->ibcq.cqe; in mthca_resize_cq()
822 cq->buf = cq->resize_buf->buf; in mthca_resize_cq()
823 cq->ibcq.cqe = cq->resize_buf->cqe; in mthca_resize_cq()
825 tbuf = cq->resize_buf->buf; in mthca_resize_cq()
826 tcqe = cq->resize_buf->cqe; in mthca_resize_cq()
829 kfree(cq->resize_buf); in mthca_resize_cq()
830 cq->resize_buf = NULL; in mthca_resize_cq()
831 spin_unlock_irq(&cq->lock); in mthca_resize_cq()
838 mutex_unlock(&cq->mutex); in mthca_resize_cq()
843 static int mthca_destroy_cq(struct ib_cq *cq) in mthca_destroy_cq() argument
845 if (cq->uobject) { in mthca_destroy_cq()
846 mthca_unmap_user_db(to_mdev(cq->device), in mthca_destroy_cq()
847 &to_mucontext(cq->uobject->context)->uar, in mthca_destroy_cq()
848 to_mucontext(cq->uobject->context)->db_tab, in mthca_destroy_cq()
849 to_mcq(cq)->arm_db_index); in mthca_destroy_cq()
850 mthca_unmap_user_db(to_mdev(cq->device), in mthca_destroy_cq()
851 &to_mucontext(cq->uobject->context)->uar, in mthca_destroy_cq()
852 to_mucontext(cq->uobject->context)->db_tab, in mthca_destroy_cq()
853 to_mcq(cq)->set_ci_db_index); in mthca_destroy_cq()
855 mthca_free_cq(to_mdev(cq->device), to_mcq(cq)); in mthca_destroy_cq()
856 kfree(cq); in mthca_destroy_cq()