Lines Matching refs:cq

566 static inline void write_gts(struct t4_cq *cq, u32 val)  in write_gts()  argument
568 if (cq->bar2_va) in write_gts()
569 writel(val | INGRESSQID_V(cq->bar2_qid), in write_gts()
570 cq->bar2_va + SGE_UDB_GTS); in write_gts()
572 writel(val | INGRESSQID_V(cq->cqid), cq->gts); in write_gts()
575 static inline int t4_clear_cq_armed(struct t4_cq *cq) in t4_clear_cq_armed() argument
577 return test_and_clear_bit(CQ_ARMED, &cq->flags); in t4_clear_cq_armed()
580 static inline int t4_arm_cq(struct t4_cq *cq, int se) in t4_arm_cq() argument
584 set_bit(CQ_ARMED, &cq->flags); in t4_arm_cq()
585 while (cq->cidx_inc > CIDXINC_M) { in t4_arm_cq()
587 write_gts(cq, val); in t4_arm_cq()
588 cq->cidx_inc -= CIDXINC_M; in t4_arm_cq()
590 val = SEINTARM_V(se) | CIDXINC_V(cq->cidx_inc) | TIMERREG_V(6); in t4_arm_cq()
591 write_gts(cq, val); in t4_arm_cq()
592 cq->cidx_inc = 0; in t4_arm_cq()
596 static inline void t4_swcq_produce(struct t4_cq *cq) in t4_swcq_produce() argument
598 cq->sw_in_use++; in t4_swcq_produce()
599 if (cq->sw_in_use == cq->size) { in t4_swcq_produce()
600 PDBG("%s cxgb4 sw cq overflow cqid %u\n", __func__, cq->cqid); in t4_swcq_produce()
601 cq->error = 1; in t4_swcq_produce()
604 if (++cq->sw_pidx == cq->size) in t4_swcq_produce()
605 cq->sw_pidx = 0; in t4_swcq_produce()
608 static inline void t4_swcq_consume(struct t4_cq *cq) in t4_swcq_consume() argument
610 BUG_ON(cq->sw_in_use < 1); in t4_swcq_consume()
611 cq->sw_in_use--; in t4_swcq_consume()
612 if (++cq->sw_cidx == cq->size) in t4_swcq_consume()
613 cq->sw_cidx = 0; in t4_swcq_consume()
616 static inline void t4_hwcq_consume(struct t4_cq *cq) in t4_hwcq_consume() argument
618 cq->bits_type_ts = cq->queue[cq->cidx].bits_type_ts; in t4_hwcq_consume()
619 if (++cq->cidx_inc == (cq->size >> 4) || cq->cidx_inc == CIDXINC_M) { in t4_hwcq_consume()
622 val = SEINTARM_V(0) | CIDXINC_V(cq->cidx_inc) | TIMERREG_V(7); in t4_hwcq_consume()
623 write_gts(cq, val); in t4_hwcq_consume()
624 cq->cidx_inc = 0; in t4_hwcq_consume()
626 if (++cq->cidx == cq->size) { in t4_hwcq_consume()
627 cq->cidx = 0; in t4_hwcq_consume()
628 cq->gen ^= 1; in t4_hwcq_consume()
632 static inline int t4_valid_cqe(struct t4_cq *cq, struct t4_cqe *cqe) in t4_valid_cqe() argument
634 return (CQE_GENBIT(cqe) == cq->gen); in t4_valid_cqe()
637 static inline int t4_next_hw_cqe(struct t4_cq *cq, struct t4_cqe **cqe) in t4_next_hw_cqe() argument
642 if (cq->cidx == 0) in t4_next_hw_cqe()
643 prev_cidx = cq->size - 1; in t4_next_hw_cqe()
645 prev_cidx = cq->cidx - 1; in t4_next_hw_cqe()
647 if (cq->queue[prev_cidx].bits_type_ts != cq->bits_type_ts) { in t4_next_hw_cqe()
649 cq->error = 1; in t4_next_hw_cqe()
650 printk(KERN_ERR MOD "cq overflow cqid %u\n", cq->cqid); in t4_next_hw_cqe()
652 } else if (t4_valid_cqe(cq, &cq->queue[cq->cidx])) { in t4_next_hw_cqe()
656 *cqe = &cq->queue[cq->cidx]; in t4_next_hw_cqe()
663 static inline struct t4_cqe *t4_next_sw_cqe(struct t4_cq *cq) in t4_next_sw_cqe() argument
665 if (cq->sw_in_use == cq->size) { in t4_next_sw_cqe()
666 PDBG("%s cxgb4 sw cq overflow cqid %u\n", __func__, cq->cqid); in t4_next_sw_cqe()
667 cq->error = 1; in t4_next_sw_cqe()
671 if (cq->sw_in_use) in t4_next_sw_cqe()
672 return &cq->sw_queue[cq->sw_cidx]; in t4_next_sw_cqe()
676 static inline int t4_next_cqe(struct t4_cq *cq, struct t4_cqe **cqe) in t4_next_cqe() argument
680 if (cq->error) in t4_next_cqe()
682 else if (cq->sw_in_use) in t4_next_cqe()
683 *cqe = &cq->sw_queue[cq->sw_cidx]; in t4_next_cqe()
685 ret = t4_next_hw_cqe(cq, cqe); in t4_next_cqe()
689 static inline int t4_cq_in_error(struct t4_cq *cq) in t4_cq_in_error() argument
691 return ((struct t4_status_page *)&cq->queue[cq->size])->qp_err; in t4_cq_in_error()
694 static inline void t4_set_cq_in_error(struct t4_cq *cq) in t4_set_cq_in_error() argument
696 ((struct t4_status_page *)&cq->queue[cq->size])->qp_err = 1; in t4_set_cq_in_error()