Lines Matching refs:cq
77 static inline void comp_event_callback(struct ehca_cq *cq) in comp_event_callback() argument
79 if (!cq->ib_cq.comp_handler) in comp_event_callback()
82 spin_lock(&cq->cb_lock); in comp_event_callback()
83 cq->ib_cq.comp_handler(&cq->ib_cq, cq->ib_cq.cq_context); in comp_event_callback()
84 spin_unlock(&cq->cb_lock); in comp_event_callback()
111 struct ehca_cq *cq = (struct ehca_cq *)data; in print_error_data() local
115 cq->cq_number, resource); in print_error_data()
243 struct ehca_cq *cq; in cq_event_callback() local
247 cq = idr_find(&ehca_cq_idr, token); in cq_event_callback()
248 if (cq) in cq_event_callback()
249 atomic_inc(&cq->nr_events); in cq_event_callback()
252 if (!cq) in cq_event_callback()
255 ehca_error_data(shca, cq, cq->ipz_cq_handle.handle); in cq_event_callback()
257 if (atomic_dec_and_test(&cq->nr_events)) in cq_event_callback()
258 wake_up(&cq->wait_completion); in cq_event_callback()
459 static inline void reset_eq_pending(struct ehca_cq *cq) in reset_eq_pending() argument
462 struct h_galpa gal = cq->galpas.kernel; in reset_eq_pending()
517 struct ehca_cq *cq; in process_eqe() local
525 cq = idr_find(&ehca_cq_idr, token); in process_eqe()
526 if (cq) in process_eqe()
527 atomic_inc(&cq->nr_events); in process_eqe()
529 if (cq == NULL) { in process_eqe()
535 reset_eq_pending(cq); in process_eqe()
537 queue_comp_task(cq); in process_eqe()
539 comp_event_callback(cq); in process_eqe()
540 if (atomic_dec_and_test(&cq->nr_events)) in process_eqe()
541 wake_up(&cq->wait_completion); in process_eqe()
584 eqe_cache[eqe_cnt].cq = idr_find(&ehca_cq_idr, token); in ehca_process_eq()
585 if (eqe_cache[eqe_cnt].cq) in ehca_process_eq()
586 atomic_inc(&eqe_cache[eqe_cnt].cq->nr_events); in ehca_process_eq()
588 if (!eqe_cache[eqe_cnt].cq) { in ehca_process_eq()
595 eqe_cache[eqe_cnt].cq = NULL; in ehca_process_eq()
614 if (eq->eqe_cache[i].cq) in ehca_process_eq()
615 reset_eq_pending(eq->eqe_cache[i].cq); in ehca_process_eq()
623 if (eq->eqe_cache[i].cq) { in ehca_process_eq()
625 queue_comp_task(eq->eqe_cache[i].cq); in ehca_process_eq()
627 struct ehca_cq *cq = eq->eqe_cache[i].cq; in ehca_process_eq() local
628 comp_event_callback(cq); in ehca_process_eq()
629 if (atomic_dec_and_test(&cq->nr_events)) in ehca_process_eq()
630 wake_up(&cq->wait_completion); in ehca_process_eq()
727 struct ehca_cq *cq; in run_comp_task() local
730 cq = list_entry(cct->cq_list.next, struct ehca_cq, entry); in run_comp_task()
733 comp_event_callback(cq); in run_comp_task()
734 if (atomic_dec_and_test(&cq->nr_events)) in run_comp_task()
735 wake_up(&cq->wait_completion); in run_comp_task()
738 spin_lock(&cq->task_lock); in run_comp_task()
739 cq->nr_callbacks--; in run_comp_task()
740 if (!cq->nr_callbacks) { in run_comp_task()
744 spin_unlock(&cq->task_lock); in run_comp_task()
753 struct ehca_cq *cq, *tmp; in comp_task_park() local
766 list_for_each_entry_safe(cq, tmp, &list, entry) { in comp_task_park()
767 list_del(&cq->entry); in comp_task_park()
768 __queue_comp_task(cq, target, thread); in comp_task_park()