Lines Matching refs:hba

34 	if (test_bit(BNX2I_NX2_DEV_57710, &ep->hba->cnic_dev_type))  in bnx2i_get_cid_num()
48 static void bnx2i_adjust_qp_size(struct bnx2i_hba *hba) in bnx2i_adjust_qp_size() argument
52 if (test_bit(BNX2I_NX2_DEV_5706, &hba->cnic_dev_type) || in bnx2i_adjust_qp_size()
53 test_bit(BNX2I_NX2_DEV_5708, &hba->cnic_dev_type) || in bnx2i_adjust_qp_size()
54 test_bit(BNX2I_NX2_DEV_5709, &hba->cnic_dev_type)) { in bnx2i_adjust_qp_size()
55 if (!is_power_of_2(hba->max_sqes)) in bnx2i_adjust_qp_size()
56 hba->max_sqes = rounddown_pow_of_two(hba->max_sqes); in bnx2i_adjust_qp_size()
58 if (!is_power_of_2(hba->max_rqes)) in bnx2i_adjust_qp_size()
59 hba->max_rqes = rounddown_pow_of_two(hba->max_rqes); in bnx2i_adjust_qp_size()
67 if (hba->max_sqes < num_elements_per_pg) in bnx2i_adjust_qp_size()
68 hba->max_sqes = num_elements_per_pg; in bnx2i_adjust_qp_size()
69 else if (hba->max_sqes % num_elements_per_pg) in bnx2i_adjust_qp_size()
70 hba->max_sqes = (hba->max_sqes + num_elements_per_pg - 1) & in bnx2i_adjust_qp_size()
75 if (hba->max_cqes < num_elements_per_pg) in bnx2i_adjust_qp_size()
76 hba->max_cqes = num_elements_per_pg; in bnx2i_adjust_qp_size()
77 else if (hba->max_cqes % num_elements_per_pg) in bnx2i_adjust_qp_size()
78 hba->max_cqes = (hba->max_cqes + num_elements_per_pg - 1) & in bnx2i_adjust_qp_size()
83 if (hba->max_rqes < num_elements_per_pg) in bnx2i_adjust_qp_size()
84 hba->max_rqes = num_elements_per_pg; in bnx2i_adjust_qp_size()
85 else if (hba->max_rqes % num_elements_per_pg) in bnx2i_adjust_qp_size()
86 hba->max_rqes = (hba->max_rqes + num_elements_per_pg - 1) & in bnx2i_adjust_qp_size()
97 static void bnx2i_get_link_state(struct bnx2i_hba *hba) in bnx2i_get_link_state() argument
99 if (test_bit(__LINK_STATE_NOCARRIER, &hba->netdev->state)) in bnx2i_get_link_state()
100 set_bit(ADAPTER_STATE_LINK_DOWN, &hba->adapter_state); in bnx2i_get_link_state()
102 clear_bit(ADAPTER_STATE_LINK_DOWN, &hba->adapter_state); in bnx2i_get_link_state()
114 static void bnx2i_iscsi_license_error(struct bnx2i_hba *hba, u32 error_code) in bnx2i_iscsi_license_error() argument
119 hba->netdev->name); in bnx2i_iscsi_license_error()
124 hba->netdev->name); in bnx2i_iscsi_license_error()
125 set_bit(ADAPTER_STATE_INIT_FAILED, &hba->adapter_state); in bnx2i_iscsi_license_error()
146 if (!test_bit(BNX2I_NX2_DEV_57710, &ep->hba->cnic_dev_type)) in bnx2i_arm_cq_event_coalescing()
241 if (ep->qp.rq_prod_idx > bnx2i_conn->hba->max_rqes) { in bnx2i_put_rq_buf()
242 ep->qp.rq_prod_idx %= bnx2i_conn->hba->max_rqes; in bnx2i_put_rq_buf()
248 if (test_bit(BNX2I_NX2_DEV_57710, &ep->hba->cnic_dev_type)) { in bnx2i_put_rq_buf()
276 if (test_bit(BNX2I_NX2_DEV_57710, &ep->hba->cnic_dev_type)) { in bnx2i_ring_sq_dbell()
445 tmfabort_wqe->bd_list_addr_lo = (u32) bnx2i_conn->hba->mp_bd_dma; in bnx2i_send_iscsi_tmf()
447 ((u64) bnx2i_conn->hba->mp_bd_dma >> 32); in bnx2i_send_iscsi_tmf()
558 if (test_bit(BNX2I_NX2_DEV_57710, &ep->hba->cnic_dev_type)) { in bnx2i_send_iscsi_nopout()
582 bnx2i_conn->hba->mp_bd_dma; in bnx2i_send_iscsi_nopout()
584 (u32) ((u64) bnx2i_conn->hba->mp_bd_dma >> 32); in bnx2i_send_iscsi_nopout()
626 logout_wqe->bd_list_addr_lo = (u32) bnx2i_conn->hba->mp_bd_dma; in bnx2i_send_iscsi_logout()
628 ((u64) bnx2i_conn->hba->mp_bd_dma >> 32); in bnx2i_send_iscsi_logout()
648 struct bnx2i_hba *hba = bnx2i_conn->hba; in bnx2i_update_iscsi_conn() local
660 if (test_bit(BNX2I_NX2_DEV_57710, &bnx2i_conn->ep->hba->cnic_dev_type)) in bnx2i_update_iscsi_conn()
690 if (hba->cnic && hba->cnic->submit_kwqes) in bnx2i_update_iscsi_conn()
691 hba->cnic->submit_kwqes(hba->cnic, kwqe_arr, 1); in bnx2i_update_iscsi_conn()
742 void bnx2i_send_cmd_cleanup_req(struct bnx2i_hba *hba, struct bnx2i_cmd *cmd) in bnx2i_send_cmd_cleanup_req() argument
766 int bnx2i_send_conn_destroy(struct bnx2i_hba *hba, struct bnx2i_endpoint *ep) in bnx2i_send_conn_destroy() argument
778 if (test_bit(BNX2I_NX2_DEV_57710, &ep->hba->cnic_dev_type)) in bnx2i_send_conn_destroy()
786 if (hba->cnic && hba->cnic->submit_kwqes) in bnx2i_send_conn_destroy()
787 rc = hba->cnic->submit_kwqes(hba->cnic, kwqe_arr, 1); in bnx2i_send_conn_destroy()
800 static int bnx2i_570x_send_conn_ofld_req(struct bnx2i_hba *hba, in bnx2i_570x_send_conn_ofld_req() argument
846 if (hba->cnic && hba->cnic->submit_kwqes) in bnx2i_570x_send_conn_ofld_req()
847 rc = hba->cnic->submit_kwqes(hba->cnic, kwqe_arr, num_kwqes); in bnx2i_570x_send_conn_ofld_req()
860 static int bnx2i_5771x_send_conn_ofld_req(struct bnx2i_hba *hba, in bnx2i_5771x_send_conn_ofld_req() argument
915 if (hba->cnic && hba->cnic->submit_kwqes) in bnx2i_5771x_send_conn_ofld_req()
916 rc = hba->cnic->submit_kwqes(hba->cnic, kwqe_arr, num_kwqes); in bnx2i_5771x_send_conn_ofld_req()
929 int bnx2i_send_conn_ofld_req(struct bnx2i_hba *hba, struct bnx2i_endpoint *ep) in bnx2i_send_conn_ofld_req() argument
933 if (test_bit(BNX2I_NX2_DEV_57710, &hba->cnic_dev_type)) in bnx2i_send_conn_ofld_req()
934 rc = bnx2i_5771x_send_conn_ofld_req(hba, ep); in bnx2i_send_conn_ofld_req()
936 rc = bnx2i_570x_send_conn_ofld_req(hba, ep); in bnx2i_send_conn_ofld_req()
957 if (test_bit(BNX2I_NX2_DEV_57710, &ep->hba->cnic_dev_type)) in setup_qp_page_tables()
1058 int bnx2i_alloc_qp_resc(struct bnx2i_hba *hba, struct bnx2i_endpoint *ep) in bnx2i_alloc_qp_resc() argument
1062 ep->hba = hba; in bnx2i_alloc_qp_resc()
1067 ep->qp.sq_mem_size = hba->max_sqes * BNX2I_SQ_WQE_SIZE; in bnx2i_alloc_qp_resc()
1076 dma_alloc_coherent(&hba->pcidev->dev, ep->qp.sq_pgtbl_size, in bnx2i_alloc_qp_resc()
1086 dma_alloc_coherent(&hba->pcidev->dev, ep->qp.sq_mem_size, in bnx2i_alloc_qp_resc()
1098 ep->qp.sq_last_qe = &ep->qp.sq_first_qe[hba->max_sqes - 1]; in bnx2i_alloc_qp_resc()
1101 ep->qp.sqe_left = hba->max_sqes; in bnx2i_alloc_qp_resc()
1104 ep->qp.cq_mem_size = hba->max_cqes * BNX2I_CQE_SIZE; in bnx2i_alloc_qp_resc()
1113 dma_alloc_coherent(&hba->pcidev->dev, ep->qp.cq_pgtbl_size, in bnx2i_alloc_qp_resc()
1123 dma_alloc_coherent(&hba->pcidev->dev, ep->qp.cq_mem_size, in bnx2i_alloc_qp_resc()
1135 ep->qp.cq_last_qe = &ep->qp.cq_first_qe[hba->max_cqes - 1]; in bnx2i_alloc_qp_resc()
1138 ep->qp.cqe_left = hba->max_cqes; in bnx2i_alloc_qp_resc()
1140 ep->qp.cqe_size = hba->max_cqes; in bnx2i_alloc_qp_resc()
1147 ep->qp.rq_mem_size = hba->max_rqes * BNX2I_RQ_WQE_SIZE; in bnx2i_alloc_qp_resc()
1156 dma_alloc_coherent(&hba->pcidev->dev, ep->qp.rq_pgtbl_size, in bnx2i_alloc_qp_resc()
1166 dma_alloc_coherent(&hba->pcidev->dev, ep->qp.rq_mem_size, in bnx2i_alloc_qp_resc()
1177 ep->qp.rq_last_qe = &ep->qp.rq_first_qe[hba->max_rqes - 1]; in bnx2i_alloc_qp_resc()
1180 ep->qp.rqe_left = hba->max_rqes; in bnx2i_alloc_qp_resc()
1187 bnx2i_free_qp_resc(hba, ep); in bnx2i_alloc_qp_resc()
1200 void bnx2i_free_qp_resc(struct bnx2i_hba *hba, struct bnx2i_endpoint *ep) in bnx2i_free_qp_resc() argument
1208 dma_free_coherent(&hba->pcidev->dev, ep->qp.sq_pgtbl_size, in bnx2i_free_qp_resc()
1214 dma_free_coherent(&hba->pcidev->dev, ep->qp.sq_mem_size, in bnx2i_free_qp_resc()
1222 dma_free_coherent(&hba->pcidev->dev, ep->qp.rq_pgtbl_size, in bnx2i_free_qp_resc()
1228 dma_free_coherent(&hba->pcidev->dev, ep->qp.rq_mem_size, in bnx2i_free_qp_resc()
1236 dma_free_coherent(&hba->pcidev->dev, ep->qp.cq_pgtbl_size, in bnx2i_free_qp_resc()
1242 dma_free_coherent(&hba->pcidev->dev, ep->qp.cq_mem_size, in bnx2i_free_qp_resc()
1261 int bnx2i_send_fw_iscsi_init_msg(struct bnx2i_hba *hba) in bnx2i_send_fw_iscsi_init_msg() argument
1272 bnx2i_adjust_qp_size(hba); in bnx2i_send_fw_iscsi_init_msg()
1284 iscsi_init.dummy_buffer_addr_lo = (u32) hba->dummy_buf_dma; in bnx2i_send_fw_iscsi_init_msg()
1286 (u32) ((u64) hba->dummy_buf_dma >> 32); in bnx2i_send_fw_iscsi_init_msg()
1288 hba->num_ccell = hba->max_sqes >> 1; in bnx2i_send_fw_iscsi_init_msg()
1289 hba->ctx_ccell_tasks = in bnx2i_send_fw_iscsi_init_msg()
1290 ((hba->num_ccell & 0xFFFF) | (hba->max_sqes << 16)); in bnx2i_send_fw_iscsi_init_msg()
1291 iscsi_init.num_ccells_per_conn = hba->num_ccell; in bnx2i_send_fw_iscsi_init_msg()
1292 iscsi_init.num_tasks_per_conn = hba->max_sqes; in bnx2i_send_fw_iscsi_init_msg()
1294 iscsi_init.sq_num_wqes = hba->max_sqes; in bnx2i_send_fw_iscsi_init_msg()
1297 iscsi_init.cq_num_wqes = hba->max_cqes; in bnx2i_send_fw_iscsi_init_msg()
1298 iscsi_init.cq_num_pages = (hba->max_cqes * BNX2I_CQE_SIZE + in bnx2i_send_fw_iscsi_init_msg()
1300 iscsi_init.sq_num_pages = (hba->max_sqes * BNX2I_SQ_WQE_SIZE + in bnx2i_send_fw_iscsi_init_msg()
1303 iscsi_init.rq_num_wqes = hba->max_rqes; in bnx2i_send_fw_iscsi_init_msg()
1309 iscsi_init2.max_cq_sqn = hba->max_cqes * 2 + 1; in bnx2i_send_fw_iscsi_init_msg()
1339 if (hba->cnic && hba->cnic->submit_kwqes) in bnx2i_send_fw_iscsi_init_msg()
1340 rc = hba->cnic->submit_kwqes(hba->cnic, kwqe_arr, 2); in bnx2i_send_fw_iscsi_init_msg()
1358 struct bnx2i_hba *hba = bnx2i_conn->hba; in bnx2i_process_scsi_cmd_resp() local
1379 ADD_STATS_64(hba, rx_pdus, in bnx2i_process_scsi_cmd_resp()
1381 ADD_STATS_64(hba, rx_bytes, in bnx2i_process_scsi_cmd_resp()
1390 ADD_STATS_64(hba, tx_pdus, in bnx2i_process_scsi_cmd_resp()
1392 ADD_STATS_64(hba, tx_bytes, in bnx2i_process_scsi_cmd_resp()
1394 ADD_STATS_64(hba, rx_pdus, in bnx2i_process_scsi_cmd_resp()
1980 struct bnx2i_hba *hba = bnx2i_conn->hba; in bnx2i_process_new_cqes() local
1993 hba->netdev->name); in bnx2i_process_new_cqes()
2007 hba->netdev->name); in bnx2i_process_new_cqes()
2065 ADD_STATS_64(hba, rx_pdus, 1); in bnx2i_process_new_cqes()
2066 ADD_STATS_64(hba, rx_bytes, nopin->data_length); in bnx2i_process_new_cqes()
2072 hba->netdev->name, in bnx2i_process_new_cqes()
2107 static void bnx2i_fastpath_notification(struct bnx2i_hba *hba, in bnx2i_fastpath_notification() argument
2115 bnx2i_conn = bnx2i_get_conn_from_id(hba, iscsi_cid); in bnx2i_fastpath_notification()
2141 static void bnx2i_process_update_conn_cmpl(struct bnx2i_hba *hba, in bnx2i_process_update_conn_cmpl() argument
2148 conn = bnx2i_get_conn_from_id(hba, iscsi_cid); in bnx2i_process_update_conn_cmpl()
2176 static void bnx2i_recovery_que_add_conn(struct bnx2i_hba *hba, in bnx2i_recovery_que_add_conn() argument
2192 static void bnx2i_process_tcp_error(struct bnx2i_hba *hba, in bnx2i_process_tcp_error() argument
2199 bnx2i_conn = bnx2i_get_conn_from_id(hba, iscsi_cid); in bnx2i_process_tcp_error()
2208 bnx2i_recovery_que_add_conn(bnx2i_conn->hba, bnx2i_conn); in bnx2i_process_tcp_error()
2225 static void bnx2i_process_iscsi_error(struct bnx2i_hba *hba, in bnx2i_process_iscsi_error() argument
2238 bnx2i_conn = bnx2i_get_conn_from_id(hba, iscsi_cid); in bnx2i_process_iscsi_error()
2391 bnx2i_conn->hba->shost->host_no, in bnx2i_process_iscsi_error()
2394 bnx2i_recovery_que_add_conn(bnx2i_conn->hba, bnx2i_conn); in bnx2i_process_iscsi_error()
2412 static void bnx2i_process_conn_destroy_cmpl(struct bnx2i_hba *hba, in bnx2i_process_conn_destroy_cmpl() argument
2417 ep = bnx2i_find_ep_in_destroy_list(hba, conn_destroy->iscsi_conn_id); in bnx2i_process_conn_destroy_cmpl()
2424 if (hba != ep->hba) { in bnx2i_process_conn_destroy_cmpl()
2446 static void bnx2i_process_ofld_cmpl(struct bnx2i_hba *hba, in bnx2i_process_ofld_cmpl() argument
2453 ep = bnx2i_find_ep_in_ofld_list(hba, ofld_kcqe->iscsi_conn_id); in bnx2i_process_ofld_cmpl()
2459 if (hba != ep->hba) { in bnx2i_process_ofld_cmpl()
2470 hba->netdev->name); in bnx2i_process_ofld_cmpl()
2474 "opcode\n", hba->netdev->name); in bnx2i_process_ofld_cmpl()
2481 "error code %d\n", hba->netdev->name, in bnx2i_process_ofld_cmpl()
2503 struct bnx2i_hba *hba = context; in bnx2i_indicate_kcqe() local
2512 bnx2i_fastpath_notification(hba, ikcqe); in bnx2i_indicate_kcqe()
2514 bnx2i_process_ofld_cmpl(hba, ikcqe); in bnx2i_indicate_kcqe()
2516 bnx2i_process_update_conn_cmpl(hba, ikcqe); in bnx2i_indicate_kcqe()
2520 bnx2i_iscsi_license_error(hba, ikcqe->\ in bnx2i_indicate_kcqe()
2523 set_bit(ADAPTER_STATE_UP, &hba->adapter_state); in bnx2i_indicate_kcqe()
2524 bnx2i_get_link_state(hba); in bnx2i_indicate_kcqe()
2527 (u8)hba->pcidev->bus->number, in bnx2i_indicate_kcqe()
2528 hba->pci_devno, in bnx2i_indicate_kcqe()
2529 (u8)hba->pci_func); in bnx2i_indicate_kcqe()
2534 bnx2i_process_conn_destroy_cmpl(hba, ikcqe); in bnx2i_indicate_kcqe()
2536 bnx2i_process_iscsi_error(hba, ikcqe); in bnx2i_indicate_kcqe()
2538 bnx2i_process_tcp_error(hba, ikcqe); in bnx2i_indicate_kcqe()
2558 struct bnx2i_hba *hba = context; in bnx2i_indicate_netevent() local
2566 if (!test_bit(ADAPTER_STATE_UP, &hba->adapter_state)) in bnx2i_indicate_netevent()
2567 bnx2i_send_fw_iscsi_init_msg(hba); in bnx2i_indicate_netevent()
2570 clear_bit(ADAPTER_STATE_GOING_DOWN, &hba->adapter_state); in bnx2i_indicate_netevent()
2571 clear_bit(ADAPTER_STATE_UP, &hba->adapter_state); in bnx2i_indicate_netevent()
2574 set_bit(ADAPTER_STATE_GOING_DOWN, &hba->adapter_state); in bnx2i_indicate_netevent()
2575 iscsi_host_for_each_session(hba->shost, in bnx2i_indicate_netevent()
2579 bnx2i_get_link_state(hba); in bnx2i_indicate_netevent()
2598 if (test_bit(ADAPTER_STATE_GOING_DOWN, &ep->hba->adapter_state)) in bnx2i_cm_connect_cmpl()
2655 bnx2i_recovery_que_add_conn(ep->hba, ep->conn); in bnx2i_cm_remote_close()
2676 bnx2i_recovery_que_add_conn(ep->hba, ep->conn); in bnx2i_cm_remote_abort()
2683 struct bnx2i_hba *hba = context; in bnx2i_send_nl_mesg() local
2686 if (!hba) in bnx2i_send_nl_mesg()
2689 rc = iscsi_offload_mesg(hba->shost, &bnx2i_iscsi_transport, in bnx2i_send_nl_mesg()
2740 if (test_bit(BNX2I_NX2_DEV_57710, &ep->hba->cnic_dev_type)) { in bnx2i_map_ep_dbell_regs()
2741 reg_base = pci_resource_start(ep->hba->pcidev, in bnx2i_map_ep_dbell_regs()
2748 if ((test_bit(BNX2I_NX2_DEV_5709, &ep->hba->cnic_dev_type)) && in bnx2i_map_ep_dbell_regs()
2749 (ep->hba->mail_queue_access == BNX2I_MQ_BIN_MODE)) { in bnx2i_map_ep_dbell_regs()
2750 config2 = REG_RD(ep->hba, BNX2_MQ_CONFIG2); in bnx2i_map_ep_dbell_regs()
2763 ep->qp.ctx_base = ioremap_nocache(ep->hba->reg_base + reg_off, in bnx2i_map_ep_dbell_regs()