Lines Matching refs:cd
54 int __genwqe_writeq(struct genwqe_dev *cd, u64 byte_offs, u64 val) in __genwqe_writeq() argument
56 struct pci_dev *pci_dev = cd->pci_dev; in __genwqe_writeq()
58 if (cd->err_inject & GENWQE_INJECT_HARDWARE_FAILURE) in __genwqe_writeq()
61 if (cd->mmio == NULL) in __genwqe_writeq()
67 __raw_writeq((__force u64)cpu_to_be64(val), cd->mmio + byte_offs); in __genwqe_writeq()
78 u64 __genwqe_readq(struct genwqe_dev *cd, u64 byte_offs) in __genwqe_readq() argument
80 if (cd->err_inject & GENWQE_INJECT_HARDWARE_FAILURE) in __genwqe_readq()
83 if ((cd->err_inject & GENWQE_INJECT_GFIR_FATAL) && in __genwqe_readq()
87 if ((cd->err_inject & GENWQE_INJECT_GFIR_INFO) && in __genwqe_readq()
91 if (cd->mmio == NULL) in __genwqe_readq()
94 return be64_to_cpu((__force __be64)__raw_readq(cd->mmio + byte_offs)); in __genwqe_readq()
105 int __genwqe_writel(struct genwqe_dev *cd, u64 byte_offs, u32 val) in __genwqe_writel() argument
107 struct pci_dev *pci_dev = cd->pci_dev; in __genwqe_writel()
109 if (cd->err_inject & GENWQE_INJECT_HARDWARE_FAILURE) in __genwqe_writel()
112 if (cd->mmio == NULL) in __genwqe_writel()
118 __raw_writel((__force u32)cpu_to_be32(val), cd->mmio + byte_offs); in __genwqe_writel()
129 u32 __genwqe_readl(struct genwqe_dev *cd, u64 byte_offs) in __genwqe_readl() argument
131 if (cd->err_inject & GENWQE_INJECT_HARDWARE_FAILURE) in __genwqe_readl()
134 if (cd->mmio == NULL) in __genwqe_readl()
137 return be32_to_cpu((__force __be32)__raw_readl(cd->mmio + byte_offs)); in __genwqe_readl()
145 int genwqe_read_app_id(struct genwqe_dev *cd, char *app_name, int len) in genwqe_read_app_id() argument
148 u32 app_id = (u32)cd->app_unitcfg; in genwqe_read_app_id()
217 void *__genwqe_alloc_consistent(struct genwqe_dev *cd, size_t size, in __genwqe_alloc_consistent() argument
223 return pci_alloc_consistent(cd->pci_dev, size, dma_handle); in __genwqe_alloc_consistent()
226 void __genwqe_free_consistent(struct genwqe_dev *cd, size_t size, in __genwqe_free_consistent() argument
232 pci_free_consistent(cd->pci_dev, size, vaddr, dma_handle); in __genwqe_free_consistent()
235 static void genwqe_unmap_pages(struct genwqe_dev *cd, dma_addr_t *dma_list, in genwqe_unmap_pages() argument
239 struct pci_dev *pci_dev = cd->pci_dev; in genwqe_unmap_pages()
248 static int genwqe_map_pages(struct genwqe_dev *cd, in genwqe_map_pages() argument
253 struct pci_dev *pci_dev = cd->pci_dev; in genwqe_map_pages()
277 genwqe_unmap_pages(cd, dma_list, num_pages); in genwqe_map_pages()
297 int genwqe_alloc_sync_sgl(struct genwqe_dev *cd, struct genwqe_sgl *sgl, in genwqe_alloc_sync_sgl() argument
301 struct pci_dev *pci_dev = cd->pci_dev; in genwqe_alloc_sync_sgl()
322 sgl->sgl = __genwqe_alloc_consistent(cd, sgl->sgl_size, in genwqe_alloc_sync_sgl()
332 sgl->fpage = __genwqe_alloc_consistent(cd, PAGE_SIZE, in genwqe_alloc_sync_sgl()
345 sgl->lpage = __genwqe_alloc_consistent(cd, PAGE_SIZE, in genwqe_alloc_sync_sgl()
360 __genwqe_free_consistent(cd, PAGE_SIZE, sgl->fpage, in genwqe_alloc_sync_sgl()
363 __genwqe_free_consistent(cd, sgl->sgl_size, sgl->sgl, in genwqe_alloc_sync_sgl()
368 int genwqe_setup_sgl(struct genwqe_dev *cd, struct genwqe_sgl *sgl, in genwqe_setup_sgl() argument
465 int genwqe_free_sync_sgl(struct genwqe_dev *cd, struct genwqe_sgl *sgl) in genwqe_free_sync_sgl() argument
468 struct pci_dev *pci_dev = cd->pci_dev; in genwqe_free_sync_sgl()
477 __genwqe_free_consistent(cd, PAGE_SIZE, sgl->fpage, in genwqe_free_sync_sgl()
490 __genwqe_free_consistent(cd, PAGE_SIZE, sgl->lpage, in genwqe_free_sync_sgl()
495 __genwqe_free_consistent(cd, sgl->sgl_size, sgl->sgl, in genwqe_free_sync_sgl()
557 int genwqe_user_vmap(struct genwqe_dev *cd, struct dma_mapping *m, void *uaddr, in genwqe_user_vmap() argument
562 struct pci_dev *pci_dev = cd->pci_dev; in genwqe_user_vmap()
603 rc = genwqe_map_pages(cd, m->page_list, m->nr_pages, m->dma_list); in genwqe_user_vmap()
628 int genwqe_user_vunmap(struct genwqe_dev *cd, struct dma_mapping *m, in genwqe_user_vunmap() argument
631 struct pci_dev *pci_dev = cd->pci_dev; in genwqe_user_vunmap()
640 genwqe_unmap_pages(cd, m->dma_list, m->nr_pages); in genwqe_user_vunmap()
664 u8 genwqe_card_type(struct genwqe_dev *cd) in genwqe_card_type() argument
666 u64 card_type = cd->slu_unitcfg; in genwqe_card_type()
675 int genwqe_card_reset(struct genwqe_dev *cd) in genwqe_card_reset() argument
678 struct pci_dev *pci_dev = cd->pci_dev; in genwqe_card_reset()
680 if (!genwqe_is_privileged(cd)) in genwqe_card_reset()
684 __genwqe_writeq(cd, IO_SLC_CFGREG_SOFTRESET, 0x1ull); in genwqe_card_reset()
686 __genwqe_readq(cd, IO_HSU_FIR_CLR); in genwqe_card_reset()
687 __genwqe_readq(cd, IO_APP_FIR_CLR); in genwqe_card_reset()
688 __genwqe_readq(cd, IO_SLU_FIR_CLR); in genwqe_card_reset()
698 softrst = __genwqe_readq(cd, IO_SLC_CFGREG_SOFTRESET) & 0x3cull; in genwqe_card_reset()
699 __genwqe_writeq(cd, IO_SLC_CFGREG_SOFTRESET, softrst | 0x2ull); in genwqe_card_reset()
704 if (genwqe_need_err_masking(cd)) { in genwqe_card_reset()
707 __genwqe_writeq(cd, IO_SLC_MISC_DEBUG, 0x0aull); in genwqe_card_reset()
712 int genwqe_read_softreset(struct genwqe_dev *cd) in genwqe_read_softreset() argument
716 if (!genwqe_is_privileged(cd)) in genwqe_read_softreset()
719 bitstream = __genwqe_readq(cd, IO_SLU_BITSTREAM) & 0x1; in genwqe_read_softreset()
720 cd->softreset = (bitstream == 0) ? 0x8ull : 0xcull; in genwqe_read_softreset()
729 int genwqe_set_interrupt_capability(struct genwqe_dev *cd, int count) in genwqe_set_interrupt_capability() argument
732 struct pci_dev *pci_dev = cd->pci_dev; in genwqe_set_interrupt_capability()
738 cd->flags |= GENWQE_FLAG_MSI_ENABLED; in genwqe_set_interrupt_capability()
746 void genwqe_reset_interrupt_capability(struct genwqe_dev *cd) in genwqe_reset_interrupt_capability() argument
748 struct pci_dev *pci_dev = cd->pci_dev; in genwqe_reset_interrupt_capability()
750 if (cd->flags & GENWQE_FLAG_MSI_ENABLED) { in genwqe_reset_interrupt_capability()
752 cd->flags &= ~GENWQE_FLAG_MSI_ENABLED; in genwqe_reset_interrupt_capability()
766 static int set_reg_idx(struct genwqe_dev *cd, struct genwqe_reg *r, in set_reg_idx() argument
780 static int set_reg(struct genwqe_dev *cd, struct genwqe_reg *r, in set_reg() argument
783 return set_reg_idx(cd, r, i, m, addr, 0, val); in set_reg()
786 int genwqe_read_ffdc_regs(struct genwqe_dev *cd, struct genwqe_reg *regs, in genwqe_read_ffdc_regs() argument
794 gfir = __genwqe_readq(cd, IO_SLC_CFGREG_GFIR); in genwqe_read_ffdc_regs()
795 set_reg(cd, regs, &idx, max_regs, IO_SLC_CFGREG_GFIR, gfir); in genwqe_read_ffdc_regs()
798 sluid = __genwqe_readq(cd, IO_SLU_UNITCFG); /* 0x00000000 */ in genwqe_read_ffdc_regs()
799 set_reg(cd, regs, &idx, max_regs, IO_SLU_UNITCFG, sluid); in genwqe_read_ffdc_regs()
802 appid = __genwqe_readq(cd, IO_APP_UNITCFG); /* 0x02000000 */ in genwqe_read_ffdc_regs()
803 set_reg(cd, regs, &idx, max_regs, IO_APP_UNITCFG, appid); in genwqe_read_ffdc_regs()
810 ufir = __genwqe_readq(cd, ufir_addr); in genwqe_read_ffdc_regs()
811 set_reg(cd, regs, &idx, max_regs, ufir_addr, ufir); in genwqe_read_ffdc_regs()
815 ufec = __genwqe_readq(cd, ufec_addr); in genwqe_read_ffdc_regs()
816 set_reg(cd, regs, &idx, max_regs, ufec_addr, ufec); in genwqe_read_ffdc_regs()
824 sfir = __genwqe_readq(cd, sfir_addr); in genwqe_read_ffdc_regs()
825 set_reg(cd, regs, &idx, max_regs, sfir_addr, sfir); in genwqe_read_ffdc_regs()
828 sfec = __genwqe_readq(cd, sfec_addr); in genwqe_read_ffdc_regs()
829 set_reg(cd, regs, &idx, max_regs, sfec_addr, sfec); in genwqe_read_ffdc_regs()
844 int genwqe_ffdc_buff_size(struct genwqe_dev *cd, int uid) in genwqe_ffdc_buff_size() argument
851 eevptr = __genwqe_readq(cd, eevptr_addr); in genwqe_ffdc_buff_size()
857 val = __genwqe_readq(cd, l_addr); in genwqe_ffdc_buff_size()
880 val = __genwqe_readq(cd, addr); in genwqe_ffdc_buff_size()
897 int genwqe_ffdc_buff_read(struct genwqe_dev *cd, int uid, in genwqe_ffdc_buff_read() argument
906 eevptr = __genwqe_readq(cd, eevptr_addr); in genwqe_ffdc_buff_read()
911 e = __genwqe_readq(cd, l_addr); in genwqe_ffdc_buff_read()
922 val = __genwqe_readq(cd, d_addr); in genwqe_ffdc_buff_read()
923 set_reg_idx(cd, regs, &idx, max_regs, in genwqe_ffdc_buff_read()
929 val = __genwqe_readq(cd, d_addr); in genwqe_ffdc_buff_read()
930 set_reg_idx(cd, regs, &idx, max_regs, in genwqe_ffdc_buff_read()
945 val = __genwqe_readq(cd, addr); in genwqe_ffdc_buff_read()
963 __genwqe_writeq(cd, addr, diag_sel); in genwqe_ffdc_buff_read()
970 val = __genwqe_readq(cd, addr); in genwqe_ffdc_buff_read()
971 set_reg_idx(cd, regs, &idx, max_regs, addr, in genwqe_ffdc_buff_read()
985 int genwqe_write_vreg(struct genwqe_dev *cd, u32 reg, u64 val, int func) in genwqe_write_vreg() argument
987 __genwqe_writeq(cd, IO_PF_SLC_VIRTUAL_WINDOW, func & 0xf); in genwqe_write_vreg()
988 __genwqe_writeq(cd, reg, val); in genwqe_write_vreg()
998 u64 genwqe_read_vreg(struct genwqe_dev *cd, u32 reg, int func) in genwqe_read_vreg() argument
1000 __genwqe_writeq(cd, IO_PF_SLC_VIRTUAL_WINDOW, func & 0xf); in genwqe_read_vreg()
1001 return __genwqe_readq(cd, reg); in genwqe_read_vreg()
1016 int genwqe_base_clock_frequency(struct genwqe_dev *cd) in genwqe_base_clock_frequency() argument
1021 speed = (u16)((cd->slu_unitcfg >> 28) & 0x0full); in genwqe_base_clock_frequency()
1033 void genwqe_stop_traps(struct genwqe_dev *cd) in genwqe_stop_traps() argument
1035 __genwqe_writeq(cd, IO_SLC_MISC_DEBUG_SET, 0xcull); in genwqe_stop_traps()
1043 void genwqe_start_traps(struct genwqe_dev *cd) in genwqe_start_traps() argument
1045 __genwqe_writeq(cd, IO_SLC_MISC_DEBUG_CLR, 0xcull); in genwqe_start_traps()
1047 if (genwqe_need_err_masking(cd)) in genwqe_start_traps()
1048 __genwqe_writeq(cd, IO_SLC_MISC_DEBUG, 0x0aull); in genwqe_start_traps()