Lines Matching refs:pvt
91 static void f15h_select_dct(struct amd64_pvt *pvt, u8 dct) in f15h_select_dct() argument
95 amd64_read_pci_cfg(pvt->F1, DCT_CFG_SEL, ®); in f15h_select_dct()
96 reg &= (pvt->model == 0x30) ? ~3 : ~1; in f15h_select_dct()
98 amd64_write_pci_cfg(pvt->F1, DCT_CFG_SEL, reg); in f15h_select_dct()
115 static inline int amd64_read_dct_pci_cfg(struct amd64_pvt *pvt, u8 dct, in amd64_read_dct_pci_cfg() argument
118 switch (pvt->fam) { in amd64_read_dct_pci_cfg()
131 if (dct_ganging_enabled(pvt)) in amd64_read_dct_pci_cfg()
143 dct = (dct && pvt->model == 0x30) ? 3 : dct; in amd64_read_dct_pci_cfg()
144 f15h_select_dct(pvt, dct); in amd64_read_dct_pci_cfg()
155 return amd64_read_pci_cfg(pvt->F2, offset, val); in amd64_read_dct_pci_cfg()
214 struct amd64_pvt *pvt = mci->pvt_info; in set_scrub_rate() local
217 if (pvt->fam == 0xf) in set_scrub_rate()
221 if (pvt->fam == 0x15 && pvt->model < 0x10) in set_scrub_rate()
222 f15h_select_dct(pvt, 0); in set_scrub_rate()
224 return __set_scrub_rate(pvt->F3, bw, min_scrubrate); in set_scrub_rate()
229 struct amd64_pvt *pvt = mci->pvt_info; in get_scrub_rate() local
234 if (pvt->fam == 0x15 && pvt->model < 0x10) in get_scrub_rate()
235 f15h_select_dct(pvt, 0); in get_scrub_rate()
237 amd64_read_pci_cfg(pvt->F3, SCRCTRL, &scrubval); in get_scrub_rate()
254 static bool base_limit_match(struct amd64_pvt *pvt, u64 sys_addr, u8 nid) in base_limit_match() argument
266 return ((addr >= get_dram_base(pvt, nid)) && in base_limit_match()
267 (addr <= get_dram_limit(pvt, nid))); in base_limit_match()
279 struct amd64_pvt *pvt; in find_mc_by_sys_addr() local
287 pvt = mci->pvt_info; in find_mc_by_sys_addr()
294 intlv_en = dram_intlv_en(pvt, 0); in find_mc_by_sys_addr()
298 if (base_limit_match(pvt, sys_addr, node_id)) in find_mc_by_sys_addr()
314 if ((dram_intlv_sel(pvt, node_id) & intlv_en) == bits) in find_mc_by_sys_addr()
322 if (unlikely(!base_limit_match(pvt, sys_addr, node_id))) { in find_mc_by_sys_addr()
343 static void get_cs_base_and_mask(struct amd64_pvt *pvt, int csrow, u8 dct, in get_cs_base_and_mask() argument
349 if (pvt->fam == 0xf && pvt->ext_model < K8_REV_F) { in get_cs_base_and_mask()
350 csbase = pvt->csels[dct].csbases[csrow]; in get_cs_base_and_mask()
351 csmask = pvt->csels[dct].csmasks[csrow]; in get_cs_base_and_mask()
360 } else if (pvt->fam == 0x16 || in get_cs_base_and_mask()
361 (pvt->fam == 0x15 && pvt->model >= 0x30)) { in get_cs_base_and_mask()
362 csbase = pvt->csels[dct].csbases[csrow]; in get_cs_base_and_mask()
363 csmask = pvt->csels[dct].csmasks[csrow >> 1]; in get_cs_base_and_mask()
378 csbase = pvt->csels[dct].csbases[csrow]; in get_cs_base_and_mask()
379 csmask = pvt->csels[dct].csmasks[csrow >> 1]; in get_cs_base_and_mask()
382 if (pvt->fam == 0x15) in get_cs_base_and_mask()
399 #define for_each_chip_select(i, dct, pvt) \ argument
400 for (i = 0; i < pvt->csels[dct].b_cnt; i++)
402 #define chip_select_base(i, dct, pvt) \ argument
403 pvt->csels[dct].csbases[i]
405 #define for_each_chip_select_mask(i, dct, pvt) \ argument
406 for (i = 0; i < pvt->csels[dct].m_cnt; i++)
414 struct amd64_pvt *pvt; in input_addr_to_csrow() local
418 pvt = mci->pvt_info; in input_addr_to_csrow()
420 for_each_chip_select(csrow, 0, pvt) { in input_addr_to_csrow()
421 if (!csrow_enabled(csrow, 0, pvt)) in input_addr_to_csrow()
424 get_cs_base_and_mask(pvt, csrow, 0, &base, &mask); in input_addr_to_csrow()
431 pvt->mc_node_id); in input_addr_to_csrow()
437 (unsigned long)input_addr, pvt->mc_node_id); in input_addr_to_csrow()
461 struct amd64_pvt *pvt = mci->pvt_info; in amd64_get_dram_hole_info() local
464 if (pvt->fam == 0xf && pvt->ext_model < K8_REV_E) { in amd64_get_dram_hole_info()
466 pvt->ext_model, pvt->mc_node_id); in amd64_get_dram_hole_info()
471 if (pvt->fam >= 0x10 && !dhar_mem_hoist_valid(pvt)) { in amd64_get_dram_hole_info()
476 if (!dhar_valid(pvt)) { in amd64_get_dram_hole_info()
478 pvt->mc_node_id); in amd64_get_dram_hole_info()
500 *hole_base = dhar_base(pvt); in amd64_get_dram_hole_info()
503 *hole_offset = (pvt->fam > 0xf) ? f10_dhar_offset(pvt) in amd64_get_dram_hole_info()
504 : k8_dhar_offset(pvt); in amd64_get_dram_hole_info()
507 pvt->mc_node_id, (unsigned long)*hole_base, in amd64_get_dram_hole_info()
545 struct amd64_pvt *pvt = mci->pvt_info; in sys_addr_to_dram_addr() local
549 dram_base = get_dram_base(pvt, pvt->mc_node_id); in sys_addr_to_dram_addr()
601 struct amd64_pvt *pvt; in dram_addr_to_input_addr() local
605 pvt = mci->pvt_info; in dram_addr_to_input_addr()
611 intlv_shift = num_node_interleave_bits(dram_intlv_en(pvt, 0)); in dram_addr_to_input_addr()
673 static unsigned long determine_edac_cap(struct amd64_pvt *pvt) in determine_edac_cap() argument
678 bit = (pvt->fam > 0xf || pvt->ext_model >= K8_REV_F) in determine_edac_cap()
682 if (pvt->dclr0 & BIT(bit)) in determine_edac_cap()
690 static void debug_dump_dramcfg_low(struct amd64_pvt *pvt, u32 dclr, int chan) in debug_dump_dramcfg_low() argument
694 if (pvt->dram_type == MEM_LRDDR3) { in debug_dump_dramcfg_low()
695 u32 dcsm = pvt->csels[chan].csmasks[0]; in debug_dump_dramcfg_low()
711 if (pvt->fam == 0x10) in debug_dump_dramcfg_low()
723 static void dump_misc_regs(struct amd64_pvt *pvt) in dump_misc_regs() argument
725 edac_dbg(1, "F3xE8 (NB Cap): 0x%08x\n", pvt->nbcap); in dump_misc_regs()
728 (pvt->nbcap & NBCAP_DCT_DUAL) ? "yes" : "no"); in dump_misc_regs()
731 (pvt->nbcap & NBCAP_SECDED) ? "yes" : "no", in dump_misc_regs()
732 (pvt->nbcap & NBCAP_CHIPKILL) ? "yes" : "no"); in dump_misc_regs()
734 debug_dump_dramcfg_low(pvt, pvt->dclr0, 0); in dump_misc_regs()
736 edac_dbg(1, "F3xB0 (Online Spare): 0x%08x\n", pvt->online_spare); in dump_misc_regs()
739 pvt->dhar, dhar_base(pvt), in dump_misc_regs()
740 (pvt->fam == 0xf) ? k8_dhar_offset(pvt) in dump_misc_regs()
741 : f10_dhar_offset(pvt)); in dump_misc_regs()
743 edac_dbg(1, " DramHoleValid: %s\n", dhar_valid(pvt) ? "yes" : "no"); in dump_misc_regs()
745 debug_display_dimm_sizes(pvt, 0); in dump_misc_regs()
748 if (pvt->fam == 0xf) in dump_misc_regs()
751 debug_display_dimm_sizes(pvt, 1); in dump_misc_regs()
753 amd64_info("using %s syndromes.\n", ((pvt->ecc_sym_sz == 8) ? "x8" : "x4")); in dump_misc_regs()
756 if (!dct_ganging_enabled(pvt)) in dump_misc_regs()
757 debug_dump_dramcfg_low(pvt, pvt->dclr1, 1); in dump_misc_regs()
763 static void prep_chip_selects(struct amd64_pvt *pvt) in prep_chip_selects() argument
765 if (pvt->fam == 0xf && pvt->ext_model < K8_REV_F) { in prep_chip_selects()
766 pvt->csels[0].b_cnt = pvt->csels[1].b_cnt = 8; in prep_chip_selects()
767 pvt->csels[0].m_cnt = pvt->csels[1].m_cnt = 8; in prep_chip_selects()
768 } else if (pvt->fam == 0x15 && pvt->model == 0x30) { in prep_chip_selects()
769 pvt->csels[0].b_cnt = pvt->csels[1].b_cnt = 4; in prep_chip_selects()
770 pvt->csels[0].m_cnt = pvt->csels[1].m_cnt = 2; in prep_chip_selects()
772 pvt->csels[0].b_cnt = pvt->csels[1].b_cnt = 8; in prep_chip_selects()
773 pvt->csels[0].m_cnt = pvt->csels[1].m_cnt = 4; in prep_chip_selects()
780 static void read_dct_base_mask(struct amd64_pvt *pvt) in read_dct_base_mask() argument
784 prep_chip_selects(pvt); in read_dct_base_mask()
786 for_each_chip_select(cs, 0, pvt) { in read_dct_base_mask()
789 u32 *base0 = &pvt->csels[0].csbases[cs]; in read_dct_base_mask()
790 u32 *base1 = &pvt->csels[1].csbases[cs]; in read_dct_base_mask()
792 if (!amd64_read_dct_pci_cfg(pvt, 0, reg0, base0)) in read_dct_base_mask()
796 if (pvt->fam == 0xf) in read_dct_base_mask()
799 if (!amd64_read_dct_pci_cfg(pvt, 1, reg0, base1)) in read_dct_base_mask()
801 cs, *base1, (pvt->fam == 0x10) ? reg1 in read_dct_base_mask()
805 for_each_chip_select_mask(cs, 0, pvt) { in read_dct_base_mask()
808 u32 *mask0 = &pvt->csels[0].csmasks[cs]; in read_dct_base_mask()
809 u32 *mask1 = &pvt->csels[1].csmasks[cs]; in read_dct_base_mask()
811 if (!amd64_read_dct_pci_cfg(pvt, 0, reg0, mask0)) in read_dct_base_mask()
815 if (pvt->fam == 0xf) in read_dct_base_mask()
818 if (!amd64_read_dct_pci_cfg(pvt, 1, reg0, mask1)) in read_dct_base_mask()
820 cs, *mask1, (pvt->fam == 0x10) ? reg1 in read_dct_base_mask()
825 static void determine_memory_type(struct amd64_pvt *pvt) in determine_memory_type() argument
829 switch (pvt->fam) { in determine_memory_type()
831 if (pvt->ext_model >= K8_REV_F) in determine_memory_type()
834 pvt->dram_type = (pvt->dclr0 & BIT(18)) ? MEM_DDR : MEM_RDDR; in determine_memory_type()
838 if (pvt->dchr0 & DDR3_MODE) in determine_memory_type()
841 pvt->dram_type = (pvt->dclr0 & BIT(16)) ? MEM_DDR2 : MEM_RDDR2; in determine_memory_type()
845 if (pvt->model < 0x60) in determine_memory_type()
857 amd64_read_dct_pci_cfg(pvt, 0, DRAM_CONTROL, &dram_ctrl); in determine_memory_type()
858 dcsm = pvt->csels[0].csmasks[0]; in determine_memory_type()
861 pvt->dram_type = MEM_DDR4; in determine_memory_type()
862 else if (pvt->dclr0 & BIT(16)) in determine_memory_type()
863 pvt->dram_type = MEM_DDR3; in determine_memory_type()
865 pvt->dram_type = MEM_LRDDR3; in determine_memory_type()
867 pvt->dram_type = MEM_RDDR3; in determine_memory_type()
875 WARN(1, KERN_ERR "%s: Family??? 0x%x\n", __func__, pvt->fam); in determine_memory_type()
876 pvt->dram_type = MEM_EMPTY; in determine_memory_type()
881 pvt->dram_type = (pvt->dclr0 & BIT(16)) ? MEM_DDR3 : MEM_RDDR3; in determine_memory_type()
885 static int k8_early_channel_count(struct amd64_pvt *pvt) in k8_early_channel_count() argument
889 if (pvt->ext_model >= K8_REV_F) in k8_early_channel_count()
891 flag = pvt->dclr0 & WIDTH_128; in k8_early_channel_count()
894 flag = pvt->dclr0 & REVE_WIDTH_128; in k8_early_channel_count()
897 pvt->dclr1 = 0; in k8_early_channel_count()
903 static u64 get_error_address(struct amd64_pvt *pvt, struct mce *m) in get_error_address() argument
915 pvt = mci->pvt_info; in get_error_address()
917 if (pvt->fam == 0xf) { in get_error_address()
927 if (pvt->fam == 0x15) { in get_error_address()
936 amd64_read_pci_cfg(pvt->F1, DRAM_LOCAL_NODE_LIM, &tmp); in get_error_address()
951 amd64_read_pci_cfg(pvt->F1, DRAM_LOCAL_NODE_BASE, &tmp); in get_error_address()
984 static void read_dram_base_limit_regs(struct amd64_pvt *pvt, unsigned range) in read_dram_base_limit_regs() argument
992 amd64_read_pci_cfg(pvt->F1, DRAM_BASE_LO + off, &pvt->ranges[range].base.lo); in read_dram_base_limit_regs()
993 amd64_read_pci_cfg(pvt->F1, DRAM_LIMIT_LO + off, &pvt->ranges[range].lim.lo); in read_dram_base_limit_regs()
995 if (pvt->fam == 0xf) in read_dram_base_limit_regs()
998 if (!dram_rw(pvt, range)) in read_dram_base_limit_regs()
1001 amd64_read_pci_cfg(pvt->F1, DRAM_BASE_HI + off, &pvt->ranges[range].base.hi); in read_dram_base_limit_regs()
1002 amd64_read_pci_cfg(pvt->F1, DRAM_LIMIT_HI + off, &pvt->ranges[range].lim.hi); in read_dram_base_limit_regs()
1005 if (pvt->fam != 0x15) in read_dram_base_limit_regs()
1008 nb = node_to_amd_nb(dram_dst_node(pvt, range)); in read_dram_base_limit_regs()
1012 if (pvt->model == 0x60) in read_dram_base_limit_regs()
1014 else if (pvt->model == 0x30) in read_dram_base_limit_regs()
1025 pvt->ranges[range].lim.lo &= GENMASK_ULL(15, 0); in read_dram_base_limit_regs()
1028 pvt->ranges[range].lim.lo |= ((llim & 0x1fff) << 3 | 0x7) << 16; in read_dram_base_limit_regs()
1030 pvt->ranges[range].lim.hi &= GENMASK_ULL(7, 0); in read_dram_base_limit_regs()
1033 pvt->ranges[range].lim.hi |= llim >> 13; in read_dram_base_limit_regs()
1041 struct amd64_pvt *pvt = mci->pvt_info; in k8_map_sysaddr_to_csrow() local
1065 if (pvt->nbcfg & NBCFG_CHIPKILL) { in k8_map_sysaddr_to_csrow()
1106 static int k8_dbam_to_chip_select(struct amd64_pvt *pvt, u8 dct, in k8_dbam_to_chip_select() argument
1109 u32 dclr = dct ? pvt->dclr1 : pvt->dclr0; in k8_dbam_to_chip_select()
1111 if (pvt->ext_model >= K8_REV_F) { in k8_dbam_to_chip_select()
1115 else if (pvt->ext_model >= K8_REV_D) { in k8_dbam_to_chip_select()
1161 static int f1x_early_channel_count(struct amd64_pvt *pvt) in f1x_early_channel_count() argument
1166 if (pvt->fam == 0x10 && (pvt->dclr0 & WIDTH_128)) in f1x_early_channel_count()
1185 u32 dbam = (i ? pvt->dbam1 : pvt->dbam0); in f1x_early_channel_count()
1260 static int f10_dbam_to_chip_select(struct amd64_pvt *pvt, u8 dct, in f10_dbam_to_chip_select() argument
1263 u32 dclr = dct ? pvt->dclr1 : pvt->dclr0; in f10_dbam_to_chip_select()
1267 if (pvt->dchr0 & DDR3_MODE || pvt->dchr1 & DDR3_MODE) in f10_dbam_to_chip_select()
1276 static int f15_dbam_to_chip_select(struct amd64_pvt *pvt, u8 dct, in f15_dbam_to_chip_select() argument
1285 static int f15_m60h_dbam_to_chip_select(struct amd64_pvt *pvt, u8 dct, in f15_m60h_dbam_to_chip_select() argument
1289 u32 dcsm = pvt->csels[dct].csmasks[cs_mask_nr]; in f15_m60h_dbam_to_chip_select()
1293 if (pvt->dram_type == MEM_DDR4) { in f15_m60h_dbam_to_chip_select()
1298 } else if (pvt->dram_type == MEM_LRDDR3) { in f15_m60h_dbam_to_chip_select()
1318 static int f16_dbam_to_chip_select(struct amd64_pvt *pvt, u8 dct, in f16_dbam_to_chip_select() argument
1330 static void read_dram_ctl_register(struct amd64_pvt *pvt) in read_dram_ctl_register() argument
1333 if (pvt->fam == 0xf) in read_dram_ctl_register()
1336 if (!amd64_read_pci_cfg(pvt->F2, DCT_SEL_LO, &pvt->dct_sel_lo)) { in read_dram_ctl_register()
1338 pvt->dct_sel_lo, dct_sel_baseaddr(pvt)); in read_dram_ctl_register()
1341 (dct_ganging_enabled(pvt) ? "ganged" : "unganged")); in read_dram_ctl_register()
1343 if (!dct_ganging_enabled(pvt)) in read_dram_ctl_register()
1345 (dct_high_range_enabled(pvt) ? "yes" : "no")); in read_dram_ctl_register()
1348 (dct_data_intlv_enabled(pvt) ? "enabled" : "disabled"), in read_dram_ctl_register()
1349 (dct_memory_cleared(pvt) ? "yes" : "no")); in read_dram_ctl_register()
1353 (dct_interleave_enabled(pvt) ? "enabled" : "disabled"), in read_dram_ctl_register()
1354 dct_sel_interleave_addr(pvt)); in read_dram_ctl_register()
1357 amd64_read_pci_cfg(pvt->F2, DCT_SEL_HI, &pvt->dct_sel_hi); in read_dram_ctl_register()
1364 static u8 f15_m30h_determine_channel(struct amd64_pvt *pvt, u64 sys_addr, in f15_m30h_determine_channel() argument
1378 u8 intlv_addr = dct_sel_interleave_addr(pvt); in f15_m30h_determine_channel()
1395 static u8 f1x_determine_channel(struct amd64_pvt *pvt, u64 sys_addr, in f1x_determine_channel() argument
1398 u8 dct_sel_high = (pvt->dct_sel_lo >> 1) & 1; in f1x_determine_channel()
1400 if (dct_ganging_enabled(pvt)) in f1x_determine_channel()
1409 if (dct_interleave_enabled(pvt)) { in f1x_determine_channel()
1410 u8 intlv_addr = dct_sel_interleave_addr(pvt); in f1x_determine_channel()
1426 if (dct_high_range_enabled(pvt)) in f1x_determine_channel()
1433 static u64 f1x_get_norm_dct_addr(struct amd64_pvt *pvt, u8 range, in f1x_get_norm_dct_addr() argument
1438 u64 dram_base = get_dram_base(pvt, range); in f1x_get_norm_dct_addr()
1439 u64 hole_off = f10_dhar_offset(pvt); in f1x_get_norm_dct_addr()
1440 u64 dct_sel_base_off = (u64)(pvt->dct_sel_hi & 0xFFFFFC00) << 16; in f1x_get_norm_dct_addr()
1455 dct_sel_base_addr < dhar_base(pvt)) && in f1x_get_norm_dct_addr()
1456 dhar_valid(pvt) && in f1x_get_norm_dct_addr()
1471 if (dhar_valid(pvt) && (sys_addr >= BIT_64(32))) in f1x_get_norm_dct_addr()
1484 static int f10_process_possible_spare(struct amd64_pvt *pvt, u8 dct, int csrow) in f10_process_possible_spare() argument
1488 if (online_spare_swap_done(pvt, dct) && in f10_process_possible_spare()
1489 csrow == online_spare_bad_dramcs(pvt, dct)) { in f10_process_possible_spare()
1491 for_each_chip_select(tmp_cs, dct, pvt) { in f10_process_possible_spare()
1492 if (chip_select_base(tmp_cs, dct, pvt) & 0x2) { in f10_process_possible_spare()
1512 struct amd64_pvt *pvt; in f1x_lookup_addr_in_dct() local
1521 pvt = mci->pvt_info; in f1x_lookup_addr_in_dct()
1525 for_each_chip_select(csrow, dct, pvt) { in f1x_lookup_addr_in_dct()
1526 if (!csrow_enabled(csrow, dct, pvt)) in f1x_lookup_addr_in_dct()
1529 get_cs_base_and_mask(pvt, csrow, dct, &cs_base, &cs_mask); in f1x_lookup_addr_in_dct()
1540 if (pvt->fam == 0x15 && pvt->model >= 0x30) { in f1x_lookup_addr_in_dct()
1544 cs_found = f10_process_possible_spare(pvt, dct, csrow); in f1x_lookup_addr_in_dct()
1558 static u64 f1x_swap_interleaved_region(struct amd64_pvt *pvt, u64 sys_addr) in f1x_swap_interleaved_region() argument
1562 if (pvt->fam == 0x10) { in f1x_swap_interleaved_region()
1564 if (pvt->model < 4 || (pvt->model < 0xa && pvt->stepping < 3)) in f1x_swap_interleaved_region()
1568 amd64_read_pci_cfg(pvt->F2, SWAP_INTLV_REG, &swap_reg); in f1x_swap_interleaved_region()
1588 static int f1x_match_to_this_node(struct amd64_pvt *pvt, unsigned range, in f1x_match_to_this_node() argument
1597 u8 node_id = dram_dst_node(pvt, range); in f1x_match_to_this_node()
1598 u8 intlv_en = dram_intlv_en(pvt, range); in f1x_match_to_this_node()
1599 u32 intlv_sel = dram_intlv_sel(pvt, range); in f1x_match_to_this_node()
1602 range, sys_addr, get_dram_limit(pvt, range)); in f1x_match_to_this_node()
1604 if (dhar_valid(pvt) && in f1x_match_to_this_node()
1605 dhar_base(pvt) <= sys_addr && in f1x_match_to_this_node()
1615 sys_addr = f1x_swap_interleaved_region(pvt, sys_addr); in f1x_match_to_this_node()
1617 dct_sel_base = dct_sel_baseaddr(pvt); in f1x_match_to_this_node()
1623 if (dct_high_range_enabled(pvt) && in f1x_match_to_this_node()
1624 !dct_ganging_enabled(pvt) && in f1x_match_to_this_node()
1628 channel = f1x_determine_channel(pvt, sys_addr, high_range, intlv_en); in f1x_match_to_this_node()
1630 chan_addr = f1x_get_norm_dct_addr(pvt, range, sys_addr, in f1x_match_to_this_node()
1639 if (dct_interleave_enabled(pvt) && in f1x_match_to_this_node()
1640 !dct_high_range_enabled(pvt) && in f1x_match_to_this_node()
1641 !dct_ganging_enabled(pvt)) { in f1x_match_to_this_node()
1643 if (dct_sel_interleave_addr(pvt) != 1) { in f1x_match_to_this_node()
1644 if (dct_sel_interleave_addr(pvt) == 0x3) in f1x_match_to_this_node()
1668 static int f15_m30h_match_to_this_node(struct amd64_pvt *pvt, unsigned range, in f15_m30h_match_to_this_node() argument
1678 u64 dhar_offset = f10_dhar_offset(pvt); in f15_m30h_match_to_this_node()
1679 u8 intlv_addr = dct_sel_interleave_addr(pvt); in f15_m30h_match_to_this_node()
1680 u8 node_id = dram_dst_node(pvt, range); in f15_m30h_match_to_this_node()
1681 u8 intlv_en = dram_intlv_en(pvt, range); in f15_m30h_match_to_this_node()
1683 amd64_read_pci_cfg(pvt->F1, DRAM_CONT_BASE, &dct_cont_base_reg); in f15_m30h_match_to_this_node()
1684 amd64_read_pci_cfg(pvt->F1, DRAM_CONT_LIMIT, &dct_cont_limit_reg); in f15_m30h_match_to_this_node()
1690 range, sys_addr, get_dram_limit(pvt, range)); in f15_m30h_match_to_this_node()
1692 if (!(get_dram_base(pvt, range) <= sys_addr) && in f15_m30h_match_to_this_node()
1693 !(get_dram_limit(pvt, range) >= sys_addr)) in f15_m30h_match_to_this_node()
1696 if (dhar_valid(pvt) && in f15_m30h_match_to_this_node()
1697 dhar_base(pvt) <= sys_addr && in f15_m30h_match_to_this_node()
1705 dct_base = (u64) dct_sel_baseaddr(pvt); in f15_m30h_match_to_this_node()
1719 channel = f15_m30h_determine_channel(pvt, sys_addr, intlv_en, in f15_m30h_match_to_this_node()
1759 amd64_read_pci_cfg(pvt->F1, in f15_m30h_match_to_this_node()
1765 f15h_select_dct(pvt, channel); in f15_m30h_match_to_this_node()
1787 static int f1x_translate_sysaddr_to_cs(struct amd64_pvt *pvt, in f1x_translate_sysaddr_to_cs() argument
1795 if (!dram_rw(pvt, range)) in f1x_translate_sysaddr_to_cs()
1798 if (pvt->fam == 0x15 && pvt->model >= 0x30) in f1x_translate_sysaddr_to_cs()
1799 cs_found = f15_m30h_match_to_this_node(pvt, range, in f1x_translate_sysaddr_to_cs()
1803 else if ((get_dram_base(pvt, range) <= sys_addr) && in f1x_translate_sysaddr_to_cs()
1804 (get_dram_limit(pvt, range) >= sys_addr)) { in f1x_translate_sysaddr_to_cs()
1805 cs_found = f1x_match_to_this_node(pvt, range, in f1x_translate_sysaddr_to_cs()
1824 struct amd64_pvt *pvt = mci->pvt_info; in f1x_map_sysaddr_to_csrow() local
1828 err->csrow = f1x_translate_sysaddr_to_cs(pvt, sys_addr, &err->channel); in f1x_map_sysaddr_to_csrow()
1839 if (dct_ganging_enabled(pvt)) in f1x_map_sysaddr_to_csrow()
1847 static void debug_display_dimm_sizes(struct amd64_pvt *pvt, u8 ctrl) in debug_display_dimm_sizes() argument
1850 u32 *dcsb = ctrl ? pvt->csels[1].csbases : pvt->csels[0].csbases; in debug_display_dimm_sizes()
1851 u32 dbam = ctrl ? pvt->dbam1 : pvt->dbam0; in debug_display_dimm_sizes()
1853 if (pvt->fam == 0xf) { in debug_display_dimm_sizes()
1855 if (pvt->ext_model < K8_REV_F) in debug_display_dimm_sizes()
1861 if (pvt->fam == 0x10) { in debug_display_dimm_sizes()
1862 dbam = (ctrl && !dct_ganging_enabled(pvt)) ? pvt->dbam1 in debug_display_dimm_sizes()
1863 : pvt->dbam0; in debug_display_dimm_sizes()
1864 dcsb = (ctrl && !dct_ganging_enabled(pvt)) ? in debug_display_dimm_sizes()
1865 pvt->csels[1].csbases : in debug_display_dimm_sizes()
1866 pvt->csels[0].csbases; in debug_display_dimm_sizes()
1868 dbam = pvt->dbam0; in debug_display_dimm_sizes()
1869 dcsb = pvt->csels[1].csbases; in debug_display_dimm_sizes()
1886 size0 = pvt->ops->dbam_to_cs(pvt, ctrl, in debug_display_dimm_sizes()
1892 size1 = pvt->ops->dbam_to_cs(pvt, ctrl, in debug_display_dimm_sizes()
2120 struct amd64_pvt *pvt = mci->pvt_info; in get_channel_from_ecc_syndrome() local
2123 if (pvt->ecc_sym_sz == 8) in get_channel_from_ecc_syndrome()
2126 pvt->ecc_sym_sz); in get_channel_from_ecc_syndrome()
2127 else if (pvt->ecc_sym_sz == 4) in get_channel_from_ecc_syndrome()
2130 pvt->ecc_sym_sz); in get_channel_from_ecc_syndrome()
2132 amd64_warn("Illegal syndrome type: %u\n", pvt->ecc_sym_sz); in get_channel_from_ecc_syndrome()
2136 return map_err_sym_to_channel(err_sym, pvt->ecc_sym_sz); in get_channel_from_ecc_syndrome()
2181 struct amd64_pvt *pvt; in decode_bus_error() local
2192 pvt = mci->pvt_info; in decode_bus_error()
2204 sys_addr = get_error_address(pvt, m); in decode_bus_error()
2209 pvt->ops->map_sysaddr_to_csrow(mci, sys_addr, &err); in decode_bus_error()
2218 static int reserve_mc_sibling_devs(struct amd64_pvt *pvt, u16 f1_id, u16 f3_id) in reserve_mc_sibling_devs() argument
2221 pvt->F1 = pci_get_related_function(pvt->F2->vendor, f1_id, pvt->F2); in reserve_mc_sibling_devs()
2222 if (!pvt->F1) { in reserve_mc_sibling_devs()
2230 pvt->F3 = pci_get_related_function(pvt->F2->vendor, f3_id, pvt->F2); in reserve_mc_sibling_devs()
2231 if (!pvt->F3) { in reserve_mc_sibling_devs()
2232 pci_dev_put(pvt->F1); in reserve_mc_sibling_devs()
2233 pvt->F1 = NULL; in reserve_mc_sibling_devs()
2241 edac_dbg(1, "F1: %s\n", pci_name(pvt->F1)); in reserve_mc_sibling_devs()
2242 edac_dbg(1, "F2: %s\n", pci_name(pvt->F2)); in reserve_mc_sibling_devs()
2243 edac_dbg(1, "F3: %s\n", pci_name(pvt->F3)); in reserve_mc_sibling_devs()
2248 static void free_mc_sibling_devs(struct amd64_pvt *pvt) in free_mc_sibling_devs() argument
2250 pci_dev_put(pvt->F1); in free_mc_sibling_devs()
2251 pci_dev_put(pvt->F3); in free_mc_sibling_devs()
2258 static void read_mc_regs(struct amd64_pvt *pvt) in read_mc_regs() argument
2268 rdmsrl(MSR_K8_TOP_MEM1, pvt->top_mem); in read_mc_regs()
2269 edac_dbg(0, " TOP_MEM: 0x%016llx\n", pvt->top_mem); in read_mc_regs()
2274 rdmsrl(MSR_K8_TOP_MEM2, pvt->top_mem2); in read_mc_regs()
2275 edac_dbg(0, " TOP_MEM2: 0x%016llx\n", pvt->top_mem2); in read_mc_regs()
2279 amd64_read_pci_cfg(pvt->F3, NBCAP, &pvt->nbcap); in read_mc_regs()
2281 read_dram_ctl_register(pvt); in read_mc_regs()
2287 read_dram_base_limit_regs(pvt, range); in read_mc_regs()
2289 rw = dram_rw(pvt, range); in read_mc_regs()
2295 get_dram_base(pvt, range), in read_mc_regs()
2296 get_dram_limit(pvt, range)); in read_mc_regs()
2299 dram_intlv_en(pvt, range) ? "Enabled" : "Disabled", in read_mc_regs()
2302 dram_intlv_sel(pvt, range), in read_mc_regs()
2303 dram_dst_node(pvt, range)); in read_mc_regs()
2306 read_dct_base_mask(pvt); in read_mc_regs()
2308 amd64_read_pci_cfg(pvt->F1, DHAR, &pvt->dhar); in read_mc_regs()
2309 amd64_read_dct_pci_cfg(pvt, 0, DBAM0, &pvt->dbam0); in read_mc_regs()
2311 amd64_read_pci_cfg(pvt->F3, F10_ONLINE_SPARE, &pvt->online_spare); in read_mc_regs()
2313 amd64_read_dct_pci_cfg(pvt, 0, DCLR0, &pvt->dclr0); in read_mc_regs()
2314 amd64_read_dct_pci_cfg(pvt, 0, DCHR0, &pvt->dchr0); in read_mc_regs()
2316 if (!dct_ganging_enabled(pvt)) { in read_mc_regs()
2317 amd64_read_dct_pci_cfg(pvt, 1, DCLR0, &pvt->dclr1); in read_mc_regs()
2318 amd64_read_dct_pci_cfg(pvt, 1, DCHR0, &pvt->dchr1); in read_mc_regs()
2321 pvt->ecc_sym_sz = 4; in read_mc_regs()
2322 determine_memory_type(pvt); in read_mc_regs()
2323 edac_dbg(1, " DIMM type: %s\n", edac_mem_types[pvt->dram_type]); in read_mc_regs()
2325 if (pvt->fam >= 0x10) { in read_mc_regs()
2326 amd64_read_pci_cfg(pvt->F3, EXT_NB_MCA_CFG, &tmp); in read_mc_regs()
2328 if (pvt->fam != 0x16) in read_mc_regs()
2329 amd64_read_dct_pci_cfg(pvt, 1, DBAM0, &pvt->dbam1); in read_mc_regs()
2332 if ((pvt->fam > 0x10 || pvt->model > 7) && tmp & BIT(25)) in read_mc_regs()
2333 pvt->ecc_sym_sz = 8; in read_mc_regs()
2335 dump_misc_regs(pvt); in read_mc_regs()
2372 static u32 get_csrow_nr_pages(struct amd64_pvt *pvt, u8 dct, int csrow_nr) in get_csrow_nr_pages() argument
2375 u32 dbam = dct ? pvt->dbam1 : pvt->dbam0; in get_csrow_nr_pages()
2387 nr_pages = pvt->ops->dbam_to_cs(pvt, dct, cs_mode, (csrow_nr / 2)) in get_csrow_nr_pages()
2403 struct amd64_pvt *pvt = mci->pvt_info; in init_csrows() local
2411 amd64_read_pci_cfg(pvt->F3, NBCFG, &val); in init_csrows()
2413 pvt->nbcfg = val; in init_csrows()
2416 pvt->mc_node_id, val, in init_csrows()
2422 for_each_chip_select(i, 0, pvt) { in init_csrows()
2423 bool row_dct0 = !!csrow_enabled(i, 0, pvt); in init_csrows()
2426 if (pvt->fam != 0xf) in init_csrows()
2427 row_dct1 = !!csrow_enabled(i, 1, pvt); in init_csrows()
2436 pvt->mc_node_id, i); in init_csrows()
2439 nr_pages = get_csrow_nr_pages(pvt, 0, i); in init_csrows()
2444 if (pvt->fam != 0xf && row_dct1) { in init_csrows()
2445 int row_dct1_pages = get_csrow_nr_pages(pvt, 1, i); in init_csrows()
2456 if (pvt->nbcfg & NBCFG_ECC_ENABLE) in init_csrows()
2457 edac_mode = (pvt->nbcfg & NBCFG_CHIPKILL) ? in init_csrows()
2462 for (j = 0; j < pvt->channel_count; j++) { in init_csrows()
2464 dimm->mtype = pvt->dram_type; in init_csrows()
2672 struct amd64_pvt *pvt = mci->pvt_info; in setup_mci_misc_attrs() local
2677 if (pvt->nbcap & NBCAP_SECDED) in setup_mci_misc_attrs()
2680 if (pvt->nbcap & NBCAP_CHIPKILL) in setup_mci_misc_attrs()
2683 mci->edac_cap = determine_edac_cap(pvt); in setup_mci_misc_attrs()
2687 mci->dev_name = pci_name(pvt->F2); in setup_mci_misc_attrs()
2698 static struct amd64_family_type *per_family_init(struct amd64_pvt *pvt) in per_family_init() argument
2702 pvt->ext_model = boot_cpu_data.x86_model >> 4; in per_family_init()
2703 pvt->stepping = boot_cpu_data.x86_mask; in per_family_init()
2704 pvt->model = boot_cpu_data.x86_model; in per_family_init()
2705 pvt->fam = boot_cpu_data.x86; in per_family_init()
2707 switch (pvt->fam) { in per_family_init()
2710 pvt->ops = &family_types[K8_CPUS].ops; in per_family_init()
2715 pvt->ops = &family_types[F10_CPUS].ops; in per_family_init()
2719 if (pvt->model == 0x30) { in per_family_init()
2721 pvt->ops = &family_types[F15_M30H_CPUS].ops; in per_family_init()
2723 } else if (pvt->model == 0x60) { in per_family_init()
2725 pvt->ops = &family_types[F15_M60H_CPUS].ops; in per_family_init()
2730 pvt->ops = &family_types[F15_CPUS].ops; in per_family_init()
2734 if (pvt->model == 0x30) { in per_family_init()
2736 pvt->ops = &family_types[F16_M30H_CPUS].ops; in per_family_init()
2740 pvt->ops = &family_types[F16_CPUS].ops; in per_family_init()
2749 (pvt->fam == 0xf ? in per_family_init()
2750 (pvt->ext_model >= K8_REV_F ? "revF or later " in per_family_init()
2752 : ""), pvt->mc_node_id); in per_family_init()
2768 struct amd64_pvt *pvt = NULL; in init_one_instance() local
2776 pvt = kzalloc(sizeof(struct amd64_pvt), GFP_KERNEL); in init_one_instance()
2777 if (!pvt) in init_one_instance()
2780 pvt->mc_node_id = nid; in init_one_instance()
2781 pvt->F2 = F2; in init_one_instance()
2784 fam_type = per_family_init(pvt); in init_one_instance()
2789 err = reserve_mc_sibling_devs(pvt, fam_type->f1_id, fam_type->f3_id); in init_one_instance()
2793 read_mc_regs(pvt); in init_one_instance()
2801 pvt->channel_count = pvt->ops->early_channel_count(pvt); in init_one_instance()
2802 if (pvt->channel_count < 0) in init_one_instance()
2807 layers[0].size = pvt->csels[0].b_cnt; in init_one_instance()
2823 mci->pvt_info = pvt; in init_one_instance()
2824 mci->pdev = &pvt->F2->dev; in init_one_instance()
2851 free_mc_sibling_devs(pvt); in init_one_instance()
2854 kfree(pvt); in init_one_instance()
2912 struct amd64_pvt *pvt; in remove_one_instance() local
2925 pvt = mci->pvt_info; in remove_one_instance()
2929 free_mc_sibling_devs(pvt); in remove_one_instance()
2941 kfree(pvt); in remove_one_instance()
2972 struct amd64_pvt *pvt; in setup_pci_device() local
2981 pvt = mci->pvt_info; in setup_pci_device()
2982 pci_ctl = edac_pci_create_generic_ctl(&pvt->F2->dev, EDAC_MOD_STR); in setup_pci_device()