/linux-4.1.27/drivers/gpu/drm/radeon/ |
D | radeon_clocks.c | 38 uint32_t fb_div, ref_div, post_div, sclk; in radeon_legacy_get_engine_clock() local 53 post_div = RREG32_PLL(RADEON_SCLK_CNTL) & RADEON_SCLK_SRC_SEL_MASK; in radeon_legacy_get_engine_clock() 54 if (post_div == 2) in radeon_legacy_get_engine_clock() 56 else if (post_div == 3) in radeon_legacy_get_engine_clock() 58 else if (post_div == 4) in radeon_legacy_get_engine_clock() 68 uint32_t fb_div, ref_div, post_div, mclk; in radeon_legacy_get_memory_clock() local 83 post_div = RREG32_PLL(RADEON_MCLK_CNTL) & 0x7; in radeon_legacy_get_memory_clock() 84 if (post_div == 2) in radeon_legacy_get_memory_clock() 86 else if (post_div == 3) in radeon_legacy_get_memory_clock() 88 else if (post_div == 4) in radeon_legacy_get_memory_clock() [all …]
|
D | radeon_display.c | 897 static void avivo_get_fb_ref_div(unsigned nom, unsigned den, unsigned post_div, in avivo_get_fb_ref_div() argument 902 ref_div_max = max(min(100 / post_div, ref_div_max), 1u); in avivo_get_fb_ref_div() 905 *ref_div = min(max(DIV_ROUND_CLOSEST(den, post_div), 1u), ref_div_max); in avivo_get_fb_ref_div() 906 *fb_div = DIV_ROUND_CLOSEST(nom * *ref_div * post_div, den); in avivo_get_fb_ref_div() 940 unsigned post_div_min, post_div_max, post_div; in radeon_compute_pll_avivo() local 971 post_div_min = pll->post_div; in radeon_compute_pll_avivo() 972 post_div_max = pll->post_div; in radeon_compute_pll_avivo() 1016 for (post_div = post_div_min; post_div <= post_div_max; ++post_div) { in radeon_compute_pll_avivo() 1018 avivo_get_fb_ref_div(nom, den, post_div, fb_div_max, in radeon_compute_pll_avivo() 1021 (ref_div * post_div)); in radeon_compute_pll_avivo() [all …]
|
D | radeon_legacy_tv.c | 869 int post_div; in get_post_div() local 871 case 1: post_div = 0; break; in get_post_div() 872 case 2: post_div = 1; break; in get_post_div() 873 case 3: post_div = 4; break; in get_post_div() 874 case 4: post_div = 2; break; in get_post_div() 875 case 6: post_div = 6; break; in get_post_div() 876 case 8: post_div = 3; break; in get_post_div() 877 case 12: post_div = 7; break; in get_post_div() 879 default: post_div = 5; break; in get_post_div() 881 return post_div; in get_post_div()
|
D | rv730_dpm.c | 65 post_divider = ((dividers.post_div >> 4) & 0xf) + in rv730_populate_sclk_value() 66 (dividers.post_div & 0xf) + 2; in rv730_populate_sclk_value() 81 spll_func_cntl |= SPLL_HILEN((dividers.post_div >> 4) & 0xf); in rv730_populate_sclk_value() 82 spll_func_cntl |= SPLL_LOLEN(dividers.post_div & 0xf); in rv730_populate_sclk_value() 143 post_divider = ((dividers.post_div >> 4) & 0xf) + in rv730_populate_mclk_value() 144 (dividers.post_div & 0xf) + 2; in rv730_populate_mclk_value() 156 mpll_func_cntl |= MPLL_HILEN((dividers.post_div >> 4) & 0xf); in rv730_populate_mclk_value() 157 mpll_func_cntl |= MPLL_LOLEN(dividers.post_div & 0xf); in rv730_populate_mclk_value()
|
D | rv740_dpm.c | 144 tmp = (u64) engine_clock * reference_divider * dividers.post_div * 16384; in rv740_populate_sclk_value() 150 spll_func_cntl |= SPLL_PDIV_A(dividers.post_div); in rv740_populate_sclk_value() 161 u32 vco_freq = engine_clock * dividers.post_div; in rv740_populate_sclk_value() 218 mpll_ad_func_cntl |= YCLK_POST_DIV(dividers.post_div); in rv740_populate_mclk_value() 235 mpll_dq_func_cntl |= YCLK_POST_DIV(dividers.post_div); in rv740_populate_mclk_value() 248 u32 vco_freq = memory_clock * dividers.post_div; in rv740_populate_mclk_value()
|
D | radeon_uvd.c | 873 unsigned post_div = vco_freq / target_freq; in radeon_uvd_calc_upll_post_div() local 876 if (post_div < pd_min) in radeon_uvd_calc_upll_post_div() 877 post_div = pd_min; in radeon_uvd_calc_upll_post_div() 880 if ((vco_freq / post_div) > target_freq) in radeon_uvd_calc_upll_post_div() 881 post_div += 1; in radeon_uvd_calc_upll_post_div() 884 if (post_div > pd_even && post_div % 2) in radeon_uvd_calc_upll_post_div() 885 post_div += 1; in radeon_uvd_calc_upll_post_div() 887 return post_div; in radeon_uvd_calc_upll_post_div()
|
D | radeon_legacy_crtc.c | 756 } *post_div, post_divs[] = { in radeon_set_pll() local 822 for (post_div = &post_divs[0]; post_div->divider; ++post_div) { in radeon_set_pll() 823 if (post_div->divider == post_divider) in radeon_set_pll() 827 if (!post_div->divider) in radeon_set_pll() 828 post_div = &post_divs[0]; in radeon_set_pll() 843 pll_fb_post_div = (feedback_div | (post_div->bitvalue << 16)); in radeon_set_pll()
|
D | rs780_dpm.c | 88 r600_engine_clock_entry_set_post_divider(rdev, 0, dividers.post_div); in rs780_initialize_dpm_power_state() 454 (min_dividers.post_div != max_dividers.post_div) || in rs780_set_engine_clock_scaling() 456 (max_dividers.post_div != current_max_dividers.post_div)) in rs780_set_engine_clock_scaling() 988 u32 post_div = ((func_cntl & SPLL_SW_HILEN_MASK) >> SPLL_SW_HILEN_SHIFT) + 1 + in rs780_dpm_debugfs_print_current_performance_level() local 991 (post_div * ref_div); in rs780_dpm_debugfs_print_current_performance_level() 1010 u32 post_div = ((func_cntl & SPLL_SW_HILEN_MASK) >> SPLL_SW_HILEN_SHIFT) + 1 + in rs780_dpm_get_current_sclk() local 1013 (post_div * ref_div); in rs780_dpm_get_current_sclk()
|
D | atombios_crtc.c | 823 u32 post_div, in atombios_crtc_program_pll() argument 850 args.v1.ucPostDiv = post_div; in atombios_crtc_program_pll() 860 args.v2.ucPostDiv = post_div; in atombios_crtc_program_pll() 870 args.v3.ucPostDiv = post_div; in atombios_crtc_program_pll() 887 args.v5.ucPostDiv = post_div; in atombios_crtc_program_pll() 916 args.v6.ucPostDiv = post_div; in atombios_crtc_program_pll() 1063 u32 ref_div = 0, fb_div = 0, frac_fb_div = 0, post_div = 0; in atombios_crtc_set_pll() local 1090 pll->post_div = radeon_crtc->pll_post_div; in atombios_crtc_set_pll() 1095 &fb_div, &frac_fb_div, &ref_div, &post_div); in atombios_crtc_set_pll() 1098 &fb_div, &frac_fb_div, &ref_div, &post_div); in atombios_crtc_set_pll() [all …]
|
D | radeon_mode.h | 171 uint32_t post_div; member 582 u32 post_div; member 622 u32 post_div; member
|
D | rv770_dpm.c | 333 post_divider = dividers->post_div; in rv770_calculate_fractional_mpll_feedback_divider() 422 ret = rv770_encode_yclk_post_div(dividers.post_div, &postdiv_yclk); in rv770_populate_mclk_value() 452 ret = rv770_encode_yclk_post_div(dividers.post_div, &postdiv_yclk); in rv770_populate_mclk_value() 514 post_divider = (0x0f & (dividers.post_div >> 4)) + (0x0f & dividers.post_div) + 2; in rv770_populate_sclk_value() 528 spll_func_cntl |= SPLL_HILEN((dividers.post_div >> 4) & 0xf); in rv770_populate_sclk_value() 529 spll_func_cntl |= SPLL_LOLEN(dividers.post_div & 0xf); in rv770_populate_sclk_value()
|
D | kv_dpm.c | 545 pi->graphics_level[index].SclkDid = (u8)dividers.post_div; in kv_set_divider_value() 848 pi->uvd_level[i].VclkDivider = (u8)dividers.post_div; in kv_populate_uvd_table() 854 pi->uvd_level[i].DclkDivider = (u8)dividers.post_div; in kv_populate_uvd_table() 916 pi->vce_level[i].Divider = (u8)dividers.post_div; in kv_populate_vce_table() 979 pi->samu_level[i].Divider = (u8)dividers.post_div; in kv_populate_samu_table() 1038 pi->acp_level[i].Divider = (u8)dividers.post_div; in kv_populate_acp_table()
|
D | cypress_dpm.c | 509 dividers.post_div = 1; in cypress_populate_mclk_value() 520 mpll_ad_func_cntl |= YCLK_POST_DIV(dividers.post_div); in cypress_populate_mclk_value() 537 mpll_dq_func_cntl |= YCLK_POST_DIV(dividers.post_div); in cypress_populate_mclk_value() 555 u32 vco_freq = memory_clock * dividers.post_div; in cypress_populate_mclk_value()
|
D | trinity_dpm.c | 380 value |= PDS_DIV(dividers.post_div); in trinity_gfx_powergating_initialize() 596 value |= CLK_DIVIDER(dividers.post_div); in trinity_set_divider_value() 606 value |= PD_SCLK_DIVIDER(dividers.post_div); in trinity_set_divider_value()
|
D | ni_dpm.c | 2025 tmp = (u64) engine_clock * reference_divider * dividers.post_div * 16834; in ni_calculate_sclk_params() 2031 spll_func_cntl |= SPLL_PDIV_A(dividers.post_div); in ni_calculate_sclk_params() 2042 u32 vco_freq = engine_clock * dividers.post_div; in ni_calculate_sclk_params() 2192 dividers.post_div = 1; in ni_populate_mclk_value() 2203 mpll_ad_func_cntl |= YCLK_POST_DIV(dividers.post_div); in ni_populate_mclk_value() 2220 mpll_dq_func_cntl |= YCLK_POST_DIV(dividers.post_div); in ni_populate_mclk_value() 2238 u32 vco_freq = memory_clock * dividers.post_div; in ni_populate_mclk_value()
|
D | radeon_atombios.c | 2848 dividers->post_div = args.v1.ucPostDiv; in radeon_atom_get_clock_dividers() 2862 dividers->post_div = args.v2.ucPostDiv; in radeon_atom_get_clock_dividers() 2877 dividers->post_div = args.v3.ucPostDiv; in radeon_atom_get_clock_dividers() 2897 dividers->post_div = args.v5.ucPostDiv; in radeon_atom_get_clock_dividers() 2916 dividers->post_divider = dividers->post_div = args.v4.ucPostDiv; in radeon_atom_get_clock_dividers() 2930 dividers->post_div = args.v6_out.ucPllPostDiv; in radeon_atom_get_clock_dividers() 2970 mpll_param->post_div = args.ucPostDiv; in radeon_atom_get_memory_pll_dividers()
|
D | rv6xx_dpm.c | 151 step->post_divider = 2 + (dividers.post_div & 0xF) + (dividers.post_div >> 4); in rv6xx_convert_clock_to_stepping() 609 rv6xx_memory_clock_entry_set_post_divider(rdev, entry, dividers.post_div); in rv6xx_program_mclk_stepping_entry()
|
D | sumo_dpm.c | 560 sumo_set_divider_value(rdev, index, dividers.post_div); in sumo_program_power_level() 796 WREG32_P(CG_ACPI_CNTL, SCLK_ACPI_DIV(dividers.post_div), ~SCLK_ACPI_DIV_MASK); in sumo_program_acpi_power_level()
|
D | si_dpm.c | 4746 tmp = (u64) engine_clock * reference_divider * dividers.post_div * 16384; in si_calculate_sclk_params() 4752 spll_func_cntl |= SPLL_PDIV_A(dividers.post_div); in si_calculate_sclk_params() 4763 u32 vco_freq = engine_clock * dividers.post_div; in si_calculate_sclk_params() 4844 mpll_ad_func_cntl |= YCLK_POST_DIV(mpll_param.post_div); in si_populate_mclk_value() 4849 YCLK_POST_DIV(mpll_param.post_div); in si_populate_mclk_value()
|
D | ci_dpm.c | 2789 mpll_ad_func_cntl |= YCLK_POST_DIV(mpll_param.post_div); in ci_calculate_mclk_params() 2794 YCLK_POST_DIV(mpll_param.post_div); in ci_calculate_mclk_params() 2804 freq_nom = memory_clock * 4 * (1 << mpll_param.post_div); in ci_calculate_mclk_params() 2806 freq_nom = memory_clock * 2 * (1 << mpll_param.post_div); in ci_calculate_mclk_params() 3159 u32 vco_freq = engine_clock * dividers.post_div; in ci_calculate_sclk_params()
|
D | evergreen.c | 1079 WREG32_P(cntl_reg, dividers.post_div, ~(DCLK_DIR_CNTL_EN|DCLK_DIVIDER_MASK)); in sumo_set_uvd_clock()
|
/linux-4.1.27/drivers/gpu/drm/nouveau/nvkm/subdev/clk/ |
D | mcp77.c | 53 u32 post_div = 0; in read_pll() local 59 post_div = 1 << ((nv_rd32(clk, 0x4070) & 0x000f0000) >> 16); in read_pll() 62 post_div = (nv_rd32(clk, 0x4040) & 0x000f0000) >> 16; in read_pll() 72 clock = clock / post_div; in read_pll()
|
/linux-4.1.27/drivers/video/fbdev/aty/ |
D | radeon_base.c | 1418 } *post_div, in radeon_calc_pll_regs() local 1487 for (post_div = &post_divs[0]; post_div->divider; ++post_div) { in radeon_calc_pll_regs() 1488 pll_output_freq = post_div->divider * freq; in radeon_calc_pll_regs() 1492 if (uses_dvo && (post_div->divider & 1)) in radeon_calc_pll_regs() 1501 if ( !post_div->divider ) { in radeon_calc_pll_regs() 1502 post_div = &post_divs[post_div->bitvalue]; in radeon_calc_pll_regs() 1503 pll_output_freq = post_div->divider * freq; in radeon_calc_pll_regs() 1511 if ( !post_div->divider ) { in radeon_calc_pll_regs() 1512 post_div = &post_divs[post_div->bitvalue]; in radeon_calc_pll_regs() 1513 pll_output_freq = post_div->divider * freq; in radeon_calc_pll_regs() [all …]
|
D | radeonfb.h | 232 int post_div; member
|
/linux-4.1.27/drivers/media/tuners/ |
D | tda18271-priv.h | 190 u32 *freq, u8 *post_div, u8 *div);
|
D | tda18271-maps.c | 1069 u32 *freq, u8 *post_div, u8 *div) in tda18271_lookup_pll_map() argument 1109 *post_div = map[i].pd; in tda18271_lookup_pll_map() 1113 i, map_name, *post_div, *div); in tda18271_lookup_pll_map()
|