diff --git a/modules/hekate_libsys_minerva/mtc_mc_emc_regs.h b/modules/hekate_libsys_minerva/mtc_mc_emc_regs.h index c0d098e..7fa0c2a 100644 --- a/modules/hekate_libsys_minerva/mtc_mc_emc_regs.h +++ b/modules/hekate_libsys_minerva/mtc_mc_emc_regs.h @@ -23,8 +23,9 @@ /* Clock controller registers */ #define CLK_RST_CONTROLLER_PLLM_BASE 0x90 #define CLK_RST_CONTROLLER_PLLM_MISC2 0x9C -#define PLLM_ENABLE (1 << 30) -#define PLLM_LOCK (1 << 27) +#define PLLM_ENABLE (1 << 30) +#define PLLM_LOCK (1 << 27) +#define PLLM_EN_LCKDET (1 << 4) #define CLK_RST_CONTROLLER_CLK_SOURCE_EMC 0x19C #define EMC_2X_CLK_SRC_SHIFT 29 @@ -34,6 +35,9 @@ #define CLK_RST_CONTROLLER_CLK_ENB_X_CLR 0x288 #define CLK_RST_CONTROLLER_PLLMB_BASE 0x5E8 #define CLK_RST_CONTROLLER_CLK_SOURCE_EMC_DLL 0x664 +#define EMC_DLL_PLLM_VCOB (1 << 10) +#define EMC_DLL_SWITCH_OUT (1 << 11) + #define CLK_RST_CONTROLLER_CLK_SOURCE_EMC_SAFE 0x724 /* Memory controller registers */ diff --git a/modules/hekate_libsys_minerva/sys_sdrammtc.c b/modules/hekate_libsys_minerva/sys_sdrammtc.c index 0310d65..43e4e01 100644 --- a/modules/hekate_libsys_minerva/sys_sdrammtc.c +++ b/modules/hekate_libsys_minerva/sys_sdrammtc.c @@ -1,6 +1,6 @@ /* * Minerva Training Cell - * DRAM Training for Tegra X1 SoC. Supports DDR2/3 and LPDDR3/4. + * DRAM Training for Tegra X1 SoC. Supports LPDDR4. * * Copyright (c) 2018 CTCaer * @@ -38,7 +38,7 @@ bool train_ram_patterns; * DIVM: PLL input divider. * DIVP: PLL post divider. * PLL_OUT = (REF / DIVM) * DIVN / DIVP - * + * * DIVP | DIVP * Encoded | Real * ---------------------- @@ -1025,11 +1025,14 @@ static void _usleep(u32 microseconds) ; } -static s32 _fceil(float var) +static u32 div_o3(u32 a, u32 b) { - s32 result = (s32)(var + 0.5f); + u32 result = a / b; - return result; + if ((b * result) < a) + return result + 1; + else + return result; } static u32 _actual_osc_clocks(u32 in) @@ -1068,22 +1071,21 @@ static bool _wait_emc_status(u32 reg_offset, u32 bit_mask, bool updated_state, s { if (emc_channel != 1) goto done; + if (((EMC_CH1(reg_offset) & bit_mask) != 0) == updated_state) { err = false; break; } } - else + else if (((EMC(reg_offset) & bit_mask) != 0) == updated_state) { - if (((EMC(reg_offset) & bit_mask) != 0) == updated_state) - { - err = false; - break; - } + err = false; + break; } _usleep(1); } + done: return err; } @@ -1144,7 +1146,7 @@ static s32 _get_dram_temperature() if (channel1_enabled) { - _request_mmr_data(0x40040000, channel1_enabled); + _request_mmr_data(0x40040000, EMC_CH1); mr4_1 = EMC(EMC_MRR); if (mr4_1 < 0xF001) @@ -1188,18 +1190,21 @@ static u32 _pllm_clk_base_cfg(s32 rate_KHz, u32 clk_src_emc, s32 emc_2X_clk_src_ { CLOCK(CLK_RST_CONTROLLER_PLLMB_BASE) = dividers; CLOCK(CLK_RST_CONTROLLER_PLLMB_BASE) |= PLLM_ENABLE; + if ((clk_src_emc >> EMC_2X_CLK_SRC_SHIFT) == PLLM_UD) clk_src_emc = (clk_src_emc & 0x1FFFFFFF) | (PLLMB_UD << EMC_2X_CLK_SRC_SHIFT); else if (!(clk_src_emc >> EMC_2X_CLK_SRC_SHIFT)) clk_src_emc |= (PLLMB_OUT0 << EMC_2X_CLK_SRC_SHIFT); + while (!(CLOCK(CLK_RST_CONTROLLER_PLLMB_BASE) & PLLM_LOCK)) ; } else { CLOCK(CLK_RST_CONTROLLER_PLLM_BASE) = dividers; - CLOCK(CLK_RST_CONTROLLER_PLLM_MISC2) |= 0x10u; // PLLM_EN_LCKDET. + CLOCK(CLK_RST_CONTROLLER_PLLM_MISC2) |= PLLM_EN_LCKDET; CLOCK(CLK_RST_CONTROLLER_PLLM_BASE) |= PLLM_ENABLE; + if ((clk_src_emc >> EMC_2X_CLK_SRC_SHIFT) == PLLM_UD) clk_src_emc = (clk_src_emc & 0x1FFFFFFF) | (PLLM_UD << EMC_2X_CLK_SRC_SHIFT); while (!(CLOCK(CLK_RST_CONTROLLER_PLLM_BASE) & PLLM_LOCK)) @@ -1214,13 +1219,13 @@ static void _change_dll_src(emc_table_t *mtc_table_entry, u32 clk_src_emc) u32 emc_2x_clk_src = clk_src_emc >> EMC_2X_CLK_SRC_SHIFT; u32 dll_setting = ((((mtc_table_entry->dll_clk_src & 0x1FFFFFFF) - | (emc_2x_clk_src << EMC_2X_CLK_SRC_SHIFT)) & 0xFFFFFF00) + | (emc_2x_clk_src << EMC_2X_CLK_SRC_SHIFT)) & 0xFFFFFF00) | (clk_src_emc & 0xFF)) & 0xFFFFF3FF; if (emc_2x_clk_src == PLLMB_UD) - dll_setting |= 0x400; // PLLM_VCOB. + dll_setting |= EMC_DLL_PLLM_VCOB; else if (emc_2x_clk_src != PLLM_UD) - dll_setting |= 0x800; // EMC_DLL_SWITCH_OUT. + dll_setting |= EMC_DLL_SWITCH_OUT; CLOCK(CLK_RST_CONTROLLER_CLK_SOURCE_EMC_DLL) = dll_setting; @@ -1296,10 +1301,8 @@ static void _digital_dll_disable() while (EMC(EMC_CFG_DIG_DLL) & 1) ; if (dual_channel) - { while (EMC_CH1(EMC_CFG_DIG_DLL) & 1) ; - } } static void _digital_dll_enable(s32 channel1_enabled) @@ -1311,10 +1314,8 @@ static void _digital_dll_enable(s32 channel1_enabled) while (!(EMC(EMC_CFG_DIG_DLL) & 1)) ; if (channel1_enabled) - { while (!(EMC_CH1(EMC_CFG_DIG_DLL) & 1)) ; - } } static void _digital_dll_enable_rs(s32 channel1_enabled) @@ -1326,13 +1327,11 @@ static void _digital_dll_enable_rs(s32 channel1_enabled) while (!(EMC(EMC_CFG_DIG_DLL) & 1)) ; if (channel1_enabled) - { while (!(EMC_CH1(EMC_CFG_DIG_DLL) & 1)) ; - } } -static u32 _dvfs_power_ramp_down(bool flip_backward, emc_table_t *src_emc_table_entry, emc_table_t *dst_emc_table_entry, float src_clock_period) +static u32 _dvfs_power_ramp_down(bool flip_backward, emc_table_t *src_emc_table_entry, emc_table_t *dst_emc_table_entry, s32 src_clock_period) { u32 pmacro_cmd_pad; u32 pmacro_rfu1; @@ -1340,7 +1339,7 @@ static u32 _dvfs_power_ramp_down(bool flip_backward, emc_table_t *src_emc_table_ u32 pmacro_common_tx; u32 pmacro_dq_pad; - float src_clk_per_pc = (100.0f / src_clock_period) + 1.0f; + u32 src_clk_per_pc = (100000 / src_clock_period) + 1; if (flip_backward) { @@ -1358,22 +1357,23 @@ static u32 _dvfs_power_ramp_down(bool flip_backward, emc_table_t *src_emc_table_ pmacro_cfg5 = src_emc_table_entry->burst_regs.emc_fbio_cfg5_idx; pmacro_common_tx = src_emc_table_entry->burst_regs.emc_pmacro_common_pad_tx_ctrl_idx; } + u32 pmacro_cmd_pad_drvforceon = pmacro_cmd_pad | 0x4000000; - u32 ramp_down_wait = (u32)(float)(src_clock_period * 12.0f); + u32 ramp_down_wait = src_clock_period * 12 / 1000; _ccfifo_write(EMC_PMACRO_CMD_PAD_TX_CTRL, pmacro_cmd_pad_drvforceon, 0); _ccfifo_write(EMC_FBIO_CFG5, pmacro_cfg5 | 0x100, 12); - if (src_clock_period >= 1.0f) // Dvfs high speed threshold. + if (src_clock_period >= 1000) // Dvfs high speed threshold. { - _ccfifo_write(EMC_PMACRO_BRICK_CTRL_RFU1, pmacro_rfu1 & 0xF800F800, (u32)(float)(src_clk_per_pc + 19.0f)); - ramp_down_wait = (u32)(float)((float)ramp_down_wait + (100.0f + (src_clock_period * 20.0f))); + _ccfifo_write(EMC_PMACRO_BRICK_CTRL_RFU1, pmacro_rfu1 & 0xF800F800, (u32)(src_clk_per_pc + 19)); + ramp_down_wait = ramp_down_wait + 100 + (src_clock_period * 20 / 1000); } else { ramp_down_wait += 100; - if (src_clock_period >= 0.416666667) // Iobrick dcc threshold. + if (src_clock_period >= 416) // Iobrick dcc threshold. _ccfifo_write(EMC_PMACRO_BRICK_CTRL_RFU1, pmacro_rfu1 & 0xFEEDFEED, (u32)src_clk_per_pc); else { @@ -1383,9 +1383,11 @@ static u32 _dvfs_power_ramp_down(bool flip_backward, emc_table_t *src_emc_table_ _ccfifo_write(EMC_PMACRO_DATA_PAD_TX_CTRL, pmacro_dq_pad, 0); _ccfifo_write(EMC_PMACRO_BRICK_CTRL_RFU1, pmacro_rfu1 & 0xFEEDFEED, 0); } + ramp_down_wait += 200; _ccfifo_write(EMC_PMACRO_BRICK_CTRL_RFU1, pmacro_rfu1 & 0xFE40FE40, (u32)src_clk_per_pc); - if (src_clock_period >= 0.416666667) // Iobrick dcc threshold. + + if (src_clock_period >= 416) // Iobrick dcc threshold. _ccfifo_write(EMC_PMACRO_BRICK_CTRL_RFU1, pmacro_rfu1 & 0xF800F800, (u32)src_clk_per_pc); else { @@ -1394,10 +1396,9 @@ static u32 _dvfs_power_ramp_down(bool flip_backward, emc_table_t *src_emc_table_ _ccfifo_write(EMC_PMACRO_BRICK_CTRL_RFU1, pmacro_rfu1 & 0xF800F800, 0); } } - if (src_clock_period >= 1.66666667) // Dvfs mid speed threshold. - { + + if (src_clock_period >= 1666) // Dvfs mid speed threshold. _ccfifo_write(EMC_PMACRO_COMMON_PAD_TX_CTRL, pmacro_common_tx & 0xFFFFFFF0, (u32)src_clk_per_pc); - } else { ramp_down_wait += 400; @@ -1409,7 +1410,7 @@ static u32 _dvfs_power_ramp_down(bool flip_backward, emc_table_t *src_emc_table_ return ramp_down_wait; } -static u32 _dvfs_power_ramp_up(bool flip_backward, emc_table_t *src_emc_table_entry, emc_table_t *dst_emc_table_entry, u8 needs_training, float dst_clock_period) +static u32 _dvfs_power_ramp_up(bool flip_backward, emc_table_t *src_emc_table_entry, emc_table_t *dst_emc_table_entry, u8 needs_training, s32 dst_clock_period) { u32 pmacro_cmd_pad; u32 pmacro_dq_pad; @@ -1419,7 +1420,8 @@ static u32 _dvfs_power_ramp_up(bool flip_backward, emc_table_t *src_emc_table_en u32 pmacro_cmd_pad_data; u32 ramp_up_wait = 0; - float dst_clk_per_pc = (100.0f / dst_clock_period) + 1.0f; + u32 dst_clk_per_pc = (100000 / dst_clock_period) + 1; + if (flip_backward) { pmacro_cmd_pad = src_emc_table_entry->burst_regs.emc_pmacro_cmd_pad_tx_ctrl_idx; @@ -1460,80 +1462,62 @@ static u32 _dvfs_power_ramp_up(bool flip_backward, emc_table_t *src_emc_table_en pmacro_cfg5 = dst_emc_table_entry->burst_regs.emc_fbio_cfg5_idx; pmacro_common_tx = dst_emc_table_entry->burst_regs.emc_pmacro_common_pad_tx_ctrl_idx; } + pmacro_cmd_pad_data = (pmacro_cmd_pad & 0xFEFEFDFD) | 0x4000000; - if (dst_clock_period >= 1.66666667) // Dvfs mid speed threshold. + if (dst_clock_period >= 1666) // Dvfs mid speed threshold. { _ccfifo_write(EMC_PMACRO_COMMON_PAD_TX_CTRL, pmacro_common_tx | 8, 0); - if (dst_clock_period >= 1.0) // Dvfs high speed threshold. + + _ccfifo_write(EMC_PMACRO_BRICK_CTRL_RFU1, pmacro_rfu1 | 0x600, 0); + _ccfifo_write(EMC_FBIO_CFG5, pmacro_cfg5 & 0xFFFFFEFF, 12); + + ramp_up_wait = (dst_clock_period * 12) / 1000 + 0; + } + else + { + _ccfifo_write(EMC_PMACRO_COMMON_PAD_TX_CTRL, pmacro_common_tx & 0xA, 0); + _ccfifo_write(EMC_PMACRO_COMMON_PAD_TX_CTRL, pmacro_common_tx & 0xF, (u32)dst_clk_per_pc); + + if (dst_clock_period < 1000) // Dvfs high speed threshold. { - if (dst_clock_period >= 1.66666667) // Dvfs mid speed threshold. - { - _ccfifo_write(EMC_PMACRO_BRICK_CTRL_RFU1, pmacro_rfu1 | 0x600, 0); - _ccfifo_write(EMC_FBIO_CFG5, pmacro_cfg5 & 0xFFFFFEFF, 12); - ramp_up_wait = (u32)((float)(dst_clock_period * 12.0f) + 0.0); - } + if (dst_clock_period >= 416) // Iobrick dcc threshold. + _ccfifo_write(EMC_PMACRO_BRICK_CTRL_RFU1, pmacro_rfu1 & 0xFE40FE40, (u32)dst_clk_per_pc); else { - _ccfifo_write(EMC_PMACRO_BRICK_CTRL_RFU1, pmacro_rfu1 | 0x6000600, (u32)dst_clk_per_pc); - _ccfifo_write(EMC_FBIO_CFG5, pmacro_cfg5 & 0xFFFFFEFF, (u32)(float)(dst_clk_per_pc + 9.0f)); - ramp_up_wait = (u32)(float)(100.0f + (float)(dst_clock_period * 10.0f)); + pmacro_cmd_pad_data = (pmacro_cmd_pad & 0xFEFEFDFD) | 0x4010200; + pmacro_dq_pad = (pmacro_dq_pad & 0xFEFEFDFD) | 0x10200; + _ccfifo_write(EMC_PMACRO_CMD_PAD_TX_CTRL, pmacro_cmd_pad_data, (u32)dst_clk_per_pc); + _ccfifo_write(EMC_PMACRO_DATA_PAD_TX_CTRL, pmacro_dq_pad, 0); + _ccfifo_write(EMC_PMACRO_BRICK_CTRL_RFU1, pmacro_rfu1 & 0xFE40FE40, 0); } - _ccfifo_write(EMC_PMACRO_CMD_PAD_TX_CTRL, pmacro_cmd_pad_data & 0xFBFFFFFF, 5); - return ramp_up_wait; + _ccfifo_write(EMC_PMACRO_BRICK_CTRL_RFU1, pmacro_rfu1 & 0xFEEDFEED, (u32)dst_clk_per_pc); + + if (dst_clock_period >= 416) // Iobrick dcc threshold. + _ccfifo_write(EMC_PMACRO_BRICK_CTRL_RFU1, pmacro_rfu1, (u32)dst_clk_per_pc); + else + { + pmacro_cmd_pad_data |= 0x1010202u; + pmacro_dq_pad |= 0x1010202; + _ccfifo_write(EMC_PMACRO_CMD_PAD_TX_CTRL, pmacro_cmd_pad_data, (u32)dst_clk_per_pc); + _ccfifo_write(EMC_PMACRO_DATA_PAD_TX_CTRL, pmacro_dq_pad, 0); + _ccfifo_write(EMC_PMACRO_BRICK_CTRL_RFU1, pmacro_rfu1, 0); + } + + _ccfifo_write(EMC_FBIO_CFG5, pmacro_cfg5 & 0xFFFFFEFF, (u32)(dst_clk_per_pc + 9)); + + ramp_up_wait = 500 + (dst_clock_period * 10) / 1000; } - - _ccfifo_write(EMC_PMACRO_BRICK_CTRL_RFU1, pmacro_rfu1 & 0xFE40FE40, (u32)dst_clk_per_pc); - _ccfifo_write(EMC_PMACRO_BRICK_CTRL_RFU1, pmacro_rfu1 & 0xFEEDFEED, (u32)dst_clk_per_pc); - _ccfifo_write(EMC_PMACRO_BRICK_CTRL_RFU1, pmacro_rfu1, (u32)dst_clk_per_pc); - _ccfifo_write(EMC_FBIO_CFG5, pmacro_cfg5 & 0xFFFFFEFF, (u32)(float)(dst_clk_per_pc + 9.0f)); - ramp_up_wait = (u32)(float)((float)300 + (float)(100.0f + (float)(dst_clock_period * 10.0f))); - _ccfifo_write(EMC_PMACRO_CMD_PAD_TX_CTRL, pmacro_cmd_pad_data & 0xFBFFFFFF, 5); - - return ramp_up_wait; - } - _ccfifo_write(EMC_PMACRO_COMMON_PAD_TX_CTRL, pmacro_common_tx & 0xA, 0); - _ccfifo_write(EMC_PMACRO_COMMON_PAD_TX_CTRL, pmacro_common_tx & 0xF, (u32)dst_clk_per_pc); - - if (dst_clock_period < 1.0) // Dvfs high speed threshold. - { - if (dst_clock_period >= 0.416666667) // Iobrick dcc threshold. - _ccfifo_write(EMC_PMACRO_BRICK_CTRL_RFU1, pmacro_rfu1 & 0xFE40FE40, (u32)dst_clk_per_pc); - else + else // 1000 > dst_clock_period < 1666. { - pmacro_cmd_pad_data = (pmacro_cmd_pad & 0xFEFEFDFD) | 0x4010200; - pmacro_dq_pad = (pmacro_dq_pad & 0xFEFEFDFD) | 0x10200; - _ccfifo_write(EMC_PMACRO_CMD_PAD_TX_CTRL, pmacro_cmd_pad_data, (u32)dst_clk_per_pc); - _ccfifo_write(EMC_PMACRO_DATA_PAD_TX_CTRL, pmacro_dq_pad, 0); - _ccfifo_write(EMC_PMACRO_BRICK_CTRL_RFU1, pmacro_rfu1 & 0xFE40FE40, 0); + _ccfifo_write(EMC_PMACRO_BRICK_CTRL_RFU1, pmacro_rfu1 | 0x6000600, (u32)dst_clk_per_pc); + _ccfifo_write(EMC_FBIO_CFG5, pmacro_cfg5 & 0xFFFFFEFF, (u32)(dst_clk_per_pc + 9)); + + ramp_up_wait = 200 + (dst_clock_period * 10) / 1000; } - - _ccfifo_write(EMC_PMACRO_BRICK_CTRL_RFU1, pmacro_rfu1 & 0xFEEDFEED, (u32)dst_clk_per_pc); - - if (dst_clock_period >= 0.416666667) // Iobrick dcc threshold. - _ccfifo_write(EMC_PMACRO_BRICK_CTRL_RFU1, pmacro_rfu1, (u32)dst_clk_per_pc); - else - { - pmacro_cmd_pad_data |= 0x1010202u; - pmacro_dq_pad |= 0x1010202; - _ccfifo_write(EMC_PMACRO_CMD_PAD_TX_CTRL, pmacro_cmd_pad_data, (u32)dst_clk_per_pc); - _ccfifo_write(EMC_PMACRO_DATA_PAD_TX_CTRL, pmacro_dq_pad, 0); - _ccfifo_write(EMC_PMACRO_BRICK_CTRL_RFU1, pmacro_rfu1, 0); - } - - _ccfifo_write(EMC_FBIO_CFG5, pmacro_cfg5 & 0xFFFFFEFF, (u32)(float)(dst_clk_per_pc + 9.0f)); - ramp_up_wait = (u32)(float)((float)400 + (float)(100.0f + (float)(dst_clock_period * 10.0f))); - _ccfifo_write(EMC_PMACRO_CMD_PAD_TX_CTRL, pmacro_cmd_pad_data & 0xFBFFFFFF, 5); - - return ramp_up_wait; } - // 1.0 > dst_clock_period < 1.66666667. - _ccfifo_write(EMC_PMACRO_BRICK_CTRL_RFU1, pmacro_rfu1 | 0x6000600, (u32)dst_clk_per_pc); - _ccfifo_write(EMC_FBIO_CFG5, pmacro_cfg5 & 0xFFFFFEFF, (u32)(float)(dst_clk_per_pc + 9.0f)); - - ramp_up_wait = (u32)(float)((float)100 + (float)(100.0f + (float)(dst_clock_period * 10.0f))); _ccfifo_write(EMC_PMACRO_CMD_PAD_TX_CTRL, pmacro_cmd_pad_data & 0xFBFFFFFF, 5); return ramp_up_wait; @@ -1617,11 +1601,11 @@ static u32 _minerva_update_clock_tree_delay(emc_table_t *src_emc_entry, emc_tabl goto calc_td0_0; break; } - + tdel0_0 = dst_emc_entry->current_dram_clktree_c0d0u0 - (dst_emc_entry->ptfv_list.ptfv_dqsosc_movavg_c0d0u0_idx / 100); if (tdel0_0 < 0) tdel0_0 = !tdel0_0; - if (update_type == TRAINING_UPDATE || (dst_rate_mhz * tdel0_0 << 7) / 1000000 > dst_emc_entry->tree_margin) + if (update_type == TRAINING_UPDATE || ((dst_rate_mhz * tdel0_0 << 7) / 1000000) > dst_emc_entry->tree_margin) dst_emc_entry->current_dram_clktree_c0d0u0 = dst_emc_entry->ptfv_list.ptfv_dqsosc_movavg_c0d0u0_idx / 100; calc_td0_0: @@ -1658,11 +1642,11 @@ calc_td0_0: tdel0_1 = !tdel0_1; if (tdel0_1 > tdel0_0) tdel0_0 = tdel0_1; - if (update_type == TRAINING_UPDATE || (dst_rate_mhz * tdel0_1 << 7) / 1000000 > dst_emc_entry->tree_margin) + if (update_type == TRAINING_UPDATE || ((dst_rate_mhz * tdel0_1 << 7) / 1000000) > dst_emc_entry->tree_margin) dst_emc_entry->current_dram_clktree_c0d0u1 = dst_emc_entry->ptfv_list.ptfv_dqsosc_movavg_c0d0u1_idx / 100; calc_td1_0: - if (channel1_enabled == 1) + if (channel1_enabled) { cval = tval / (2 * temp_ch1_0); switch (update_type) @@ -1697,7 +1681,7 @@ calc_td1_0: tdel1_0 = !tdel1_0; if (tdel1_0 > tdel0_0) tdel0_0 = tdel1_0; - if (update_type == TRAINING_UPDATE || (dst_rate_mhz * tdel1_0 << 7) / 1000000 > dst_emc_entry->tree_margin) + if (update_type == TRAINING_UPDATE || ((dst_rate_mhz * tdel1_0 << 7) / 1000000) > dst_emc_entry->tree_margin) dst_emc_entry->current_dram_clktree_c1d0u0 = dst_emc_entry->ptfv_list.ptfv_dqsosc_movavg_c1d0u0_idx / 100; calc_td1_1: @@ -1734,7 +1718,7 @@ calc_td1_1: tdel1_1 = !tdel1_1; if (tdel1_1 > tdel0_0) tdel0_0 = tdel1_1; - if (update_type == TRAINING_UPDATE || (dst_rate_mhz * tdel1_1 << 7) / 1000000 > dst_emc_entry->tree_margin) + if (update_type == TRAINING_UPDATE || ((dst_rate_mhz * tdel1_1 << 7) / 1000000) > dst_emc_entry->tree_margin) dst_emc_entry->current_dram_clktree_c1d0u1 = dst_emc_entry->ptfv_list.ptfv_dqsosc_movavg_c1d0u1_idx / 100; } @@ -1747,7 +1731,7 @@ calc_dev2: _request_mmr_data(0x40130000, channel1_enabled); // Dev1 MRR 19. temp_ch0_0 = (EMC(EMC_MRR) & 0xFF) << 8; temp_ch0_1 = EMC(EMC_MRR) & 0xFF00; - if (channel1_enabled == 1) + if (channel1_enabled) { temp_ch1_0 = (EMC_CH1(EMC_MRR) & 0xFF) << 8; temp_ch1_1 = EMC_CH1(EMC_MRR) & 0xFF00; @@ -1756,12 +1740,11 @@ calc_dev2: _request_mmr_data(0x40120000, channel1_enabled); // Dev1 MRR 18 temp_ch0_0 |= EMC(EMC_MRR) & 0xFF; temp_ch0_1 |= ((EMC(EMC_MRR) & 0xFF00) >> 8); - if (channel1_enabled == 1) + if (channel1_enabled) { temp_ch1_0 |= EMC_CH1(EMC_MRR) & 0xFF; temp_ch1_1 |= (EMC_CH1(EMC_MRR) & 0xFF00) >> 8; } - } cval = tval / (2 * temp_ch0_0); @@ -1797,7 +1780,7 @@ calc_dev2: tmp_tdel0_0 = !tmp_tdel0_0; if (tmp_tdel0_0 > tdel0_0) tdel0_0 = tmp_tdel0_0; - if (update_type == TRAINING_UPDATE || (dst_rate_mhz * tmp_tdel0_0 << 7) / 1000000 > dst_emc_entry->tree_margin) + if (update_type == TRAINING_UPDATE || ((dst_rate_mhz * tmp_tdel0_0 << 7) / 1000000) > dst_emc_entry->tree_margin) dst_emc_entry->current_dram_clktree_c0d1u0 = dst_emc_entry->ptfv_list.ptfv_dqsosc_movavg_c0d1u0_idx / 100; calc_tmp_td0_1: @@ -1834,11 +1817,11 @@ calc_tmp_td0_1: tdel0_1 = !tdel0_1; if (tdel0_1 > tdel0_0) tdel0_0 = tdel0_1; - if (update_type == TRAINING_UPDATE || (dst_rate_mhz * tdel0_1 << 7) / 1000000 > dst_emc_entry->tree_margin) + if (update_type == TRAINING_UPDATE || ((dst_rate_mhz * tdel0_1 << 7) / 1000000) > dst_emc_entry->tree_margin) dst_emc_entry->current_dram_clktree_c0d1u1 = dst_emc_entry->ptfv_list.ptfv_dqsosc_movavg_c0d1u1_idx / 100; calc_tmp_td1_0: - if (channel1_enabled == 1) + if (channel1_enabled) { cval = tval / (2 * temp_ch1_0); switch (update_type) @@ -1873,7 +1856,7 @@ calc_tmp_td1_0: tdel1_0 = !tdel1_0; if (tdel1_0 > tdel0_0) tdel0_0 = tdel1_0; - if (update_type == TRAINING_UPDATE || (dst_rate_mhz * tdel1_0 << 7) / 1000000 > dst_emc_entry->tree_margin) + if (update_type == TRAINING_UPDATE || ((dst_rate_mhz * tdel1_0 << 7) / 1000000) > dst_emc_entry->tree_margin) dst_emc_entry->current_dram_clktree_c1d1u0 = dst_emc_entry->ptfv_list.ptfv_dqsosc_movavg_c1d1u0_idx / 100; calc_tmp_td1_1: @@ -1910,7 +1893,7 @@ calc_tmp_td1_1: tdel1_1 = !tdel1_1; if (tdel1_1 > tdel0_0) tdel0_0 = tdel1_1; - if (update_type == TRAINING_UPDATE || (dst_rate_mhz * tdel1_1 << 7) / 1000000 > dst_emc_entry->tree_margin) + if (update_type == TRAINING_UPDATE || ((dst_rate_mhz * tdel1_1 << 7) / 1000000) > dst_emc_entry->tree_margin) dst_emc_entry->current_dram_clktree_c1d1u1 = dst_emc_entry->ptfv_list.ptfv_dqsosc_movavg_c1d1u1_idx / 100; } @@ -1963,13 +1946,14 @@ static u32 _minerva_periodic_compensation_handler(emc_table_t *src_emc_entry, em dst_emc_entry->ptfv_list.ptfv_dqsosc_movavg_c1d1u0_idx = 0; dst_emc_entry->ptfv_list.ptfv_dqsosc_movavg_c1d1u1_idx = 0; - for (s32 i = 0; i < dst_emc_entry->ptfv_list.ptfv_dvfs_samples_idx; i++) + for (u32 i = 0; i < dst_emc_entry->ptfv_list.ptfv_dvfs_samples_idx; i++) { _start_periodic_compensation(); _usleep(delay); _minerva_update_clock_tree_delay(src_emc_entry, dst_emc_entry, dram_dev_num, channel1_enabled, DVFS_PT1); } } + adel = _minerva_update_clock_tree_delay(src_emc_entry, dst_emc_entry, dram_dev_num, channel1_enabled, DVFS_UPDATE); return adel; @@ -1985,12 +1969,13 @@ static u32 _minerva_periodic_compensation_handler(emc_table_t *src_emc_entry, em dst_emc_entry->ptfv_list.ptfv_dqsosc_movavg_c1d1u0_idx = 0; dst_emc_entry->ptfv_list.ptfv_dqsosc_movavg_c1d1u1_idx = 0; - for (s32 i = 0; i < dst_emc_entry->ptfv_list.ptfv_write_samples_idx; i++) + for (u32 i = 0; i < dst_emc_entry->ptfv_list.ptfv_write_samples_idx; i++) { _start_periodic_compensation(); _usleep(delay); _minerva_update_clock_tree_delay(src_emc_entry, dst_emc_entry, dram_dev_num, channel1_enabled, TRAINING_PT1); } + adel = _minerva_update_clock_tree_delay(src_emc_entry, dst_emc_entry, dram_dev_num, channel1_enabled, TRAINING_UPDATE); return adel; @@ -2093,7 +2078,8 @@ static u32 _minerva_apply_periodic_compensation_trimmer(emc_table_t *mtc_table_e tree_delta_taps[3] = (tree_delta[3] * (s32)dst_rate_mhz) / 1000000; for (s32 i = 0; i < 4; i++) { - if ((tree_delta_taps[i] > mtc_table_entry->tree_margin) || (tree_delta_taps[i] < (-1 * mtc_table_entry->tree_margin))) { + if ((tree_delta_taps[i] > mtc_table_entry->tree_margin) || (tree_delta_taps[i] < (-1 * mtc_table_entry->tree_margin))) + { new_trim[8 + i * 2] += tree_delta_taps[i]; new_trim[8 + i * 2 + 1] += tree_delta_taps[i]; } @@ -2166,15 +2152,16 @@ static u32 _minerva_apply_periodic_compensation_trimmer(emc_table_t *mtc_table_e static bool _check_freq_changed(u32 dst_entry_rate_KHz, u32 dst_entry_clk_src_emc, u32 src_entry_rate_KHz, u32 src_entry_clk_src_emc) { - float dst_div_clock; - float src_div_clock; - float src_end_div_clk_ratio; + s64 dst_div_clock; + s64 src_div_clock; + s32 src_end_div_clk_ratio; u32 src_entry_emc_2X_clk_src = src_entry_clk_src_emc >> EMC_2X_CLK_SRC_SHIFT; u32 dst_entry_emc_2X_clk_src = dst_entry_clk_src_emc >> EMC_2X_CLK_SRC_SHIFT; u32 src_entry_emc_2X_clk_src_div = src_entry_clk_src_emc & 0xFF; u32 dst_entry_emc_2X_clk_src_div = dst_entry_clk_src_emc & 0xFF; u32 pll_post_divider = 0; + switch (CLOCK(CLK_RST_CONTROLLER_CLK_SOURCE_EMC) >> EMC_2X_CLK_SRC_SHIFT) { case PLLM_OUT0: @@ -2188,32 +2175,29 @@ static bool _check_freq_changed(u32 dst_entry_rate_KHz, u32 dst_entry_clk_src_em default: break; } + + // Hang if post div is wrong. if (pll_post_divider > 5) - { while (true) ; - } if (src_entry_emc_2X_clk_src <= PLLMB_UD) src_entry_emc_2X_clk_src_div = 0; if (dst_entry_emc_2X_clk_src <= PLLMB_UD) dst_entry_emc_2X_clk_src_div = 0; - if (dst_entry_emc_2X_clk_src != src_entry_emc_2X_clk_src && (dst_entry_emc_2X_clk_src & 0xFFFFFFFB || src_entry_emc_2X_clk_src & 0xFFFFFFFB)) - return true; + if (dst_entry_emc_2X_clk_src != src_entry_emc_2X_clk_src + && (dst_entry_emc_2X_clk_src & 0xFFFFFFFB || src_entry_emc_2X_clk_src & 0xFFFFFFFB)) + return true; - dst_div_clock = (double)dst_entry_rate_KHz - * ((double)((dst_entry_emc_2X_clk_src_div >> 1) + 1) - + (double)(dst_entry_emc_2X_clk_src_div & 1) * 0.5) - * (double)(pll_post_divider + 1); - src_div_clock = (double)src_entry_rate_KHz - * ((double)((src_entry_emc_2X_clk_src_div >> 1) + 1) - + (double)(src_entry_emc_2X_clk_src_div & 1) * 0.5) - * (double)(pll_post_divider + 1); + dst_div_clock = dst_entry_rate_KHz * (pll_post_divider + 1) + * ((dst_entry_emc_2X_clk_src_div >> 1) * 10 + (dst_entry_emc_2X_clk_src_div & 1) * 5 + 10) / 10; // Accounting for 7.1 div. + src_div_clock = src_entry_rate_KHz * (pll_post_divider + 1) + * ((src_entry_emc_2X_clk_src_div >> 1) * 10 + (src_entry_emc_2X_clk_src_div & 1) * 5 + 10) / 10; // Accounting for 7.1 div. - src_end_div_clk_ratio = src_div_clock / dst_div_clock; + src_end_div_clk_ratio = (src_div_clock * 1000) / dst_div_clock; - if (src_end_div_clk_ratio > 1.01f || src_end_div_clk_ratio < 0.99f) + if (src_end_div_clk_ratio > 1010 || src_end_div_clk_ratio < 990) return true; else return false; @@ -2260,13 +2244,13 @@ static void _save_train_results(emc_table_t *mtc_table_entry, u32 needs_training mtc_table_entry->burst_reg_per_ch.emc0_mrw10_idx = (EMC_CH0(EMC_TRAINING_OPT_CA_VREF) & 0xFFFF) | 0x880C0000; mtc_table_entry->burst_reg_per_ch.emc1_mrw10_idx = (channel1_enabled ? EMC_CH1(EMC_TRAINING_OPT_CA_VREF) & 0xFFFF : 0) | 0x880C0000; - u32 mrw11_dev_selectn = 0; + u32 mrw11_dev_selectn; if (dram_dev_num == TWO_RANK) mrw11_dev_selectn = 0x480C0000; else mrw11_dev_selectn = 0xC80C0000; - mtc_table_entry->burst_reg_per_ch.emc0_mrw11_idx = + mtc_table_entry->burst_reg_per_ch.emc0_mrw11_idx = ((EMC_CH0(EMC_TRAINING_OPT_CA_VREF) >> 16) & 0xFF) | (EMC_CH0(EMC_TRAINING_OPT_CA_VREF) >> 24 << 8) | (mrw11_dev_selectn & 0xFFFFFF00); @@ -2594,8 +2578,7 @@ s32 _minerva_set_clock(emc_table_t *src_emc_entry, emc_table_t *dst_emc_entry, u u32 bg_regulator_mode_change; u32 mr13_flip_fspop = 0; u32 mr13_flip_fspwr = 0; //float - u32 mr13_catr_enable; //float - bool opt_zcal_en_cc; + u32 mr13_catr_enable = 0; //float /* needs_training LOBYTE table var */ /* @@ -2610,9 +2593,7 @@ s32 _minerva_set_clock(emc_table_t *src_emc_entry, emc_table_t *dst_emc_entry, u | 6 | Needs RD training | | 7 | Needs RD_VREF training | */ - - bool opt_dll_mode = false; - bool is_lpddr3_dram = false; + bool compensate_trimmer_applicable = false; bool needs_ca_or_cavref_training = (needs_training & 3) != 0; bool needs_tristate_training = (needs_training & 0xF7) != 0; @@ -2631,37 +2612,23 @@ s32 _minerva_set_clock(emc_table_t *src_emc_entry, emc_table_t *dst_emc_entry, u bool channel1_enabled = (src_emc_entry->burst_regs.emc_fbio_cfg7_idx >> 2) & 1; s32 dram_type = EMC(EMC_FBIO_CFG5) & 3; s32 dram_dev_num = (MC(MC_EMEM_ADR_CFG) & 1) + 1; - - float src_clock_period = 1000000.0 / (double)src_emc_entry->rate_khz; - float dst_clock_period = 1000000.0 / (double)dst_emc_entry->rate_khz; + + s32 src_clock_period = 1000000000 / src_emc_entry->rate_khz; + s32 dst_clock_period = 1000000000 / dst_emc_entry->rate_khz; fsp_for_src_freq = !fsp_for_src_freq; - - if (dst_emc_entry->burst_regs.emc_zcal_interval_idx && !src_emc_entry->burst_regs.emc_zcal_interval_idx) - opt_zcal_en_cc = true; - else - opt_zcal_en_cc = dram_type == DRAM_TYPE_LPDDR4; - switch(dram_type) + if (dram_type != DRAM_TYPE_LPDDR4) { - case DRAM_TYPE_DDR2: - case DRAM_TYPE_LPDDR4: - break; - case DRAM_TYPE_DDR3: - opt_dll_mode = (dst_emc_entry->emc_emrs & 1) ^ 1; - break; - - case DRAM_TYPE_LPDDR2: - if ((dst_emc_entry->burst_regs.emc_fbio_cfg5_idx >> 25) & 1) //LPDDR3_DRAM bit - is_lpddr3_dram = true; - break; + EPRINTF("MTC Error: DRAM is not LPDDR4"); + return 5; } u32 tFC_lpddr4 = dst_emc_entry->dram_timings.t_fc_lpddr4; - float tZQCAL_lpddr4 = 1000.0f; - if (src_clock_period <= 2.0) - tZQCAL_lpddr4 = (float)(1000 - tFC_lpddr4); - s32 tZQCAL_lpddr4_fc_adj = (s32)(float)(tZQCAL_lpddr4 / dst_clock_period); + s32 tZQCAL_lpddr4 = 1000; + if (src_clock_period <= 2000) + tZQCAL_lpddr4 = 1000 - tFC_lpddr4; + s32 tZQCAL_lpddr4_fc_adj = tZQCAL_lpddr4 * 1000 / dst_clock_period; // Step 1 - Pre DVFS SW sequence. EPRINTF("Step 1"); @@ -2682,25 +2649,25 @@ s32 _minerva_set_clock(emc_table_t *src_emc_entry, emc_table_t *dst_emc_entry, u EMC(EMC_CFG) = emc_cfg; EMC(EMC_SEL_DPD_CTRL) = emc_sel_dpd_ctrl; EMC(EMC_DBG) = emc_dbg_o; - + if (!needs_tristate_training && dst_emc_entry->periodic_training) { if (dram_dev_num == TWO_RANK) { _wait_emc_status(EMC_EMC_STATUS, IN_POWERDOWN_MASK, false, EMC_CH0); if (channel1_enabled) - _wait_emc_status(EMC_EMC_STATUS, IN_POWERDOWN_MASK, false, channel1_enabled); + _wait_emc_status(EMC_EMC_STATUS, IN_POWERDOWN_MASK, false, EMC_CH1); } else { _wait_emc_status(EMC_EMC_STATUS, 0x10, false, EMC_CH0); if (channel1_enabled) - _wait_emc_status(EMC_EMC_STATUS, 0x10, false, channel1_enabled); + _wait_emc_status(EMC_EMC_STATUS, 0x10, false, EMC_CH1); } _wait_emc_status(EMC_EMC_STATUS, IN_SELF_REFRESH_MASK, false, EMC_CH0); if (channel1_enabled) - _wait_emc_status(EMC_EMC_STATUS, IN_SELF_REFRESH_MASK, false, channel1_enabled); + _wait_emc_status(EMC_EMC_STATUS, IN_SELF_REFRESH_MASK, false, EMC_CH1); // Reset clock tree delays. dst_emc_entry->current_dram_clktree_c0d0u0 = dst_emc_entry->trained_dram_clktree_c0d0u0; @@ -2746,10 +2713,9 @@ s32 _minerva_set_clock(emc_table_t *src_emc_entry, emc_table_t *dst_emc_entry, u EMC(EMC_PMACRO_DATA_PAD_TX_CTRL) = (((dst_emc_entry->burst_regs.emc_pmacro_data_pad_tx_ctrl_idx & 1) | (src_emc_entry->burst_regs.emc_pmacro_data_pad_tx_ctrl_idx & 0xFFFFFFFE)) & 0xFFFFFEFF) | (((dst_emc_entry->burst_regs.emc_pmacro_data_pad_tx_ctrl_idx >> 8) & 0x1) << 8); - _usleep(1); } - else if (bg_regulator_mode_change) - _usleep(1); + + _usleep(1); EMC(EMC_DBG) = emc_dbg_o; @@ -2779,7 +2745,7 @@ s32 _minerva_set_clock(emc_table_t *src_emc_entry, emc_table_t *dst_emc_entry, u // Step 4 - Update EMC_CFG. EPRINTF("Step 4"); - if (src_clock_period <= 50.0f || dram_type != 1) + if (src_clock_period <= 50000) EMC(EMC_CFG_2) = dst_emc_entry->emc_cfg_2; else _ccfifo_write(EMC_SELF_REF, 1, 0); @@ -2789,106 +2755,92 @@ s32 _minerva_set_clock(emc_table_t *src_emc_entry, emc_table_t *dst_emc_entry, u // u32 zq_wait_long = 0; // u32 zq_wait_short = 0; - // if (dram_type == DRAM_TYPE_LPDDR4) - // zq_wait_long = _fceil(1000.0f / dst_clock_period); - // else if (is_lpddr3_dram || dram_type == DRAM_TYPE_LPDDR2) - // zq_wait_long = _fceil(360.0f / dst_clock_period); - // else if (!dram_type) - // zq_wait_long = _fceil(320.0f / dst_clock_period); - - // if (is_lpddr3_dram || dram_type == DRAM_TYPE_LPDDR2) - // zq_wait_short = _fceil(90.0f / dst_clock_period); - // else if (!dram_type) - // zq_wait_short = _fceil(80.0f / dst_clock_period); + // zq_wait_long = _fceil(1000.0f / dst_clock_period); // Step 7 - Bug 200024907 - Patch RP R2P. EPRINTF("Step 7"); if (needs_ca_or_cavref_training && dram_dev_num == TWO_RANK) EMC(EMC_PIN) = 0x107; - if (dram_type == DRAM_TYPE_LPDDR4) + u32 R2P_war = 0; + u32 TRPab_war = 0; + u32 RP_war = 0; + u32 W2P_war = 0; + + s32 nRTP = 8; // <= 1066MHz. + if (src_clock_period < 3759 // 1000 / 266MHz. + && src_clock_period < 1876 // 1000 / 533MHz. + && src_clock_period < 1250 // 1000 / 800MHz. + && src_clock_period < 938) // 1000 / 1066MHz. + nRTP = 10; // 1067MHz < x <= 1333MHz. + if (src_clock_period < 750) // 1000 / 1333MHz. + nRTP = 12; // 1333MHz < x <= 1600MHz. + if (src_clock_period < 625) // 1000 / 1600MHz. + nRTP = 14; // 1600MHz < x <= 1866MHz. + if (src_clock_period < 535) // 1000 / 1866MHz. + nRTP = 16; // > 1866MHz + + s32 tRPST = (src_emc_entry->emc_mrw >> 7) & 1; + + u32 deltaTWATM = div_o3(7500, src_clock_period); + if (deltaTWATM < 8) + deltaTWATM = 8; + + u32 tRTM = src_emc_entry->dram_timings.rl + div_o3(3600, src_clock_period) + deltaTWATM + tRPST + nRTP + 1; + + if (tRTM <= src_emc_entry->burst_regs.emc_rp_idx + src_emc_entry->burst_regs.emc_r2p_idx) { - u32 R2P_war = 0; - u32 TRPab_war = 0; - u32 RP_war = 0; - u32 W2P_war = 0; - - s32 nRTP = 8; // <= 1066MHz. - if (src_clock_period < 3.7593985 // 1000 / 266MHz. - && src_clock_period < 1.87617261 // 1000 / 533MHz. - && src_clock_period < 1.25 // 1000 / 800MHz. - && src_clock_period < 0.938086304) // 1000 / 1066MHz. - nRTP = 10; // 1067MHz < x <= 1333MHz. - if (src_clock_period < 0.750187547) // 1000 / 1333MHz. - nRTP = 12; // 1333MHz < x <= 1333MHz. - if (src_clock_period < 0.625) // 1000 / 1600MHz. - nRTP = 14; // 1600MHz < x <= 1866MHz. - if (src_clock_period < 0.535905681) // 1000 / 1866MHz. - nRTP = 16; // > 1866MHz - - float tRPST = (float)((src_emc_entry->emc_mrw >> 7) & 1) + 0.5f; - - s32 deltaTWATM = _fceil(7.5f / src_clock_period); - if (deltaTWATM < 8) - deltaTWATM = 8; - - u32 tRTM = (u32)_fceil((float)((((float)src_emc_entry->dram_timings.rl + _fceil(3.6f / src_clock_period) + (float)deltaTWATM) + tRPST) + (float)nRTP)); - - if (tRTM <= src_emc_entry->burst_regs.emc_rp_idx + src_emc_entry->burst_regs.emc_r2p_idx) + TRPab_war = src_emc_entry->burst_regs.emc_trpab_idx; + R2P_war = src_emc_entry->burst_regs.emc_r2p_idx; + RP_war = src_emc_entry->burst_regs.emc_rp_idx; + } + else + { + R2P_war = tRTM - src_emc_entry->burst_regs.emc_rp_idx; + TRPab_war = src_emc_entry->burst_regs.emc_trpab_idx; + RP_war = src_emc_entry->burst_regs.emc_rp_idx; + if (R2P_war > 63) { - TRPab_war = src_emc_entry->burst_regs.emc_trpab_idx; - R2P_war = src_emc_entry->burst_regs.emc_r2p_idx; - RP_war = src_emc_entry->burst_regs.emc_rp_idx; - } - else - { - R2P_war = tRTM - src_emc_entry->burst_regs.emc_rp_idx; - TRPab_war = src_emc_entry->burst_regs.emc_trpab_idx; - RP_war = src_emc_entry->burst_regs.emc_rp_idx; - if (R2P_war > 63) - { - RP_war = tRTM - 63; - R2P_war = 63; - if (src_emc_entry->burst_regs.emc_trpab_idx < tRTM - 63) - TRPab_war = tRTM - 63; - else - TRPab_war = src_emc_entry->burst_regs.emc_trpab_idx; - } + RP_war = tRTM - 63; + R2P_war = 63; + if (src_emc_entry->burst_regs.emc_trpab_idx < tRTM - 63) + TRPab_war = tRTM - 63; + else + TRPab_war = src_emc_entry->burst_regs.emc_trpab_idx; } + } - if (RP_war >= deltaTWATM) - W2P_war = src_emc_entry->burst_regs.emc_w2p_idx; - else + if (RP_war >= deltaTWATM) + W2P_war = src_emc_entry->burst_regs.emc_w2p_idx; + else + { + u32 W2P_war_temp = deltaTWATM + src_emc_entry->burst_regs.emc_w2p_idx; + W2P_war = W2P_war_temp - RP_war; + if (W2P_war > 63) { - u32 W2P_war_temp = deltaTWATM + src_emc_entry->burst_regs.emc_w2p_idx; - W2P_war = W2P_war_temp - RP_war; - if (W2P_war > 63) - { - RP_war = W2P_war_temp - 63; - W2P_war = 63; - if (TRPab_war < RP_war) - TRPab_war = RP_war; - } + RP_war = W2P_war_temp - 63; + W2P_war = 63; + if (TRPab_war < RP_war) + TRPab_war = RP_war; } + } - if ( src_emc_entry->burst_regs.emc_w2p_idx != W2P_war - || src_emc_entry->burst_regs.emc_rp_idx != RP_war - || src_emc_entry->burst_regs.emc_r2p_idx != R2P_war - || src_emc_entry->burst_regs.emc_trpab_idx != TRPab_war) - { - EMC(EMC_DBG) = emc_dbg_o | 2; - EMC(EMC_RP) = RP_war; - EMC(EMC_R2P) = R2P_war; - EMC(EMC_W2P) = W2P_war; - EMC(EMC_TRPAB) = TRPab_war; - EMC(EMC_DBG) = emc_dbg_o; - _usleep(1); - } + if ( src_emc_entry->burst_regs.emc_w2p_idx != W2P_war + || src_emc_entry->burst_regs.emc_rp_idx != RP_war + || src_emc_entry->burst_regs.emc_r2p_idx != R2P_war + || src_emc_entry->burst_regs.emc_trpab_idx != TRPab_war) + { + EMC(EMC_DBG) = emc_dbg_o | 2; + EMC(EMC_RP) = RP_war; + EMC(EMC_R2P) = R2P_war; + EMC(EMC_W2P) = W2P_war; + EMC(EMC_TRPAB) = TRPab_war; + EMC(EMC_DBG) = emc_dbg_o; + _usleep(1); } // Step 7.2 - Program FSP reference registers and send MRWs to new FSPWR. EPRINTF("Step 7.2"); - mr13_catr_enable = 0; if (fsp_for_src_freq) { mr13_flip_fspop = dst_emc_entry->emc_mrw3 | 0xC0; @@ -2900,35 +2852,27 @@ s32 _minerva_set_clock(emc_table_t *src_emc_entry, emc_table_t *dst_emc_entry, u mr13_flip_fspwr = mr13_flip_fspop | 0x80; } - if (needs_ca_or_cavref_training && dram_dev_num == TWO_RANK) - { - if (needs_swap_rank_training) - { - mr13_flip_fspop = (mr13_flip_fspop & 0x3FFFFFFF) | 0x80000000; - mr13_catr_enable = (mr13_flip_fspwr & 0x3FFFFFFF)| 0x40000001; - } - else - { - mr13_flip_fspop = (mr13_flip_fspop & 0x3FFFFFFF) | 0x40000000; - mr13_catr_enable = (mr13_flip_fspwr & 0x3FFFFFFF) | 0x80000001; - } - } - else if (dram_dev_num == TWO_RANK) + if (dram_dev_num == TWO_RANK) { if (needs_swap_rank_training) mr13_catr_enable = (mr13_flip_fspwr & 0x3FFFFFFF) | 0x40000001; else mr13_catr_enable = (mr13_flip_fspwr & 0x3FFFFFFF) | 0x80000001; + + if (needs_ca_or_cavref_training) + { + if (needs_swap_rank_training) + mr13_flip_fspop = (mr13_flip_fspop & 0x3FFFFFFF) | 0x80000000; + else + mr13_flip_fspop = (mr13_flip_fspop & 0x3FFFFFFF) | 0x40000000; + } } else mr13_catr_enable = mr13_flip_fspwr | 1; - if (dram_type == DRAM_TYPE_LPDDR4) - { - EMC(EMC_MRW3) = mr13_flip_fspwr; - EMC(EMC_MRW) = dst_emc_entry->emc_mrw; - EMC(EMC_MRW2) = dst_emc_entry->emc_mrw2; - } + EMC(EMC_MRW3) = mr13_flip_fspwr; + EMC(EMC_MRW) = dst_emc_entry->emc_mrw; + EMC(EMC_MRW2) = dst_emc_entry->emc_mrw2; // Step 8 - Program the shadow registers. EPRINTF("Step 8"); @@ -2969,19 +2913,15 @@ s32 _minerva_set_clock(emc_table_t *src_emc_entry, emc_table_t *dst_emc_entry, u if (reg_check && reg_addr == EMC_CFG) { - if (dram_type == DRAM_TYPE_LPDDR4) - reg_val &= 0xFFFFFFF; - else - reg_val &= 0xCFFFFFFF; + reg_val &= 0xFFFFFFF; + EMC(reg_addr) = reg_val; continue; } - if (!reg_check && dram_type != DRAM_TYPE_LPDDR4) - continue; if (reg_addr != EMC_CFG)// EMC_CFG { - if (reg_addr != EMC_ZCAL_INTERVAL || !opt_zcal_en_cc) + if (reg_addr != EMC_ZCAL_INTERVAL) { switch ( reg_addr ) { @@ -3023,15 +2963,7 @@ s32 _minerva_set_clock(emc_table_t *src_emc_entry, emc_table_t *dst_emc_entry, u for (u32 i = 0; dst_emc_entry->num_burst_per_ch > i; i++) { reg_addr = burst_reg_per_ch_emc01_addr_table[i]; - if (reg_addr - && (((((reg_addr & 0xFFF) - 0x4B8) & 0xFFFFFFF7) //EMC0_MRW11 - && (reg_addr & 0xFFF) != 0x4B4 //EMC0_MRW10 - ALways true, because of constant table. - && (reg_addr & 0xFFFFFFF7) != EMC_MRW6 - && reg_addr != EMC_MRW15 - && reg_addr != EMC_MRW14 - && ((reg_addr - EMC_MRW7) & 0xFFFFFFF7)) - || dram_type == DRAM_TYPE_LPDDR4) - && (channel1_enabled || ((reg_addr - 0x4000) > 0xFFF))) + if (reg_addr && (channel1_enabled || ((reg_addr - 0x4000) > 0xFFF))) { EMC(reg_addr) = dst_burst_regs->burst_reg_per_ch[i]; } @@ -3074,8 +3006,8 @@ s32 _minerva_set_clock(emc_table_t *src_emc_entry, emc_table_t *dst_emc_entry, u EMC(reg_addr) = trim_regs_table->trim_regs[i]; } } - - // Writing trim_regs_per_ch + + // Writing trim_regs_per_ch reg_val = 0; for (u32 i = 0; dst_emc_entry->num_trim_per_ch > i; i++) { @@ -3137,103 +3069,91 @@ s32 _minerva_set_clock(emc_table_t *src_emc_entry, emc_table_t *dst_emc_entry, u // Step 9 - LPDDR4. EPRINTF("Step 9"); - if (dram_type == DRAM_TYPE_LPDDR4) + + EMC(EMC_ZCAL_INTERVAL) = src_emc_entry->burst_regs.emc_zcal_interval_idx & 0xFF000000; + EMC(EMC_ZCAL_WAIT_CNT) = dst_emc_entry->burst_regs.emc_zcal_wait_cnt_idx & 0xFFFFF800; + EMC(EMC_DBG) = emc_dbg_o | 0x40000002; + EMC(EMC_ZCAL_INTERVAL) = src_emc_entry->burst_regs.emc_zcal_interval_idx & 0xFF000000; + EMC(EMC_DBG) = emc_dbg_o; + + if (needs_tristate_training) { - EMC(EMC_ZCAL_INTERVAL) = src_emc_entry->burst_regs.emc_zcal_interval_idx & 0xFF000000; - EMC(EMC_ZCAL_WAIT_CNT) = dst_emc_entry->burst_regs.emc_zcal_wait_cnt_idx & 0xFFFFF800; - EMC(EMC_DBG) = emc_dbg_o | 0x40000002; - EMC(EMC_ZCAL_INTERVAL) = src_emc_entry->burst_regs.emc_zcal_interval_idx & 0xFF000000; + EMC(EMC_DBG) = emc_dbg_o | 2; + EMC(EMC_PMACRO_AUTOCAL_CFG_COMMON) = dst_emc_entry->burst_regs.emc_pmacro_autocal_cfg_common_idx | 0x10000; + + if (needs_ca_or_cavref_training) + EMC(EMC_FBIO_CFG5) = src_emc_entry->burst_regs.emc_fbio_cfg5_idx | 0x8000000; + EMC(EMC_DBG) = emc_dbg_o; - if (needs_tristate_training) - { - EMC(EMC_DBG) = emc_dbg_o | 2; - EMC(EMC_PMACRO_AUTOCAL_CFG_COMMON) = dst_emc_entry->burst_regs.emc_pmacro_autocal_cfg_common_idx | 0x10000; - if (needs_ca_or_cavref_training) - EMC(EMC_FBIO_CFG5) = src_emc_entry->burst_regs.emc_fbio_cfg5_idx | 0x8000000; - EMC(EMC_DBG) = emc_dbg_o; - if (channel1_enabled) - _ccfifo_write(EMC_CFG_SYNC, 0, 0); - _ccfifo_write(EMC_DBG, (emc_dbg_o & 0xF3FFFFFF) | 0x4000000, 0); - } + + if (channel1_enabled) + _ccfifo_write(EMC_CFG_SYNC, 0, 0); + + _ccfifo_write(EMC_DBG, (emc_dbg_o & 0xF3FFFFFF) | 0x4000000, 0); } + // Step 10 - Self refresh EPRINTF("Step 10"); - u32 emc_self_ref_val = 1; - if (!opt_dll_mode && dram_type == DRAM_TYPE_DDR3) - _ccfifo_write(EMC_EMRS, dst_emc_entry->emc_emrs, 0); - else if (dram_type == DRAM_TYPE_LPDDR4) - emc_self_ref_val = 0x101; - - _ccfifo_write(EMC_SELF_REF, emc_self_ref_val, 0); + _ccfifo_write(EMC_SELF_REF, 0x101, 0); - if (needs_ca_or_cavref_training < ((src_clock_period <= 2.0f) && dram_type == DRAM_TYPE_LPDDR4)) + if (needs_ca_or_cavref_training < (src_clock_period <= 2000)) { - _ccfifo_write(EMC_MRW3, mr13_flip_fspwr ^ 0x40, 0); - _ccfifo_write(EMC_MRW6, (src_emc_entry->burst_regs.emc_mrw6_idx & 0xC0C0) | (dst_emc_entry->burst_regs.emc_mrw6_idx & 0xFFFF3F3F), 0); - _ccfifo_write(EMC_MRW14, (src_emc_entry->burst_regs.emc_mrw14_idx & 0x3838) | (dst_emc_entry->burst_regs.emc_mrw14_idx & 0xFFFF0707), 0); + _ccfifo_write(EMC_MRW3, mr13_flip_fspwr ^ 0x40, 0); + _ccfifo_write(EMC_MRW6, (src_emc_entry->burst_regs.emc_mrw6_idx & 0xC0C0) | (dst_emc_entry->burst_regs.emc_mrw6_idx & 0xFFFF3F3F), 0); + _ccfifo_write(EMC_MRW14, (src_emc_entry->burst_regs.emc_mrw14_idx & 0x3838) | (dst_emc_entry->burst_regs.emc_mrw14_idx & 0xFFFF0707), 0); if (dram_dev_num == TWO_RANK) { - _ccfifo_write(EMC_MRW7, (src_emc_entry->burst_regs.emc_mrw7_idx & 0xC0C0) | (dst_emc_entry->burst_regs.emc_mrw7_idx & 0xFFFF3F3F), 0); - _ccfifo_write(EMC_MRW15, (src_emc_entry->burst_regs.emc_mrw15_idx & 0x3838) | (dst_emc_entry->burst_regs.emc_mrw15_idx & 0xFFFF0707), 0); - } - if (opt_zcal_en_cc) - { - if (dram_dev_num == ONE_RANK || zcal_resistor_shared) - emc_zq_cal = 0x80000001; - else - emc_zq_cal = 1; - _ccfifo_write(EMC_ZQ_CAL, emc_zq_cal, 0); + _ccfifo_write(EMC_MRW7, (src_emc_entry->burst_regs.emc_mrw7_idx & 0xC0C0) | (dst_emc_entry->burst_regs.emc_mrw7_idx & 0xFFFF3F3F), 0); + _ccfifo_write(EMC_MRW15, (src_emc_entry->burst_regs.emc_mrw15_idx & 0x3838) | (dst_emc_entry->burst_regs.emc_mrw15_idx & 0xFFFF0707), 0); } + + if (dram_dev_num == ONE_RANK || zcal_resistor_shared) + emc_zq_cal = 0x80000001; + else + emc_zq_cal = 1; + + _ccfifo_write(EMC_ZQ_CAL, emc_zq_cal, 0); } - + emc_dbg_val = emc_dbg_o; - float tRP_src_timing = (float)((float)src_emc_entry->dram_timings.t_rp / src_clock_period); - float tRFC_src_timing = (float)((float)src_emc_entry->dram_timings.t_rfc / src_clock_period); + u32 tRP_src_timing = src_emc_entry->dram_timings.t_rp * 1000 / src_clock_period; bool in_self_refresh = false; u32 ref_delay = 0; - if (dram_type == DRAM_TYPE_LPDDR4) + if (needs_tristate_training) { - if (needs_tristate_training) - { - emc_dbg_val = (emc_dbg_o & 0xF3FFFFFF) | 0x44000000; - _ccfifo_write(EMC_DBG, emc_dbg_val, 0); - } - if (needs_ca_or_cavref_training) - { - _ccfifo_write(EMC_PMACRO_DATA_RX_TERM_MODE, src_emc_entry->burst_regs.emc_pmacro_data_rx_term_mode_idx & 0xFFFFFCCC, 0); - if (dram_dev_num == TWO_RANK && needs_swap_rank_training) - { - _ccfifo_write(EMC_MRW3, mr13_flip_fspop | 8, (u32)tRP_src_timing); - _ccfifo_write(EMC_MRW3, mr13_catr_enable | 8, 0); - } - else - _ccfifo_write(EMC_MRW3, mr13_catr_enable | 8, (u32)tRP_src_timing); + emc_dbg_val = (emc_dbg_o & 0xF3FFFFFF) | 0x44000000; + _ccfifo_write(EMC_DBG, emc_dbg_val, 0); + } - _ccfifo_write(EMC_TR_CTRL_0, 0x15A, 0); - ref_delay = (u32)(1000.0 / src_clock_period); + if (needs_ca_or_cavref_training) + { + _ccfifo_write(EMC_PMACRO_DATA_RX_TERM_MODE, src_emc_entry->burst_regs.emc_pmacro_data_rx_term_mode_idx & 0xFFFFFCCC, 0); + + if (dram_dev_num == TWO_RANK && needs_swap_rank_training) + { + _ccfifo_write(EMC_MRW3, mr13_flip_fspop | 8, tRP_src_timing); + _ccfifo_write(EMC_MRW3, mr13_catr_enable | 8, 0); } else - { - _ccfifo_write(EMC_MRW3, mr13_flip_fspop | 8, (u32)tRP_src_timing); - ref_delay = (u32)(float)((float)tFC_lpddr4 / src_clock_period); - } - _ccfifo_write(EMC_INTSTATUS, 0, ref_delay); - _ccfifo_write(EMC_PIN, emc_pin_o & 0xFFFFFFF8, 30); + _ccfifo_write(EMC_MRW3, mr13_catr_enable | 8, tRP_src_timing); + + _ccfifo_write(EMC_TR_CTRL_0, 0x15A, 0); + ref_delay = 1000000 / src_clock_period; } else { - in_self_refresh = true; - _ccfifo_write(EMC_SELF_REF, 0x1, 0); + _ccfifo_write(EMC_MRW3, mr13_flip_fspop | 8, tRP_src_timing); + ref_delay = tFC_lpddr4 * 1000 / src_clock_period; } + _ccfifo_write(EMC_INTSTATUS, 0, ref_delay); + _ccfifo_write(EMC_PIN, emc_pin_o & 0xFFFFFFF8, 30); + // Step 11 - Ramp down. EPRINTF("Step 11"); - ref_delay = 0; - if (dram_type != DRAM_TYPE_LPDDR4) - ref_delay = (u32)(float)(tRP_src_timing + tRFC_src_timing + 20.0f); - _ccfifo_write(EMC_CFG_SYNC, 0, ref_delay); + _ccfifo_write(EMC_CFG_SYNC, 0, 0); _ccfifo_write(EMC_DBG, emc_dbg_val | 0x40000002, 0); // WRITE_MUX_ACTIVE | WRITE_ACTIVE_ONLY ramp_down_wait = _dvfs_power_ramp_down(false, src_emc_entry, dst_emc_entry, src_clock_period); @@ -3252,48 +3172,45 @@ s32 _minerva_set_clock(emc_table_t *src_emc_entry, emc_table_t *dst_emc_entry, u // Step 14 - Bringup CKE pins. EPRINTF("Step 14"); - if (dram_type == DRAM_TYPE_LPDDR4) + u32 emc_pin_val_final = 0; + if (needs_ca_or_cavref_training) { - u32 emc_pin_val_final = 0; - if (needs_ca_or_cavref_training) + emc_pin_val_final = emc_pin_o & 0xFFFFFFF8; + if (dram_dev_num == TWO_RANK) { - emc_pin_val_final = emc_pin_o & 0xFFFFFFF8; - if (dram_dev_num == TWO_RANK) - { - if (needs_swap_rank_training) - emc_pin_val_final |= 5; - else - emc_pin_val_final |= 6; - } + if (needs_swap_rank_training) + emc_pin_val_final |= 5; + else + emc_pin_val_final |= 6; } - else if (dram_dev_num == TWO_RANK) - emc_pin_val_final = emc_pin_o | 7; - else - emc_pin_val_final = (emc_pin_o & 0xFFFFFFF8) | 1; - - _ccfifo_write(EMC_PIN, emc_pin_val_final, 0); } + else if (dram_dev_num == TWO_RANK) + emc_pin_val_final = emc_pin_o | 7; + else + emc_pin_val_final = (emc_pin_o & 0xFFFFFFF8) | 1; + + _ccfifo_write(EMC_PIN, emc_pin_val_final, 0); // Step 15 - Zqlatch. EPRINTF("Step 15"); - if (dram_type == DRAM_TYPE_LPDDR4 && !needs_ca_or_cavref_training && opt_zcal_en_cc) + if (!needs_ca_or_cavref_training) { s32 zq_latch_dvfs_wait_time = 0; s32 T_PDEX_timing_final = 0; - s32 T_PDEX_timing = _fceil((float)dst_emc_entry->dram_timings.t_pdex / dst_clock_period); + s32 T_PDEX_timing = div_o3(dst_emc_entry->dram_timings.t_pdex * 1000, dst_clock_period); - if (src_clock_period > 2.0) - zq_latch_dvfs_wait_time = (s32)(tZQCAL_lpddr4_fc_adj - T_PDEX_timing); + if (src_clock_period > 2000) + zq_latch_dvfs_wait_time = tZQCAL_lpddr4_fc_adj - T_PDEX_timing; else zq_latch_dvfs_wait_time = - (s32)(tZQCAL_lpddr4_fc_adj - (s32)((float)(ramp_up_wait + ramp_down_wait) / dst_clock_period)); + tZQCAL_lpddr4_fc_adj - (ramp_up_wait + ramp_down_wait) * 1000 / dst_clock_period; if (dram_dev_num == ONE_RANK) { if (T_PDEX_timing < 0) T_PDEX_timing = 0; - if (src_clock_period > 2.0) + if (src_clock_period > 2000) _ccfifo_write(EMC_ZQ_CAL, 0x80000001, T_PDEX_timing); if (!needs_tristate_training) @@ -3308,7 +3225,7 @@ s32 _minerva_set_clock(emc_table_t *src_emc_entry, emc_table_t *dst_emc_entry, u } else if (zcal_resistor_shared) { - if (src_clock_period > 2.0) + if (src_clock_period > 2000) { T_PDEX_timing_final = T_PDEX_timing; if (T_PDEX_timing < 0) @@ -3332,14 +3249,14 @@ s32 _minerva_set_clock(emc_table_t *src_emc_entry, emc_table_t *dst_emc_entry, u } emc_zq_cal = 0x40000002; - zq_latch_dvfs_wait_time = (s32)(float)(1000.0f / dst_clock_period); + zq_latch_dvfs_wait_time = 1000000 / dst_clock_period; } else { if (T_PDEX_timing < 0) T_PDEX_timing = 0; - if (src_clock_period > 2.0) + if (src_clock_period > 2000) _ccfifo_write(EMC_ZQ_CAL, 1, T_PDEX_timing); if (!needs_tristate_training) @@ -3362,9 +3279,9 @@ s32 _minerva_set_clock(emc_table_t *src_emc_entry, emc_table_t *dst_emc_entry, u // Step 16 - LPDDR4 Conditional training kickoff. EPRINTF("Step 16"); - if (needs_tristate_training && dram_type == DRAM_TYPE_LPDDR4) + if (needs_tristate_training) { - _ccfifo_write(EMC_INTSTATUS, 0, (u32)(1020.0f / dst_clock_period)); + _ccfifo_write(EMC_INTSTATUS, 0, 1020000 / dst_clock_period); u32 training_command = 0; @@ -3403,7 +3320,7 @@ s32 _minerva_set_clock(emc_table_t *src_emc_entry, emc_table_t *dst_emc_entry, u if (!needs_ca_or_cavref_training || needs_swap_rank_training) { _ccfifo_write(EMC_MRW3, mr13_flip_fspop ^ 0xC0, 0); - _ccfifo_write(EMC_INTSTATUS, 0, (u32)(1000.0f / dst_clock_period)); + _ccfifo_write(EMC_INTSTATUS, 0, 1000000 / dst_clock_period); } _ccfifo_write(EMC_PIN, emc_pin_o & 0xFFFFFFF8, 0); @@ -3426,46 +3343,43 @@ s32 _minerva_set_clock(emc_table_t *src_emc_entry, emc_table_t *dst_emc_entry, u if (needs_ca_or_cavref_training) { - _ccfifo_write(EMC_TR_CTRL_0, 0x4A, (u32)(float)(200.0f / src_clock_period)); - _ccfifo_write(EMC_TR_CTRL_0, 0x40, (u32)(float)(1000.0f / src_clock_period)); + _ccfifo_write(EMC_TR_CTRL_0, 0x4A, 200000 / src_clock_period); + _ccfifo_write(EMC_TR_CTRL_0, 0x40, 1000000 / src_clock_period); _ccfifo_write(EMC_MRW3, mr13_catr_enable & 0xFFFFFFFE, 0); - _ccfifo_write(EMC_INTSTATUS, 0, (u32)(float)(1000.0f / src_clock_period)); + _ccfifo_write(EMC_INTSTATUS, 0, 1000000 / src_clock_period); _ccfifo_write(EMC_PMACRO_DATA_RX_TERM_MODE, src_emc_entry->burst_regs.emc_pmacro_data_rx_term_mode_idx, 0); } _ccfifo_write(EMC_DBG, emc_dbg_o, 0); - if (opt_zcal_en_cc) + _ccfifo_write(EMC_ZQ_CAL, 0x80000001, 0); + _ccfifo_write(EMC_ZQ_CAL, 0x80000002, 1000000 / src_clock_period); + + if (zcal_resistor_shared && dram_dev_num == TWO_RANK) { - _ccfifo_write(EMC_ZQ_CAL, 0x80000001, 0); - _ccfifo_write(EMC_ZQ_CAL, 0x80000002, (u32)(float)(1000.0f / src_clock_period)); - - if (zcal_resistor_shared && dram_dev_num == TWO_RANK) + if (!needs_ca_or_cavref_training || needs_swap_rank_training) { - if (!needs_ca_or_cavref_training || needs_swap_rank_training) - { - _ccfifo_write(EMC_ZQ_CAL, 0x40000001, 0); - _ccfifo_write(EMC_ZQ_CAL, 0x40000002, (u32)(float)(1000.0f / src_clock_period)); - if (!needs_ca_or_cavref_training) - _ccfifo_write(EMC_MRW3, ((mr13_flip_fspop ^ 0xC0) & 0xF3FFFFF7) | 0xC000000, 0); - } + _ccfifo_write(EMC_ZQ_CAL, 0x40000001, 0); + _ccfifo_write(EMC_ZQ_CAL, 0x40000002, 1000000 / src_clock_period); + if (!needs_ca_or_cavref_training) + _ccfifo_write(EMC_MRW3, ((mr13_flip_fspop ^ 0xC0) & 0xF3FFFFF7) | 0xC000000, 0); + } + _ccfifo_write(EMC_SELF_REF, 0x100, 0); + + goto step_19_2; + } + else if (dram_dev_num == TWO_RANK) + { + if (needs_ca_or_cavref_training && !needs_swap_rank_training) + { _ccfifo_write(EMC_SELF_REF, 0x100, 0); goto step_19_2; } - else if (dram_dev_num == TWO_RANK) - { - if (needs_ca_or_cavref_training && !needs_swap_rank_training) - { - _ccfifo_write(EMC_SELF_REF, 0x100, 0); - goto step_19_2; - } - - _ccfifo_write(EMC_ZQ_CAL, 0x40000001, 0); - _ccfifo_write(EMC_ZQ_CAL, 0x40000002, (u32)(float)(1000.0f / src_clock_period)); - } + _ccfifo_write(EMC_ZQ_CAL, 0x40000001, 0); + _ccfifo_write(EMC_ZQ_CAL, 0x40000002, 1000000 / src_clock_period); } if (!needs_ca_or_cavref_training) @@ -3474,59 +3388,6 @@ s32 _minerva_set_clock(emc_table_t *src_emc_entry, emc_table_t *dst_emc_entry, u _ccfifo_write(EMC_SELF_REF, 0x100, 0); } - if (dram_type != DRAM_TYPE_LPDDR4) - { - // Step 17 - MANSR exit self refresh. - EPRINTF("Step 17"); - _ccfifo_write(EMC_SELF_REF, 0, 0); - - if (dram_type != DRAM_TYPE_LPDDR2) - { - if (dram_type == DRAM_TYPE_DDR3) - { - if (opt_dll_mode) - _ccfifo_write(EMC_EMRS, dst_emc_entry->emc_emrs & 0xFBFFFFFF, 0); - - _ccfifo_write(EMC_EMRS2, dst_emc_entry->emc_emrs2 & 0xFBFFFFFF, 0); - _ccfifo_write(EMC_MRS, dst_emc_entry->emc_mrs | 0x4000000, 0); - - if (opt_zcal_en_cc) - { - _ccfifo_write(EMC_ZQ_CAL, 0x80000001, 0); - if (dram_dev_num == TWO_RANK) - _ccfifo_write(EMC_ZQ_CAL, 0x40000001, 0); - } - } - - if (dram_type == DRAM_TYPE_LPDDR2 && opt_zcal_en_cc) - { - _ccfifo_write(EMC_MRW, 0x880A0056, 0); - if (dram_dev_num == TWO_RANK) - _ccfifo_write(EMC_MRW, 0x480A0056, 0); - } - - goto step_19_2; - } - - // Step 18 - Send MRWs to LPDDR3/DDR3. - EPRINTF("Step 18"); - _ccfifo_write(EMC_MRW2, dst_emc_entry->emc_mrw2, 0); - _ccfifo_write(EMC_MRW, dst_emc_entry->emc_mrw, 0); - if (is_lpddr3_dram) - _ccfifo_write(EMC_MRW4, dst_emc_entry->emc_mrw4, 0); - - // Step 19 - ZQCAL for LPDDR3/DDR3. - EPRINTF("Step 19"); - if (opt_zcal_en_cc) - { - u32 zcal_wait_time_clocks = _fceil(90.0f / dst_clock_period); - _ccfifo_write(EMC_MRS_WAIT_CNT2, ((zcal_wait_time_clocks & 0xB) << 16) | (zcal_wait_time_clocks & 0x3FF), 0); //WTFF - _ccfifo_write(EMC_MRW, 0x880A0056, 0); - if (dram_dev_num == TWO_RANK) - _ccfifo_write(EMC_MRW, 0x480A0056, 0); - } - } - step_19_2: // Step 19.2. EPRINTF("Step 19.2"); @@ -3537,14 +3398,14 @@ step_19_2: u32 bg_regulator_switch_complete_wait_clks = 0; if (needs_tristate_training) { - bg_regulator_switch_complete_wait_clks = (u32)(float)(1250.0f / src_clock_period); + bg_regulator_switch_complete_wait_clks = 1250000 / src_clock_period; _ccfifo_write(EMC_PMACRO_BG_BIAS_CTRL_0, src_emc_entry->burst_regs.emc_pmacro_bg_bias_ctrl_0_idx, bg_regulator_switch_complete_wait_clks); } else { if (ramp_up_wait <= 1250) - bg_regulator_switch_complete_wait_clks = (u32)(float)((float)((s32)1250 - ramp_up_wait) / dst_clock_period); + bg_regulator_switch_complete_wait_clks = (1250 - ramp_up_wait) * 1000 / dst_clock_period; _ccfifo_write(EMC_PMACRO_BG_BIAS_CTRL_0, dst_emc_entry->burst_regs.emc_pmacro_bg_bias_ctrl_0_idx, bg_regulator_switch_complete_wait_clks); } @@ -3554,27 +3415,22 @@ step_19_2: // Step 20 - Issue ref and optional QRST. EPRINTF("Step 20"); - if (needs_tristate_training || dram_type != DRAM_TYPE_LPDDR4) + if (needs_tristate_training) _ccfifo_write(EMC_REF, 0, 0); // Step 21 - Restore ZCAL and ZCAL interval. EPRINTF("Step 21"); _ccfifo_write(EMC_DBG, emc_dbg_o | 2, 0); - if (opt_zcal_en_cc) - { - if (needs_tristate_training) - _ccfifo_write(EMC_ZCAL_INTERVAL, src_emc_entry->burst_regs.emc_zcal_interval_idx, 0); - else if (dram_type != DRAM_TYPE_LPDDR4) - _ccfifo_write(EMC_ZCAL_INTERVAL, dst_emc_entry->burst_regs.emc_zcal_interval_idx, 0); - } + if (needs_tristate_training) + _ccfifo_write(EMC_ZCAL_INTERVAL, src_emc_entry->burst_regs.emc_zcal_interval_idx, 0); _ccfifo_write(EMC_CFG, dst_emc_entry->burst_regs.emc_cfg_idx & 0xEFFFFFFF, 0); // Step 22 - Restore EMC_CFG_PIPE_CLK. EPRINTF("Step 22"); - if (needs_tristate_training && dram_type == DRAM_TYPE_LPDDR4) - _ccfifo_write(EMC_SEL_DPD_CTRL, src_emc_entry->emc_sel_dpd_ctrl, 0); + _ccfifo_write(EMC_SEL_DPD_CTRL, src_emc_entry->emc_sel_dpd_ctrl, 0); + _ccfifo_write(EMC_DBG, emc_dbg_o, 0); _ccfifo_write(EMC_CFG_PIPE_CLK, emc_cfg_pipe_clk_o, 0); @@ -3645,7 +3501,7 @@ step_19_2: // Step 28 - Training recover. EPRINTF("Step 28"); - if (needs_tristate_training && dram_type == DRAM_TYPE_LPDDR4) + if (needs_tristate_training) { EMC(EMC_DBG) = emc_dbg_o | 2; EMC(EMC_CFG) = dst_emc_entry->burst_regs.emc_cfg_idx; @@ -3698,7 +3554,6 @@ static void _minerva_train_patterns(emc_table_t *src_emc_entry, emc_table_t *dst u32 needs_training_emc_table[8] = {0}; u32 needs_training = dst_emc_entry->needs_training; - u32 dram_type = dst_emc_entry->burst_regs.emc_fbio_cfg5_idx & 3; bool dual_channel = (EMC(EMC_FBIO_CFG7) >> 1) & ((EMC(EMC_FBIO_CFG7) >> 2) & 1); // Must start as true. @@ -3764,13 +3619,11 @@ static void _minerva_train_patterns(emc_table_t *src_emc_entry, emc_table_t *dst ; // Bug 200024907. - if (dram_type == DRAM_TYPE_LPDDR4) - { - EMC(EMC_RP) = src_emc_entry->burst_regs.emc_rp_idx; - EMC(EMC_R2P) = src_emc_entry->burst_regs.emc_r2p_idx; - EMC(EMC_W2P) = src_emc_entry->burst_regs.emc_w2p_idx; - EMC(EMC_TRPAB) = src_emc_entry->burst_regs.emc_trpab_idx; - } + EMC(EMC_RP) = src_emc_entry->burst_regs.emc_rp_idx; + EMC(EMC_R2P) = src_emc_entry->burst_regs.emc_r2p_idx; + EMC(EMC_W2P) = src_emc_entry->burst_regs.emc_w2p_idx; + EMC(EMC_TRPAB) = src_emc_entry->burst_regs.emc_trpab_idx; + _timing_update(dual_channel); } @@ -3787,7 +3640,7 @@ void _minerva_do_over_temp_compensation(mtc_config_t *mtc_cfg) s32 dram_type = EMC(EMC_FBIO_CFG5) & 3; // Only LPDDR chips are supported. - if (dram_type != DRAM_TYPE_LPDDR2 && dram_type != DRAM_TYPE_LPDDR4) + if (dram_type != DRAM_TYPE_LPDDR4) return; s32 dram_temp = _get_dram_temperature(); @@ -3813,12 +3666,12 @@ void _minerva_do_over_temp_compensation(mtc_config_t *mtc_cfg) } break; // Over temp (> 85 oC). - case 4: // + case 4: // 2x refresh. refr = (refr & 0xFFFF0000) | ((refr & 0xFFFF) >> REFRESH_X2); pre_refr = (pre_refr & 0xFFFF0000) | ((pre_refr & 0xFFFF) >> REFRESH_X2); dyn_self_ref = (dyn_self_ref & 0xFFFF0000) | ((dyn_self_ref & 0xFFFF) >> REFRESH_X2); break; - case 5: + case 5: // 4x refresh. case 6: // Temp 6 normally needs a derating emc table. refr = (refr & 0xFFFF0000) | ((refr & 0xFFFF) >> REFRESH_X4); pre_refr = (pre_refr & 0xFFFF0000) | ((pre_refr & 0xFFFF) >> REFRESH_X4); @@ -3856,22 +3709,22 @@ u32 _minerva_do_periodic_compensation(emc_table_t *mtc_table_entry) { _wait_emc_status(EMC_EMC_STATUS, IN_POWERDOWN_MASK, 0, EMC_CH0); if (channel1_enabled) - _wait_emc_status(EMC_EMC_STATUS, IN_POWERDOWN_MASK, 0, channel1_enabled); + _wait_emc_status(EMC_EMC_STATUS, IN_POWERDOWN_MASK, 0, EMC_CH1); } else { _wait_emc_status(EMC_EMC_STATUS, 0x10, 0, 0); if (channel1_enabled) - _wait_emc_status(EMC_EMC_STATUS, 0x10, 0, channel1_enabled); + _wait_emc_status(EMC_EMC_STATUS, 0x10, 0, EMC_CH1); } _wait_emc_status(EMC_EMC_STATUS, IN_SELF_REFRESH_MASK, 0, EMC_CH0); if (channel1_enabled) - _wait_emc_status(EMC_EMC_STATUS, IN_SELF_REFRESH_MASK, 0, channel1_enabled); + _wait_emc_status(EMC_EMC_STATUS, IN_SELF_REFRESH_MASK, 0, EMC_CH1); //_wait_emc_status(EMC_EMC_STATUS, REQ_FIFO_EMPTY, 0, EMC_CH0); //v1.6 //if (channel1_enabled) - // _wait_emc_status(EMC_EMC_STATUS, REQ_FIFO_EMPTY, 0, channel1_enabled); //v1.6 + // _wait_emc_status(EMC_EMC_STATUS, REQ_FIFO_EMPTY, 0, EMC_CH1); //v1.6 u32 emc_cfg_update = EMC(EMC_CFG_UPDATE); EMC(EMC_CFG_UPDATE) = (emc_cfg_update & 0xFFFFF9FF) | 0x400; @@ -3921,7 +3774,7 @@ s32 _minerva_set_rate(mtc_config_t *mtc_cfg) emc_table_t *src_emc_entry; emc_table_t *dst_emc_entry; - for (s32 i = 0; i < mtc_cfg->table_entries; i++) + for (u32 i = 0; i < mtc_cfg->table_entries; i++) { table_entry_rate = mtc_cfg->mtc_table[i].rate_khz; if (mtc_cfg->rate_from == table_entry_rate) @@ -4033,7 +3886,7 @@ static void _minerva_get_table(mtc_config_t *mtc_cfg) void _minerva_init(mtc_config_t *mtc_cfg, void* bp) { EPRINTF("-- Minerva Training Cell --"); - + train_ram_patterns = mtc_cfg->train_ram_patterns; fsp_for_src_freq = mtc_cfg->fsp_for_src_freq; emc_2X_clk_src_is_pllmb = mtc_cfg->emc_2X_clk_src_is_pllmb;