soc/mediatek/mt8192: Do dramc pre-settings before calibration

Before calibration, dramc resets the delay of each PHY IO, calculates
TX path and sets CKE to be rank independent.

Signed-off-by: Huayang Duan <huayang.duan@mediatek.com>
Change-Id: I071eca037f89a916d6cfaf5b008d64f2b4a269a9
Reviewed-on: https://review.coreboot.org/c/coreboot/+/44706
Tested-by: build bot (Jenkins) <no-reply@coreboot.org>
Reviewed-by: Yu-Ping Wu <yupingso@google.com>
This commit is contained in:
Huayang Duan 2020-06-23 11:25:41 +08:00 committed by Hung-Te Lin
parent 32ed65611d
commit 3960351141
2 changed files with 278 additions and 0 deletions

View File

@ -108,6 +108,273 @@ void global_option_init(struct ddr_cali *cali)
set_dqo1_pinmux_mapping(cali); set_dqo1_pinmux_mapping(cali);
} }
static u32 get_write_latency_by_mr(u8 mr_wl)
{
u32 write_latency;
switch (mr_wl) {
case 0:
write_latency = 4;
break;
case 1:
write_latency = 6;
break;
case 2:
write_latency = 8;
break;
case 3:
write_latency = 10;
break;
case 4:
write_latency = 12;
break;
case 5:
write_latency = 14;
break;
case 6:
write_latency = 16;
break;
case 7:
write_latency = 18;
break;
default:
dramc_err("error: unexpected mr_wl: %x\n", mr_wl);
return 0;
}
dramc_info("mr_wl: %x map to WriteLatency: %d\n", mr_wl, write_latency);
return write_latency;
}
static void tx_path_algorithm(const struct ddr_cali *cali)
{
u8 write_latency, wl_mr;
const u8 ckr = 1;
u8 dqs_total_ui;
u8 dqs_oe_total_ui;
u8 dqs_mck, dqs_ui;
u8 dqs_oe_mck, dqs_oe_ui;
u8 shift;
const u8 tx_dq_oe_shift = 3;
wl_mr = (cali->mr_value->mr02[get_fsp(cali)] & 0x3f) >> 3;
shift = get_mck2ui_div_shift(cali);
write_latency = get_write_latency_by_mr(wl_mr);
dqs_total_ui = write_latency * ckr * 2 + 1;
dqs_oe_total_ui = dqs_total_ui - tx_dq_oe_shift;
dqs_ui = dqs_total_ui - ((dqs_total_ui >> shift) << shift);
dqs_mck = dqs_total_ui >> shift;
dqs_oe_ui = dqs_oe_total_ui - ((dqs_oe_total_ui >> shift) << shift);
dqs_oe_mck = dqs_oe_total_ui >> shift;
dramc_dbg("[TX_path_calculate] write_latency=%u, DQS_TotalUI=%u\n",
write_latency, dqs_total_ui);
dramc_dbg("[TX_path_calculate] DQS = (%u,%u) DQS_OE = (%u,%u)\n",
dqs_mck, dqs_ui, dqs_oe_mck, dqs_oe_ui);
for (u8 chn = 0; chn < CHANNEL_MAX; chn++) {
SET32_BITFIELDS(&ch[chn].ao.shu_selph_dqs0,
SHU_SELPH_DQS0_TXDLY_DQS0, dqs_mck,
SHU_SELPH_DQS0_TXDLY_DQS1, dqs_mck,
SHU_SELPH_DQS0_TXDLY_OEN_DQS0, dqs_oe_mck,
SHU_SELPH_DQS0_TXDLY_OEN_DQS1, dqs_oe_mck);
SET32_BITFIELDS(&ch[chn].ao.shu_selph_dqs1,
SHU_SELPH_DQS1_DLY_DQS0, dqs_ui,
SHU_SELPH_DQS1_DLY_DQS1, dqs_ui,
SHU_SELPH_DQS1_DLY_OEN_DQS0, dqs_oe_ui,
SHU_SELPH_DQS1_DLY_OEN_DQS1, dqs_oe_ui);
}
}
void cke_fix_onoff(const struct ddr_cali *cali, u8 chn, u8 rank, int option)
{
u8 cke_on = 0, cke_off = 0;
bool set_rank1 = (rank == RANK_MAX) && (cali->support_ranks == DUAL_RANK_DDR);
if (option != CKE_DYNAMIC) {
cke_on = option;
cke_off = 1 - option;
}
if (rank == RANK_0 || rank == RANK_MAX) {
SET32_BITFIELDS(&ch[chn].ao.ckectrl,
CKECTRL_CKEFIXOFF, cke_off,
CKECTRL_CKEFIXON, cke_on);
}
if (rank == RANK_1 || set_rank1) {
SET32_BITFIELDS(&ch[chn].ao.ckectrl,
CKECTRL_CKE1FIXOFF, cke_off,
CKECTRL_CKE1FIXON, cke_on);
}
}
static void set_cke2rank_independent(void)
{
for (u8 chn = 0; chn < CHANNEL_MAX; chn++) {
SET32_BITFIELDS(&ch[chn].ao.rkcfg, RKCFG_CKE2RANK, 0);
SET32_BITFIELDS(&ch[chn].ao.ckectrl,
CKECTRL_CKE2RANK_OPT3, 0,
CKECTRL_CKE2RANK_OPT, 0,
CKECTRL_CKE2RANK_OPT2, 1,
CKECTRL_CKE2RANK_OPT5, 0,
CKECTRL_CKE2RANK_OPT6, 0,
CKECTRL_CKE2RANK_OPT7, 1,
CKECTRL_CKE2RANK_OPT8, 0,
CKECTRL_CKETIMER_SEL, 0,
CKECTRL_FASTWAKE_SEL, 1,
CKECTRL_CKEWAKE_SEL, 0);
SET32_BITFIELDS(&ch[chn].ao.shu_dcm_ctrl0,
SHU_DCM_CTRL0_FASTWAKE, 1,
SHU_DCM_CTRL0_FASTWAKE2, 1);
SET32_BITFIELDS(&ch[chn].ao.actiming_ctrl, ACTIMING_CTRL_CLKWITRFC, 0);
}
}
static void dramc_hw_gating_onoff(bool is_on)
{
for (u8 chn = 0; chn < CHANNEL_MAX; chn++) {
SET32_BITFIELDS(&ch[chn].phy_ao.misc_dvfsctl2,
MISC_DVFSCTL2_R_DVFS_OPTION, is_on,
MISC_DVFSCTL2_R_DVFS_PARK_N, is_on);
SET32_BITFIELDS(&ch[chn].phy_ao.misc_stbcal2,
MISC_STBCAL2_STB_GERRSTOP, is_on);
SET32_BITFIELDS(&ch[chn].phy_ao.misc_shu_stbcal,
MISC_SHU_STBCAL_STBCALEN, is_on);
SET32_BITFIELDS(&ch[chn].phy_ao.misc_shu_stbcal,
MISC_SHU_STBCAL_STB_SELPHCALEN, is_on);
}
}
static void dramc_reset_delay_chain_before_calibration(void)
{
u32 bc_bak = dramc_get_broadcast();
dramc_set_broadcast(DRAMC_BROADCAST_OFF);
for (u8 rk = RANK_0; rk < RANK_MAX; rk++) {
for (u8 chn = 0; chn < CHANNEL_MAX; chn++) {
struct ddrphy_ao_regs *phy_ao = &ch[chn].phy_ao;
SET32_BITFIELDS(&phy_ao->ca_rk[rk].shu_r0_ca_txdly0,
SHU_R0_CA_TXDLY0_TX_ARCA0_DLY, 0,
SHU_R0_CA_TXDLY0_TX_ARCA1_DLY, 0,
SHU_R0_CA_TXDLY0_TX_ARCA2_DLY, 0,
SHU_R0_CA_TXDLY0_TX_ARCA3_DLY, 0);
SET32_BITFIELDS(&phy_ao->ca_rk[rk].shu_r0_ca_txdly1,
SHU_R0_CA_TXDLY1_TX_ARCA4_DLY, 0,
SHU_R0_CA_TXDLY1_TX_ARCA5_DLY, 0,
SHU_R0_CA_TXDLY1_TX_ARCA6_DLY, 0,
SHU_R0_CA_TXDLY1_TX_ARCA7_DLY, 0);
SET32_BITFIELDS(&phy_ao->byte[0].rk[rk].shu_r0_b0_txdly0,
SHU_R0_B0_TXDLY0_TX_ARDQ0_DLY_B0, 0,
SHU_R0_B0_TXDLY0_TX_ARDQ1_DLY_B0, 0,
SHU_R0_B0_TXDLY0_TX_ARDQ2_DLY_B0, 0,
SHU_R0_B0_TXDLY0_TX_ARDQ3_DLY_B0, 0);
SET32_BITFIELDS(&phy_ao->byte[0].rk[rk].shu_r0_b0_txdly1,
SHU_R0_B0_TXDLY1_TX_ARDQ4_DLY_B0, 0,
SHU_R0_B0_TXDLY1_TX_ARDQ5_DLY_B0, 0,
SHU_R0_B0_TXDLY1_TX_ARDQ6_DLY_B0, 0,
SHU_R0_B0_TXDLY1_TX_ARDQ7_DLY_B0, 0);
SET32_BITFIELDS(&phy_ao->byte[1].rk[rk].shu_r0_b0_txdly0,
SHU_R0_B1_TXDLY0_TX_ARDQ0_DLY_B1, 0,
SHU_R0_B1_TXDLY0_TX_ARDQ1_DLY_B1, 0,
SHU_R0_B1_TXDLY0_TX_ARDQ2_DLY_B1, 0,
SHU_R0_B1_TXDLY0_TX_ARDQ3_DLY_B1, 0);
SET32_BITFIELDS(&phy_ao->byte[1].rk[rk].shu_r0_b0_txdly1,
SHU_R0_B1_TXDLY1_TX_ARDQ4_DLY_B1, 0,
SHU_R0_B1_TXDLY1_TX_ARDQ5_DLY_B1, 0,
SHU_R0_B1_TXDLY1_TX_ARDQ6_DLY_B1, 0,
SHU_R0_B1_TXDLY1_TX_ARDQ7_DLY_B1, 0);
SET32_BITFIELDS(&phy_ao->byte[0].rk[rk].shu_r0_b0_txdly3,
SHU_R0_B0_TXDLY3_TX_ARDQM0_DLY_B0, 0x0);
SET32_BITFIELDS(&phy_ao->byte[1].rk[rk].shu_r0_b0_txdly3,
SHU_R0_B1_TXDLY3_TX_ARDQM0_DLY_B1, 0x0);
}
}
dramc_set_broadcast(bc_bak);
}
static void dramc_init(const struct ddr_cali *cali)
{
dramc_reset_delay_chain_before_calibration();
}
static void dramc_before_calibration(const struct ddr_cali *cali)
{
u8 ma_type = get_column_num();
dram_freq_grp freq_group = cali->freq_group;
u8 dis_imp_hw = (freq_group > DDRFREQ_1200) ? 0 : 1;
dramc_hw_gating_onoff(false);
cke_fix_onoff(cali, CHANNEL_A, RANK_MAX, CKE_FIXON);
cke_fix_onoff(cali, CHANNEL_B, RANK_MAX, CKE_FIXON);
set_cke2rank_independent();
for (u8 chn = 0; chn < CHANNEL_MAX; chn++) {
SET32_BITFIELDS(&ch[chn].ao.shu_tx_set0, SHU_TX_SET0_DBIWR, 0x0);
SET32_BITFIELDS(&ch[chn].phy_ao.misc_shu_impedamce_upd_dis1,
MISC_SHU_IMPEDAMCE_UPD_DIS1_ODTN_UPD_DIS, dis_imp_hw,
MISC_SHU_IMPEDAMCE_UPD_DIS1_DRVN_UPD_DIS, dis_imp_hw,
MISC_SHU_IMPEDAMCE_UPD_DIS1_DRVP_UPD_DIS, dis_imp_hw,
MISC_SHU_IMPEDAMCE_UPD_DIS1_WCK_ODTN_UPD_DIS, dis_imp_hw,
MISC_SHU_IMPEDAMCE_UPD_DIS1_WCK_DRVN_UPD_DIS, dis_imp_hw,
MISC_SHU_IMPEDAMCE_UPD_DIS1_WCK_DRVP_UPD_DIS, dis_imp_hw,
MISC_SHU_IMPEDAMCE_UPD_DIS1_DQ_ODTN_UPD_DIS, dis_imp_hw,
MISC_SHU_IMPEDAMCE_UPD_DIS1_DQ_DRVN_UPD_DIS, dis_imp_hw,
MISC_SHU_IMPEDAMCE_UPD_DIS1_DQ_DRVP_UPD_DIS, dis_imp_hw,
MISC_SHU_IMPEDAMCE_UPD_DIS1_DQS_ODTN_UPD_DIS, dis_imp_hw,
MISC_SHU_IMPEDAMCE_UPD_DIS1_DQS_DRVN_UPD_DIS, dis_imp_hw,
MISC_SHU_IMPEDAMCE_UPD_DIS1_DQS_DRVP_UPD_DIS, dis_imp_hw,
MISC_SHU_IMPEDAMCE_UPD_DIS1_WCK_DRVP_UPD_DIS, 1,
MISC_SHU_IMPEDAMCE_UPD_DIS1_WCK_DRVN_UPD_DIS, 1,
MISC_SHU_IMPEDAMCE_UPD_DIS1_WCK_ODTN_UPD_DIS, 1);
SET32_BITFIELDS(&ch[chn].phy_ao.shu_misc_impcal1,
SHU_MISC_IMPCAL1_IMPCALCNT, dis_imp_hw ? 0x0 : 0x40);
SET32_BITFIELDS(&ch[chn].phy_ao.shu_misc_drving1,
SHU_MISC_DRVING1_DIS_IMPCAL_HW, dis_imp_hw);
SET32_BITFIELDS(&ch[chn].phy_ao.shu_misc_drving1,
SHU_MISC_DRVING1_DIS_IMP_ODTN_TRACK, dis_imp_hw);
SET32_BITFIELDS(&ch[chn].phy_ao.shu_misc_drving2,
SHU_MISC_DRVING2_DIS_IMPCAL_ODT_EN, dis_imp_hw);
SET32_BITFIELDS(&ch[chn].phy_ao.shu_ca_cmd12,
SHU_CA_CMD12_RG_RIMP_UNTERM_EN, dis_imp_hw);
SET32_BITFIELDS(&ch[chn].phy_ao.misc_clk_ctrl,
MISC_CLK_CTRL_DVFS_CLK_MEM_SEL, 0,
MISC_CLK_CTRL_DVFS_MEM_CK_MUX_UPDATE_EN, 0);
SET32_BITFIELDS(&ch[chn].ao.shu_zq_set0,
SHU_ZQ_SET0_ZQCSCNT, 0x1ff,
SHU_ZQ_SET0_TZQLAT, 0x1b);
SET32_BITFIELDS(&ch[chn].ao.zq_set0,
ZQ_SET0_ZQCSDUAL, 1,
ZQ_SET0_ZQCSMASK_OPT, 0,
ZQ_SET0_ZQMASK_CGAR, 0,
ZQ_SET0_ZQCS_MASK_SEL_CGAR, 0);
}
SET32_BITFIELDS(&ch[0].ao.zq_set0, ZQ_SET0_ZQCSMASK, 1);
SET32_BITFIELDS(&ch[1].ao.zq_set0, ZQ_SET0_ZQCSMASK, 0);
for (u8 chn = 0; chn < CHANNEL_MAX; chn++) {
SET32_BITFIELDS(&ch[chn].ao.zq_set0, ZQ_SET0_ZQCS_MASK_SEL, 0);
SET32_BITFIELDS(&ch[chn].phy_ao.byte[0].shu_b0_dq2,
SHU_B0_DQ2_RG_ARPI_OFFSET_LAT_EN_B0, 0);
SET32_BITFIELDS(&ch[chn].phy_ao.byte[1].shu_b0_dq2,
SHU_B1_DQ2_RG_ARPI_OFFSET_LAT_EN_B1, 0);
SET32_BITFIELDS(&ch[chn].ao.dcm_sub_ctrl,
DCM_SUB_CTRL_SUBCLK_CTRL_TX_AUTOK, 0x0);
SET32_BITFIELDS(&ch[chn].ao.dcm_sub_ctrl,
DCM_SUB_CTRL_SUBCLK_CTRL_TX_TRACKING, 0);
SET32_BITFIELDS(&ch[chn].ao.dqsoscr, DQSOSCR_DQSOSCRDIS, 0x1);
SET32_BITFIELDS(&ch[chn].ao.refctrl0, REFCTRL0_REFDIS, 0x1);
SET32_BITFIELDS(&ch[chn].ao.shu_matype, SHU_MATYPE_MATYPE, ma_type);
}
tx_path_algorithm(cali);
}
void dfs_init_for_calibration(const struct ddr_cali *cali) void dfs_init_for_calibration(const struct ddr_cali *cali)
{ {
dramc_init(cali);
dramc_before_calibration(cali);
} }

View File

@ -314,6 +314,17 @@ void emi_mdl_init(const struct emi_mdl *emi_con)
write32(&ch[chn].emi_chn.cona, emi_con->chn_cona_val); write32(&ch[chn].emi_chn.cona, emi_con->chn_cona_val);
} }
u32 get_column_num(void)
{
u32 ma_type = read32(&emi_reg->cona);
u32 ma_type_r0 = ((ma_type >> 20) & 0x3) + 1;
u32 ma_type_r1 = ((ma_type >> 22) & 0x3) + 1;
ma_type = MIN(ma_type_r0, ma_type_r1);
return ma_type;
}
static void emi_sw_setting(void) static void emi_sw_setting(void)
{ {
setbits32(&emi_mpu->mpu_ctrl_d[1], BIT(4)); setbits32(&emi_mpu->mpu_ctrl_d[1], BIT(4));