X-Git-Url: https://git.librecmc.org/?a=blobdiff_plain;f=drivers%2Fddr%2Ffsl%2Fctrl_regs.c;h=24fd36602d21cf834cb2d98716909fdbfc08849f;hb=d188b1130226d304fb3cd2c901b968eca927f39f;hp=690e73dacf6bea41af718be7bbcd680801e863bd;hpb=57c6941b433722ab83a50dab35b8ab5a0954942a;p=oweals%2Fu-boot.git diff --git a/drivers/ddr/fsl/ctrl_regs.c b/drivers/ddr/fsl/ctrl_regs.c index 690e73dacf..24fd36602d 100644 --- a/drivers/ddr/fsl/ctrl_regs.c +++ b/drivers/ddr/fsl/ctrl_regs.c @@ -313,8 +313,28 @@ static void set_timing_cfg_0(const unsigned int ctrl_num, #ifdef CONFIG_SYS_FSL_DDR4 /* tXP=max(4nCK, 6ns) */ int txp = max((int)mclk_ps * 4, 6000); /* unit=ps */ - trwt_mclk = 2; - twrt_mclk = 1; + unsigned int data_rate = get_ddr_freq(ctrl_num); + + /* for faster clock, need more time for data setup */ + trwt_mclk = (data_rate/1000000 > 1900) ? 3 : 2; + + /* + * for single quad-rank DIMM and two-slot DIMMs + * to avoid ODT overlap + */ + switch (avoid_odt_overlap(dimm_params)) { + case 2: + twrt_mclk = 2; + twwt_mclk = 2; + trrt_mclk = 2; + break; + default: + twrt_mclk = 1; + twwt_mclk = 1; + trrt_mclk = 0; + break; + } + act_pd_exit_mclk = picos_to_mclk(ctrl_num, txp); pre_pd_exit_mclk = act_pd_exit_mclk; /* @@ -338,7 +358,7 @@ static void set_timing_cfg_0(const unsigned int ctrl_num, */ txp = max((int)mclk_ps * 3, (mclk_ps > 1540 ? 7500 : 6000)); - ip_rev = fsl_ddr_get_version(); + ip_rev = fsl_ddr_get_version(ctrl_num); if (ip_rev >= 0x40700) { /* * MRS_CYC = max(tMRD, tMOD) @@ -544,7 +564,7 @@ static void set_timing_cfg_1(const unsigned int ctrl_num, * we need set extend bit for it at * TIMING_CFG_3[EXT_CASLAT] */ - if (fsl_ddr_get_version() <= 0x40400) + if (fsl_ddr_get_version(ctrl_num) <= 0x40400) caslat_ctrl = 2 * cas_latency - 1; else caslat_ctrl = (cas_latency - 1) << 1; @@ -689,7 +709,7 @@ static void set_timing_cfg_2(const unsigned int ctrl_num, | ((add_lat_mclk & 0xf) << 28) | ((cpo & 0x1f) << 23) | ((wr_lat & 0xf) << 19) - | ((wr_lat & 0x10) << 14) + | (((wr_lat & 0x10) >> 4) << 18) | ((rd_to_pre & RD_TO_PRE_MASK) << RD_TO_PRE_SHIFT) | ((wr_data_delay & WR_DATA_DELAY_MASK) << WR_DATA_DELAY_SHIFT) | ((cke_pls & 0x7) << 6) @@ -855,7 +875,7 @@ static void set_ddr_sdram_cfg_2(const unsigned int ctrl_num, break; } } - + sr_ie = popts->self_refresh_interrupt_en; num_pr = 1; /* Make this configurable */ /* @@ -875,11 +895,15 @@ static void set_ddr_sdram_cfg_2(const unsigned int ctrl_num, slow = get_ddr_freq(ctrl_num) < 1249000000; #endif - if (popts->registered_dimm_en) { + if (popts->registered_dimm_en) rcw_en = 1; - ap_en = popts->ap_en; - } else { + + /* DDR4 can have address parity for UDIMM and discrete */ + if ((CONFIG_FSL_SDRAM_TYPE != SDRAM_TYPE_DDR4) && + (!popts->registered_dimm_en)) { ap_en = 0; + } else { + ap_en = popts->ap_en; } x4_en = popts->x4_en ? 1 : 0; @@ -1113,16 +1137,71 @@ static void set_ddr_sdram_mode_9(fsl_ddr_cfg_regs_t *ddr, int i; unsigned short esdmode4 = 0; /* Extended SDRAM mode 4 */ unsigned short esdmode5; /* Extended SDRAM mode 5 */ + int rtt_park = 0; + bool four_cs = false; + const unsigned int mclk_ps = get_memory_clk_period_ps(0); + +#if CONFIG_CHIP_SELECTS_PER_CTRL == 4 + if ((ddr->cs[0].config & SDRAM_CS_CONFIG_EN) && + (ddr->cs[1].config & SDRAM_CS_CONFIG_EN) && + (ddr->cs[2].config & SDRAM_CS_CONFIG_EN) && + (ddr->cs[3].config & SDRAM_CS_CONFIG_EN)) + four_cs = true; +#endif + if (ddr->cs[0].config & SDRAM_CS_CONFIG_EN) { + esdmode5 = 0x00000500; /* Data mask enable, RTT_PARK CS0 */ + rtt_park = four_cs ? 0 : 1; + } else { + esdmode5 = 0x00000400; /* Data mask enabled */ + } - esdmode5 = 0x00000400; /* Data mask enabled */ + /* set command/address parity latency */ + if (ddr->ddr_sdram_cfg_2 & SDRAM_CFG2_AP_EN) { + if (mclk_ps >= 935) { + /* for DDR4-1600/1866/2133 */ + esdmode5 |= DDR_MR5_CA_PARITY_LAT_4_CLK; + } else if (mclk_ps >= 833) { + /* for DDR4-2400 */ + esdmode5 |= DDR_MR5_CA_PARITY_LAT_5_CLK; + } else { + printf("parity: mclk_ps = %d not supported\n", mclk_ps); + } + } ddr->ddr_sdram_mode_9 = (0 | ((esdmode4 & 0xffff) << 16) | ((esdmode5 & 0xffff) << 0) ); + + /* Normally only the first enabled CS use 0x500, others use 0x400 + * But when four chip-selects are all enabled, all mode registers + * need 0x500 to park. + */ + debug("FSLDDR: ddr_sdram_mode_9) = 0x%08x\n", ddr->ddr_sdram_mode_9); if (unq_mrs_en) { /* unique mode registers are supported */ for (i = 1; i < CONFIG_CHIP_SELECTS_PER_CTRL; i++) { + if (!rtt_park && + (ddr->cs[i].config & SDRAM_CS_CONFIG_EN)) { + esdmode5 |= 0x00000500; /* RTT_PARK */ + rtt_park = four_cs ? 0 : 1; + } else { + esdmode5 = 0x00000400; + } + + if (ddr->ddr_sdram_cfg_2 & SDRAM_CFG2_AP_EN) { + if (mclk_ps >= 935) { + /* for DDR4-1600/1866/2133 */ + esdmode5 |= DDR_MR5_CA_PARITY_LAT_4_CLK; + } else if (mclk_ps >= 833) { + /* for DDR4-2400 */ + esdmode5 |= DDR_MR5_CA_PARITY_LAT_5_CLK; + } else { + printf("parity: mclk_ps = %d not supported\n", + mclk_ps); + } + } + switch (i) { case 1: ddr->ddr_sdram_mode_11 = (0 @@ -1167,6 +1246,9 @@ static void set_ddr_sdram_mode_10(const unsigned int ctrl_num, esdmode6 = ((tccdl_min - 4) & 0x7) << 10; + if (popts->ddr_cdr2 & DDR_CDR2_VREF_RANGE_2) + esdmode6 |= 1 << 6; /* Range 2 */ + ddr->ddr_sdram_mode_10 = (0 | ((esdmode6 & 0xffff) << 16) | ((esdmode7 & 0xffff) << 0) @@ -1747,9 +1829,24 @@ static void set_ddr_sdram_clk_cntl(fsl_ddr_cfg_regs_t *ddr, const memctl_options_t *popts) { unsigned int clk_adjust; /* Clock adjust */ + unsigned int ss_en = 0; /* Source synchronous enable */ - clk_adjust = popts->clk_adjust; - ddr->ddr_sdram_clk_cntl = (clk_adjust & 0xF) << 23; +#if defined(CONFIG_MPC8541) || defined(CONFIG_MPC8555) + /* Per FSL Application Note: AN2805 */ + ss_en = 1; +#endif + if (fsl_ddr_get_version(0) >= 0x40701) { + /* clk_adjust in 5-bits on T-series and LS-series */ + clk_adjust = (popts->clk_adjust & 0x1F) << 22; + } else { + /* clk_adjust in 4-bits on earlier MPC85xx and P-series */ + clk_adjust = (popts->clk_adjust & 0xF) << 23; + } + + ddr->ddr_sdram_clk_cntl = (0 + | ((ss_en & 0x1) << 31) + | clk_adjust + ); debug("FSLDDR: clk_cntl = 0x%08x\n", ddr->ddr_sdram_clk_cntl); } @@ -1781,6 +1878,7 @@ static void set_timing_cfg_4(fsl_ddr_cfg_regs_t *ddr, unsigned int wrt = 0; /* Write-to-read turnaround for same CS */ unsigned int rrt = 0; /* Read-to-read turnaround for same CS */ unsigned int wwt = 0; /* Write-to-write turnaround for same CS */ + unsigned int trwt_mclk = 0; /* ext_rwt */ unsigned int dll_lock = 0; /* DDR SDRAM DLL Lock Time */ #if defined(CONFIG_SYS_FSL_DDR3) || defined(CONFIG_SYS_FSL_DDR4) @@ -1794,17 +1892,21 @@ static void set_timing_cfg_4(fsl_ddr_cfg_regs_t *ddr, wwt = 2; /* BL/2 + 2 clocks */ } #endif - #ifdef CONFIG_SYS_FSL_DDR4 dll_lock = 2; /* tDLLK = 1024 clocks */ #elif defined(CONFIG_SYS_FSL_DDR3) dll_lock = 1; /* tDLLK = 512 clocks from spec */ #endif + + if (popts->trwt_override) + trwt_mclk = popts->trwt; + ddr->timing_cfg_4 = (0 | ((rwt & 0xf) << 28) | ((wrt & 0xf) << 24) | ((rrt & 0xf) << 20) | ((wwt & 0xf) << 16) + | ((trwt_mclk & 0xc) << 12) | (dll_lock & 0x3) ); debug("FSLDDR: timing_cfg_4 = 0x%08x\n", ddr->timing_cfg_4); @@ -1862,12 +1964,25 @@ static void set_timing_cfg_7(const unsigned int ctrl_num, const common_timing_params_t *common_dimm) { unsigned int txpr, tcksre, tcksrx; - unsigned int cke_rst, cksre, cksrx, par_lat, cs_to_cmd; + unsigned int cke_rst, cksre, cksrx, par_lat = 0, cs_to_cmd; + const unsigned int mclk_ps = get_memory_clk_period_ps(ctrl_num); txpr = max(5U, picos_to_mclk(ctrl_num, common_dimm->trfc1_ps + 10000)); tcksre = max(5U, picos_to_mclk(ctrl_num, 10000)); tcksrx = max(5U, picos_to_mclk(ctrl_num, 10000)); - par_lat = 0; + + if (ddr->ddr_sdram_cfg_2 & SDRAM_CFG2_AP_EN) { + if (mclk_ps >= 935) { + /* parity latency 4 clocks in case of 1600/1866/2133 */ + par_lat = 4; + } else if (mclk_ps >= 833) { + /* parity latency 5 clocks for DDR4-2400 */ + par_lat = 5; + } else { + printf("parity: mclk_ps = %d not supported\n", mclk_ps); + } + } + cs_to_cmd = 0; if (txpr <= 200) @@ -1962,31 +2077,41 @@ static void set_ddr_dq_mapping(fsl_ddr_cfg_regs_t *ddr, const dimm_params_t *dimm_params) { unsigned int acc_ecc_en = (ddr->ddr_sdram_cfg >> 2) & 0x1; + int i; + + for (i = 0; i < CONFIG_DIMM_SLOTS_PER_CTLR; i++) { + if (dimm_params[i].n_ranks) + break; + } + if (i >= CONFIG_DIMM_SLOTS_PER_CTLR) { + puts("DDR error: no DIMM found!\n"); + return; + } - ddr->dq_map_0 = ((dimm_params->dq_mapping[0] & 0x3F) << 26) | - ((dimm_params->dq_mapping[1] & 0x3F) << 20) | - ((dimm_params->dq_mapping[2] & 0x3F) << 14) | - ((dimm_params->dq_mapping[3] & 0x3F) << 8) | - ((dimm_params->dq_mapping[4] & 0x3F) << 2); + ddr->dq_map_0 = ((dimm_params[i].dq_mapping[0] & 0x3F) << 26) | + ((dimm_params[i].dq_mapping[1] & 0x3F) << 20) | + ((dimm_params[i].dq_mapping[2] & 0x3F) << 14) | + ((dimm_params[i].dq_mapping[3] & 0x3F) << 8) | + ((dimm_params[i].dq_mapping[4] & 0x3F) << 2); - ddr->dq_map_1 = ((dimm_params->dq_mapping[5] & 0x3F) << 26) | - ((dimm_params->dq_mapping[6] & 0x3F) << 20) | - ((dimm_params->dq_mapping[7] & 0x3F) << 14) | - ((dimm_params->dq_mapping[10] & 0x3F) << 8) | - ((dimm_params->dq_mapping[11] & 0x3F) << 2); + ddr->dq_map_1 = ((dimm_params[i].dq_mapping[5] & 0x3F) << 26) | + ((dimm_params[i].dq_mapping[6] & 0x3F) << 20) | + ((dimm_params[i].dq_mapping[7] & 0x3F) << 14) | + ((dimm_params[i].dq_mapping[10] & 0x3F) << 8) | + ((dimm_params[i].dq_mapping[11] & 0x3F) << 2); - ddr->dq_map_2 = ((dimm_params->dq_mapping[12] & 0x3F) << 26) | - ((dimm_params->dq_mapping[13] & 0x3F) << 20) | - ((dimm_params->dq_mapping[14] & 0x3F) << 14) | - ((dimm_params->dq_mapping[15] & 0x3F) << 8) | - ((dimm_params->dq_mapping[16] & 0x3F) << 2); + ddr->dq_map_2 = ((dimm_params[i].dq_mapping[12] & 0x3F) << 26) | + ((dimm_params[i].dq_mapping[13] & 0x3F) << 20) | + ((dimm_params[i].dq_mapping[14] & 0x3F) << 14) | + ((dimm_params[i].dq_mapping[15] & 0x3F) << 8) | + ((dimm_params[i].dq_mapping[16] & 0x3F) << 2); /* dq_map for ECC[4:7] is set to 0 if accumulated ECC is enabled */ - ddr->dq_map_3 = ((dimm_params->dq_mapping[17] & 0x3F) << 26) | - ((dimm_params->dq_mapping[8] & 0x3F) << 20) | + ddr->dq_map_3 = ((dimm_params[i].dq_mapping[17] & 0x3F) << 26) | + ((dimm_params[i].dq_mapping[8] & 0x3F) << 20) | (acc_ecc_en ? 0 : - (dimm_params->dq_mapping[9] & 0x3F) << 14) | - dimm_params->dq_mapping_ors; + (dimm_params[i].dq_mapping[9] & 0x3F) << 14) | + dimm_params[i].dq_mapping_ors; debug("FSLDDR: dq_map_0 = 0x%08x\n", ddr->dq_map_0); debug("FSLDDR: dq_map_1 = 0x%08x\n", ddr->dq_map_1); @@ -2087,7 +2212,7 @@ static void set_ddr_wrlvl_cntl(fsl_ddr_cfg_regs_t *ddr, unsigned int wrlvl_en, * Write leveling start time * The value use for the DQS_ADJUST for the first sample * when write leveling is enabled. It probably needs to be - * overriden per platform. + * overridden per platform. */ wrlvl_start = 0x8; /* @@ -2349,7 +2474,7 @@ compute_fsl_memctl_config_regs(const unsigned int ctrl_num, set_ddr_cdr1(ddr, popts); set_ddr_cdr2(ddr, popts); set_ddr_sdram_cfg(ddr, popts, common_dimm); - ip_rev = fsl_ddr_get_version(); + ip_rev = fsl_ddr_get_version(ctrl_num); if (ip_rev > 0x40400) unq_mrs_en = 1;