1 // SPDX-License-Identifier: GPL-2.0+ OR BSD-3-Clause
3 * (C) Copyright 2015 Google, Inc
4 * Copyright 2014 Rockchip Inc.
6 * Adapted from coreboot.
12 #include <dt-structs.h>
21 #include <asm/arch-rockchip/clock.h>
22 #include <asm/arch-rockchip/cru.h>
23 #include <asm/arch-rockchip/ddr_rk3288.h>
24 #include <asm/arch-rockchip/grf_rk3288.h>
25 #include <asm/arch-rockchip/pmu_rk3288.h>
26 #include <asm/arch-rockchip/sdram.h>
27 #include <asm/arch-rockchip/sdram_rk3288.h>
28 #include <linux/err.h>
29 #include <power/regulator.h>
30 #include <power/rk8xx_pmic.h>
33 struct rk3288_ddr_pctl *pctl;
34 struct rk3288_ddr_publ *publ;
35 struct rk3288_msch *msch;
39 struct chan_info chan[2];
42 struct rockchip_cru *cru;
43 struct rk3288_grf *grf;
44 struct rk3288_sgrf *sgrf;
45 struct rk3288_pmu *pmu;
49 struct rk3288_sdram_params {
50 #if CONFIG_IS_ENABLED(OF_PLATDATA)
51 struct dtd_rockchip_rk3288_dmc of_plat;
53 struct rk3288_sdram_channel ch[2];
54 struct rk3288_sdram_pctl_timing pctl_timing;
55 struct rk3288_sdram_phy_timing phy_timing;
56 struct rk3288_base_params base;
61 const int ddrconf_table[] = {
64 ((1 << DDRCONF_ROW_SHIFT) | 1 << DDRCONF_COL_SHIFT),
65 ((2 << DDRCONF_ROW_SHIFT) | 1 << DDRCONF_COL_SHIFT),
66 ((3 << DDRCONF_ROW_SHIFT) | 1 << DDRCONF_COL_SHIFT),
67 ((4 << DDRCONF_ROW_SHIFT) | 1 << DDRCONF_COL_SHIFT),
68 ((1 << DDRCONF_ROW_SHIFT) | 2 << DDRCONF_COL_SHIFT),
69 ((2 << DDRCONF_ROW_SHIFT) | 2 << DDRCONF_COL_SHIFT),
70 ((3 << DDRCONF_ROW_SHIFT) | 2 << DDRCONF_COL_SHIFT),
71 ((1 << DDRCONF_ROW_SHIFT) | 0 << DDRCONF_COL_SHIFT),
72 ((2 << DDRCONF_ROW_SHIFT) | 0 << DDRCONF_COL_SHIFT),
73 ((3 << DDRCONF_ROW_SHIFT) | 0 << DDRCONF_COL_SHIFT),
81 #define TEST_PATTEN 0x5aa5f00f
82 #define DQS_GATE_TRAINING_ERROR_RANK0 (1 << 4)
83 #define DQS_GATE_TRAINING_ERROR_RANK1 (2 << 4)
85 #if defined(CONFIG_TPL_BUILD) || \
86 (!defined(CONFIG_TPL) && defined(CONFIG_SPL_BUILD))
87 static void copy_to_reg(u32 *dest, const u32 *src, u32 n)
91 for (i = 0; i < n / sizeof(u32); i++) {
98 static void ddr_reset(struct rockchip_cru *cru, u32 ch, u32 ctl, u32 phy)
100 u32 phy_ctl_srstn_shift = 4 + 5 * ch;
101 u32 ctl_psrstn_shift = 3 + 5 * ch;
102 u32 ctl_srstn_shift = 2 + 5 * ch;
103 u32 phy_psrstn_shift = 1 + 5 * ch;
104 u32 phy_srstn_shift = 5 * ch;
106 rk_clrsetreg(&cru->cru_softrst_con[10],
107 1 << phy_ctl_srstn_shift | 1 << ctl_psrstn_shift |
108 1 << ctl_srstn_shift | 1 << phy_psrstn_shift |
109 1 << phy_srstn_shift,
110 phy << phy_ctl_srstn_shift | ctl << ctl_psrstn_shift |
111 ctl << ctl_srstn_shift | phy << phy_psrstn_shift |
112 phy << phy_srstn_shift);
115 static void ddr_phy_ctl_reset(struct rockchip_cru *cru, u32 ch, u32 n)
117 u32 phy_ctl_srstn_shift = 4 + 5 * ch;
119 rk_clrsetreg(&cru->cru_softrst_con[10],
120 1 << phy_ctl_srstn_shift, n << phy_ctl_srstn_shift);
123 static void phy_pctrl_reset(struct rockchip_cru *cru,
124 struct rk3288_ddr_publ *publ,
129 ddr_reset(cru, channel, 1, 1);
131 clrbits_le32(&publ->acdllcr, ACDLLCR_DLLSRST);
132 for (i = 0; i < 4; i++)
133 clrbits_le32(&publ->datx8[i].dxdllcr, DXDLLCR_DLLSRST);
136 setbits_le32(&publ->acdllcr, ACDLLCR_DLLSRST);
137 for (i = 0; i < 4; i++)
138 setbits_le32(&publ->datx8[i].dxdllcr, DXDLLCR_DLLSRST);
141 ddr_reset(cru, channel, 1, 0);
143 ddr_reset(cru, channel, 0, 0);
147 static void phy_dll_bypass_set(struct rk3288_ddr_publ *publ,
152 if (freq <= 250000000) {
153 if (freq <= 150000000)
154 clrbits_le32(&publ->dllgcr, SBIAS_BYPASS);
156 setbits_le32(&publ->dllgcr, SBIAS_BYPASS);
157 setbits_le32(&publ->acdllcr, ACDLLCR_DLLDIS);
158 for (i = 0; i < 4; i++)
159 setbits_le32(&publ->datx8[i].dxdllcr,
162 setbits_le32(&publ->pir, PIR_DLLBYP);
164 clrbits_le32(&publ->dllgcr, SBIAS_BYPASS);
165 clrbits_le32(&publ->acdllcr, ACDLLCR_DLLDIS);
166 for (i = 0; i < 4; i++) {
167 clrbits_le32(&publ->datx8[i].dxdllcr,
171 clrbits_le32(&publ->pir, PIR_DLLBYP);
175 static void dfi_cfg(struct rk3288_ddr_pctl *pctl, u32 dramtype)
177 writel(DFI_INIT_START, &pctl->dfistcfg0);
178 writel(DFI_DRAM_CLK_SR_EN | DFI_DRAM_CLK_DPD_EN,
180 writel(DFI_PARITY_INTR_EN | DFI_PARITY_EN, &pctl->dfistcfg2);
181 writel(7 << TLP_RESP_TIME_SHIFT | LP_SR_EN | LP_PD_EN,
184 writel(2 << TCTRL_DELAY_TIME_SHIFT, &pctl->dfitctrldelay);
185 writel(1 << TPHY_WRDATA_TIME_SHIFT, &pctl->dfitphywrdata);
186 writel(0xf << TPHY_RDLAT_TIME_SHIFT, &pctl->dfitphyrdlat);
187 writel(2 << TDRAM_CLK_DIS_TIME_SHIFT, &pctl->dfitdramclkdis);
188 writel(2 << TDRAM_CLK_EN_TIME_SHIFT, &pctl->dfitdramclken);
189 writel(1, &pctl->dfitphyupdtype0);
191 /* cs0 and cs1 write odt enable */
192 writel((RANK0_ODT_WRITE_SEL | RANK1_ODT_WRITE_SEL),
194 /* odt write length */
195 writel(7 << ODT_LEN_BL8_W_SHIFT, &pctl->dfiodtcfg1);
196 /* phyupd and ctrlupd disabled */
197 writel(0, &pctl->dfiupdcfg);
200 static void ddr_set_enable(struct rk3288_grf *grf, uint channel, bool enable)
205 val = 1 << (channel ? DDR1_16BIT_EN_SHIFT :
206 DDR0_16BIT_EN_SHIFT);
208 rk_clrsetreg(&grf->soc_con0,
209 1 << (channel ? DDR1_16BIT_EN_SHIFT : DDR0_16BIT_EN_SHIFT),
213 static void ddr_set_ddr3_mode(struct rk3288_grf *grf, uint channel,
218 mask = 1 << (channel ? MSCH1_MAINDDR3_SHIFT : MSCH0_MAINDDR3_SHIFT);
219 val = ddr3_mode << (channel ? MSCH1_MAINDDR3_SHIFT :
220 MSCH0_MAINDDR3_SHIFT);
221 rk_clrsetreg(&grf->soc_con0, mask, val);
224 static void ddr_set_en_bst_odt(struct rk3288_grf *grf, uint channel,
225 bool enable, bool enable_bst, bool enable_odt)
228 bool disable_bst = !enable_bst;
231 (1 << LPDDR3_EN1_SHIFT | 1 << UPCTL1_BST_DIABLE_SHIFT |
232 1 << UPCTL1_LPDDR3_ODT_EN_SHIFT) :
233 (1 << LPDDR3_EN0_SHIFT | 1 << UPCTL0_BST_DIABLE_SHIFT |
234 1 << UPCTL0_LPDDR3_ODT_EN_SHIFT);
235 rk_clrsetreg(&grf->soc_con2, mask,
236 enable << (channel ? LPDDR3_EN1_SHIFT : LPDDR3_EN0_SHIFT) |
237 disable_bst << (channel ? UPCTL1_BST_DIABLE_SHIFT :
238 UPCTL0_BST_DIABLE_SHIFT) |
239 enable_odt << (channel ? UPCTL1_LPDDR3_ODT_EN_SHIFT :
240 UPCTL0_LPDDR3_ODT_EN_SHIFT));
243 static void pctl_cfg(int channel, struct rk3288_ddr_pctl *pctl,
244 struct rk3288_sdram_params *sdram_params,
245 struct rk3288_grf *grf)
247 unsigned int burstlen;
249 burstlen = (sdram_params->base.noc_timing >> 18) & 0x7;
250 copy_to_reg(&pctl->togcnt1u, &sdram_params->pctl_timing.togcnt1u,
251 sizeof(sdram_params->pctl_timing));
252 switch (sdram_params->base.dramtype) {
254 writel(sdram_params->pctl_timing.tcl - 1,
255 &pctl->dfitrddataen);
256 writel(sdram_params->pctl_timing.tcwl,
257 &pctl->dfitphywrlat);
259 writel(LPDDR2_S4 | 0 << MDDR_LPDDR2_CLK_STOP_IDLE_SHIFT |
260 LPDDR2_EN | burstlen << BURSTLENGTH_SHIFT |
261 (6 - 4) << TFAW_SHIFT | PD_EXIT_FAST |
262 1 << PD_TYPE_SHIFT | 0 << PD_IDLE_SHIFT,
264 ddr_set_ddr3_mode(grf, channel, false);
265 ddr_set_enable(grf, channel, true);
266 ddr_set_en_bst_odt(grf, channel, true, false,
267 sdram_params->base.odt);
270 if (sdram_params->phy_timing.mr[1] & DDR3_DLL_DISABLE) {
271 writel(sdram_params->pctl_timing.tcl - 3,
272 &pctl->dfitrddataen);
274 writel(sdram_params->pctl_timing.tcl - 2,
275 &pctl->dfitrddataen);
277 writel(sdram_params->pctl_timing.tcwl - 1,
278 &pctl->dfitphywrlat);
279 writel(0 << MDDR_LPDDR2_CLK_STOP_IDLE_SHIFT | DDR3_EN |
280 DDR2_DDR3_BL_8 | (6 - 4) << TFAW_SHIFT | PD_EXIT_SLOW |
281 1 << PD_TYPE_SHIFT | 0 << PD_IDLE_SHIFT,
283 ddr_set_ddr3_mode(grf, channel, true);
284 ddr_set_enable(grf, channel, true);
286 ddr_set_en_bst_odt(grf, channel, false, true, false);
290 setbits_le32(&pctl->scfg, 1);
293 static void phy_cfg(const struct chan_info *chan, int channel,
294 struct rk3288_sdram_params *sdram_params)
296 struct rk3288_ddr_publ *publ = chan->publ;
297 struct rk3288_msch *msch = chan->msch;
298 uint ddr_freq_mhz = sdram_params->base.ddr_freq / 1000000;
302 dinit2 = DIV_ROUND_UP(ddr_freq_mhz * 200000, 1000);
304 copy_to_reg(&publ->dtpr[0], &sdram_params->phy_timing.dtpr0,
305 sizeof(sdram_params->phy_timing));
306 writel(sdram_params->base.noc_timing, &msch->ddrtiming);
307 writel(0x3f, &msch->readlatency);
308 writel(sdram_params->base.noc_activate, &msch->activate);
309 writel(2 << BUSWRTORD_SHIFT | 2 << BUSRDTOWR_SHIFT |
310 1 << BUSRDTORD_SHIFT, &msch->devtodev);
311 writel(DIV_ROUND_UP(ddr_freq_mhz * 5120, 1000) << PRT_DLLLOCK_SHIFT |
312 DIV_ROUND_UP(ddr_freq_mhz * 50, 1000) << PRT_DLLSRST_SHIFT |
313 8 << PRT_ITMSRST_SHIFT, &publ->ptr[0]);
314 writel(DIV_ROUND_UP(ddr_freq_mhz * 500000, 1000) << PRT_DINIT0_SHIFT |
315 DIV_ROUND_UP(ddr_freq_mhz * 400, 1000) << PRT_DINIT1_SHIFT,
317 writel(min(dinit2, 0x1ffffU) << PRT_DINIT2_SHIFT |
318 DIV_ROUND_UP(ddr_freq_mhz * 1000, 1000) << PRT_DINIT3_SHIFT,
321 switch (sdram_params->base.dramtype) {
323 clrsetbits_le32(&publ->pgcr, 0x1F,
324 0 << PGCR_DFTLMT_SHIFT |
325 0 << PGCR_DFTCMP_SHIFT |
326 1 << PGCR_DQSCFG_SHIFT |
327 0 << PGCR_ITMDMD_SHIFT);
328 /* DDRMODE select LPDDR3 */
329 clrsetbits_le32(&publ->dcr, DDRMD_MASK << DDRMD_SHIFT,
330 DDRMD_LPDDR2_LPDDR3 << DDRMD_SHIFT);
331 clrsetbits_le32(&publ->dxccr,
332 DQSNRES_MASK << DQSNRES_SHIFT |
333 DQSRES_MASK << DQSRES_SHIFT,
334 4 << DQSRES_SHIFT | 0xc << DQSNRES_SHIFT);
335 tmp = readl(&publ->dtpr[1]);
336 tmp = ((tmp >> TDQSCKMAX_SHIFT) & TDQSCKMAX_MASK) -
337 ((tmp >> TDQSCK_SHIFT) & TDQSCK_MASK);
338 clrsetbits_le32(&publ->dsgcr,
339 DQSGE_MASK << DQSGE_SHIFT |
340 DQSGX_MASK << DQSGX_SHIFT,
341 tmp << DQSGE_SHIFT | tmp << DQSGX_SHIFT);
344 clrbits_le32(&publ->pgcr, 0x1f);
345 clrsetbits_le32(&publ->dcr, DDRMD_MASK << DDRMD_SHIFT,
346 DDRMD_DDR3 << DDRMD_SHIFT);
349 if (sdram_params->base.odt) {
350 /*dynamic RTT enable */
351 for (i = 0; i < 4; i++)
352 setbits_le32(&publ->datx8[i].dxgcr, DQSRTT | DQRTT);
354 /*dynamic RTT disable */
355 for (i = 0; i < 4; i++)
356 clrbits_le32(&publ->datx8[i].dxgcr, DQSRTT | DQRTT);
360 static void phy_init(struct rk3288_ddr_publ *publ)
362 setbits_le32(&publ->pir, PIR_INIT | PIR_DLLSRST
363 | PIR_DLLLOCK | PIR_ZCAL | PIR_ITMSRST | PIR_CLRSR);
365 while ((readl(&publ->pgsr) &
366 (PGSR_IDONE | PGSR_DLDONE | PGSR_ZCDONE)) !=
367 (PGSR_IDONE | PGSR_DLDONE | PGSR_ZCDONE))
371 static void send_command(struct rk3288_ddr_pctl *pctl, u32 rank,
374 writel((START_CMD | (rank << 20) | arg | cmd), &pctl->mcmd);
376 while (readl(&pctl->mcmd) & START_CMD)
380 static inline void send_command_op(struct rk3288_ddr_pctl *pctl,
381 u32 rank, u32 cmd, u32 ma, u32 op)
383 send_command(pctl, rank, cmd, (ma & LPDDR2_MA_MASK) << LPDDR2_MA_SHIFT |
384 (op & LPDDR2_OP_MASK) << LPDDR2_OP_SHIFT);
387 static void memory_init(struct rk3288_ddr_publ *publ,
390 setbits_le32(&publ->pir,
391 (PIR_INIT | PIR_DRAMINIT | PIR_LOCKBYP
392 | PIR_ZCALBYP | PIR_CLRSR | PIR_ICPC
393 | (dramtype == DDR3 ? PIR_DRAMRST : 0)));
395 while ((readl(&publ->pgsr) & (PGSR_IDONE | PGSR_DLDONE))
396 != (PGSR_IDONE | PGSR_DLDONE))
400 static void move_to_config_state(struct rk3288_ddr_publ *publ,
401 struct rk3288_ddr_pctl *pctl)
406 state = readl(&pctl->stat) & PCTL_STAT_MSK;
410 writel(WAKEUP_STATE, &pctl->sctl);
411 while ((readl(&pctl->stat) & PCTL_STAT_MSK)
415 while ((readl(&publ->pgsr) & PGSR_DLDONE)
419 * if at low power state,need wakeup first,
420 * and then enter the config
426 writel(CFG_STATE, &pctl->sctl);
427 while ((readl(&pctl->stat) & PCTL_STAT_MSK) != CONFIG)
438 static void set_bandwidth_ratio(const struct chan_info *chan, int channel,
439 u32 n, struct rk3288_grf *grf)
441 struct rk3288_ddr_pctl *pctl = chan->pctl;
442 struct rk3288_ddr_publ *publ = chan->publ;
443 struct rk3288_msch *msch = chan->msch;
446 setbits_le32(&pctl->ppcfg, 1);
447 rk_setreg(&grf->soc_con0, 1 << (8 + channel));
448 setbits_le32(&msch->ddrtiming, 1 << 31);
449 /* Data Byte disable*/
450 clrbits_le32(&publ->datx8[2].dxgcr, 1);
451 clrbits_le32(&publ->datx8[3].dxgcr, 1);
453 setbits_le32(&publ->datx8[2].dxdllcr, DXDLLCR_DLLDIS);
454 setbits_le32(&publ->datx8[3].dxdllcr, DXDLLCR_DLLDIS);
456 clrbits_le32(&pctl->ppcfg, 1);
457 rk_clrreg(&grf->soc_con0, 1 << (8 + channel));
458 clrbits_le32(&msch->ddrtiming, 1 << 31);
459 /* Data Byte enable*/
460 setbits_le32(&publ->datx8[2].dxgcr, 1);
461 setbits_le32(&publ->datx8[3].dxgcr, 1);
464 clrbits_le32(&publ->datx8[2].dxdllcr, DXDLLCR_DLLDIS);
465 clrbits_le32(&publ->datx8[3].dxdllcr, DXDLLCR_DLLDIS);
467 clrbits_le32(&publ->datx8[2].dxdllcr, DXDLLCR_DLLSRST);
468 clrbits_le32(&publ->datx8[3].dxdllcr, DXDLLCR_DLLSRST);
470 setbits_le32(&publ->datx8[2].dxdllcr, DXDLLCR_DLLSRST);
471 setbits_le32(&publ->datx8[3].dxdllcr, DXDLLCR_DLLSRST);
473 setbits_le32(&pctl->dfistcfg0, 1 << 2);
476 static int data_training(const struct chan_info *chan, int channel,
477 struct rk3288_sdram_params *sdram_params)
483 u32 step[2] = { PIR_QSTRN, PIR_RVTRN };
484 struct rk3288_ddr_publ *publ = chan->publ;
485 struct rk3288_ddr_pctl *pctl = chan->pctl;
487 /* disable auto refresh */
488 writel(0, &pctl->trefi);
490 if (sdram_params->base.dramtype != LPDDR3)
491 setbits_le32(&publ->pgcr, 1 << PGCR_DQSCFG_SHIFT);
492 rank = sdram_params->ch[channel].rank | 1;
493 for (j = 0; j < ARRAY_SIZE(step); j++) {
495 * trigger QSTRN and RVTRN
496 * clear DTDONE status
498 setbits_le32(&publ->pir, PIR_CLRSR);
501 setbits_le32(&publ->pir,
502 PIR_INIT | step[j] | PIR_LOCKBYP | PIR_ZCALBYP |
505 /* wait echo byte DTDONE */
506 while ((readl(&publ->datx8[0].dxgsr[0]) & rank)
509 while ((readl(&publ->datx8[1].dxgsr[0]) & rank)
512 if (!(readl(&pctl->ppcfg) & 1)) {
513 while ((readl(&publ->datx8[2].dxgsr[0])
516 while ((readl(&publ->datx8[3].dxgsr[0])
520 if (readl(&publ->pgsr) &
521 (PGSR_DTERR | PGSR_RVERR | PGSR_RVEIRR)) {
526 /* send some auto refresh to complement the lost while DTT */
527 for (i = 0; i < (rank > 1 ? 8 : 4); i++)
528 send_command(pctl, rank, REF_CMD, 0);
530 if (sdram_params->base.dramtype != LPDDR3)
531 clrbits_le32(&publ->pgcr, 1 << PGCR_DQSCFG_SHIFT);
533 /* resume auto refresh */
534 writel(sdram_params->pctl_timing.trefi, &pctl->trefi);
539 static void move_to_access_state(const struct chan_info *chan)
541 struct rk3288_ddr_publ *publ = chan->publ;
542 struct rk3288_ddr_pctl *pctl = chan->pctl;
546 state = readl(&pctl->stat) & PCTL_STAT_MSK;
550 if (((readl(&pctl->stat) >> LP_TRIG_SHIFT) &
554 writel(WAKEUP_STATE, &pctl->sctl);
555 while ((readl(&pctl->stat) & PCTL_STAT_MSK) != ACCESS)
558 while ((readl(&publ->pgsr) & PGSR_DLDONE)
563 writel(CFG_STATE, &pctl->sctl);
564 while ((readl(&pctl->stat) & PCTL_STAT_MSK) != CONFIG)
567 writel(GO_STATE, &pctl->sctl);
568 while ((readl(&pctl->stat) & PCTL_STAT_MSK) == CONFIG)
579 static void dram_cfg_rbc(const struct chan_info *chan, u32 chnum,
580 struct rk3288_sdram_params *sdram_params)
582 struct rk3288_ddr_publ *publ = chan->publ;
584 if (sdram_params->ch[chnum].bk == 3)
585 clrsetbits_le32(&publ->dcr, PDQ_MASK << PDQ_SHIFT,
588 clrbits_le32(&publ->dcr, PDQ_MASK << PDQ_SHIFT);
590 writel(sdram_params->base.ddrconfig, &chan->msch->ddrconf);
593 static void dram_all_config(const struct dram_info *dram,
594 struct rk3288_sdram_params *sdram_params)
599 sys_reg |= sdram_params->base.dramtype << SYS_REG_DDRTYPE_SHIFT;
600 sys_reg |= (sdram_params->num_channels - 1) << SYS_REG_NUM_CH_SHIFT;
601 for (chan = 0; chan < sdram_params->num_channels; chan++) {
602 const struct rk3288_sdram_channel *info =
603 &sdram_params->ch[chan];
605 sys_reg |= info->row_3_4 << SYS_REG_ROW_3_4_SHIFT(chan);
606 sys_reg |= 1 << SYS_REG_CHINFO_SHIFT(chan);
607 sys_reg |= (info->rank - 1) << SYS_REG_RANK_SHIFT(chan);
608 sys_reg |= (info->col - 9) << SYS_REG_COL_SHIFT(chan);
609 sys_reg |= info->bk == 3 ? 0 : 1 << SYS_REG_BK_SHIFT(chan);
610 sys_reg |= (info->cs0_row - 13) << SYS_REG_CS0_ROW_SHIFT(chan);
611 sys_reg |= (info->cs1_row - 13) << SYS_REG_CS1_ROW_SHIFT(chan);
612 sys_reg |= (2 >> info->bw) << SYS_REG_BW_SHIFT(chan);
613 sys_reg |= (2 >> info->dbw) << SYS_REG_DBW_SHIFT(chan);
615 dram_cfg_rbc(&dram->chan[chan], chan, sdram_params);
617 writel(sys_reg, &dram->pmu->sys_reg[2]);
618 rk_clrsetreg(&dram->sgrf->soc_con2, 0x1f, sdram_params->base.stride);
621 static int sdram_rank_bw_detect(struct dram_info *dram, int channel,
622 struct rk3288_sdram_params *sdram_params)
625 int need_trainig = 0;
626 const struct chan_info *chan = &dram->chan[channel];
627 struct rk3288_ddr_publ *publ = chan->publ;
629 if (data_training(chan, channel, sdram_params) < 0) {
630 reg = readl(&publ->datx8[0].dxgsr[0]);
631 /* Check the result for rank 0 */
632 if ((channel == 0) && (reg & DQS_GATE_TRAINING_ERROR_RANK0)) {
633 debug("data training fail!\n");
635 } else if ((channel == 1) &&
636 (reg & DQS_GATE_TRAINING_ERROR_RANK0)) {
637 sdram_params->num_channels = 1;
640 /* Check the result for rank 1 */
641 if (reg & DQS_GATE_TRAINING_ERROR_RANK1) {
642 sdram_params->ch[channel].rank = 1;
643 clrsetbits_le32(&publ->pgcr, 0xF << 18,
644 sdram_params->ch[channel].rank << 18);
647 reg = readl(&publ->datx8[2].dxgsr[0]);
648 if (reg & (1 << 4)) {
649 sdram_params->ch[channel].bw = 1;
650 set_bandwidth_ratio(chan, channel,
651 sdram_params->ch[channel].bw,
656 /* Assume the Die bit width are the same with the chip bit width */
657 sdram_params->ch[channel].dbw = sdram_params->ch[channel].bw;
660 (data_training(chan, channel, sdram_params) < 0)) {
661 if (sdram_params->base.dramtype == LPDDR3) {
662 ddr_phy_ctl_reset(dram->cru, channel, 1);
664 ddr_phy_ctl_reset(dram->cru, channel, 0);
667 debug("2nd data training failed!");
674 static int sdram_col_row_detect(struct dram_info *dram, int channel,
675 struct rk3288_sdram_params *sdram_params)
679 const struct chan_info *chan = &dram->chan[channel];
680 struct rk3288_ddr_pctl *pctl = chan->pctl;
681 struct rk3288_ddr_publ *publ = chan->publ;
685 for (col = 11; col >= 9; col--) {
686 writel(0, CONFIG_SYS_SDRAM_BASE);
687 addr = CONFIG_SYS_SDRAM_BASE +
688 (1 << (col + sdram_params->ch[channel].bw - 1));
689 writel(TEST_PATTEN, addr);
690 if ((readl(addr) == TEST_PATTEN) &&
691 (readl(CONFIG_SYS_SDRAM_BASE) == 0))
695 printf("Col detect error\n");
699 sdram_params->ch[channel].col = col;
702 move_to_config_state(publ, pctl);
703 writel(4, &chan->msch->ddrconf);
704 move_to_access_state(chan);
706 for (row = 16; row >= 12; row--) {
707 writel(0, CONFIG_SYS_SDRAM_BASE);
708 addr = CONFIG_SYS_SDRAM_BASE + (1 << (row + 15 - 1));
709 writel(TEST_PATTEN, addr);
710 if ((readl(addr) == TEST_PATTEN) &&
711 (readl(CONFIG_SYS_SDRAM_BASE) == 0))
715 printf("Row detect error\n");
718 sdram_params->ch[channel].cs1_row = row;
719 sdram_params->ch[channel].row_3_4 = 0;
720 debug("chn %d col %d, row %d\n", channel, col, row);
721 sdram_params->ch[channel].cs0_row = row;
728 static int sdram_get_niu_config(struct rk3288_sdram_params *sdram_params)
730 int i, tmp, size, ret = 0;
732 tmp = sdram_params->ch[0].col - 9;
733 tmp -= (sdram_params->ch[0].bw == 2) ? 0 : 1;
734 tmp |= ((sdram_params->ch[0].cs0_row - 12) << 4);
735 size = sizeof(ddrconf_table)/sizeof(ddrconf_table[0]);
736 for (i = 0; i < size; i++)
737 if (tmp == ddrconf_table[i])
740 printf("niu config not found\n");
743 sdram_params->base.ddrconfig = i;
749 static int sdram_get_stride(struct rk3288_sdram_params *sdram_params)
753 long cap = sdram_params->num_channels * (1u <<
754 (sdram_params->ch[0].cs0_row +
755 sdram_params->ch[0].col +
756 (sdram_params->ch[0].rank - 1) +
757 sdram_params->ch[0].bw +
775 printf("could not find correct stride, cap error!\n");
779 sdram_params->base.stride = stride;
784 static int sdram_init(struct dram_info *dram,
785 struct rk3288_sdram_params *sdram_params)
791 debug("%s start\n", __func__);
792 if ((sdram_params->base.dramtype == DDR3 &&
793 sdram_params->base.ddr_freq > 800000000) ||
794 (sdram_params->base.dramtype == LPDDR3 &&
795 sdram_params->base.ddr_freq > 533000000)) {
796 debug("SDRAM frequency is too high!");
800 debug("ddr clk dpll\n");
801 ret = clk_set_rate(&dram->ddr_clk, sdram_params->base.ddr_freq);
802 debug("ret=%d\n", ret);
804 debug("Could not set DDR clock\n");
808 for (channel = 0; channel < 2; channel++) {
809 const struct chan_info *chan = &dram->chan[channel];
810 struct rk3288_ddr_pctl *pctl = chan->pctl;
811 struct rk3288_ddr_publ *publ = chan->publ;
813 /* map all the 4GB space to the current channel */
815 rk_clrsetreg(&dram->sgrf->soc_con2, 0x1f, 0x17);
817 rk_clrsetreg(&dram->sgrf->soc_con2, 0x1f, 0x1a);
818 phy_pctrl_reset(dram->cru, publ, channel);
819 phy_dll_bypass_set(publ, sdram_params->base.ddr_freq);
821 dfi_cfg(pctl, sdram_params->base.dramtype);
823 pctl_cfg(channel, pctl, sdram_params, dram->grf);
825 phy_cfg(chan, channel, sdram_params);
829 writel(POWER_UP_START, &pctl->powctl);
830 while (!(readl(&pctl->powstat) & POWER_UP_DONE))
833 memory_init(publ, sdram_params->base.dramtype);
834 move_to_config_state(publ, pctl);
836 if (sdram_params->base.dramtype == LPDDR3) {
837 send_command(pctl, 3, DESELECT_CMD, 0);
839 send_command(pctl, 3, PREA_CMD, 0);
841 send_command_op(pctl, 3, MRS_CMD, 63, 0xfc);
843 send_command_op(pctl, 3, MRS_CMD, 1,
844 sdram_params->phy_timing.mr[1]);
846 send_command_op(pctl, 3, MRS_CMD, 2,
847 sdram_params->phy_timing.mr[2]);
849 send_command_op(pctl, 3, MRS_CMD, 3,
850 sdram_params->phy_timing.mr[3]);
854 /* Using 32bit bus width for detect */
855 sdram_params->ch[channel].bw = 2;
856 set_bandwidth_ratio(chan, channel,
857 sdram_params->ch[channel].bw, dram->grf);
859 * set cs, using n=3 for detect
864 sdram_params->ch[channel].rank = 2,
865 clrsetbits_le32(&publ->pgcr, 0xF << 18,
866 (sdram_params->ch[channel].rank | 1) << 18);
868 /* DS=40ohm,ODT=155ohm */
869 zqcr = 1 << ZDEN_SHIFT | 2 << PU_ONDIE_SHIFT |
870 2 << PD_ONDIE_SHIFT | 0x19 << PU_OUTPUT_SHIFT |
871 0x19 << PD_OUTPUT_SHIFT;
872 writel(zqcr, &publ->zq1cr[0]);
873 writel(zqcr, &publ->zq0cr[0]);
875 if (sdram_params->base.dramtype == LPDDR3) {
876 /* LPDDR2/LPDDR3 need to wait DAI complete, max 10us */
878 send_command_op(pctl,
879 sdram_params->ch[channel].rank | 1,
881 sdram_params->base.odt ? 3 : 0);
883 writel(0, &pctl->mrrcfg0);
884 send_command_op(pctl, 1, MRR_CMD, 8, 0);
886 if ((readl(&pctl->mrrstat0) & 0x3) != 3) {
893 /* Detect the rank and bit-width with data-training */
894 sdram_rank_bw_detect(dram, channel, sdram_params);
896 if (sdram_params->base.dramtype == LPDDR3) {
898 writel(0, &pctl->mrrcfg0);
899 for (i = 0; i < 17; i++)
900 send_command_op(pctl, 1, MRR_CMD, i, 0);
902 writel(15, &chan->msch->ddrconf);
903 move_to_access_state(chan);
904 /* DDR3 and LPDDR3 are always 8 bank, no need detect */
905 sdram_params->ch[channel].bk = 3;
906 /* Detect Col and Row number*/
907 ret = sdram_col_row_detect(dram, channel, sdram_params);
911 /* Find NIU DDR configuration */
912 ret = sdram_get_niu_config(sdram_params);
915 /* Find stride setting */
916 ret = sdram_get_stride(sdram_params);
920 dram_all_config(dram, sdram_params);
921 debug("%s done\n", __func__);
925 printf("DRAM init failed!\n");
929 # ifdef CONFIG_ROCKCHIP_FAST_SPL
930 static int veyron_init(struct dram_info *priv)
932 struct udevice *pmic;
935 ret = uclass_first_device_err(UCLASS_PMIC, &pmic);
939 /* Slowly raise to max CPU voltage to prevent overshoot */
940 ret = rk8xx_spl_configure_buck(pmic, 1, 1200000);
943 udelay(175);/* Must wait for voltage to stabilize, 2mV/us */
944 ret = rk8xx_spl_configure_buck(pmic, 1, 1400000);
947 udelay(100);/* Must wait for voltage to stabilize, 2mV/us */
949 rk3288_clk_configure_cpu(priv->cru, priv->grf);
955 static int setup_sdram(struct udevice *dev)
957 struct dram_info *priv = dev_get_priv(dev);
958 struct rk3288_sdram_params *params = dev_get_platdata(dev);
960 # ifdef CONFIG_ROCKCHIP_FAST_SPL
961 if (priv->is_veyron) {
964 ret = veyron_init(priv);
970 return sdram_init(priv, params);
973 static int rk3288_dmc_ofdata_to_platdata(struct udevice *dev)
975 #if !CONFIG_IS_ENABLED(OF_PLATDATA)
976 struct rk3288_sdram_params *params = dev_get_platdata(dev);
979 /* Rk3288 supports dual-channel, set default channel num to 2 */
980 params->num_channels = 2;
981 ret = dev_read_u32_array(dev, "rockchip,pctl-timing",
982 (u32 *)¶ms->pctl_timing,
983 sizeof(params->pctl_timing) / sizeof(u32));
985 debug("%s: Cannot read rockchip,pctl-timing\n", __func__);
988 ret = dev_read_u32_array(dev, "rockchip,phy-timing",
989 (u32 *)¶ms->phy_timing,
990 sizeof(params->phy_timing) / sizeof(u32));
992 debug("%s: Cannot read rockchip,phy-timing\n", __func__);
995 ret = dev_read_u32_array(dev, "rockchip,sdram-params",
996 (u32 *)¶ms->base,
997 sizeof(params->base) / sizeof(u32));
999 debug("%s: Cannot read rockchip,sdram-params\n", __func__);
1002 #ifdef CONFIG_ROCKCHIP_FAST_SPL
1003 struct dram_info *priv = dev_get_priv(dev);
1005 priv->is_veyron = !fdt_node_check_compatible(blob, 0, "google,veyron");
1007 ret = regmap_init_mem(dev_ofnode(dev), ¶ms->map);
1014 #endif /* CONFIG_SPL_BUILD */
1016 #if CONFIG_IS_ENABLED(OF_PLATDATA)
1017 static int conv_of_platdata(struct udevice *dev)
1019 struct rk3288_sdram_params *plat = dev_get_platdata(dev);
1020 struct dtd_rockchip_rk3288_dmc *of_plat = &plat->of_plat;
1023 memcpy(&plat->pctl_timing, of_plat->rockchip_pctl_timing,
1024 sizeof(plat->pctl_timing));
1025 memcpy(&plat->phy_timing, of_plat->rockchip_phy_timing,
1026 sizeof(plat->phy_timing));
1027 memcpy(&plat->base, of_plat->rockchip_sdram_params, sizeof(plat->base));
1028 /* Rk3288 supports dual-channel, set default channel num to 2 */
1029 plat->num_channels = 2;
1030 ret = regmap_init_mem_platdata(dev, of_plat->reg,
1031 ARRAY_SIZE(of_plat->reg) / 2,
1040 static int rk3288_dmc_probe(struct udevice *dev)
1042 #if defined(CONFIG_TPL_BUILD) || \
1043 (!defined(CONFIG_TPL) && defined(CONFIG_SPL_BUILD))
1044 struct rk3288_sdram_params *plat = dev_get_platdata(dev);
1045 struct udevice *dev_clk;
1049 struct dram_info *priv = dev_get_priv(dev);
1051 priv->pmu = syscon_get_first_range(ROCKCHIP_SYSCON_PMU);
1052 #if defined(CONFIG_TPL_BUILD) || \
1053 (!defined(CONFIG_TPL) && defined(CONFIG_SPL_BUILD))
1054 #if CONFIG_IS_ENABLED(OF_PLATDATA)
1055 ret = conv_of_platdata(dev);
1059 map = syscon_get_regmap_by_driver_data(ROCKCHIP_SYSCON_NOC);
1061 return PTR_ERR(map);
1062 priv->chan[0].msch = regmap_get_range(map, 0);
1063 priv->chan[1].msch = (struct rk3288_msch *)
1064 (regmap_get_range(map, 0) + 0x80);
1066 priv->grf = syscon_get_first_range(ROCKCHIP_SYSCON_GRF);
1067 priv->sgrf = syscon_get_first_range(ROCKCHIP_SYSCON_SGRF);
1069 priv->chan[0].pctl = regmap_get_range(plat->map, 0);
1070 priv->chan[0].publ = regmap_get_range(plat->map, 1);
1071 priv->chan[1].pctl = regmap_get_range(plat->map, 2);
1072 priv->chan[1].publ = regmap_get_range(plat->map, 3);
1074 ret = rockchip_get_clk(&dev_clk);
1077 priv->ddr_clk.id = CLK_DDR;
1078 ret = clk_request(dev_clk, &priv->ddr_clk);
1082 priv->cru = rockchip_get_cru();
1083 if (IS_ERR(priv->cru))
1084 return PTR_ERR(priv->cru);
1085 ret = setup_sdram(dev);
1089 priv->info.base = CONFIG_SYS_SDRAM_BASE;
1090 priv->info.size = rockchip_sdram_size(
1091 (phys_addr_t)&priv->pmu->sys_reg[2]);
1097 static int rk3288_dmc_get_info(struct udevice *dev, struct ram_info *info)
1099 struct dram_info *priv = dev_get_priv(dev);
1106 static struct ram_ops rk3288_dmc_ops = {
1107 .get_info = rk3288_dmc_get_info,
1110 static const struct udevice_id rk3288_dmc_ids[] = {
1111 { .compatible = "rockchip,rk3288-dmc" },
1115 U_BOOT_DRIVER(dmc_rk3288) = {
1116 .name = "rockchip_rk3288_dmc",
1118 .of_match = rk3288_dmc_ids,
1119 .ops = &rk3288_dmc_ops,
1120 #if defined(CONFIG_TPL_BUILD) || \
1121 (!defined(CONFIG_TPL) && defined(CONFIG_SPL_BUILD))
1122 .ofdata_to_platdata = rk3288_dmc_ofdata_to_platdata,
1124 .probe = rk3288_dmc_probe,
1125 .priv_auto_alloc_size = sizeof(struct dram_info),
1126 #if defined(CONFIG_TPL_BUILD) || \
1127 (!defined(CONFIG_TPL) && defined(CONFIG_SPL_BUILD))
1128 .platdata_auto_alloc_size = sizeof(struct rk3288_sdram_params),