2 * (C) Copyright 2015 Google, Inc
3 * Copyright 2014 Rockchip Inc.
5 * SPDX-License-Identifier: GPL-2.0
7 * Adapted from coreboot.
13 #include <dt-structs.h>
19 #include <asm/arch/clock.h>
20 #include <asm/arch/cru_rk3288.h>
21 #include <asm/arch/ddr_rk3288.h>
22 #include <asm/arch/grf_rk3288.h>
23 #include <asm/arch/pmu_rk3288.h>
24 #include <asm/arch/sdram.h>
25 #include <linux/err.h>
26 #include <power/regulator.h>
27 #include <power/rk808_pmic.h>
29 DECLARE_GLOBAL_DATA_PTR;
32 struct rk3288_ddr_pctl *pctl;
33 struct rk3288_ddr_publ *publ;
34 struct rk3288_msch *msch;
38 struct chan_info chan[2];
41 struct rk3288_cru *cru;
42 struct rk3288_grf *grf;
43 struct rk3288_sgrf *sgrf;
44 struct rk3288_pmu *pmu;
48 struct rk3288_sdram_params {
49 #if CONFIG_IS_ENABLED(OF_PLATDATA)
50 struct dtd_rockchip_rk3288_dmc of_plat;
52 struct rk3288_sdram_channel ch[2];
53 struct rk3288_sdram_pctl_timing pctl_timing;
54 struct rk3288_sdram_phy_timing phy_timing;
55 struct rk3288_base_params base;
60 #ifdef CONFIG_SPL_BUILD
61 static void copy_to_reg(u32 *dest, const u32 *src, u32 n)
65 for (i = 0; i < n / sizeof(u32); i++) {
72 static void ddr_reset(struct rk3288_cru *cru, u32 ch, u32 ctl, u32 phy)
74 u32 phy_ctl_srstn_shift = 4 + 5 * ch;
75 u32 ctl_psrstn_shift = 3 + 5 * ch;
76 u32 ctl_srstn_shift = 2 + 5 * ch;
77 u32 phy_psrstn_shift = 1 + 5 * ch;
78 u32 phy_srstn_shift = 5 * ch;
80 rk_clrsetreg(&cru->cru_softrst_con[10],
81 1 << phy_ctl_srstn_shift | 1 << ctl_psrstn_shift |
82 1 << ctl_srstn_shift | 1 << phy_psrstn_shift |
84 phy << phy_ctl_srstn_shift | ctl << ctl_psrstn_shift |
85 ctl << ctl_srstn_shift | phy << phy_psrstn_shift |
86 phy << phy_srstn_shift);
89 static void ddr_phy_ctl_reset(struct rk3288_cru *cru, u32 ch, u32 n)
91 u32 phy_ctl_srstn_shift = 4 + 5 * ch;
93 rk_clrsetreg(&cru->cru_softrst_con[10],
94 1 << phy_ctl_srstn_shift, n << phy_ctl_srstn_shift);
97 static void phy_pctrl_reset(struct rk3288_cru *cru,
98 struct rk3288_ddr_publ *publ,
103 ddr_reset(cru, channel, 1, 1);
105 clrbits_le32(&publ->acdllcr, ACDLLCR_DLLSRST);
106 for (i = 0; i < 4; i++)
107 clrbits_le32(&publ->datx8[i].dxdllcr, DXDLLCR_DLLSRST);
110 setbits_le32(&publ->acdllcr, ACDLLCR_DLLSRST);
111 for (i = 0; i < 4; i++)
112 setbits_le32(&publ->datx8[i].dxdllcr, DXDLLCR_DLLSRST);
115 ddr_reset(cru, channel, 1, 0);
117 ddr_reset(cru, channel, 0, 0);
121 static void phy_dll_bypass_set(struct rk3288_ddr_publ *publ,
125 if (freq <= 250000000) {
126 if (freq <= 150000000)
127 clrbits_le32(&publ->dllgcr, SBIAS_BYPASS);
129 setbits_le32(&publ->dllgcr, SBIAS_BYPASS);
130 setbits_le32(&publ->acdllcr, ACDLLCR_DLLDIS);
131 for (i = 0; i < 4; i++)
132 setbits_le32(&publ->datx8[i].dxdllcr,
135 setbits_le32(&publ->pir, PIR_DLLBYP);
137 clrbits_le32(&publ->dllgcr, SBIAS_BYPASS);
138 clrbits_le32(&publ->acdllcr, ACDLLCR_DLLDIS);
139 for (i = 0; i < 4; i++) {
140 clrbits_le32(&publ->datx8[i].dxdllcr,
144 clrbits_le32(&publ->pir, PIR_DLLBYP);
148 static void dfi_cfg(struct rk3288_ddr_pctl *pctl, u32 dramtype)
150 writel(DFI_INIT_START, &pctl->dfistcfg0);
151 writel(DFI_DRAM_CLK_SR_EN | DFI_DRAM_CLK_DPD_EN,
153 writel(DFI_PARITY_INTR_EN | DFI_PARITY_EN, &pctl->dfistcfg2);
154 writel(7 << TLP_RESP_TIME_SHIFT | LP_SR_EN | LP_PD_EN,
157 writel(2 << TCTRL_DELAY_TIME_SHIFT, &pctl->dfitctrldelay);
158 writel(1 << TPHY_WRDATA_TIME_SHIFT, &pctl->dfitphywrdata);
159 writel(0xf << TPHY_RDLAT_TIME_SHIFT, &pctl->dfitphyrdlat);
160 writel(2 << TDRAM_CLK_DIS_TIME_SHIFT, &pctl->dfitdramclkdis);
161 writel(2 << TDRAM_CLK_EN_TIME_SHIFT, &pctl->dfitdramclken);
162 writel(1, &pctl->dfitphyupdtype0);
164 /* cs0 and cs1 write odt enable */
165 writel((RANK0_ODT_WRITE_SEL | RANK1_ODT_WRITE_SEL),
167 /* odt write length */
168 writel(7 << ODT_LEN_BL8_W_SHIFT, &pctl->dfiodtcfg1);
169 /* phyupd and ctrlupd disabled */
170 writel(0, &pctl->dfiupdcfg);
173 static void ddr_set_enable(struct rk3288_grf *grf, uint channel, bool enable)
178 val = 1 << (channel ? DDR1_16BIT_EN_SHIFT :
179 DDR0_16BIT_EN_SHIFT);
181 rk_clrsetreg(&grf->soc_con0,
182 1 << (channel ? DDR1_16BIT_EN_SHIFT : DDR0_16BIT_EN_SHIFT),
186 static void ddr_set_ddr3_mode(struct rk3288_grf *grf, uint channel,
191 mask = 1 << (channel ? MSCH1_MAINDDR3_SHIFT : MSCH0_MAINDDR3_SHIFT);
192 val = ddr3_mode << (channel ? MSCH1_MAINDDR3_SHIFT :
193 MSCH0_MAINDDR3_SHIFT);
194 rk_clrsetreg(&grf->soc_con0, mask, val);
197 static void ddr_set_en_bst_odt(struct rk3288_grf *grf, uint channel,
198 bool enable, bool enable_bst, bool enable_odt)
201 bool disable_bst = !enable_bst;
204 (1 << LPDDR3_EN1_SHIFT | 1 << UPCTL1_BST_DIABLE_SHIFT |
205 1 << UPCTL1_LPDDR3_ODT_EN_SHIFT) :
206 (1 << LPDDR3_EN0_SHIFT | 1 << UPCTL0_BST_DIABLE_SHIFT |
207 1 << UPCTL0_LPDDR3_ODT_EN_SHIFT);
208 rk_clrsetreg(&grf->soc_con2, mask,
209 enable << (channel ? LPDDR3_EN1_SHIFT : LPDDR3_EN0_SHIFT) |
210 disable_bst << (channel ? UPCTL1_BST_DIABLE_SHIFT :
211 UPCTL0_BST_DIABLE_SHIFT) |
212 enable_odt << (channel ? UPCTL1_LPDDR3_ODT_EN_SHIFT :
213 UPCTL0_LPDDR3_ODT_EN_SHIFT));
216 static void pctl_cfg(u32 channel, struct rk3288_ddr_pctl *pctl,
217 const struct rk3288_sdram_params *sdram_params,
218 struct rk3288_grf *grf)
220 unsigned int burstlen;
222 burstlen = (sdram_params->base.noc_timing >> 18) & 0x7;
223 copy_to_reg(&pctl->togcnt1u, &sdram_params->pctl_timing.togcnt1u,
224 sizeof(sdram_params->pctl_timing));
225 switch (sdram_params->base.dramtype) {
227 writel(sdram_params->pctl_timing.tcl - 1,
228 &pctl->dfitrddataen);
229 writel(sdram_params->pctl_timing.tcwl,
230 &pctl->dfitphywrlat);
232 writel(LPDDR2_S4 | 0 << MDDR_LPDDR2_CLK_STOP_IDLE_SHIFT |
233 LPDDR2_EN | burstlen << BURSTLENGTH_SHIFT |
234 (6 - 4) << TFAW_SHIFT | PD_EXIT_FAST |
235 1 << PD_TYPE_SHIFT | 0 << PD_IDLE_SHIFT,
237 ddr_set_ddr3_mode(grf, channel, false);
238 ddr_set_enable(grf, channel, true);
239 ddr_set_en_bst_odt(grf, channel, true, false,
240 sdram_params->base.odt);
243 if (sdram_params->phy_timing.mr[1] & DDR3_DLL_DISABLE) {
244 writel(sdram_params->pctl_timing.tcl - 3,
245 &pctl->dfitrddataen);
247 writel(sdram_params->pctl_timing.tcl - 2,
248 &pctl->dfitrddataen);
250 writel(sdram_params->pctl_timing.tcwl - 1,
251 &pctl->dfitphywrlat);
252 writel(0 << MDDR_LPDDR2_CLK_STOP_IDLE_SHIFT | DDR3_EN |
253 DDR2_DDR3_BL_8 | (6 - 4) << TFAW_SHIFT | PD_EXIT_SLOW |
254 1 << PD_TYPE_SHIFT | 0 << PD_IDLE_SHIFT,
256 ddr_set_ddr3_mode(grf, channel, true);
257 ddr_set_enable(grf, channel, true);
259 ddr_set_en_bst_odt(grf, channel, false, true, false);
263 setbits_le32(&pctl->scfg, 1);
266 static void phy_cfg(const struct chan_info *chan, u32 channel,
267 const struct rk3288_sdram_params *sdram_params)
269 struct rk3288_ddr_publ *publ = chan->publ;
270 struct rk3288_msch *msch = chan->msch;
271 uint ddr_freq_mhz = sdram_params->base.ddr_freq / 1000000;
275 dinit2 = DIV_ROUND_UP(ddr_freq_mhz * 200000, 1000);
277 copy_to_reg(&publ->dtpr[0], &sdram_params->phy_timing.dtpr0,
278 sizeof(sdram_params->phy_timing));
279 writel(sdram_params->base.noc_timing, &msch->ddrtiming);
280 writel(0x3f, &msch->readlatency);
281 writel(sdram_params->base.noc_activate, &msch->activate);
282 writel(2 << BUSWRTORD_SHIFT | 2 << BUSRDTOWR_SHIFT |
283 1 << BUSRDTORD_SHIFT, &msch->devtodev);
284 writel(DIV_ROUND_UP(ddr_freq_mhz * 5120, 1000) << PRT_DLLLOCK_SHIFT |
285 DIV_ROUND_UP(ddr_freq_mhz * 50, 1000) << PRT_DLLSRST_SHIFT |
286 8 << PRT_ITMSRST_SHIFT, &publ->ptr[0]);
287 writel(DIV_ROUND_UP(ddr_freq_mhz * 500000, 1000) << PRT_DINIT0_SHIFT |
288 DIV_ROUND_UP(ddr_freq_mhz * 400, 1000) << PRT_DINIT1_SHIFT,
290 writel(min(dinit2, 0x1ffffU) << PRT_DINIT2_SHIFT |
291 DIV_ROUND_UP(ddr_freq_mhz * 1000, 1000) << PRT_DINIT3_SHIFT,
294 switch (sdram_params->base.dramtype) {
296 clrsetbits_le32(&publ->pgcr, 0x1F,
297 0 << PGCR_DFTLMT_SHIFT |
298 0 << PGCR_DFTCMP_SHIFT |
299 1 << PGCR_DQSCFG_SHIFT |
300 0 << PGCR_ITMDMD_SHIFT);
301 /* DDRMODE select LPDDR3 */
302 clrsetbits_le32(&publ->dcr, DDRMD_MASK << DDRMD_SHIFT,
303 DDRMD_LPDDR2_LPDDR3 << DDRMD_SHIFT);
304 clrsetbits_le32(&publ->dxccr,
305 DQSNRES_MASK << DQSNRES_SHIFT |
306 DQSRES_MASK << DQSRES_SHIFT,
307 4 << DQSRES_SHIFT | 0xc << DQSNRES_SHIFT);
308 tmp = readl(&publ->dtpr[1]);
309 tmp = ((tmp >> TDQSCKMAX_SHIFT) & TDQSCKMAX_MASK) -
310 ((tmp >> TDQSCK_SHIFT) & TDQSCK_MASK);
311 clrsetbits_le32(&publ->dsgcr,
312 DQSGE_MASK << DQSGE_SHIFT |
313 DQSGX_MASK << DQSGX_SHIFT,
314 tmp << DQSGE_SHIFT | tmp << DQSGX_SHIFT);
317 clrbits_le32(&publ->pgcr, 0x1f);
318 clrsetbits_le32(&publ->dcr, DDRMD_MASK << DDRMD_SHIFT,
319 DDRMD_DDR3 << DDRMD_SHIFT);
322 if (sdram_params->base.odt) {
323 /*dynamic RTT enable */
324 for (i = 0; i < 4; i++)
325 setbits_le32(&publ->datx8[i].dxgcr, DQSRTT | DQRTT);
327 /*dynamic RTT disable */
328 for (i = 0; i < 4; i++)
329 clrbits_le32(&publ->datx8[i].dxgcr, DQSRTT | DQRTT);
333 static void phy_init(struct rk3288_ddr_publ *publ)
335 setbits_le32(&publ->pir, PIR_INIT | PIR_DLLSRST
336 | PIR_DLLLOCK | PIR_ZCAL | PIR_ITMSRST | PIR_CLRSR);
338 while ((readl(&publ->pgsr) &
339 (PGSR_IDONE | PGSR_DLDONE | PGSR_ZCDONE)) !=
340 (PGSR_IDONE | PGSR_DLDONE | PGSR_ZCDONE))
344 static void send_command(struct rk3288_ddr_pctl *pctl, u32 rank,
347 writel((START_CMD | (rank << 20) | arg | cmd), &pctl->mcmd);
349 while (readl(&pctl->mcmd) & START_CMD)
353 static inline void send_command_op(struct rk3288_ddr_pctl *pctl,
354 u32 rank, u32 cmd, u32 ma, u32 op)
356 send_command(pctl, rank, cmd, (ma & LPDDR2_MA_MASK) << LPDDR2_MA_SHIFT |
357 (op & LPDDR2_OP_MASK) << LPDDR2_OP_SHIFT);
360 static void memory_init(struct rk3288_ddr_publ *publ,
363 setbits_le32(&publ->pir,
364 (PIR_INIT | PIR_DRAMINIT | PIR_LOCKBYP
365 | PIR_ZCALBYP | PIR_CLRSR | PIR_ICPC
366 | (dramtype == DDR3 ? PIR_DRAMRST : 0)));
368 while ((readl(&publ->pgsr) & (PGSR_IDONE | PGSR_DLDONE))
369 != (PGSR_IDONE | PGSR_DLDONE))
373 static void move_to_config_state(struct rk3288_ddr_publ *publ,
374 struct rk3288_ddr_pctl *pctl)
379 state = readl(&pctl->stat) & PCTL_STAT_MSK;
383 writel(WAKEUP_STATE, &pctl->sctl);
384 while ((readl(&pctl->stat) & PCTL_STAT_MSK)
388 while ((readl(&publ->pgsr) & PGSR_DLDONE)
391 /* if at low power state,need wakeup first,
392 * and then enter the config
398 writel(CFG_STATE, &pctl->sctl);
399 while ((readl(&pctl->stat) & PCTL_STAT_MSK) != CONFIG)
410 static void set_bandwidth_ratio(const struct chan_info *chan, u32 channel,
411 u32 n, struct rk3288_grf *grf)
413 struct rk3288_ddr_pctl *pctl = chan->pctl;
414 struct rk3288_ddr_publ *publ = chan->publ;
415 struct rk3288_msch *msch = chan->msch;
418 setbits_le32(&pctl->ppcfg, 1);
419 rk_setreg(&grf->soc_con0, 1 << (8 + channel));
420 setbits_le32(&msch->ddrtiming, 1 << 31);
421 /* Data Byte disable*/
422 clrbits_le32(&publ->datx8[2].dxgcr, 1);
423 clrbits_le32(&publ->datx8[3].dxgcr, 1);
425 setbits_le32(&publ->datx8[2].dxdllcr, DXDLLCR_DLLDIS);
426 setbits_le32(&publ->datx8[3].dxdllcr, DXDLLCR_DLLDIS);
428 clrbits_le32(&pctl->ppcfg, 1);
429 rk_clrreg(&grf->soc_con0, 1 << (8 + channel));
430 clrbits_le32(&msch->ddrtiming, 1 << 31);
431 /* Data Byte enable*/
432 setbits_le32(&publ->datx8[2].dxgcr, 1);
433 setbits_le32(&publ->datx8[3].dxgcr, 1);
436 clrbits_le32(&publ->datx8[2].dxdllcr, DXDLLCR_DLLDIS);
437 clrbits_le32(&publ->datx8[3].dxdllcr, DXDLLCR_DLLDIS);
439 clrbits_le32(&publ->datx8[2].dxdllcr, DXDLLCR_DLLSRST);
440 clrbits_le32(&publ->datx8[3].dxdllcr, DXDLLCR_DLLSRST);
442 setbits_le32(&publ->datx8[2].dxdllcr, DXDLLCR_DLLSRST);
443 setbits_le32(&publ->datx8[3].dxdllcr, DXDLLCR_DLLSRST);
445 setbits_le32(&pctl->dfistcfg0, 1 << 2);
448 static int data_training(const struct chan_info *chan, u32 channel,
449 const struct rk3288_sdram_params *sdram_params)
455 u32 step[2] = { PIR_QSTRN, PIR_RVTRN };
456 struct rk3288_ddr_publ *publ = chan->publ;
457 struct rk3288_ddr_pctl *pctl = chan->pctl;
459 /* disable auto refresh */
460 writel(0, &pctl->trefi);
462 if (sdram_params->base.dramtype != LPDDR3)
463 setbits_le32(&publ->pgcr, 1 << PGCR_DQSCFG_SHIFT);
464 rank = sdram_params->ch[channel].rank | 1;
465 for (j = 0; j < ARRAY_SIZE(step); j++) {
467 * trigger QSTRN and RVTRN
468 * clear DTDONE status
470 setbits_le32(&publ->pir, PIR_CLRSR);
473 setbits_le32(&publ->pir,
474 PIR_INIT | step[j] | PIR_LOCKBYP | PIR_ZCALBYP |
477 /* wait echo byte DTDONE */
478 while ((readl(&publ->datx8[0].dxgsr[0]) & rank)
481 while ((readl(&publ->datx8[1].dxgsr[0]) & rank)
484 if (!(readl(&pctl->ppcfg) & 1)) {
485 while ((readl(&publ->datx8[2].dxgsr[0])
488 while ((readl(&publ->datx8[3].dxgsr[0])
492 if (readl(&publ->pgsr) &
493 (PGSR_DTERR | PGSR_RVERR | PGSR_RVEIRR)) {
498 /* send some auto refresh to complement the lost while DTT */
499 for (i = 0; i < (rank > 1 ? 8 : 4); i++)
500 send_command(pctl, rank, REF_CMD, 0);
502 if (sdram_params->base.dramtype != LPDDR3)
503 clrbits_le32(&publ->pgcr, 1 << PGCR_DQSCFG_SHIFT);
505 /* resume auto refresh */
506 writel(sdram_params->pctl_timing.trefi, &pctl->trefi);
511 static void move_to_access_state(const struct chan_info *chan)
513 struct rk3288_ddr_publ *publ = chan->publ;
514 struct rk3288_ddr_pctl *pctl = chan->pctl;
518 state = readl(&pctl->stat) & PCTL_STAT_MSK;
522 if (((readl(&pctl->stat) >> LP_TRIG_SHIFT) &
526 writel(WAKEUP_STATE, &pctl->sctl);
527 while ((readl(&pctl->stat) & PCTL_STAT_MSK) != ACCESS)
530 while ((readl(&publ->pgsr) & PGSR_DLDONE)
535 writel(CFG_STATE, &pctl->sctl);
536 while ((readl(&pctl->stat) & PCTL_STAT_MSK) != CONFIG)
539 writel(GO_STATE, &pctl->sctl);
540 while ((readl(&pctl->stat) & PCTL_STAT_MSK) == CONFIG)
551 static void dram_cfg_rbc(const struct chan_info *chan, u32 chnum,
552 const struct rk3288_sdram_params *sdram_params)
554 struct rk3288_ddr_publ *publ = chan->publ;
556 if (sdram_params->ch[chnum].bk == 3)
557 clrsetbits_le32(&publ->dcr, PDQ_MASK << PDQ_SHIFT,
560 clrbits_le32(&publ->dcr, PDQ_MASK << PDQ_SHIFT);
562 writel(sdram_params->base.ddrconfig, &chan->msch->ddrconf);
565 static void dram_all_config(const struct dram_info *dram,
566 const struct rk3288_sdram_params *sdram_params)
571 sys_reg |= sdram_params->base.dramtype << SYS_REG_DDRTYPE_SHIFT;
572 sys_reg |= (sdram_params->num_channels - 1) << SYS_REG_NUM_CH_SHIFT;
573 for (chan = 0; chan < sdram_params->num_channels; chan++) {
574 const struct rk3288_sdram_channel *info =
575 &sdram_params->ch[chan];
577 sys_reg |= info->row_3_4 << SYS_REG_ROW_3_4_SHIFT(chan);
578 sys_reg |= 1 << SYS_REG_CHINFO_SHIFT(chan);
579 sys_reg |= (info->rank - 1) << SYS_REG_RANK_SHIFT(chan);
580 sys_reg |= (info->col - 9) << SYS_REG_COL_SHIFT(chan);
581 sys_reg |= info->bk == 3 ? 0 : 1 << SYS_REG_BK_SHIFT(chan);
582 sys_reg |= (info->cs0_row - 13) << SYS_REG_CS0_ROW_SHIFT(chan);
583 sys_reg |= (info->cs1_row - 13) << SYS_REG_CS1_ROW_SHIFT(chan);
584 sys_reg |= (2 >> info->bw) << SYS_REG_BW_SHIFT(chan);
585 sys_reg |= (2 >> info->dbw) << SYS_REG_DBW_SHIFT(chan);
587 dram_cfg_rbc(&dram->chan[chan], chan, sdram_params);
589 writel(sys_reg, &dram->pmu->sys_reg[2]);
590 rk_clrsetreg(&dram->sgrf->soc_con2, 0x1f, sdram_params->base.stride);
593 static int sdram_init(struct dram_info *dram,
594 const struct rk3288_sdram_params *sdram_params)
600 debug("%s start\n", __func__);
601 if ((sdram_params->base.dramtype == DDR3 &&
602 sdram_params->base.ddr_freq > 800000000) ||
603 (sdram_params->base.dramtype == LPDDR3 &&
604 sdram_params->base.ddr_freq > 533000000)) {
605 debug("SDRAM frequency is too high!");
609 debug("ddr clk dpll\n");
610 ret = clk_set_rate(&dram->ddr_clk, sdram_params->base.ddr_freq);
611 debug("ret=%d\n", ret);
613 debug("Could not set DDR clock\n");
617 for (channel = 0; channel < 2; channel++) {
618 const struct chan_info *chan = &dram->chan[channel];
619 struct rk3288_ddr_pctl *pctl = chan->pctl;
620 struct rk3288_ddr_publ *publ = chan->publ;
622 phy_pctrl_reset(dram->cru, publ, channel);
623 phy_dll_bypass_set(publ, sdram_params->base.ddr_freq);
625 if (channel >= sdram_params->num_channels)
628 dfi_cfg(pctl, sdram_params->base.dramtype);
630 pctl_cfg(channel, pctl, sdram_params, dram->grf);
632 phy_cfg(chan, channel, sdram_params);
636 writel(POWER_UP_START, &pctl->powctl);
637 while (!(readl(&pctl->powstat) & POWER_UP_DONE))
640 memory_init(publ, sdram_params->base.dramtype);
641 move_to_config_state(publ, pctl);
643 if (sdram_params->base.dramtype == LPDDR3) {
644 send_command(pctl, 3, DESELECT_CMD, 0);
646 send_command(pctl, 3, PREA_CMD, 0);
648 send_command_op(pctl, 3, MRS_CMD, 63, 0xfc);
650 send_command_op(pctl, 3, MRS_CMD, 1,
651 sdram_params->phy_timing.mr[1]);
653 send_command_op(pctl, 3, MRS_CMD, 2,
654 sdram_params->phy_timing.mr[2]);
656 send_command_op(pctl, 3, MRS_CMD, 3,
657 sdram_params->phy_timing.mr[3]);
661 set_bandwidth_ratio(chan, channel,
662 sdram_params->ch[channel].bw, dram->grf);
669 clrsetbits_le32(&publ->pgcr, 0xF << 18,
670 (sdram_params->ch[channel].rank | 1) << 18);
671 /* DS=40ohm,ODT=155ohm */
672 zqcr = 1 << ZDEN_SHIFT | 2 << PU_ONDIE_SHIFT |
673 2 << PD_ONDIE_SHIFT | 0x19 << PU_OUTPUT_SHIFT |
674 0x19 << PD_OUTPUT_SHIFT;
675 writel(zqcr, &publ->zq1cr[0]);
676 writel(zqcr, &publ->zq0cr[0]);
678 if (sdram_params->base.dramtype == LPDDR3) {
679 /* LPDDR2/LPDDR3 need to wait DAI complete, max 10us */
681 send_command_op(pctl,
682 sdram_params->ch[channel].rank | 1,
684 sdram_params->base.odt ? 3 : 0);
686 writel(0, &pctl->mrrcfg0);
687 send_command_op(pctl, 1, MRR_CMD, 8, 0);
689 if ((readl(&pctl->mrrstat0) & 0x3) != 3) {
696 if (-1 == data_training(chan, channel, sdram_params)) {
697 if (sdram_params->base.dramtype == LPDDR3) {
698 ddr_phy_ctl_reset(dram->cru, channel, 1);
700 ddr_phy_ctl_reset(dram->cru, channel, 0);
707 if (sdram_params->base.dramtype == LPDDR3) {
709 writel(0, &pctl->mrrcfg0);
710 for (i = 0; i < 17; i++)
711 send_command_op(pctl, 1, MRR_CMD, i, 0);
713 move_to_access_state(chan);
715 dram_all_config(dram, sdram_params);
716 debug("%s done\n", __func__);
720 #endif /* CONFIG_SPL_BUILD */
722 size_t sdram_size_mb(struct rk3288_pmu *pmu)
724 u32 rank, col, bk, cs0_row, cs1_row, bw, row_3_4;
725 size_t chipsize_mb = 0;
728 u32 sys_reg = readl(&pmu->sys_reg[2]);
731 chans = 1 + ((sys_reg >> SYS_REG_NUM_CH_SHIFT) & SYS_REG_NUM_CH_MASK);
733 for (ch = 0; ch < chans; ch++) {
734 rank = 1 + (sys_reg >> SYS_REG_RANK_SHIFT(ch) &
736 col = 9 + (sys_reg >> SYS_REG_COL_SHIFT(ch) & SYS_REG_COL_MASK);
737 bk = 3 - ((sys_reg >> SYS_REG_BK_SHIFT(ch)) & SYS_REG_BK_MASK);
738 cs0_row = 13 + (sys_reg >> SYS_REG_CS0_ROW_SHIFT(ch) &
739 SYS_REG_CS0_ROW_MASK);
740 cs1_row = 13 + (sys_reg >> SYS_REG_CS1_ROW_SHIFT(ch) &
741 SYS_REG_CS1_ROW_MASK);
742 bw = (2 >> ((sys_reg >> SYS_REG_BW_SHIFT(ch)) &
744 row_3_4 = sys_reg >> SYS_REG_ROW_3_4_SHIFT(ch) &
745 SYS_REG_ROW_3_4_MASK;
747 chipsize_mb = (1 << (cs0_row + col + bk + bw - 20));
750 chipsize_mb += chipsize_mb >>
753 chipsize_mb = chipsize_mb * 3 / 4;
754 size_mb += chipsize_mb;
758 * we use the 0x00000000~0xfeffffff space since 0xff000000~0xffffffff
759 * is SoC register space (i.e. reserved)
761 size_mb = min(size_mb, 0xff000000 >> 20);
766 #ifdef CONFIG_SPL_BUILD
767 # ifdef CONFIG_ROCKCHIP_FAST_SPL
768 static int veyron_init(struct dram_info *priv)
770 struct udevice *pmic;
773 ret = uclass_first_device_err(UCLASS_PMIC, &pmic);
777 /* Slowly raise to max CPU voltage to prevent overshoot */
778 ret = rk808_spl_configure_buck(pmic, 1, 1200000);
781 udelay(175);/* Must wait for voltage to stabilize, 2mV/us */
782 ret = rk808_spl_configure_buck(pmic, 1, 1400000);
785 udelay(100);/* Must wait for voltage to stabilize, 2mV/us */
787 rk3288_clk_configure_cpu(priv->cru, priv->grf);
793 static int setup_sdram(struct udevice *dev)
795 struct dram_info *priv = dev_get_priv(dev);
796 struct rk3288_sdram_params *params = dev_get_platdata(dev);
798 # ifdef CONFIG_ROCKCHIP_FAST_SPL
799 if (priv->is_veyron) {
802 ret = veyron_init(priv);
808 return sdram_init(priv, params);
811 static int rk3288_dmc_ofdata_to_platdata(struct udevice *dev)
813 #if !CONFIG_IS_ENABLED(OF_PLATDATA)
814 struct rk3288_sdram_params *params = dev_get_platdata(dev);
815 const void *blob = gd->fdt_blob;
816 int node = dev->of_offset;
819 params->num_channels = fdtdec_get_int(blob, node,
820 "rockchip,num-channels", 1);
821 for (i = 0; i < params->num_channels; i++) {
822 ret = fdtdec_get_byte_array(blob, node,
823 "rockchip,sdram-channel",
824 (u8 *)¶ms->ch[i],
825 sizeof(params->ch[i]));
827 debug("%s: Cannot read rockchip,sdram-channel\n",
832 ret = fdtdec_get_int_array(blob, node, "rockchip,pctl-timing",
833 (u32 *)¶ms->pctl_timing,
834 sizeof(params->pctl_timing) / sizeof(u32));
836 debug("%s: Cannot read rockchip,pctl-timing\n", __func__);
839 ret = fdtdec_get_int_array(blob, node, "rockchip,phy-timing",
840 (u32 *)¶ms->phy_timing,
841 sizeof(params->phy_timing) / sizeof(u32));
843 debug("%s: Cannot read rockchip,phy-timing\n", __func__);
846 ret = fdtdec_get_int_array(blob, node, "rockchip,sdram-params",
847 (u32 *)¶ms->base,
848 sizeof(params->base) / sizeof(u32));
850 debug("%s: Cannot read rockchip,sdram-params\n", __func__);
853 #ifdef CONFIG_ROCKCHIP_FAST_SPL
854 struct dram_info *priv = dev_get_priv(dev);
856 priv->is_veyron = !fdt_node_check_compatible(blob, 0, "google,veyron");
858 ret = regmap_init_mem(dev, ¶ms->map);
865 #endif /* CONFIG_SPL_BUILD */
867 #if CONFIG_IS_ENABLED(OF_PLATDATA)
868 static int conv_of_platdata(struct udevice *dev)
870 struct rk3288_sdram_params *plat = dev_get_platdata(dev);
871 struct dtd_rockchip_rk3288_dmc *of_plat = &plat->of_plat;
874 for (i = 0; i < 2; i++) {
875 memcpy(&plat->ch[i], of_plat->rockchip_sdram_channel,
876 sizeof(plat->ch[i]));
878 memcpy(&plat->pctl_timing, of_plat->rockchip_pctl_timing,
879 sizeof(plat->pctl_timing));
880 memcpy(&plat->phy_timing, of_plat->rockchip_phy_timing,
881 sizeof(plat->phy_timing));
882 memcpy(&plat->base, of_plat->rockchip_sdram_params, sizeof(plat->base));
883 plat->num_channels = of_plat->rockchip_num_channels;
884 ret = regmap_init_mem_platdata(dev, of_plat->reg,
885 ARRAY_SIZE(of_plat->reg) / 2,
894 static int rk3288_dmc_probe(struct udevice *dev)
896 #ifdef CONFIG_SPL_BUILD
897 struct rk3288_sdram_params *plat = dev_get_platdata(dev);
899 struct dram_info *priv = dev_get_priv(dev);
902 struct udevice *dev_clk;
904 #if CONFIG_IS_ENABLED(OF_PLATDATA)
905 ret = conv_of_platdata(dev);
909 map = syscon_get_regmap_by_driver_data(ROCKCHIP_SYSCON_NOC);
912 priv->chan[0].msch = regmap_get_range(map, 0);
913 priv->chan[1].msch = (struct rk3288_msch *)
914 (regmap_get_range(map, 0) + 0x80);
916 priv->grf = syscon_get_first_range(ROCKCHIP_SYSCON_GRF);
917 priv->sgrf = syscon_get_first_range(ROCKCHIP_SYSCON_SGRF);
918 priv->pmu = syscon_get_first_range(ROCKCHIP_SYSCON_PMU);
920 #ifdef CONFIG_SPL_BUILD
921 priv->chan[0].pctl = regmap_get_range(plat->map, 0);
922 priv->chan[0].publ = regmap_get_range(plat->map, 1);
923 priv->chan[1].pctl = regmap_get_range(plat->map, 2);
924 priv->chan[1].publ = regmap_get_range(plat->map, 3);
926 ret = rockchip_get_clk(&dev_clk);
929 priv->ddr_clk.id = CLK_DDR;
930 ret = clk_request(dev_clk, &priv->ddr_clk);
934 priv->cru = rockchip_get_cru();
935 if (IS_ERR(priv->cru))
936 return PTR_ERR(priv->cru);
937 #ifdef CONFIG_SPL_BUILD
938 ret = setup_sdram(dev);
943 priv->info.size = sdram_size_mb(priv->pmu) << 20;
948 static int rk3288_dmc_get_info(struct udevice *dev, struct ram_info *info)
950 struct dram_info *priv = dev_get_priv(dev);
957 static struct ram_ops rk3288_dmc_ops = {
958 .get_info = rk3288_dmc_get_info,
961 static const struct udevice_id rk3288_dmc_ids[] = {
962 { .compatible = "rockchip,rk3288-dmc" },
966 U_BOOT_DRIVER(dmc_rk3288) = {
967 .name = "rockchip_rk3288_dmc",
969 .of_match = rk3288_dmc_ids,
970 .ops = &rk3288_dmc_ops,
971 #ifdef CONFIG_SPL_BUILD
972 .ofdata_to_platdata = rk3288_dmc_ofdata_to_platdata,
974 .probe = rk3288_dmc_probe,
975 .priv_auto_alloc_size = sizeof(struct dram_info),
976 #ifdef CONFIG_SPL_BUILD
977 .platdata_auto_alloc_size = sizeof(struct rk3288_sdram_params),