1 // SPDX-License-Identifier: GPL-2.0+ OR BSD-3-Clause
3 * (C) Copyright 2016-2017 Rockchip Inc.
5 * Adapted from coreboot.
11 #include <dt-structs.h>
16 #include <asm/arch-rockchip/clock.h>
17 #include <asm/arch-rockchip/cru_rk3399.h>
18 #include <asm/arch-rockchip/grf_rk3399.h>
19 #include <asm/arch-rockchip/hardware.h>
20 #include <asm/arch-rockchip/sdram_common.h>
21 #include <asm/arch-rockchip/sdram_rk3399.h>
22 #include <linux/err.h>
25 #define PRESET_SGRF_HOLD(n) ((0x1 << (6 + 16)) | ((n) << 6))
26 #define PRESET_GPIO0_HOLD(n) ((0x1 << (7 + 16)) | ((n) << 7))
27 #define PRESET_GPIO1_HOLD(n) ((0x1 << (8 + 16)) | ((n) << 8))
29 #define PHY_DRV_ODT_HI_Z 0x0
30 #define PHY_DRV_ODT_240 0x1
31 #define PHY_DRV_ODT_120 0x8
32 #define PHY_DRV_ODT_80 0x9
33 #define PHY_DRV_ODT_60 0xc
34 #define PHY_DRV_ODT_48 0xd
35 #define PHY_DRV_ODT_40 0xe
36 #define PHY_DRV_ODT_34_3 0xf
38 #define PHY_BOOSTP_EN 0x1
39 #define PHY_BOOSTN_EN 0x1
40 #define PHY_SLEWP_EN 0x1
41 #define PHY_SLEWN_EN 0x1
42 #define PHY_RX_CM_INPUT 0x1
43 #define CS0_MR22_VAL 0
44 #define CS1_MR22_VAL 3
46 #define CRU_SFTRST_DDR_CTRL(ch, n) ((0x1 << (8 + 16 + (ch) * 4)) | \
47 ((n) << (8 + (ch) * 4)))
48 #define CRU_SFTRST_DDR_PHY(ch, n) ((0x1 << (9 + 16 + (ch) * 4)) | \
49 ((n) << (9 + (ch) * 4)))
51 struct rk3399_ddr_pctl_regs *pctl;
52 struct rk3399_ddr_pi_regs *pi;
53 struct rk3399_ddr_publ_regs *publ;
54 struct rk3399_msch_regs *msch;
58 #if defined(CONFIG_TPL_BUILD) || \
59 (!defined(CONFIG_TPL) && defined(CONFIG_SPL_BUILD))
60 u32 pwrup_srefresh_exit[2];
61 struct chan_info chan[2];
63 struct rk3399_cru *cru;
64 struct rk3399_grf_regs *grf;
65 struct rk3399_pmucru *pmucru;
66 struct rk3399_pmusgrf_regs *pmusgrf;
67 struct rk3399_ddr_cic_regs *cic;
70 struct rk3399_pmugrf_regs *pmugrf;
73 #if defined(CONFIG_TPL_BUILD) || \
74 (!defined(CONFIG_TPL) && defined(CONFIG_SPL_BUILD))
76 struct rockchip_dmc_plat {
77 #if CONFIG_IS_ENABLED(OF_PLATDATA)
78 struct dtd_rockchip_rk3399_dmc dtplat;
80 struct rk3399_sdram_params sdram_params;
101 } lpddr4_io_setting[] = {
112 PHY_DRV_ODT_HI_Z, /* rd_odt; */
113 PHY_DRV_ODT_40, /* wr_dq_drv; */
114 PHY_DRV_ODT_40, /* wr_ca_drv; */
115 PHY_DRV_ODT_40, /* wr_ckcs_drv; */
117 41, /* rd_vref; (unit %, range 3.3% - 48.7%) */
129 PHY_DRV_ODT_HI_Z, /* rd_odt; */
130 PHY_DRV_ODT_48, /* wr_dq_drv; */
131 PHY_DRV_ODT_40, /* wr_ca_drv; */
132 PHY_DRV_ODT_40, /* wr_ckcs_drv; */
134 32, /* rd_vref; (unit %, range 3.3% - 48.7%) */
146 PHY_DRV_ODT_40, /* rd_odt; */
147 PHY_DRV_ODT_48, /* wr_dq_drv; */
148 PHY_DRV_ODT_40, /* wr_ca_drv; */
149 PHY_DRV_ODT_40, /* wr_ckcs_drv; */
151 17, /* rd_vref; (unit %, range 3.3% - 48.7%) */
160 0x59, /* dq_vref; 32% */
163 PHY_DRV_ODT_HI_Z, /* rd_odt; */
164 PHY_DRV_ODT_48, /* wr_dq_drv; */
165 PHY_DRV_ODT_40, /* wr_ca_drv; */
166 PHY_DRV_ODT_40, /* wr_ckcs_drv; */
168 32, /* rd_vref; (unit %, range 3.3% - 48.7%) */
180 PHY_DRV_ODT_40, /* rd_odt; */
181 PHY_DRV_ODT_60, /* wr_dq_drv; */
182 PHY_DRV_ODT_40, /* wr_ca_drv; */
183 PHY_DRV_ODT_40, /* wr_ckcs_drv; */
185 17, /* rd_vref; (unit %, range 3.3% - 48.7%) */
190 * phy = 0, PHY boot freq
191 * phy = 1, PHY index 0
192 * phy = 2, PHY index 1
194 static struct io_setting *
195 lpddr4_get_io_settings(const struct rk3399_sdram_params *params, u32 mr5)
197 struct io_setting *io = NULL;
200 for (n = 0; n < ARRAY_SIZE(lpddr4_io_setting); n++) {
201 io = &lpddr4_io_setting[n];
204 if (io->mhz >= params->base.ddr_freq &&
208 if (io->mhz >= params->base.ddr_freq)
216 static void *get_ddrc0_con(struct dram_info *dram, u8 channel)
218 return (channel == 0) ? &dram->grf->ddrc0_con0 : &dram->grf->ddrc0_con1;
221 static void copy_to_reg(u32 *dest, const u32 *src, u32 n)
225 for (i = 0; i < n / sizeof(u32); i++) {
232 static void rkclk_ddr_reset(struct rk3399_cru *cru, u32 channel, u32 ctl,
238 writel(CRU_SFTRST_DDR_CTRL(channel, ctl) |
239 CRU_SFTRST_DDR_PHY(channel, phy),
240 &cru->softrst_con[4]);
243 static void phy_pctrl_reset(struct rk3399_cru *cru, u32 channel)
245 rkclk_ddr_reset(cru, channel, 1, 1);
248 rkclk_ddr_reset(cru, channel, 1, 0);
251 rkclk_ddr_reset(cru, channel, 0, 0);
255 static void phy_dll_bypass_set(struct rk3399_ddr_publ_regs *ddr_publ_regs,
258 u32 *denali_phy = ddr_publ_regs->denali_phy;
260 /* From IP spec, only freq small than 125 can enter dll bypass mode */
262 /* phy_sw_master_mode_X PHY_86/214/342/470 4bits offset_8 */
263 setbits_le32(&denali_phy[86], (0x3 << 2) << 8);
264 setbits_le32(&denali_phy[214], (0x3 << 2) << 8);
265 setbits_le32(&denali_phy[342], (0x3 << 2) << 8);
266 setbits_le32(&denali_phy[470], (0x3 << 2) << 8);
268 /* phy_adrctl_sw_master_mode PHY_547/675/803 4bits offset_16 */
269 setbits_le32(&denali_phy[547], (0x3 << 2) << 16);
270 setbits_le32(&denali_phy[675], (0x3 << 2) << 16);
271 setbits_le32(&denali_phy[803], (0x3 << 2) << 16);
273 /* phy_sw_master_mode_X PHY_86/214/342/470 4bits offset_8 */
274 clrbits_le32(&denali_phy[86], (0x3 << 2) << 8);
275 clrbits_le32(&denali_phy[214], (0x3 << 2) << 8);
276 clrbits_le32(&denali_phy[342], (0x3 << 2) << 8);
277 clrbits_le32(&denali_phy[470], (0x3 << 2) << 8);
279 /* phy_adrctl_sw_master_mode PHY_547/675/803 4bits offset_16 */
280 clrbits_le32(&denali_phy[547], (0x3 << 2) << 16);
281 clrbits_le32(&denali_phy[675], (0x3 << 2) << 16);
282 clrbits_le32(&denali_phy[803], (0x3 << 2) << 16);
286 static void set_memory_map(const struct chan_info *chan, u32 channel,
287 const struct rk3399_sdram_params *params)
289 const struct rk3399_sdram_channel *sdram_ch = ¶ms->ch[channel];
290 u32 *denali_ctl = chan->pctl->denali_ctl;
291 u32 *denali_pi = chan->pi->denali_pi;
296 /* Get row number from ddrconfig setting */
297 if (sdram_ch->cap_info.ddrconfig < 2 ||
298 sdram_ch->cap_info.ddrconfig == 4)
300 else if (sdram_ch->cap_info.ddrconfig == 3)
305 cs_map = (sdram_ch->cap_info.rank > 1) ? 3 : 1;
306 reduc = (sdram_ch->cap_info.bw == 2) ? 0 : 1;
308 /* Set the dram configuration to ctrl */
309 clrsetbits_le32(&denali_ctl[191], 0xF, (12 - sdram_ch->cap_info.col));
310 clrsetbits_le32(&denali_ctl[190], (0x3 << 16) | (0x7 << 24),
311 ((3 - sdram_ch->cap_info.bk) << 16) |
314 clrsetbits_le32(&denali_ctl[196], 0x3 | (1 << 16),
315 cs_map | (reduc << 16));
317 /* PI_199 PI_COL_DIFF:RW:0:4 */
318 clrsetbits_le32(&denali_pi[199], 0xF, (12 - sdram_ch->cap_info.col));
320 /* PI_155 PI_ROW_DIFF:RW:24:3 PI_BANK_DIFF:RW:16:2 */
321 clrsetbits_le32(&denali_pi[155], (0x3 << 16) | (0x7 << 24),
322 ((3 - sdram_ch->cap_info.bk) << 16) |
325 if (IS_ENABLED(CONFIG_RAM_RK3399_LPDDR4)) {
328 else if (cs_map == 2)
334 /* PI_41 PI_CS_MAP:RW:24:4 */
335 clrsetbits_le32(&denali_pi[41], 0xf << 24, cs_map << 24);
336 if (sdram_ch->cap_info.rank == 1 && params->base.dramtype == DDR3)
337 writel(0x2EC7FFFF, &denali_pi[34]);
340 static int phy_io_config(const struct chan_info *chan,
341 const struct rk3399_sdram_params *params, u32 mr5)
343 u32 *denali_phy = chan->publ->denali_phy;
344 u32 vref_mode_dq, vref_value_dq, vref_mode_ac, vref_value_ac;
347 u32 drv_value, odt_value;
350 /* vref setting & mode setting */
351 if (params->base.dramtype == LPDDR4) {
352 struct io_setting *io = lpddr4_get_io_settings(params, mr5);
353 u32 rd_vref = io->rd_vref * 1000;
355 if (rd_vref < 36700) {
356 /* MODE_LV[2:0] = LPDDR4 (Range 2)*/
358 /* MODE[2:0]= LPDDR4 Range 2(0.4*VDDQ) */
360 vref_value_dq = (rd_vref - 3300) / 521;
362 /* MODE_LV[2:0] = LPDDR4 (Range 1)*/
364 /* MODE[2:0]= LPDDR4 Range 1(0.33*VDDQ) */
366 vref_value_dq = (rd_vref - 15300) / 521;
371 } else if (params->base.dramtype == LPDDR3) {
372 if (params->base.odt == 1) {
373 vref_mode_dq = 0x5; /* LPDDR3 ODT */
374 drv_value = (readl(&denali_phy[6]) >> 12) & 0xf;
375 odt_value = (readl(&denali_phy[6]) >> 4) & 0xf;
376 if (drv_value == PHY_DRV_ODT_48) {
378 case PHY_DRV_ODT_240:
379 vref_value_dq = 0x16;
381 case PHY_DRV_ODT_120:
382 vref_value_dq = 0x26;
385 vref_value_dq = 0x36;
388 debug("Invalid ODT value.\n");
391 } else if (drv_value == PHY_DRV_ODT_40) {
393 case PHY_DRV_ODT_240:
394 vref_value_dq = 0x19;
396 case PHY_DRV_ODT_120:
397 vref_value_dq = 0x23;
400 vref_value_dq = 0x31;
403 debug("Invalid ODT value.\n");
406 } else if (drv_value == PHY_DRV_ODT_34_3) {
408 case PHY_DRV_ODT_240:
409 vref_value_dq = 0x17;
411 case PHY_DRV_ODT_120:
412 vref_value_dq = 0x20;
415 vref_value_dq = 0x2e;
418 debug("Invalid ODT value.\n");
422 debug("Invalid DRV value.\n");
426 vref_mode_dq = 0x2; /* LPDDR3 */
427 vref_value_dq = 0x1f;
430 vref_value_ac = 0x1f;
432 } else if (params->base.dramtype == DDR3) {
435 vref_value_dq = 0x1f;
437 vref_value_ac = 0x1f;
440 debug("Unknown DRAM type.\n");
444 reg_value = (vref_mode_dq << 9) | (0x1 << 8) | vref_value_dq;
446 /* PHY_913 PHY_PAD_VREF_CTRL_DQ_0 12bits offset_8 */
447 clrsetbits_le32(&denali_phy[913], 0xfff << 8, reg_value << 8);
448 /* PHY_914 PHY_PAD_VREF_CTRL_DQ_1 12bits offset_0 */
449 clrsetbits_le32(&denali_phy[914], 0xfff, reg_value);
450 /* PHY_914 PHY_PAD_VREF_CTRL_DQ_2 12bits offset_16 */
451 clrsetbits_le32(&denali_phy[914], 0xfff << 16, reg_value << 16);
452 /* PHY_915 PHY_PAD_VREF_CTRL_DQ_3 12bits offset_0 */
453 clrsetbits_le32(&denali_phy[915], 0xfff, reg_value);
455 reg_value = (vref_mode_ac << 9) | (0x1 << 8) | vref_value_ac;
457 /* PHY_915 PHY_PAD_VREF_CTRL_AC 12bits offset_16 */
458 clrsetbits_le32(&denali_phy[915], 0xfff << 16, reg_value << 16);
460 /* PHY_924 PHY_PAD_FDBK_DRIVE */
461 clrsetbits_le32(&denali_phy[924], 0x7 << 15, mode_sel << 15);
462 /* PHY_926 PHY_PAD_DATA_DRIVE */
463 clrsetbits_le32(&denali_phy[926], 0x7 << 6, mode_sel << 6);
464 /* PHY_927 PHY_PAD_DQS_DRIVE */
465 clrsetbits_le32(&denali_phy[927], 0x7 << 6, mode_sel << 6);
466 /* PHY_928 PHY_PAD_ADDR_DRIVE */
467 clrsetbits_le32(&denali_phy[928], 0x7 << 14, mode_sel << 14);
468 /* PHY_929 PHY_PAD_CLK_DRIVE */
469 clrsetbits_le32(&denali_phy[929], 0x7 << 14, mode_sel << 14);
470 /* PHY_935 PHY_PAD_CKE_DRIVE */
471 clrsetbits_le32(&denali_phy[935], 0x7 << 14, mode_sel << 14);
472 /* PHY_937 PHY_PAD_RST_DRIVE */
473 clrsetbits_le32(&denali_phy[937], 0x7 << 14, mode_sel << 14);
474 /* PHY_939 PHY_PAD_CS_DRIVE */
475 clrsetbits_le32(&denali_phy[939], 0x7 << 14, mode_sel << 14);
477 if (IS_ENABLED(CONFIG_RAM_RK3399_LPDDR4)) {
478 /* BOOSTP_EN & BOOSTN_EN */
479 reg_value = ((PHY_BOOSTP_EN << 4) | PHY_BOOSTN_EN);
480 /* PHY_925 PHY_PAD_FDBK_DRIVE2 */
481 clrsetbits_le32(&denali_phy[925], 0xff << 8, reg_value << 8);
482 /* PHY_926 PHY_PAD_DATA_DRIVE */
483 clrsetbits_le32(&denali_phy[926], 0xff << 12, reg_value << 12);
484 /* PHY_927 PHY_PAD_DQS_DRIVE */
485 clrsetbits_le32(&denali_phy[927], 0xff << 14, reg_value << 14);
486 /* PHY_928 PHY_PAD_ADDR_DRIVE */
487 clrsetbits_le32(&denali_phy[928], 0xff << 20, reg_value << 20);
488 /* PHY_929 PHY_PAD_CLK_DRIVE */
489 clrsetbits_le32(&denali_phy[929], 0xff << 22, reg_value << 22);
490 /* PHY_935 PHY_PAD_CKE_DRIVE */
491 clrsetbits_le32(&denali_phy[935], 0xff << 20, reg_value << 20);
492 /* PHY_937 PHY_PAD_RST_DRIVE */
493 clrsetbits_le32(&denali_phy[937], 0xff << 20, reg_value << 20);
494 /* PHY_939 PHY_PAD_CS_DRIVE */
495 clrsetbits_le32(&denali_phy[939], 0xff << 20, reg_value << 20);
497 /* SLEWP_EN & SLEWN_EN */
498 reg_value = ((PHY_SLEWP_EN << 3) | PHY_SLEWN_EN);
499 /* PHY_924 PHY_PAD_FDBK_DRIVE */
500 clrsetbits_le32(&denali_phy[924], 0x3f << 8, reg_value << 8);
501 /* PHY_926 PHY_PAD_DATA_DRIVE */
502 clrsetbits_le32(&denali_phy[926], 0x3f, reg_value);
503 /* PHY_927 PHY_PAD_DQS_DRIVE */
504 clrsetbits_le32(&denali_phy[927], 0x3f, reg_value);
505 /* PHY_928 PHY_PAD_ADDR_DRIVE */
506 clrsetbits_le32(&denali_phy[928], 0x3f << 8, reg_value << 8);
507 /* PHY_929 PHY_PAD_CLK_DRIVE */
508 clrsetbits_le32(&denali_phy[929], 0x3f << 8, reg_value << 8);
509 /* PHY_935 PHY_PAD_CKE_DRIVE */
510 clrsetbits_le32(&denali_phy[935], 0x3f << 8, reg_value << 8);
511 /* PHY_937 PHY_PAD_RST_DRIVE */
512 clrsetbits_le32(&denali_phy[937], 0x3f << 8, reg_value << 8);
513 /* PHY_939 PHY_PAD_CS_DRIVE */
514 clrsetbits_le32(&denali_phy[939], 0x3f << 8, reg_value << 8);
518 if (params->base.ddr_freq < 400)
520 else if (params->base.ddr_freq < 800)
522 else if (params->base.ddr_freq < 1200)
527 /* PHY_924 PHY_PAD_FDBK_DRIVE */
528 clrsetbits_le32(&denali_phy[924], 0x3 << 21, speed << 21);
529 /* PHY_926 PHY_PAD_DATA_DRIVE */
530 clrsetbits_le32(&denali_phy[926], 0x3 << 9, speed << 9);
531 /* PHY_927 PHY_PAD_DQS_DRIVE */
532 clrsetbits_le32(&denali_phy[927], 0x3 << 9, speed << 9);
533 /* PHY_928 PHY_PAD_ADDR_DRIVE */
534 clrsetbits_le32(&denali_phy[928], 0x3 << 17, speed << 17);
535 /* PHY_929 PHY_PAD_CLK_DRIVE */
536 clrsetbits_le32(&denali_phy[929], 0x3 << 17, speed << 17);
537 /* PHY_935 PHY_PAD_CKE_DRIVE */
538 clrsetbits_le32(&denali_phy[935], 0x3 << 17, speed << 17);
539 /* PHY_937 PHY_PAD_RST_DRIVE */
540 clrsetbits_le32(&denali_phy[937], 0x3 << 17, speed << 17);
541 /* PHY_939 PHY_PAD_CS_DRIVE */
542 clrsetbits_le32(&denali_phy[939], 0x3 << 17, speed << 17);
544 if (IS_ENABLED(CONFIG_RAM_RK3399_LPDDR4)) {
546 reg_value = PHY_RX_CM_INPUT;
547 /* PHY_924 PHY_PAD_FDBK_DRIVE */
548 clrsetbits_le32(&denali_phy[924], 0x1 << 14, reg_value << 14);
549 /* PHY_926 PHY_PAD_DATA_DRIVE */
550 clrsetbits_le32(&denali_phy[926], 0x1 << 11, reg_value << 11);
551 /* PHY_927 PHY_PAD_DQS_DRIVE */
552 clrsetbits_le32(&denali_phy[927], 0x1 << 13, reg_value << 13);
553 /* PHY_928 PHY_PAD_ADDR_DRIVE */
554 clrsetbits_le32(&denali_phy[928], 0x1 << 19, reg_value << 19);
555 /* PHY_929 PHY_PAD_CLK_DRIVE */
556 clrsetbits_le32(&denali_phy[929], 0x1 << 21, reg_value << 21);
557 /* PHY_935 PHY_PAD_CKE_DRIVE */
558 clrsetbits_le32(&denali_phy[935], 0x1 << 19, reg_value << 19);
559 /* PHY_937 PHY_PAD_RST_DRIVE */
560 clrsetbits_le32(&denali_phy[937], 0x1 << 19, reg_value << 19);
561 /* PHY_939 PHY_PAD_CS_DRIVE */
562 clrsetbits_le32(&denali_phy[939], 0x1 << 19, reg_value << 19);
568 static void set_ds_odt(const struct chan_info *chan,
569 const struct rk3399_sdram_params *params, u32 mr5)
571 u32 *denali_phy = chan->publ->denali_phy;
572 u32 *denali_ctl = chan->pctl->denali_ctl;
573 u32 tsel_idle_en, tsel_wr_en, tsel_rd_en;
574 u32 tsel_idle_select_p, tsel_rd_select_p;
575 u32 tsel_idle_select_n, tsel_rd_select_n;
576 u32 tsel_wr_select_dq_p, tsel_wr_select_ca_p;
577 u32 tsel_wr_select_dq_n, tsel_wr_select_ca_n;
578 u32 tsel_ckcs_select_p, tsel_ckcs_select_n;
579 struct io_setting *io = NULL;
583 if (params->base.dramtype == LPDDR4) {
584 io = lpddr4_get_io_settings(params, mr5);
586 tsel_rd_select_p = PHY_DRV_ODT_HI_Z;
587 tsel_rd_select_n = io->rd_odt;
589 tsel_idle_select_p = PHY_DRV_ODT_HI_Z;
590 tsel_idle_select_n = PHY_DRV_ODT_240;
592 tsel_wr_select_dq_p = io->wr_dq_drv;
593 tsel_wr_select_dq_n = PHY_DRV_ODT_40;
595 tsel_wr_select_ca_p = io->wr_ca_drv;
596 tsel_wr_select_ca_n = PHY_DRV_ODT_40;
598 tsel_ckcs_select_p = io->wr_ckcs_drv;
599 tsel_ckcs_select_n = PHY_DRV_ODT_34_3;
600 switch (tsel_rd_select_n) {
601 case PHY_DRV_ODT_240:
604 case PHY_DRV_ODT_120:
619 case PHY_DRV_ODT_34_3:
621 printf("%s: Unable to support LPDDR4 MR22 Soc ODT\n",
624 case PHY_DRV_ODT_HI_Z:
629 } else if (params->base.dramtype == LPDDR3) {
630 tsel_rd_select_p = PHY_DRV_ODT_240;
631 tsel_rd_select_n = PHY_DRV_ODT_HI_Z;
633 tsel_idle_select_p = PHY_DRV_ODT_240;
634 tsel_idle_select_n = PHY_DRV_ODT_HI_Z;
636 tsel_wr_select_dq_p = PHY_DRV_ODT_34_3;
637 tsel_wr_select_dq_n = PHY_DRV_ODT_34_3;
639 tsel_wr_select_ca_p = PHY_DRV_ODT_48;
640 tsel_wr_select_ca_n = PHY_DRV_ODT_48;
642 tsel_ckcs_select_p = PHY_DRV_ODT_34_3;
643 tsel_ckcs_select_n = PHY_DRV_ODT_34_3;
645 tsel_rd_select_p = PHY_DRV_ODT_240;
646 tsel_rd_select_n = PHY_DRV_ODT_240;
648 tsel_idle_select_p = PHY_DRV_ODT_240;
649 tsel_idle_select_n = PHY_DRV_ODT_240;
651 tsel_wr_select_dq_p = PHY_DRV_ODT_34_3;
652 tsel_wr_select_dq_n = PHY_DRV_ODT_34_3;
654 tsel_wr_select_ca_p = PHY_DRV_ODT_34_3;
655 tsel_wr_select_ca_n = PHY_DRV_ODT_34_3;
657 tsel_ckcs_select_p = PHY_DRV_ODT_34_3;
658 tsel_ckcs_select_n = PHY_DRV_ODT_34_3;
661 if (params->base.odt == 1) {
664 if (params->base.dramtype == LPDDR4)
665 tsel_rd_en = io->rd_odt_en;
674 clrsetbits_le32(&denali_ctl[145], 0xFF << 16,
675 (soc_odt | (CS0_MR22_VAL << 3)) << 16);
677 clrsetbits_le32(&denali_ctl[146], 0xFF00FF,
678 ((soc_odt | (CS0_MR22_VAL << 3)) << 16) |
679 (soc_odt | (CS0_MR22_VAL << 3)));
681 clrsetbits_le32(&denali_ctl[159], 0xFF << 16,
682 (soc_odt | (CS1_MR22_VAL << 3)) << 16);
684 clrsetbits_le32(&denali_ctl[160], 0xFF00FF,
685 ((soc_odt | (CS1_MR22_VAL << 3)) << 16) |
686 (soc_odt | (CS1_MR22_VAL << 3)));
689 * phy_dq_tsel_select_X 24bits DENALI_PHY_6/134/262/390 offset_0
690 * sets termination values for read/idle cycles and drive strength
691 * for write cycles for DQ/DM
693 reg_value = tsel_rd_select_n | (tsel_rd_select_p << 0x4) |
694 (tsel_wr_select_dq_n << 8) | (tsel_wr_select_dq_p << 12) |
695 (tsel_idle_select_n << 16) | (tsel_idle_select_p << 20);
696 clrsetbits_le32(&denali_phy[6], 0xffffff, reg_value);
697 clrsetbits_le32(&denali_phy[134], 0xffffff, reg_value);
698 clrsetbits_le32(&denali_phy[262], 0xffffff, reg_value);
699 clrsetbits_le32(&denali_phy[390], 0xffffff, reg_value);
702 * phy_dqs_tsel_select_X 24bits DENALI_PHY_7/135/263/391 offset_0
703 * sets termination values for read/idle cycles and drive strength
704 * for write cycles for DQS
706 clrsetbits_le32(&denali_phy[7], 0xffffff, reg_value);
707 clrsetbits_le32(&denali_phy[135], 0xffffff, reg_value);
708 clrsetbits_le32(&denali_phy[263], 0xffffff, reg_value);
709 clrsetbits_le32(&denali_phy[391], 0xffffff, reg_value);
711 /* phy_adr_tsel_select_ 8bits DENALI_PHY_544/672/800 offset_0 */
712 reg_value = tsel_wr_select_ca_n | (tsel_wr_select_ca_p << 0x4);
713 if (IS_ENABLED(CONFIG_RAM_RK3399_LPDDR4)) {
714 /* LPDDR4 these register read always return 0, so
715 * can not use clrsetbits_le32(), need to write32
717 writel((0x300 << 8) | reg_value, &denali_phy[544]);
718 writel((0x300 << 8) | reg_value, &denali_phy[672]);
719 writel((0x300 << 8) | reg_value, &denali_phy[800]);
721 clrsetbits_le32(&denali_phy[544], 0xff, reg_value);
722 clrsetbits_le32(&denali_phy[672], 0xff, reg_value);
723 clrsetbits_le32(&denali_phy[800], 0xff, reg_value);
726 /* phy_pad_addr_drive 8bits DENALI_PHY_928 offset_0 */
727 clrsetbits_le32(&denali_phy[928], 0xff, reg_value);
729 /* phy_pad_rst_drive 8bits DENALI_PHY_937 offset_0 */
730 clrsetbits_le32(&denali_phy[937], 0xff, reg_value);
732 /* phy_pad_cke_drive 8bits DENALI_PHY_935 offset_0 */
733 clrsetbits_le32(&denali_phy[935], 0xff, reg_value);
735 /* phy_pad_cs_drive 8bits DENALI_PHY_939 offset_0 */
736 clrsetbits_le32(&denali_phy[939], 0xff,
737 tsel_ckcs_select_n | (tsel_ckcs_select_p << 0x4));
739 /* phy_pad_clk_drive 8bits DENALI_PHY_929 offset_0 */
740 clrsetbits_le32(&denali_phy[929], 0xff,
741 tsel_ckcs_select_n | (tsel_ckcs_select_p << 0x4));
743 /* phy_pad_fdbk_drive 23bit DENALI_PHY_924/925 */
744 clrsetbits_le32(&denali_phy[924], 0xff,
745 tsel_wr_select_dq_n | (tsel_wr_select_dq_p << 4));
746 clrsetbits_le32(&denali_phy[925], 0xff,
747 tsel_rd_select_n | (tsel_rd_select_p << 4));
749 /* phy_dq_tsel_enable_X 3bits DENALI_PHY_5/133/261/389 offset_16 */
750 reg_value = (tsel_rd_en | (tsel_wr_en << 1) | (tsel_idle_en << 2))
752 clrsetbits_le32(&denali_phy[5], 0x7 << 16, reg_value);
753 clrsetbits_le32(&denali_phy[133], 0x7 << 16, reg_value);
754 clrsetbits_le32(&denali_phy[261], 0x7 << 16, reg_value);
755 clrsetbits_le32(&denali_phy[389], 0x7 << 16, reg_value);
757 /* phy_dqs_tsel_enable_X 3bits DENALI_PHY_6/134/262/390 offset_24 */
758 reg_value = (tsel_rd_en | (tsel_wr_en << 1) | (tsel_idle_en << 2))
760 clrsetbits_le32(&denali_phy[6], 0x7 << 24, reg_value);
761 clrsetbits_le32(&denali_phy[134], 0x7 << 24, reg_value);
762 clrsetbits_le32(&denali_phy[262], 0x7 << 24, reg_value);
763 clrsetbits_le32(&denali_phy[390], 0x7 << 24, reg_value);
765 /* phy_adr_tsel_enable_ 1bit DENALI_PHY_518/646/774 offset_8 */
766 reg_value = tsel_wr_en << 8;
767 clrsetbits_le32(&denali_phy[518], 0x1 << 8, reg_value);
768 clrsetbits_le32(&denali_phy[646], 0x1 << 8, reg_value);
769 clrsetbits_le32(&denali_phy[774], 0x1 << 8, reg_value);
771 /* phy_pad_addr_term tsel 1bit DENALI_PHY_933 offset_17 */
772 reg_value = tsel_wr_en << 17;
773 clrsetbits_le32(&denali_phy[933], 0x1 << 17, reg_value);
775 * pad_rst/cke/cs/clk_term tsel 1bits
776 * DENALI_PHY_938/936/940/934 offset_17
778 clrsetbits_le32(&denali_phy[938], 0x1 << 17, reg_value);
779 clrsetbits_le32(&denali_phy[936], 0x1 << 17, reg_value);
780 clrsetbits_le32(&denali_phy[940], 0x1 << 17, reg_value);
781 clrsetbits_le32(&denali_phy[934], 0x1 << 17, reg_value);
783 /* phy_pad_fdbk_term 1bit DENALI_PHY_930 offset_17 */
784 clrsetbits_le32(&denali_phy[930], 0x1 << 17, reg_value);
786 phy_io_config(chan, params, mr5);
789 static void pctl_start(struct dram_info *dram, u8 channel)
791 const struct chan_info *chan = &dram->chan[channel];
792 u32 *denali_ctl = chan->pctl->denali_ctl;
793 u32 *denali_phy = chan->publ->denali_phy;
794 u32 *ddrc0_con = get_ddrc0_con(dram, channel);
798 writel(0x01000000, &ddrc0_con);
800 clrsetbits_le32(&denali_phy[957], 0x3 << 24, 0x2 << 24);
802 while (!(readl(&denali_ctl[203]) & (1 << 3))) {
804 printf("%s: Failed to init pctl for channel %d\n",
814 writel(0x01000100, &ddrc0_con);
816 for (byte = 0; byte < 4; byte++) {
818 writel((tmp << 16) | tmp, &denali_phy[53 + (128 * byte)]);
819 writel((tmp << 16) | tmp, &denali_phy[54 + (128 * byte)]);
820 writel((tmp << 16) | tmp, &denali_phy[55 + (128 * byte)]);
821 writel((tmp << 16) | tmp, &denali_phy[56 + (128 * byte)]);
822 writel((tmp << 16) | tmp, &denali_phy[57 + (128 * byte)]);
824 clrsetbits_le32(&denali_phy[58 + (128 * byte)], 0xffff, tmp);
827 clrsetbits_le32(&denali_ctl[68], PWRUP_SREFRESH_EXIT,
828 dram->pwrup_srefresh_exit[channel]);
831 static int pctl_cfg(struct dram_info *dram, const struct chan_info *chan,
832 u32 channel, struct rk3399_sdram_params *params)
834 u32 *denali_ctl = chan->pctl->denali_ctl;
835 u32 *denali_pi = chan->pi->denali_pi;
836 u32 *denali_phy = chan->publ->denali_phy;
837 const u32 *params_ctl = params->pctl_regs.denali_ctl;
838 const u32 *params_phy = params->phy_regs.denali_phy;
842 * work around controller bug:
843 * Do not program DRAM_CLASS until NO_PHY_IND_TRAIN_INT is programmed
845 copy_to_reg(&denali_ctl[1], ¶ms_ctl[1],
846 sizeof(struct rk3399_ddr_pctl_regs) - 4);
847 writel(params_ctl[0], &denali_ctl[0]);
850 * two channel init at the same time, then ZQ Cal Start
851 * at the same time, it will use the same RZQ, but cannot
852 * start at the same time.
854 * So, increase tINIT3 for channel 1, will avoid two
855 * channel ZQ Cal Start at the same time
857 if (params->base.dramtype == LPDDR4 && channel == 1) {
858 tmp = ((params->base.ddr_freq * MHz + 999) / 1000);
859 tmp1 = readl(&denali_ctl[14]);
860 writel(tmp + tmp1, &denali_ctl[14]);
863 copy_to_reg(denali_pi, ¶ms->pi_regs.denali_pi[0],
864 sizeof(struct rk3399_ddr_pi_regs));
866 /* rank count need to set for init */
867 set_memory_map(chan, channel, params);
869 writel(params->phy_regs.denali_phy[910], &denali_phy[910]);
870 writel(params->phy_regs.denali_phy[911], &denali_phy[911]);
871 writel(params->phy_regs.denali_phy[912], &denali_phy[912]);
873 if (IS_ENABLED(CONFIG_RAM_RK3399_LPDDR4)) {
874 writel(params->phy_regs.denali_phy[898], &denali_phy[898]);
875 writel(params->phy_regs.denali_phy[919], &denali_phy[919]);
878 dram->pwrup_srefresh_exit[channel] = readl(&denali_ctl[68]) &
880 clrbits_le32(&denali_ctl[68], PWRUP_SREFRESH_EXIT);
883 clrsetbits_le32(&denali_phy[957], 0x3 << 24, 1 << 24);
885 setbits_le32(&denali_pi[0], START);
886 setbits_le32(&denali_ctl[0], START);
889 * LPDDR4 use PLL bypass mode for init
890 * not need to wait for the PLL to lock
892 if (params->base.dramtype != LPDDR4) {
893 /* Waiting for phy DLL lock */
895 tmp = readl(&denali_phy[920]);
896 tmp1 = readl(&denali_phy[921]);
897 tmp2 = readl(&denali_phy[922]);
898 if ((((tmp >> 16) & 0x1) == 0x1) &&
899 (((tmp1 >> 16) & 0x1) == 0x1) &&
900 (((tmp1 >> 0) & 0x1) == 0x1) &&
901 (((tmp2 >> 0) & 0x1) == 0x1))
906 copy_to_reg(&denali_phy[896], ¶ms_phy[896], (958 - 895) * 4);
907 copy_to_reg(&denali_phy[0], ¶ms_phy[0], (90 - 0 + 1) * 4);
908 copy_to_reg(&denali_phy[128], ¶ms_phy[128], (218 - 128 + 1) * 4);
909 copy_to_reg(&denali_phy[256], ¶ms_phy[256], (346 - 256 + 1) * 4);
910 copy_to_reg(&denali_phy[384], ¶ms_phy[384], (474 - 384 + 1) * 4);
911 copy_to_reg(&denali_phy[512], ¶ms_phy[512], (549 - 512 + 1) * 4);
912 copy_to_reg(&denali_phy[640], ¶ms_phy[640], (677 - 640 + 1) * 4);
913 copy_to_reg(&denali_phy[768], ¶ms_phy[768], (805 - 768 + 1) * 4);
914 set_ds_odt(chan, params, 0);
917 * phy_dqs_tsel_wr_timing_X 8bits DENALI_PHY_84/212/340/468 offset_8
918 * dqs_tsel_wr_end[7:4] add Half cycle
920 tmp = (readl(&denali_phy[84]) >> 8) & 0xff;
921 clrsetbits_le32(&denali_phy[84], 0xff << 8, (tmp + 0x10) << 8);
922 tmp = (readl(&denali_phy[212]) >> 8) & 0xff;
923 clrsetbits_le32(&denali_phy[212], 0xff << 8, (tmp + 0x10) << 8);
924 tmp = (readl(&denali_phy[340]) >> 8) & 0xff;
925 clrsetbits_le32(&denali_phy[340], 0xff << 8, (tmp + 0x10) << 8);
926 tmp = (readl(&denali_phy[468]) >> 8) & 0xff;
927 clrsetbits_le32(&denali_phy[468], 0xff << 8, (tmp + 0x10) << 8);
930 * phy_dqs_tsel_wr_timing_X 8bits DENALI_PHY_83/211/339/467 offset_8
931 * dq_tsel_wr_end[7:4] add Half cycle
933 tmp = (readl(&denali_phy[83]) >> 16) & 0xff;
934 clrsetbits_le32(&denali_phy[83], 0xff << 16, (tmp + 0x10) << 16);
935 tmp = (readl(&denali_phy[211]) >> 16) & 0xff;
936 clrsetbits_le32(&denali_phy[211], 0xff << 16, (tmp + 0x10) << 16);
937 tmp = (readl(&denali_phy[339]) >> 16) & 0xff;
938 clrsetbits_le32(&denali_phy[339], 0xff << 16, (tmp + 0x10) << 16);
939 tmp = (readl(&denali_phy[467]) >> 16) & 0xff;
940 clrsetbits_le32(&denali_phy[467], 0xff << 16, (tmp + 0x10) << 16);
945 static void select_per_cs_training_index(const struct chan_info *chan,
948 u32 *denali_phy = chan->publ->denali_phy;
950 /* PHY_84 PHY_PER_CS_TRAINING_EN_0 1bit offset_16 */
951 if ((readl(&denali_phy[84]) >> 16) & 1) {
954 * phy_per_cs_training_index_X 1bit offset_24
956 clrsetbits_le32(&denali_phy[8], 0x1 << 24, rank << 24);
957 clrsetbits_le32(&denali_phy[136], 0x1 << 24, rank << 24);
958 clrsetbits_le32(&denali_phy[264], 0x1 << 24, rank << 24);
959 clrsetbits_le32(&denali_phy[392], 0x1 << 24, rank << 24);
963 static void override_write_leveling_value(const struct chan_info *chan)
965 u32 *denali_ctl = chan->pctl->denali_ctl;
966 u32 *denali_phy = chan->publ->denali_phy;
969 /* PHY_896 PHY_FREQ_SEL_MULTICAST_EN 1bit offset_0 */
970 setbits_le32(&denali_phy[896], 1);
974 * phy_per_cs_training_multicast_en_X 1bit offset_16
976 clrsetbits_le32(&denali_phy[8], 0x1 << 16, 1 << 16);
977 clrsetbits_le32(&denali_phy[136], 0x1 << 16, 1 << 16);
978 clrsetbits_le32(&denali_phy[264], 0x1 << 16, 1 << 16);
979 clrsetbits_le32(&denali_phy[392], 0x1 << 16, 1 << 16);
981 for (byte = 0; byte < 4; byte++)
982 clrsetbits_le32(&denali_phy[63 + (128 * byte)], 0xffff << 16,
985 /* PHY_896 PHY_FREQ_SEL_MULTICAST_EN 1bit offset_0 */
986 clrbits_le32(&denali_phy[896], 1);
988 /* CTL_200 ctrlupd_req 1bit offset_8 */
989 clrsetbits_le32(&denali_ctl[200], 0x1 << 8, 0x1 << 8);
992 static int data_training_ca(const struct chan_info *chan, u32 channel,
993 const struct rk3399_sdram_params *params)
995 u32 *denali_pi = chan->pi->denali_pi;
996 u32 *denali_phy = chan->publ->denali_phy;
998 u32 obs_0, obs_1, obs_2, obs_err = 0;
999 u32 rank = params->ch[channel].cap_info.rank;
1002 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
1003 writel(0x00003f7c, (&denali_pi[175]));
1005 if (params->base.dramtype == LPDDR4)
1006 rank_mask = (rank == 1) ? 0x5 : 0xf;
1008 rank_mask = (rank == 1) ? 0x1 : 0x3;
1010 for (i = 0; i < 4; i++) {
1011 if (!(rank_mask & (1 << i)))
1014 select_per_cs_training_index(chan, i);
1016 /* PI_100 PI_CALVL_EN:RW:8:2 */
1017 clrsetbits_le32(&denali_pi[100], 0x3 << 8, 0x2 << 8);
1019 /* PI_92 PI_CALVL_REQ:WR:16:1,PI_CALVL_CS:RW:24:2 */
1020 clrsetbits_le32(&denali_pi[92],
1021 (0x1 << 16) | (0x3 << 24),
1022 (0x1 << 16) | (i << 24));
1024 /* Waiting for training complete */
1026 /* PI_174 PI_INT_STATUS:RD:8:18 */
1027 tmp = readl(&denali_pi[174]) >> 8;
1030 * PHY_532/660/789 phy_adr_calvl_obs1_:0:32
1032 obs_0 = readl(&denali_phy[532]);
1033 obs_1 = readl(&denali_phy[660]);
1034 obs_2 = readl(&denali_phy[788]);
1035 if (((obs_0 >> 30) & 0x3) ||
1036 ((obs_1 >> 30) & 0x3) ||
1037 ((obs_2 >> 30) & 0x3))
1039 if ((((tmp >> 11) & 0x1) == 0x1) &&
1040 (((tmp >> 13) & 0x1) == 0x1) &&
1041 (((tmp >> 5) & 0x1) == 0x0) &&
1044 else if ((((tmp >> 5) & 0x1) == 0x1) ||
1049 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
1050 writel(0x00003f7c, (&denali_pi[175]));
1053 clrbits_le32(&denali_pi[100], 0x3 << 8);
1058 static int data_training_wl(const struct chan_info *chan, u32 channel,
1059 const struct rk3399_sdram_params *params)
1061 u32 *denali_pi = chan->pi->denali_pi;
1062 u32 *denali_phy = chan->publ->denali_phy;
1064 u32 obs_0, obs_1, obs_2, obs_3, obs_err = 0;
1065 u32 rank = params->ch[channel].cap_info.rank;
1067 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
1068 writel(0x00003f7c, (&denali_pi[175]));
1070 for (i = 0; i < rank; i++) {
1071 select_per_cs_training_index(chan, i);
1073 /* PI_60 PI_WRLVL_EN:RW:8:2 */
1074 clrsetbits_le32(&denali_pi[60], 0x3 << 8, 0x2 << 8);
1076 /* PI_59 PI_WRLVL_REQ:WR:8:1,PI_WRLVL_CS:RW:16:2 */
1077 clrsetbits_le32(&denali_pi[59],
1078 (0x1 << 8) | (0x3 << 16),
1079 (0x1 << 8) | (i << 16));
1081 /* Waiting for training complete */
1083 /* PI_174 PI_INT_STATUS:RD:8:18 */
1084 tmp = readl(&denali_pi[174]) >> 8;
1087 * check status obs, if error maybe can not
1088 * get leveling done PHY_40/168/296/424
1089 * phy_wrlvl_status_obs_X:0:13
1091 obs_0 = readl(&denali_phy[40]);
1092 obs_1 = readl(&denali_phy[168]);
1093 obs_2 = readl(&denali_phy[296]);
1094 obs_3 = readl(&denali_phy[424]);
1095 if (((obs_0 >> 12) & 0x1) ||
1096 ((obs_1 >> 12) & 0x1) ||
1097 ((obs_2 >> 12) & 0x1) ||
1098 ((obs_3 >> 12) & 0x1))
1100 if ((((tmp >> 10) & 0x1) == 0x1) &&
1101 (((tmp >> 13) & 0x1) == 0x1) &&
1102 (((tmp >> 4) & 0x1) == 0x0) &&
1105 else if ((((tmp >> 4) & 0x1) == 0x1) ||
1110 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
1111 writel(0x00003f7c, (&denali_pi[175]));
1114 override_write_leveling_value(chan);
1115 clrbits_le32(&denali_pi[60], 0x3 << 8);
1120 static int data_training_rg(const struct chan_info *chan, u32 channel,
1121 const struct rk3399_sdram_params *params)
1123 u32 *denali_pi = chan->pi->denali_pi;
1124 u32 *denali_phy = chan->publ->denali_phy;
1126 u32 obs_0, obs_1, obs_2, obs_3, obs_err = 0;
1127 u32 rank = params->ch[channel].cap_info.rank;
1129 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
1130 writel(0x00003f7c, (&denali_pi[175]));
1132 for (i = 0; i < rank; i++) {
1133 select_per_cs_training_index(chan, i);
1135 /* PI_80 PI_RDLVL_GATE_EN:RW:24:2 */
1136 clrsetbits_le32(&denali_pi[80], 0x3 << 24, 0x2 << 24);
1139 * PI_74 PI_RDLVL_GATE_REQ:WR:16:1
1140 * PI_RDLVL_CS:RW:24:2
1142 clrsetbits_le32(&denali_pi[74],
1143 (0x1 << 16) | (0x3 << 24),
1144 (0x1 << 16) | (i << 24));
1146 /* Waiting for training complete */
1148 /* PI_174 PI_INT_STATUS:RD:8:18 */
1149 tmp = readl(&denali_pi[174]) >> 8;
1153 * PHY_43/171/299/427
1154 * PHY_GTLVL_STATUS_OBS_x:16:8
1156 obs_0 = readl(&denali_phy[43]);
1157 obs_1 = readl(&denali_phy[171]);
1158 obs_2 = readl(&denali_phy[299]);
1159 obs_3 = readl(&denali_phy[427]);
1160 if (((obs_0 >> (16 + 6)) & 0x3) ||
1161 ((obs_1 >> (16 + 6)) & 0x3) ||
1162 ((obs_2 >> (16 + 6)) & 0x3) ||
1163 ((obs_3 >> (16 + 6)) & 0x3))
1165 if ((((tmp >> 9) & 0x1) == 0x1) &&
1166 (((tmp >> 13) & 0x1) == 0x1) &&
1167 (((tmp >> 3) & 0x1) == 0x0) &&
1170 else if ((((tmp >> 3) & 0x1) == 0x1) ||
1175 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
1176 writel(0x00003f7c, (&denali_pi[175]));
1179 clrbits_le32(&denali_pi[80], 0x3 << 24);
1184 static int data_training_rl(const struct chan_info *chan, u32 channel,
1185 const struct rk3399_sdram_params *params)
1187 u32 *denali_pi = chan->pi->denali_pi;
1189 u32 rank = params->ch[channel].cap_info.rank;
1191 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
1192 writel(0x00003f7c, (&denali_pi[175]));
1194 for (i = 0; i < rank; i++) {
1195 select_per_cs_training_index(chan, i);
1197 /* PI_80 PI_RDLVL_EN:RW:16:2 */
1198 clrsetbits_le32(&denali_pi[80], 0x3 << 16, 0x2 << 16);
1200 /* PI_74 PI_RDLVL_REQ:WR:8:1,PI_RDLVL_CS:RW:24:2 */
1201 clrsetbits_le32(&denali_pi[74],
1202 (0x1 << 8) | (0x3 << 24),
1203 (0x1 << 8) | (i << 24));
1205 /* Waiting for training complete */
1207 /* PI_174 PI_INT_STATUS:RD:8:18 */
1208 tmp = readl(&denali_pi[174]) >> 8;
1211 * make sure status obs not report error bit
1212 * PHY_46/174/302/430
1213 * phy_rdlvl_status_obs_X:16:8
1215 if ((((tmp >> 8) & 0x1) == 0x1) &&
1216 (((tmp >> 13) & 0x1) == 0x1) &&
1217 (((tmp >> 2) & 0x1) == 0x0))
1219 else if (((tmp >> 2) & 0x1) == 0x1)
1223 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
1224 writel(0x00003f7c, (&denali_pi[175]));
1227 clrbits_le32(&denali_pi[80], 0x3 << 16);
1232 static int data_training_wdql(const struct chan_info *chan, u32 channel,
1233 const struct rk3399_sdram_params *params)
1235 u32 *denali_pi = chan->pi->denali_pi;
1237 u32 rank = params->ch[channel].cap_info.rank;
1240 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
1241 writel(0x00003f7c, (&denali_pi[175]));
1243 if (params->base.dramtype == LPDDR4)
1244 rank_mask = (rank == 1) ? 0x5 : 0xf;
1246 rank_mask = (rank == 1) ? 0x1 : 0x3;
1248 for (i = 0; i < 4; i++) {
1249 if (!(rank_mask & (1 << i)))
1252 select_per_cs_training_index(chan, i);
1255 * disable PI_WDQLVL_VREF_EN before wdq leveling?
1256 * PI_181 PI_WDQLVL_VREF_EN:RW:8:1
1258 clrbits_le32(&denali_pi[181], 0x1 << 8);
1260 /* PI_124 PI_WDQLVL_EN:RW:16:2 */
1261 clrsetbits_le32(&denali_pi[124], 0x3 << 16, 0x2 << 16);
1263 /* PI_121 PI_WDQLVL_REQ:WR:8:1,PI_WDQLVL_CS:RW:16:2 */
1264 clrsetbits_le32(&denali_pi[121],
1265 (0x1 << 8) | (0x3 << 16),
1266 (0x1 << 8) | (i << 16));
1268 /* Waiting for training complete */
1270 /* PI_174 PI_INT_STATUS:RD:8:18 */
1271 tmp = readl(&denali_pi[174]) >> 8;
1272 if ((((tmp >> 12) & 0x1) == 0x1) &&
1273 (((tmp >> 13) & 0x1) == 0x1) &&
1274 (((tmp >> 6) & 0x1) == 0x0))
1276 else if (((tmp >> 6) & 0x1) == 0x1)
1280 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
1281 writel(0x00003f7c, (&denali_pi[175]));
1284 clrbits_le32(&denali_pi[124], 0x3 << 16);
1289 static int data_training(struct dram_info *dram, u32 channel,
1290 const struct rk3399_sdram_params *params,
1293 struct chan_info *chan = &dram->chan[channel];
1294 u32 *denali_phy = chan->publ->denali_phy;
1297 /* PHY_927 PHY_PAD_DQS_DRIVE RPULL offset_22 */
1298 setbits_le32(&denali_phy[927], (1 << 22));
1300 if (training_flag == PI_FULL_TRAINING) {
1301 if (params->base.dramtype == LPDDR4) {
1302 training_flag = PI_CA_TRAINING | PI_WRITE_LEVELING |
1303 PI_READ_GATE_TRAINING |
1304 PI_READ_LEVELING | PI_WDQ_LEVELING;
1305 } else if (params->base.dramtype == LPDDR3) {
1306 training_flag = PI_CA_TRAINING | PI_WRITE_LEVELING |
1307 PI_READ_GATE_TRAINING;
1308 } else if (params->base.dramtype == DDR3) {
1309 training_flag = PI_WRITE_LEVELING |
1310 PI_READ_GATE_TRAINING |
1315 /* ca training(LPDDR4,LPDDR3 support) */
1316 if ((training_flag & PI_CA_TRAINING) == PI_CA_TRAINING) {
1317 ret = data_training_ca(chan, channel, params);
1319 debug("%s: data training ca failed\n", __func__);
1324 /* write leveling(LPDDR4,LPDDR3,DDR3 support) */
1325 if ((training_flag & PI_WRITE_LEVELING) == PI_WRITE_LEVELING) {
1326 ret = data_training_wl(chan, channel, params);
1328 debug("%s: data training wl failed\n", __func__);
1333 /* read gate training(LPDDR4,LPDDR3,DDR3 support) */
1334 if ((training_flag & PI_READ_GATE_TRAINING) == PI_READ_GATE_TRAINING) {
1335 ret = data_training_rg(chan, channel, params);
1337 debug("%s: data training rg failed\n", __func__);
1342 /* read leveling(LPDDR4,LPDDR3,DDR3 support) */
1343 if ((training_flag & PI_READ_LEVELING) == PI_READ_LEVELING) {
1344 ret = data_training_rl(chan, channel, params);
1346 debug("%s: data training rl failed\n", __func__);
1351 /* wdq leveling(LPDDR4 support) */
1352 if ((training_flag & PI_WDQ_LEVELING) == PI_WDQ_LEVELING) {
1353 ret = data_training_wdql(chan, channel, params);
1355 debug("%s: data training wdql failed\n", __func__);
1360 /* PHY_927 PHY_PAD_DQS_DRIVE RPULL offset_22 */
1361 clrbits_le32(&denali_phy[927], (1 << 22));
1366 static void set_ddrconfig(const struct chan_info *chan,
1367 const struct rk3399_sdram_params *params,
1368 unsigned char channel, u32 ddrconfig)
1370 /* only need to set ddrconfig */
1371 struct rk3399_msch_regs *ddr_msch_regs = chan->msch;
1372 unsigned int cs0_cap = 0;
1373 unsigned int cs1_cap = 0;
1375 cs0_cap = (1 << (params->ch[channel].cap_info.cs0_row
1376 + params->ch[channel].cap_info.col
1377 + params->ch[channel].cap_info.bk
1378 + params->ch[channel].cap_info.bw - 20));
1379 if (params->ch[channel].cap_info.rank > 1)
1380 cs1_cap = cs0_cap >> (params->ch[channel].cap_info.cs0_row
1381 - params->ch[channel].cap_info.cs1_row);
1382 if (params->ch[channel].cap_info.row_3_4) {
1383 cs0_cap = cs0_cap * 3 / 4;
1384 cs1_cap = cs1_cap * 3 / 4;
1387 writel(ddrconfig | (ddrconfig << 8), &ddr_msch_regs->ddrconf);
1388 writel(((cs0_cap / 32) & 0xff) | (((cs1_cap / 32) & 0xff) << 8),
1389 &ddr_msch_regs->ddrsize);
1392 static void dram_all_config(struct dram_info *dram,
1393 const struct rk3399_sdram_params *params)
1397 unsigned int channel, idx;
1399 sys_reg2 |= SYS_REG_ENC_DDRTYPE(params->base.dramtype);
1400 sys_reg2 |= SYS_REG_ENC_NUM_CH(params->base.num_channels);
1402 for (channel = 0, idx = 0;
1403 (idx < params->base.num_channels) && (channel < 2);
1405 const struct rk3399_sdram_channel *info = ¶ms->ch[channel];
1406 struct rk3399_msch_regs *ddr_msch_regs;
1407 const struct rk3399_msch_timings *noc_timing;
1409 if (params->ch[channel].cap_info.col == 0)
1412 sys_reg2 |= SYS_REG_ENC_ROW_3_4(info->cap_info.row_3_4, channel);
1413 sys_reg2 |= SYS_REG_ENC_CHINFO(channel);
1414 sys_reg2 |= SYS_REG_ENC_RANK(info->cap_info.rank, channel);
1415 sys_reg2 |= SYS_REG_ENC_COL(info->cap_info.col, channel);
1416 sys_reg2 |= SYS_REG_ENC_BK(info->cap_info.bk, channel);
1417 sys_reg2 |= SYS_REG_ENC_BW(info->cap_info.bw, channel);
1418 sys_reg2 |= SYS_REG_ENC_DBW(info->cap_info.dbw, channel);
1419 SYS_REG_ENC_CS0_ROW(info->cap_info.cs0_row, sys_reg2, sys_reg3, channel);
1420 if (info->cap_info.cs1_row)
1421 SYS_REG_ENC_CS1_ROW(info->cap_info.cs1_row, sys_reg2,
1423 sys_reg3 |= SYS_REG_ENC_CS1_COL(info->cap_info.col, channel);
1424 sys_reg3 |= SYS_REG_ENC_VERSION(DDR_SYS_REG_VERSION);
1426 ddr_msch_regs = dram->chan[channel].msch;
1427 noc_timing = ¶ms->ch[channel].noc_timings;
1428 writel(noc_timing->ddrtiminga0,
1429 &ddr_msch_regs->ddrtiminga0);
1430 writel(noc_timing->ddrtimingb0,
1431 &ddr_msch_regs->ddrtimingb0);
1432 writel(noc_timing->ddrtimingc0.d32,
1433 &ddr_msch_regs->ddrtimingc0);
1434 writel(noc_timing->devtodev0,
1435 &ddr_msch_regs->devtodev0);
1436 writel(noc_timing->ddrmode.d32,
1437 &ddr_msch_regs->ddrmode);
1440 * rank 1 memory clock disable (dfi_dram_clk_disable = 1)
1442 * The hardware for LPDDR4 with
1443 * - CLK0P/N connect to lower 16-bits
1444 * - CLK1P/N connect to higher 16-bits
1446 * dfi dram clk is configured via CLK1P/N, so disabling
1447 * dfi dram clk will disable the CLK1P/N as well for lpddr4.
1449 if (params->ch[channel].cap_info.rank == 1 &&
1450 params->base.dramtype != LPDDR4)
1451 setbits_le32(&dram->chan[channel].pctl->denali_ctl[276],
1455 writel(sys_reg2, &dram->pmugrf->os_reg2);
1456 writel(sys_reg3, &dram->pmugrf->os_reg3);
1457 rk_clrsetreg(&dram->pmusgrf->soc_con4, 0x1f << 10,
1458 params->base.stride << 10);
1460 /* reboot hold register set */
1461 writel(PRESET_SGRF_HOLD(0) | PRESET_GPIO0_HOLD(1) |
1462 PRESET_GPIO1_HOLD(1),
1463 &dram->pmucru->pmucru_rstnhold_con[1]);
1464 clrsetbits_le32(&dram->cru->glb_rst_con, 0x3, 0x3);
1467 static int switch_to_phy_index1(struct dram_info *dram,
1468 const struct rk3399_sdram_params *params)
1472 u32 ch_count = params->base.num_channels;
1476 writel(RK_CLRSETBITS(0x03 << 4 | 1 << 2 | 1,
1477 1 << 4 | 1 << 2 | 1),
1478 &dram->cic->cic_ctrl0);
1479 while (!(readl(&dram->cic->cic_status0) & (1 << 2))) {
1483 debug("index1 frequency change overtime\n");
1489 writel(RK_CLRSETBITS(1 << 1, 1 << 1), &dram->cic->cic_ctrl0);
1490 while (!(readl(&dram->cic->cic_status0) & (1 << 0))) {
1494 debug("index1 frequency done overtime\n");
1499 for (channel = 0; channel < ch_count; channel++) {
1500 denali_phy = dram->chan[channel].publ->denali_phy;
1501 clrsetbits_le32(&denali_phy[896], (0x3 << 8) | 1, 1 << 8);
1502 ret = data_training(dram, channel, params, PI_FULL_TRAINING);
1504 debug("index1 training failed\n");
1512 static unsigned char calculate_stride(struct rk3399_sdram_params *params)
1514 unsigned int stride = params->base.stride;
1515 unsigned int channel, chinfo = 0;
1516 unsigned int ch_cap[2] = {0, 0};
1519 for (channel = 0; channel < 2; channel++) {
1520 unsigned int cs0_cap = 0;
1521 unsigned int cs1_cap = 0;
1522 struct sdram_cap_info *cap_info = ¶ms->ch[channel].cap_info;
1524 if (cap_info->col == 0)
1527 cs0_cap = (1 << (cap_info->cs0_row + cap_info->col +
1528 cap_info->bk + cap_info->bw - 20));
1529 if (cap_info->rank > 1)
1530 cs1_cap = cs0_cap >> (cap_info->cs0_row
1531 - cap_info->cs1_row);
1532 if (cap_info->row_3_4) {
1533 cs0_cap = cs0_cap * 3 / 4;
1534 cs1_cap = cs1_cap * 3 / 4;
1536 ch_cap[channel] = cs0_cap + cs1_cap;
1537 chinfo |= 1 << channel;
1540 /* stride calculation for 1 channel */
1541 if (params->base.num_channels == 1 && chinfo & 1)
1542 return 0x17; /* channel a */
1544 /* stride calculation for 2 channels, default gstride type is 256B */
1545 if (ch_cap[0] == ch_cap[1]) {
1546 cap = ch_cap[0] + ch_cap[1];
1557 * 768MB + 768MB same as total 2GB memory
1558 * useful space: 0-768MB 1GB-1792MB
1565 /* 1536MB + 1536MB */
1574 printf("%s: Unable to calculate stride for ", __func__);
1575 print_size((cap * (1 << 20)), " capacity\n");
1580 sdram_print_stride(stride);
1585 static void clear_channel_params(struct rk3399_sdram_params *params, u8 channel)
1587 params->ch[channel].cap_info.rank = 0;
1588 params->ch[channel].cap_info.col = 0;
1589 params->ch[channel].cap_info.bk = 0;
1590 params->ch[channel].cap_info.bw = 32;
1591 params->ch[channel].cap_info.dbw = 32;
1592 params->ch[channel].cap_info.row_3_4 = 0;
1593 params->ch[channel].cap_info.cs0_row = 0;
1594 params->ch[channel].cap_info.cs1_row = 0;
1595 params->ch[channel].cap_info.ddrconfig = 0;
1598 static int pctl_init(struct dram_info *dram, struct rk3399_sdram_params *params)
1603 for (channel = 0; channel < 2; channel++) {
1604 const struct chan_info *chan = &dram->chan[channel];
1605 struct rk3399_cru *cru = dram->cru;
1606 struct rk3399_ddr_publ_regs *publ = chan->publ;
1608 phy_pctrl_reset(cru, channel);
1609 phy_dll_bypass_set(publ, params->base.ddr_freq);
1611 ret = pctl_cfg(dram, chan, channel, params);
1613 printf("%s: pctl config failed\n", __func__);
1617 /* start to trigger initialization */
1618 pctl_start(dram, channel);
1624 static int sdram_init(struct dram_info *dram,
1625 struct rk3399_sdram_params *params)
1627 unsigned char dramtype = params->base.dramtype;
1628 unsigned int ddr_freq = params->base.ddr_freq;
1629 u32 training_flag = PI_READ_GATE_TRAINING;
1630 int channel, ch, rank;
1633 debug("Starting SDRAM initialization...\n");
1635 if ((dramtype == DDR3 && ddr_freq > 933) ||
1636 (dramtype == LPDDR3 && ddr_freq > 933) ||
1637 (dramtype == LPDDR4 && ddr_freq > 800)) {
1638 debug("SDRAM frequency is to high!");
1642 for (ch = 0; ch < 2; ch++) {
1643 params->ch[ch].cap_info.rank = 2;
1644 for (rank = 2; rank != 0; rank--) {
1645 ret = pctl_init(dram, params);
1647 printf("%s: pctl init failed\n", __func__);
1651 /* LPDDR2/LPDDR3 need to wait DAI complete, max 10us */
1652 if (dramtype == LPDDR3)
1655 params->ch[ch].cap_info.rank = rank;
1658 * LPDDR3 CA training msut be trigger before
1660 * DDR3 is not have CA training.
1662 if (params->base.dramtype == LPDDR3)
1663 training_flag |= PI_CA_TRAINING;
1665 if (!(data_training(dram, ch, params, training_flag)))
1668 /* Computed rank with associated channel number */
1669 params->ch[ch].cap_info.rank = rank;
1672 params->base.num_channels = 0;
1673 for (channel = 0; channel < 2; channel++) {
1674 const struct chan_info *chan = &dram->chan[channel];
1675 struct sdram_cap_info *cap_info = ¶ms->ch[channel].cap_info;
1676 u8 training_flag = PI_FULL_TRAINING;
1678 if (cap_info->rank == 0) {
1679 clear_channel_params(params, channel);
1682 params->base.num_channels++;
1686 debug(channel ? "1: " : "0: ");
1688 /* LPDDR3 should have write and read gate training */
1689 if (params->base.dramtype == LPDDR3)
1690 training_flag = PI_WRITE_LEVELING |
1691 PI_READ_GATE_TRAINING;
1693 if (params->base.dramtype != LPDDR4) {
1694 ret = data_training(dram, channel, params,
1697 debug("%s: data train failed for channel %d\n",
1703 sdram_print_ddr_info(cap_info, ¶ms->base);
1705 set_ddrconfig(chan, params, channel, cap_info->ddrconfig);
1708 if (params->base.num_channels == 0) {
1709 printf("%s: ", __func__);
1710 sdram_print_dram_type(params->base.dramtype);
1711 printf(" - %dMHz failed!\n", params->base.ddr_freq);
1715 params->base.stride = calculate_stride(params);
1716 dram_all_config(dram, params);
1717 switch_to_phy_index1(dram, params);
1719 debug("Finish SDRAM initialization...\n");
1723 static int rk3399_dmc_ofdata_to_platdata(struct udevice *dev)
1725 #if !CONFIG_IS_ENABLED(OF_PLATDATA)
1726 struct rockchip_dmc_plat *plat = dev_get_platdata(dev);
1729 ret = dev_read_u32_array(dev, "rockchip,sdram-params",
1730 (u32 *)&plat->sdram_params,
1731 sizeof(plat->sdram_params) / sizeof(u32));
1733 printf("%s: Cannot read rockchip,sdram-params %d\n",
1737 ret = regmap_init_mem(dev_ofnode(dev), &plat->map);
1739 printf("%s: regmap failed %d\n", __func__, ret);
1745 #if CONFIG_IS_ENABLED(OF_PLATDATA)
1746 static int conv_of_platdata(struct udevice *dev)
1748 struct rockchip_dmc_plat *plat = dev_get_platdata(dev);
1749 struct dtd_rockchip_rk3399_dmc *dtplat = &plat->dtplat;
1752 ret = regmap_init_mem_platdata(dev, dtplat->reg,
1753 ARRAY_SIZE(dtplat->reg) / 2,
1762 static int rk3399_dmc_init(struct udevice *dev)
1764 struct dram_info *priv = dev_get_priv(dev);
1765 struct rockchip_dmc_plat *plat = dev_get_platdata(dev);
1767 #if !CONFIG_IS_ENABLED(OF_PLATDATA)
1768 struct rk3399_sdram_params *params = &plat->sdram_params;
1770 struct dtd_rockchip_rk3399_dmc *dtplat = &plat->dtplat;
1771 struct rk3399_sdram_params *params =
1772 (void *)dtplat->rockchip_sdram_params;
1774 ret = conv_of_platdata(dev);
1779 priv->cic = syscon_get_first_range(ROCKCHIP_SYSCON_CIC);
1780 priv->grf = syscon_get_first_range(ROCKCHIP_SYSCON_GRF);
1781 priv->pmugrf = syscon_get_first_range(ROCKCHIP_SYSCON_PMUGRF);
1782 priv->pmusgrf = syscon_get_first_range(ROCKCHIP_SYSCON_PMUSGRF);
1783 priv->pmucru = rockchip_get_pmucru();
1784 priv->cru = rockchip_get_cru();
1785 priv->chan[0].pctl = regmap_get_range(plat->map, 0);
1786 priv->chan[0].pi = regmap_get_range(plat->map, 1);
1787 priv->chan[0].publ = regmap_get_range(plat->map, 2);
1788 priv->chan[0].msch = regmap_get_range(plat->map, 3);
1789 priv->chan[1].pctl = regmap_get_range(plat->map, 4);
1790 priv->chan[1].pi = regmap_get_range(plat->map, 5);
1791 priv->chan[1].publ = regmap_get_range(plat->map, 6);
1792 priv->chan[1].msch = regmap_get_range(plat->map, 7);
1794 debug("con reg %p %p %p %p %p %p %p %p\n",
1795 priv->chan[0].pctl, priv->chan[0].pi,
1796 priv->chan[0].publ, priv->chan[0].msch,
1797 priv->chan[1].pctl, priv->chan[1].pi,
1798 priv->chan[1].publ, priv->chan[1].msch);
1799 debug("cru %p, cic %p, grf %p, sgrf %p, pmucru %p\n", priv->cru,
1800 priv->cic, priv->pmugrf, priv->pmusgrf, priv->pmucru);
1802 #if CONFIG_IS_ENABLED(OF_PLATDATA)
1803 ret = clk_get_by_index_platdata(dev, 0, dtplat->clocks, &priv->ddr_clk);
1805 ret = clk_get_by_index(dev, 0, &priv->ddr_clk);
1808 printf("%s clk get failed %d\n", __func__, ret);
1812 ret = clk_set_rate(&priv->ddr_clk, params->base.ddr_freq * MHz);
1814 printf("%s clk set failed %d\n", __func__, ret);
1818 ret = sdram_init(priv, params);
1820 printf("%s DRAM init failed %d\n", __func__, ret);
1828 static int rk3399_dmc_probe(struct udevice *dev)
1830 #if defined(CONFIG_TPL_BUILD) || \
1831 (!defined(CONFIG_TPL) && defined(CONFIG_SPL_BUILD))
1832 if (rk3399_dmc_init(dev))
1835 struct dram_info *priv = dev_get_priv(dev);
1837 priv->pmugrf = syscon_get_first_range(ROCKCHIP_SYSCON_PMUGRF);
1838 debug("%s: pmugrf = %p\n", __func__, priv->pmugrf);
1839 priv->info.base = CONFIG_SYS_SDRAM_BASE;
1841 rockchip_sdram_size((phys_addr_t)&priv->pmugrf->os_reg2);
1846 static int rk3399_dmc_get_info(struct udevice *dev, struct ram_info *info)
1848 struct dram_info *priv = dev_get_priv(dev);
1855 static struct ram_ops rk3399_dmc_ops = {
1856 .get_info = rk3399_dmc_get_info,
1859 static const struct udevice_id rk3399_dmc_ids[] = {
1860 { .compatible = "rockchip,rk3399-dmc" },
1864 U_BOOT_DRIVER(dmc_rk3399) = {
1865 .name = "rockchip_rk3399_dmc",
1867 .of_match = rk3399_dmc_ids,
1868 .ops = &rk3399_dmc_ops,
1869 #if defined(CONFIG_TPL_BUILD) || \
1870 (!defined(CONFIG_TPL) && defined(CONFIG_SPL_BUILD))
1871 .ofdata_to_platdata = rk3399_dmc_ofdata_to_platdata,
1873 .probe = rk3399_dmc_probe,
1874 .priv_auto_alloc_size = sizeof(struct dram_info),
1875 #if defined(CONFIG_TPL_BUILD) || \
1876 (!defined(CONFIG_TPL) && defined(CONFIG_SPL_BUILD))
1877 .platdata_auto_alloc_size = sizeof(struct rockchip_dmc_plat),