1 // SPDX-License-Identifier: GPL-2.0+ OR BSD-3-Clause
3 * (C) Copyright 2016-2017 Rockchip Inc.
5 * Adapted from coreboot.
11 #include <dt-structs.h>
16 #include <asm/arch-rockchip/clock.h>
17 #include <asm/arch-rockchip/cru_rk3399.h>
18 #include <asm/arch-rockchip/grf_rk3399.h>
19 #include <asm/arch-rockchip/hardware.h>
20 #include <asm/arch-rockchip/sdram_common.h>
21 #include <asm/arch-rockchip/sdram_rk3399.h>
22 #include <linux/err.h>
25 #define PRESET_SGRF_HOLD(n) ((0x1 << (6 + 16)) | ((n) << 6))
26 #define PRESET_GPIO0_HOLD(n) ((0x1 << (7 + 16)) | ((n) << 7))
27 #define PRESET_GPIO1_HOLD(n) ((0x1 << (8 + 16)) | ((n) << 8))
29 #define PHY_DRV_ODT_HI_Z 0x0
30 #define PHY_DRV_ODT_240 0x1
31 #define PHY_DRV_ODT_120 0x8
32 #define PHY_DRV_ODT_80 0x9
33 #define PHY_DRV_ODT_60 0xc
34 #define PHY_DRV_ODT_48 0xd
35 #define PHY_DRV_ODT_40 0xe
36 #define PHY_DRV_ODT_34_3 0xf
38 #define PHY_BOOSTP_EN 0x1
39 #define PHY_BOOSTN_EN 0x1
40 #define PHY_SLEWP_EN 0x1
41 #define PHY_SLEWN_EN 0x1
42 #define PHY_RX_CM_INPUT 0x1
44 #define CRU_SFTRST_DDR_CTRL(ch, n) ((0x1 << (8 + 16 + (ch) * 4)) | \
45 ((n) << (8 + (ch) * 4)))
46 #define CRU_SFTRST_DDR_PHY(ch, n) ((0x1 << (9 + 16 + (ch) * 4)) | \
47 ((n) << (9 + (ch) * 4)))
49 struct rk3399_ddr_pctl_regs *pctl;
50 struct rk3399_ddr_pi_regs *pi;
51 struct rk3399_ddr_publ_regs *publ;
52 struct rk3399_msch_regs *msch;
56 #if defined(CONFIG_TPL_BUILD) || \
57 (!defined(CONFIG_TPL) && defined(CONFIG_SPL_BUILD))
58 u32 pwrup_srefresh_exit[2];
59 struct chan_info chan[2];
61 struct rk3399_cru *cru;
62 struct rk3399_grf_regs *grf;
63 struct rk3399_pmucru *pmucru;
64 struct rk3399_pmusgrf_regs *pmusgrf;
65 struct rk3399_ddr_cic_regs *cic;
68 struct rk3399_pmugrf_regs *pmugrf;
71 #if defined(CONFIG_TPL_BUILD) || \
72 (!defined(CONFIG_TPL) && defined(CONFIG_SPL_BUILD))
74 struct rockchip_dmc_plat {
75 #if CONFIG_IS_ENABLED(OF_PLATDATA)
76 struct dtd_rockchip_rk3399_dmc dtplat;
78 struct rk3399_sdram_params sdram_params;
83 static void *get_ddrc0_con(struct dram_info *dram, u8 channel)
85 return (channel == 0) ? &dram->grf->ddrc0_con0 : &dram->grf->ddrc0_con1;
88 static void copy_to_reg(u32 *dest, const u32 *src, u32 n)
92 for (i = 0; i < n / sizeof(u32); i++) {
99 static void rkclk_ddr_reset(struct rk3399_cru *cru, u32 channel, u32 ctl,
105 writel(CRU_SFTRST_DDR_CTRL(channel, ctl) |
106 CRU_SFTRST_DDR_PHY(channel, phy),
107 &cru->softrst_con[4]);
110 static void phy_pctrl_reset(struct rk3399_cru *cru, u32 channel)
112 rkclk_ddr_reset(cru, channel, 1, 1);
115 rkclk_ddr_reset(cru, channel, 1, 0);
118 rkclk_ddr_reset(cru, channel, 0, 0);
122 static void phy_dll_bypass_set(struct rk3399_ddr_publ_regs *ddr_publ_regs,
125 u32 *denali_phy = ddr_publ_regs->denali_phy;
127 /* From IP spec, only freq small than 125 can enter dll bypass mode */
129 /* phy_sw_master_mode_X PHY_86/214/342/470 4bits offset_8 */
130 setbits_le32(&denali_phy[86], (0x3 << 2) << 8);
131 setbits_le32(&denali_phy[214], (0x3 << 2) << 8);
132 setbits_le32(&denali_phy[342], (0x3 << 2) << 8);
133 setbits_le32(&denali_phy[470], (0x3 << 2) << 8);
135 /* phy_adrctl_sw_master_mode PHY_547/675/803 4bits offset_16 */
136 setbits_le32(&denali_phy[547], (0x3 << 2) << 16);
137 setbits_le32(&denali_phy[675], (0x3 << 2) << 16);
138 setbits_le32(&denali_phy[803], (0x3 << 2) << 16);
140 /* phy_sw_master_mode_X PHY_86/214/342/470 4bits offset_8 */
141 clrbits_le32(&denali_phy[86], (0x3 << 2) << 8);
142 clrbits_le32(&denali_phy[214], (0x3 << 2) << 8);
143 clrbits_le32(&denali_phy[342], (0x3 << 2) << 8);
144 clrbits_le32(&denali_phy[470], (0x3 << 2) << 8);
146 /* phy_adrctl_sw_master_mode PHY_547/675/803 4bits offset_16 */
147 clrbits_le32(&denali_phy[547], (0x3 << 2) << 16);
148 clrbits_le32(&denali_phy[675], (0x3 << 2) << 16);
149 clrbits_le32(&denali_phy[803], (0x3 << 2) << 16);
153 static void set_memory_map(const struct chan_info *chan, u32 channel,
154 const struct rk3399_sdram_params *params)
156 const struct rk3399_sdram_channel *sdram_ch = ¶ms->ch[channel];
157 u32 *denali_ctl = chan->pctl->denali_ctl;
158 u32 *denali_pi = chan->pi->denali_pi;
163 /* Get row number from ddrconfig setting */
164 if (sdram_ch->cap_info.ddrconfig < 2 ||
165 sdram_ch->cap_info.ddrconfig == 4)
167 else if (sdram_ch->cap_info.ddrconfig == 3)
172 cs_map = (sdram_ch->cap_info.rank > 1) ? 3 : 1;
173 reduc = (sdram_ch->cap_info.bw == 2) ? 0 : 1;
175 /* Set the dram configuration to ctrl */
176 clrsetbits_le32(&denali_ctl[191], 0xF, (12 - sdram_ch->cap_info.col));
177 clrsetbits_le32(&denali_ctl[190], (0x3 << 16) | (0x7 << 24),
178 ((3 - sdram_ch->cap_info.bk) << 16) |
181 clrsetbits_le32(&denali_ctl[196], 0x3 | (1 << 16),
182 cs_map | (reduc << 16));
184 /* PI_199 PI_COL_DIFF:RW:0:4 */
185 clrsetbits_le32(&denali_pi[199], 0xF, (12 - sdram_ch->cap_info.col));
187 /* PI_155 PI_ROW_DIFF:RW:24:3 PI_BANK_DIFF:RW:16:2 */
188 clrsetbits_le32(&denali_pi[155], (0x3 << 16) | (0x7 << 24),
189 ((3 - sdram_ch->cap_info.bk) << 16) |
192 if (IS_ENABLED(CONFIG_RAM_RK3399_LPDDR4)) {
195 else if (cs_map == 2)
201 /* PI_41 PI_CS_MAP:RW:24:4 */
202 clrsetbits_le32(&denali_pi[41], 0xf << 24, cs_map << 24);
203 if (sdram_ch->cap_info.rank == 1 && params->base.dramtype == DDR3)
204 writel(0x2EC7FFFF, &denali_pi[34]);
207 static int phy_io_config(const struct chan_info *chan,
208 const struct rk3399_sdram_params *params)
210 u32 *denali_phy = chan->publ->denali_phy;
211 u32 vref_mode_dq, vref_value_dq, vref_mode_ac, vref_value_ac;
214 u32 drv_value, odt_value;
218 if (params->base.dramtype == LPDDR4) {
221 vref_value_dq = 0x1f;
223 vref_value_ac = 0x1f;
225 } else if (params->base.dramtype == LPDDR3) {
226 if (params->base.odt == 1) {
227 vref_mode_dq = 0x5; /* LPDDR3 ODT */
228 drv_value = (readl(&denali_phy[6]) >> 12) & 0xf;
229 odt_value = (readl(&denali_phy[6]) >> 4) & 0xf;
230 if (drv_value == PHY_DRV_ODT_48) {
232 case PHY_DRV_ODT_240:
233 vref_value_dq = 0x16;
235 case PHY_DRV_ODT_120:
236 vref_value_dq = 0x26;
239 vref_value_dq = 0x36;
242 debug("Invalid ODT value.\n");
245 } else if (drv_value == PHY_DRV_ODT_40) {
247 case PHY_DRV_ODT_240:
248 vref_value_dq = 0x19;
250 case PHY_DRV_ODT_120:
251 vref_value_dq = 0x23;
254 vref_value_dq = 0x31;
257 debug("Invalid ODT value.\n");
260 } else if (drv_value == PHY_DRV_ODT_34_3) {
262 case PHY_DRV_ODT_240:
263 vref_value_dq = 0x17;
265 case PHY_DRV_ODT_120:
266 vref_value_dq = 0x20;
269 vref_value_dq = 0x2e;
272 debug("Invalid ODT value.\n");
276 debug("Invalid DRV value.\n");
280 vref_mode_dq = 0x2; /* LPDDR3 */
281 vref_value_dq = 0x1f;
284 vref_value_ac = 0x1f;
286 } else if (params->base.dramtype == DDR3) {
289 vref_value_dq = 0x1f;
291 vref_value_ac = 0x1f;
294 debug("Unknown DRAM type.\n");
298 reg_value = (vref_mode_dq << 9) | (0x1 << 8) | vref_value_dq;
300 /* PHY_913 PHY_PAD_VREF_CTRL_DQ_0 12bits offset_8 */
301 clrsetbits_le32(&denali_phy[913], 0xfff << 8, reg_value << 8);
302 /* PHY_914 PHY_PAD_VREF_CTRL_DQ_1 12bits offset_0 */
303 clrsetbits_le32(&denali_phy[914], 0xfff, reg_value);
304 /* PHY_914 PHY_PAD_VREF_CTRL_DQ_2 12bits offset_16 */
305 clrsetbits_le32(&denali_phy[914], 0xfff << 16, reg_value << 16);
306 /* PHY_915 PHY_PAD_VREF_CTRL_DQ_3 12bits offset_0 */
307 clrsetbits_le32(&denali_phy[915], 0xfff, reg_value);
309 reg_value = (vref_mode_ac << 9) | (0x1 << 8) | vref_value_ac;
311 /* PHY_915 PHY_PAD_VREF_CTRL_AC 12bits offset_16 */
312 clrsetbits_le32(&denali_phy[915], 0xfff << 16, reg_value << 16);
314 /* PHY_924 PHY_PAD_FDBK_DRIVE */
315 clrsetbits_le32(&denali_phy[924], 0x7 << 15, mode_sel << 15);
316 /* PHY_926 PHY_PAD_DATA_DRIVE */
317 clrsetbits_le32(&denali_phy[926], 0x7 << 6, mode_sel << 6);
318 /* PHY_927 PHY_PAD_DQS_DRIVE */
319 clrsetbits_le32(&denali_phy[927], 0x7 << 6, mode_sel << 6);
320 /* PHY_928 PHY_PAD_ADDR_DRIVE */
321 clrsetbits_le32(&denali_phy[928], 0x7 << 14, mode_sel << 14);
322 /* PHY_929 PHY_PAD_CLK_DRIVE */
323 clrsetbits_le32(&denali_phy[929], 0x7 << 14, mode_sel << 14);
324 /* PHY_935 PHY_PAD_CKE_DRIVE */
325 clrsetbits_le32(&denali_phy[935], 0x7 << 14, mode_sel << 14);
326 /* PHY_937 PHY_PAD_RST_DRIVE */
327 clrsetbits_le32(&denali_phy[937], 0x7 << 14, mode_sel << 14);
328 /* PHY_939 PHY_PAD_CS_DRIVE */
329 clrsetbits_le32(&denali_phy[939], 0x7 << 14, mode_sel << 14);
331 if (IS_ENABLED(CONFIG_RAM_RK3399_LPDDR4)) {
332 /* BOOSTP_EN & BOOSTN_EN */
333 reg_value = ((PHY_BOOSTP_EN << 4) | PHY_BOOSTN_EN);
334 /* PHY_925 PHY_PAD_FDBK_DRIVE2 */
335 clrsetbits_le32(&denali_phy[925], 0xff << 8, reg_value << 8);
336 /* PHY_926 PHY_PAD_DATA_DRIVE */
337 clrsetbits_le32(&denali_phy[926], 0xff << 12, reg_value << 12);
338 /* PHY_927 PHY_PAD_DQS_DRIVE */
339 clrsetbits_le32(&denali_phy[927], 0xff << 14, reg_value << 14);
340 /* PHY_928 PHY_PAD_ADDR_DRIVE */
341 clrsetbits_le32(&denali_phy[928], 0xff << 20, reg_value << 20);
342 /* PHY_929 PHY_PAD_CLK_DRIVE */
343 clrsetbits_le32(&denali_phy[929], 0xff << 22, reg_value << 22);
344 /* PHY_935 PHY_PAD_CKE_DRIVE */
345 clrsetbits_le32(&denali_phy[935], 0xff << 20, reg_value << 20);
346 /* PHY_937 PHY_PAD_RST_DRIVE */
347 clrsetbits_le32(&denali_phy[937], 0xff << 20, reg_value << 20);
348 /* PHY_939 PHY_PAD_CS_DRIVE */
349 clrsetbits_le32(&denali_phy[939], 0xff << 20, reg_value << 20);
351 /* SLEWP_EN & SLEWN_EN */
352 reg_value = ((PHY_SLEWP_EN << 3) | PHY_SLEWN_EN);
353 /* PHY_924 PHY_PAD_FDBK_DRIVE */
354 clrsetbits_le32(&denali_phy[924], 0x3f << 8, reg_value << 8);
355 /* PHY_926 PHY_PAD_DATA_DRIVE */
356 clrsetbits_le32(&denali_phy[926], 0x3f, reg_value);
357 /* PHY_927 PHY_PAD_DQS_DRIVE */
358 clrsetbits_le32(&denali_phy[927], 0x3f, reg_value);
359 /* PHY_928 PHY_PAD_ADDR_DRIVE */
360 clrsetbits_le32(&denali_phy[928], 0x3f << 8, reg_value << 8);
361 /* PHY_929 PHY_PAD_CLK_DRIVE */
362 clrsetbits_le32(&denali_phy[929], 0x3f << 8, reg_value << 8);
363 /* PHY_935 PHY_PAD_CKE_DRIVE */
364 clrsetbits_le32(&denali_phy[935], 0x3f << 8, reg_value << 8);
365 /* PHY_937 PHY_PAD_RST_DRIVE */
366 clrsetbits_le32(&denali_phy[937], 0x3f << 8, reg_value << 8);
367 /* PHY_939 PHY_PAD_CS_DRIVE */
368 clrsetbits_le32(&denali_phy[939], 0x3f << 8, reg_value << 8);
372 if (params->base.ddr_freq < 400)
374 else if (params->base.ddr_freq < 800)
376 else if (params->base.ddr_freq < 1200)
381 /* PHY_924 PHY_PAD_FDBK_DRIVE */
382 clrsetbits_le32(&denali_phy[924], 0x3 << 21, speed << 21);
383 /* PHY_926 PHY_PAD_DATA_DRIVE */
384 clrsetbits_le32(&denali_phy[926], 0x3 << 9, speed << 9);
385 /* PHY_927 PHY_PAD_DQS_DRIVE */
386 clrsetbits_le32(&denali_phy[927], 0x3 << 9, speed << 9);
387 /* PHY_928 PHY_PAD_ADDR_DRIVE */
388 clrsetbits_le32(&denali_phy[928], 0x3 << 17, speed << 17);
389 /* PHY_929 PHY_PAD_CLK_DRIVE */
390 clrsetbits_le32(&denali_phy[929], 0x3 << 17, speed << 17);
391 /* PHY_935 PHY_PAD_CKE_DRIVE */
392 clrsetbits_le32(&denali_phy[935], 0x3 << 17, speed << 17);
393 /* PHY_937 PHY_PAD_RST_DRIVE */
394 clrsetbits_le32(&denali_phy[937], 0x3 << 17, speed << 17);
395 /* PHY_939 PHY_PAD_CS_DRIVE */
396 clrsetbits_le32(&denali_phy[939], 0x3 << 17, speed << 17);
398 if (IS_ENABLED(CONFIG_RAM_RK3399_LPDDR4)) {
400 reg_value = PHY_RX_CM_INPUT;
401 /* PHY_924 PHY_PAD_FDBK_DRIVE */
402 clrsetbits_le32(&denali_phy[924], 0x1 << 14, reg_value << 14);
403 /* PHY_926 PHY_PAD_DATA_DRIVE */
404 clrsetbits_le32(&denali_phy[926], 0x1 << 11, reg_value << 11);
405 /* PHY_927 PHY_PAD_DQS_DRIVE */
406 clrsetbits_le32(&denali_phy[927], 0x1 << 13, reg_value << 13);
407 /* PHY_928 PHY_PAD_ADDR_DRIVE */
408 clrsetbits_le32(&denali_phy[928], 0x1 << 19, reg_value << 19);
409 /* PHY_929 PHY_PAD_CLK_DRIVE */
410 clrsetbits_le32(&denali_phy[929], 0x1 << 21, reg_value << 21);
411 /* PHY_935 PHY_PAD_CKE_DRIVE */
412 clrsetbits_le32(&denali_phy[935], 0x1 << 19, reg_value << 19);
413 /* PHY_937 PHY_PAD_RST_DRIVE */
414 clrsetbits_le32(&denali_phy[937], 0x1 << 19, reg_value << 19);
415 /* PHY_939 PHY_PAD_CS_DRIVE */
416 clrsetbits_le32(&denali_phy[939], 0x1 << 19, reg_value << 19);
422 static void set_ds_odt(const struct chan_info *chan,
423 const struct rk3399_sdram_params *params)
425 u32 *denali_phy = chan->publ->denali_phy;
427 u32 tsel_idle_en, tsel_wr_en, tsel_rd_en;
428 u32 tsel_idle_select_p, tsel_rd_select_p;
429 u32 tsel_idle_select_n, tsel_rd_select_n;
430 u32 tsel_wr_select_dq_p, tsel_wr_select_ca_p;
431 u32 tsel_wr_select_dq_n, tsel_wr_select_ca_n;
434 if (params->base.dramtype == LPDDR4) {
435 tsel_rd_select_p = PHY_DRV_ODT_HI_Z;
436 tsel_rd_select_n = PHY_DRV_ODT_240;
438 tsel_idle_select_p = PHY_DRV_ODT_HI_Z;
439 tsel_idle_select_n = PHY_DRV_ODT_240;
441 tsel_wr_select_dq_p = PHY_DRV_ODT_40;
442 tsel_wr_select_dq_n = PHY_DRV_ODT_40;
444 tsel_wr_select_ca_p = PHY_DRV_ODT_40;
445 tsel_wr_select_ca_n = PHY_DRV_ODT_40;
446 } else if (params->base.dramtype == LPDDR3) {
447 tsel_rd_select_p = PHY_DRV_ODT_240;
448 tsel_rd_select_n = PHY_DRV_ODT_HI_Z;
450 tsel_idle_select_p = PHY_DRV_ODT_240;
451 tsel_idle_select_n = PHY_DRV_ODT_HI_Z;
453 tsel_wr_select_dq_p = PHY_DRV_ODT_34_3;
454 tsel_wr_select_dq_n = PHY_DRV_ODT_34_3;
456 tsel_wr_select_ca_p = PHY_DRV_ODT_48;
457 tsel_wr_select_ca_n = PHY_DRV_ODT_48;
459 tsel_rd_select_p = PHY_DRV_ODT_240;
460 tsel_rd_select_n = PHY_DRV_ODT_240;
462 tsel_idle_select_p = PHY_DRV_ODT_240;
463 tsel_idle_select_n = PHY_DRV_ODT_240;
465 tsel_wr_select_dq_p = PHY_DRV_ODT_34_3;
466 tsel_wr_select_dq_n = PHY_DRV_ODT_34_3;
468 tsel_wr_select_ca_p = PHY_DRV_ODT_34_3;
469 tsel_wr_select_ca_n = PHY_DRV_ODT_34_3;
472 if (params->base.odt == 1)
481 * phy_dq_tsel_select_X 24bits DENALI_PHY_6/134/262/390 offset_0
482 * sets termination values for read/idle cycles and drive strength
483 * for write cycles for DQ/DM
485 reg_value = tsel_rd_select_n | (tsel_rd_select_p << 0x4) |
486 (tsel_wr_select_dq_n << 8) | (tsel_wr_select_dq_p << 12) |
487 (tsel_idle_select_n << 16) | (tsel_idle_select_p << 20);
488 clrsetbits_le32(&denali_phy[6], 0xffffff, reg_value);
489 clrsetbits_le32(&denali_phy[134], 0xffffff, reg_value);
490 clrsetbits_le32(&denali_phy[262], 0xffffff, reg_value);
491 clrsetbits_le32(&denali_phy[390], 0xffffff, reg_value);
494 * phy_dqs_tsel_select_X 24bits DENALI_PHY_7/135/263/391 offset_0
495 * sets termination values for read/idle cycles and drive strength
496 * for write cycles for DQS
498 clrsetbits_le32(&denali_phy[7], 0xffffff, reg_value);
499 clrsetbits_le32(&denali_phy[135], 0xffffff, reg_value);
500 clrsetbits_le32(&denali_phy[263], 0xffffff, reg_value);
501 clrsetbits_le32(&denali_phy[391], 0xffffff, reg_value);
503 /* phy_adr_tsel_select_ 8bits DENALI_PHY_544/672/800 offset_0 */
504 reg_value = tsel_wr_select_ca_n | (tsel_wr_select_ca_p << 0x4);
505 clrsetbits_le32(&denali_phy[544], 0xff, reg_value);
506 clrsetbits_le32(&denali_phy[672], 0xff, reg_value);
507 clrsetbits_le32(&denali_phy[800], 0xff, reg_value);
509 /* phy_pad_addr_drive 8bits DENALI_PHY_928 offset_0 */
510 clrsetbits_le32(&denali_phy[928], 0xff, reg_value);
512 /* phy_pad_rst_drive 8bits DENALI_PHY_937 offset_0 */
513 clrsetbits_le32(&denali_phy[937], 0xff, reg_value);
515 /* phy_pad_cke_drive 8bits DENALI_PHY_935 offset_0 */
516 clrsetbits_le32(&denali_phy[935], 0xff, reg_value);
518 /* phy_pad_cs_drive 8bits DENALI_PHY_939 offset_0 */
519 clrsetbits_le32(&denali_phy[939], 0xff, reg_value);
521 /* phy_pad_clk_drive 8bits DENALI_PHY_929 offset_0 */
522 clrsetbits_le32(&denali_phy[929], 0xff, reg_value);
524 /* phy_pad_fdbk_drive 23bit DENALI_PHY_924/925 */
525 clrsetbits_le32(&denali_phy[924], 0xff,
526 tsel_wr_select_dq_n | (tsel_wr_select_dq_p << 4));
527 clrsetbits_le32(&denali_phy[925], 0xff,
528 tsel_rd_select_n | (tsel_rd_select_p << 4));
530 /* phy_dq_tsel_enable_X 3bits DENALI_PHY_5/133/261/389 offset_16 */
531 reg_value = (tsel_rd_en | (tsel_wr_en << 1) | (tsel_idle_en << 2))
533 clrsetbits_le32(&denali_phy[5], 0x7 << 16, reg_value);
534 clrsetbits_le32(&denali_phy[133], 0x7 << 16, reg_value);
535 clrsetbits_le32(&denali_phy[261], 0x7 << 16, reg_value);
536 clrsetbits_le32(&denali_phy[389], 0x7 << 16, reg_value);
538 /* phy_dqs_tsel_enable_X 3bits DENALI_PHY_6/134/262/390 offset_24 */
539 reg_value = (tsel_rd_en | (tsel_wr_en << 1) | (tsel_idle_en << 2))
541 clrsetbits_le32(&denali_phy[6], 0x7 << 24, reg_value);
542 clrsetbits_le32(&denali_phy[134], 0x7 << 24, reg_value);
543 clrsetbits_le32(&denali_phy[262], 0x7 << 24, reg_value);
544 clrsetbits_le32(&denali_phy[390], 0x7 << 24, reg_value);
546 /* phy_adr_tsel_enable_ 1bit DENALI_PHY_518/646/774 offset_8 */
547 reg_value = tsel_wr_en << 8;
548 clrsetbits_le32(&denali_phy[518], 0x1 << 8, reg_value);
549 clrsetbits_le32(&denali_phy[646], 0x1 << 8, reg_value);
550 clrsetbits_le32(&denali_phy[774], 0x1 << 8, reg_value);
552 /* phy_pad_addr_term tsel 1bit DENALI_PHY_933 offset_17 */
553 reg_value = tsel_wr_en << 17;
554 clrsetbits_le32(&denali_phy[933], 0x1 << 17, reg_value);
556 * pad_rst/cke/cs/clk_term tsel 1bits
557 * DENALI_PHY_938/936/940/934 offset_17
559 clrsetbits_le32(&denali_phy[938], 0x1 << 17, reg_value);
560 clrsetbits_le32(&denali_phy[936], 0x1 << 17, reg_value);
561 clrsetbits_le32(&denali_phy[940], 0x1 << 17, reg_value);
562 clrsetbits_le32(&denali_phy[934], 0x1 << 17, reg_value);
564 /* phy_pad_fdbk_term 1bit DENALI_PHY_930 offset_17 */
565 clrsetbits_le32(&denali_phy[930], 0x1 << 17, reg_value);
567 phy_io_config(chan, params);
570 static void pctl_start(struct dram_info *dram, u8 channel)
572 const struct chan_info *chan = &dram->chan[channel];
573 u32 *denali_ctl = chan->pctl->denali_ctl;
574 u32 *denali_phy = chan->publ->denali_phy;
575 u32 *ddrc0_con = get_ddrc0_con(dram, channel);
579 writel(0x01000000, &ddrc0_con);
581 clrsetbits_le32(&denali_phy[957], 0x3 << 24, 0x2 << 24);
583 while (!(readl(&denali_ctl[203]) & (1 << 3))) {
585 printf("%s: Failed to init pctl for channel %d\n",
595 writel(0x01000100, &ddrc0_con);
597 for (byte = 0; byte < 4; byte++) {
599 writel((tmp << 16) | tmp, &denali_phy[53 + (128 * byte)]);
600 writel((tmp << 16) | tmp, &denali_phy[54 + (128 * byte)]);
601 writel((tmp << 16) | tmp, &denali_phy[55 + (128 * byte)]);
602 writel((tmp << 16) | tmp, &denali_phy[56 + (128 * byte)]);
603 writel((tmp << 16) | tmp, &denali_phy[57 + (128 * byte)]);
605 clrsetbits_le32(&denali_phy[58 + (128 * byte)], 0xffff, tmp);
608 clrsetbits_le32(&denali_ctl[68], PWRUP_SREFRESH_EXIT,
609 dram->pwrup_srefresh_exit[channel]);
612 static int pctl_cfg(struct dram_info *dram, const struct chan_info *chan,
613 u32 channel, const struct rk3399_sdram_params *params)
615 u32 *denali_ctl = chan->pctl->denali_ctl;
616 u32 *denali_pi = chan->pi->denali_pi;
617 u32 *denali_phy = chan->publ->denali_phy;
618 const u32 *params_ctl = params->pctl_regs.denali_ctl;
619 const u32 *params_phy = params->phy_regs.denali_phy;
623 * work around controller bug:
624 * Do not program DRAM_CLASS until NO_PHY_IND_TRAIN_INT is programmed
626 copy_to_reg(&denali_ctl[1], ¶ms_ctl[1],
627 sizeof(struct rk3399_ddr_pctl_regs) - 4);
628 writel(params_ctl[0], &denali_ctl[0]);
631 * two channel init at the same time, then ZQ Cal Start
632 * at the same time, it will use the same RZQ, but cannot
633 * start at the same time.
635 * So, increase tINIT3 for channel 1, will avoid two
636 * channel ZQ Cal Start at the same time
638 if (params->base.dramtype == LPDDR4 && channel == 1) {
639 tmp = ((params->base.ddr_freq * MHz + 999) / 1000);
640 tmp1 = readl(&denali_ctl[14]);
641 writel(tmp + tmp1, &denali_ctl[14]);
644 copy_to_reg(denali_pi, ¶ms->pi_regs.denali_pi[0],
645 sizeof(struct rk3399_ddr_pi_regs));
647 /* rank count need to set for init */
648 set_memory_map(chan, channel, params);
650 writel(params->phy_regs.denali_phy[910], &denali_phy[910]);
651 writel(params->phy_regs.denali_phy[911], &denali_phy[911]);
652 writel(params->phy_regs.denali_phy[912], &denali_phy[912]);
654 if (IS_ENABLED(CONFIG_RAM_RK3399_LPDDR4)) {
655 writel(params->phy_regs.denali_phy[898], &denali_phy[898]);
656 writel(params->phy_regs.denali_phy[919], &denali_phy[919]);
659 dram->pwrup_srefresh_exit[channel] = readl(&denali_ctl[68]) &
661 clrbits_le32(&denali_ctl[68], PWRUP_SREFRESH_EXIT);
664 clrsetbits_le32(&denali_phy[957], 0x3 << 24, 1 << 24);
666 setbits_le32(&denali_pi[0], START);
667 setbits_le32(&denali_ctl[0], START);
670 * LPDDR4 use PLL bypass mode for init
671 * not need to wait for the PLL to lock
673 if (params->base.dramtype != LPDDR4) {
674 /* Waiting for phy DLL lock */
676 tmp = readl(&denali_phy[920]);
677 tmp1 = readl(&denali_phy[921]);
678 tmp2 = readl(&denali_phy[922]);
679 if ((((tmp >> 16) & 0x1) == 0x1) &&
680 (((tmp1 >> 16) & 0x1) == 0x1) &&
681 (((tmp1 >> 0) & 0x1) == 0x1) &&
682 (((tmp2 >> 0) & 0x1) == 0x1))
687 copy_to_reg(&denali_phy[896], ¶ms_phy[896], (958 - 895) * 4);
688 copy_to_reg(&denali_phy[0], ¶ms_phy[0], (90 - 0 + 1) * 4);
689 copy_to_reg(&denali_phy[128], ¶ms_phy[128], (218 - 128 + 1) * 4);
690 copy_to_reg(&denali_phy[256], ¶ms_phy[256], (346 - 256 + 1) * 4);
691 copy_to_reg(&denali_phy[384], ¶ms_phy[384], (474 - 384 + 1) * 4);
692 copy_to_reg(&denali_phy[512], ¶ms_phy[512], (549 - 512 + 1) * 4);
693 copy_to_reg(&denali_phy[640], ¶ms_phy[640], (677 - 640 + 1) * 4);
694 copy_to_reg(&denali_phy[768], ¶ms_phy[768], (805 - 768 + 1) * 4);
695 set_ds_odt(chan, params);
698 * phy_dqs_tsel_wr_timing_X 8bits DENALI_PHY_84/212/340/468 offset_8
699 * dqs_tsel_wr_end[7:4] add Half cycle
701 tmp = (readl(&denali_phy[84]) >> 8) & 0xff;
702 clrsetbits_le32(&denali_phy[84], 0xff << 8, (tmp + 0x10) << 8);
703 tmp = (readl(&denali_phy[212]) >> 8) & 0xff;
704 clrsetbits_le32(&denali_phy[212], 0xff << 8, (tmp + 0x10) << 8);
705 tmp = (readl(&denali_phy[340]) >> 8) & 0xff;
706 clrsetbits_le32(&denali_phy[340], 0xff << 8, (tmp + 0x10) << 8);
707 tmp = (readl(&denali_phy[468]) >> 8) & 0xff;
708 clrsetbits_le32(&denali_phy[468], 0xff << 8, (tmp + 0x10) << 8);
711 * phy_dqs_tsel_wr_timing_X 8bits DENALI_PHY_83/211/339/467 offset_8
712 * dq_tsel_wr_end[7:4] add Half cycle
714 tmp = (readl(&denali_phy[83]) >> 16) & 0xff;
715 clrsetbits_le32(&denali_phy[83], 0xff << 16, (tmp + 0x10) << 16);
716 tmp = (readl(&denali_phy[211]) >> 16) & 0xff;
717 clrsetbits_le32(&denali_phy[211], 0xff << 16, (tmp + 0x10) << 16);
718 tmp = (readl(&denali_phy[339]) >> 16) & 0xff;
719 clrsetbits_le32(&denali_phy[339], 0xff << 16, (tmp + 0x10) << 16);
720 tmp = (readl(&denali_phy[467]) >> 16) & 0xff;
721 clrsetbits_le32(&denali_phy[467], 0xff << 16, (tmp + 0x10) << 16);
726 static void select_per_cs_training_index(const struct chan_info *chan,
729 u32 *denali_phy = chan->publ->denali_phy;
731 /* PHY_84 PHY_PER_CS_TRAINING_EN_0 1bit offset_16 */
732 if ((readl(&denali_phy[84]) >> 16) & 1) {
735 * phy_per_cs_training_index_X 1bit offset_24
737 clrsetbits_le32(&denali_phy[8], 0x1 << 24, rank << 24);
738 clrsetbits_le32(&denali_phy[136], 0x1 << 24, rank << 24);
739 clrsetbits_le32(&denali_phy[264], 0x1 << 24, rank << 24);
740 clrsetbits_le32(&denali_phy[392], 0x1 << 24, rank << 24);
744 static void override_write_leveling_value(const struct chan_info *chan)
746 u32 *denali_ctl = chan->pctl->denali_ctl;
747 u32 *denali_phy = chan->publ->denali_phy;
750 /* PHY_896 PHY_FREQ_SEL_MULTICAST_EN 1bit offset_0 */
751 setbits_le32(&denali_phy[896], 1);
755 * phy_per_cs_training_multicast_en_X 1bit offset_16
757 clrsetbits_le32(&denali_phy[8], 0x1 << 16, 1 << 16);
758 clrsetbits_le32(&denali_phy[136], 0x1 << 16, 1 << 16);
759 clrsetbits_le32(&denali_phy[264], 0x1 << 16, 1 << 16);
760 clrsetbits_le32(&denali_phy[392], 0x1 << 16, 1 << 16);
762 for (byte = 0; byte < 4; byte++)
763 clrsetbits_le32(&denali_phy[63 + (128 * byte)], 0xffff << 16,
766 /* PHY_896 PHY_FREQ_SEL_MULTICAST_EN 1bit offset_0 */
767 clrbits_le32(&denali_phy[896], 1);
769 /* CTL_200 ctrlupd_req 1bit offset_8 */
770 clrsetbits_le32(&denali_ctl[200], 0x1 << 8, 0x1 << 8);
773 static int data_training_ca(const struct chan_info *chan, u32 channel,
774 const struct rk3399_sdram_params *params)
776 u32 *denali_pi = chan->pi->denali_pi;
777 u32 *denali_phy = chan->publ->denali_phy;
779 u32 obs_0, obs_1, obs_2, obs_err = 0;
780 u32 rank = params->ch[channel].cap_info.rank;
783 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
784 writel(0x00003f7c, (&denali_pi[175]));
786 if (params->base.dramtype == LPDDR4)
787 rank_mask = (rank == 1) ? 0x5 : 0xf;
789 rank_mask = (rank == 1) ? 0x1 : 0x3;
791 for (i = 0; i < 4; i++) {
792 if (!(rank_mask & (1 << i)))
795 select_per_cs_training_index(chan, i);
797 /* PI_100 PI_CALVL_EN:RW:8:2 */
798 clrsetbits_le32(&denali_pi[100], 0x3 << 8, 0x2 << 8);
800 /* PI_92 PI_CALVL_REQ:WR:16:1,PI_CALVL_CS:RW:24:2 */
801 clrsetbits_le32(&denali_pi[92],
802 (0x1 << 16) | (0x3 << 24),
803 (0x1 << 16) | (i << 24));
805 /* Waiting for training complete */
807 /* PI_174 PI_INT_STATUS:RD:8:18 */
808 tmp = readl(&denali_pi[174]) >> 8;
811 * PHY_532/660/789 phy_adr_calvl_obs1_:0:32
813 obs_0 = readl(&denali_phy[532]);
814 obs_1 = readl(&denali_phy[660]);
815 obs_2 = readl(&denali_phy[788]);
816 if (((obs_0 >> 30) & 0x3) ||
817 ((obs_1 >> 30) & 0x3) ||
818 ((obs_2 >> 30) & 0x3))
820 if ((((tmp >> 11) & 0x1) == 0x1) &&
821 (((tmp >> 13) & 0x1) == 0x1) &&
822 (((tmp >> 5) & 0x1) == 0x0) &&
825 else if ((((tmp >> 5) & 0x1) == 0x1) ||
830 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
831 writel(0x00003f7c, (&denali_pi[175]));
834 clrbits_le32(&denali_pi[100], 0x3 << 8);
839 static int data_training_wl(const struct chan_info *chan, u32 channel,
840 const struct rk3399_sdram_params *params)
842 u32 *denali_pi = chan->pi->denali_pi;
843 u32 *denali_phy = chan->publ->denali_phy;
845 u32 obs_0, obs_1, obs_2, obs_3, obs_err = 0;
846 u32 rank = params->ch[channel].cap_info.rank;
848 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
849 writel(0x00003f7c, (&denali_pi[175]));
851 for (i = 0; i < rank; i++) {
852 select_per_cs_training_index(chan, i);
854 /* PI_60 PI_WRLVL_EN:RW:8:2 */
855 clrsetbits_le32(&denali_pi[60], 0x3 << 8, 0x2 << 8);
857 /* PI_59 PI_WRLVL_REQ:WR:8:1,PI_WRLVL_CS:RW:16:2 */
858 clrsetbits_le32(&denali_pi[59],
859 (0x1 << 8) | (0x3 << 16),
860 (0x1 << 8) | (i << 16));
862 /* Waiting for training complete */
864 /* PI_174 PI_INT_STATUS:RD:8:18 */
865 tmp = readl(&denali_pi[174]) >> 8;
868 * check status obs, if error maybe can not
869 * get leveling done PHY_40/168/296/424
870 * phy_wrlvl_status_obs_X:0:13
872 obs_0 = readl(&denali_phy[40]);
873 obs_1 = readl(&denali_phy[168]);
874 obs_2 = readl(&denali_phy[296]);
875 obs_3 = readl(&denali_phy[424]);
876 if (((obs_0 >> 12) & 0x1) ||
877 ((obs_1 >> 12) & 0x1) ||
878 ((obs_2 >> 12) & 0x1) ||
879 ((obs_3 >> 12) & 0x1))
881 if ((((tmp >> 10) & 0x1) == 0x1) &&
882 (((tmp >> 13) & 0x1) == 0x1) &&
883 (((tmp >> 4) & 0x1) == 0x0) &&
886 else if ((((tmp >> 4) & 0x1) == 0x1) ||
891 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
892 writel(0x00003f7c, (&denali_pi[175]));
895 override_write_leveling_value(chan);
896 clrbits_le32(&denali_pi[60], 0x3 << 8);
901 static int data_training_rg(const struct chan_info *chan, u32 channel,
902 const struct rk3399_sdram_params *params)
904 u32 *denali_pi = chan->pi->denali_pi;
905 u32 *denali_phy = chan->publ->denali_phy;
907 u32 obs_0, obs_1, obs_2, obs_3, obs_err = 0;
908 u32 rank = params->ch[channel].cap_info.rank;
910 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
911 writel(0x00003f7c, (&denali_pi[175]));
913 for (i = 0; i < rank; i++) {
914 select_per_cs_training_index(chan, i);
916 /* PI_80 PI_RDLVL_GATE_EN:RW:24:2 */
917 clrsetbits_le32(&denali_pi[80], 0x3 << 24, 0x2 << 24);
920 * PI_74 PI_RDLVL_GATE_REQ:WR:16:1
921 * PI_RDLVL_CS:RW:24:2
923 clrsetbits_le32(&denali_pi[74],
924 (0x1 << 16) | (0x3 << 24),
925 (0x1 << 16) | (i << 24));
927 /* Waiting for training complete */
929 /* PI_174 PI_INT_STATUS:RD:8:18 */
930 tmp = readl(&denali_pi[174]) >> 8;
935 * PHY_GTLVL_STATUS_OBS_x:16:8
937 obs_0 = readl(&denali_phy[43]);
938 obs_1 = readl(&denali_phy[171]);
939 obs_2 = readl(&denali_phy[299]);
940 obs_3 = readl(&denali_phy[427]);
941 if (((obs_0 >> (16 + 6)) & 0x3) ||
942 ((obs_1 >> (16 + 6)) & 0x3) ||
943 ((obs_2 >> (16 + 6)) & 0x3) ||
944 ((obs_3 >> (16 + 6)) & 0x3))
946 if ((((tmp >> 9) & 0x1) == 0x1) &&
947 (((tmp >> 13) & 0x1) == 0x1) &&
948 (((tmp >> 3) & 0x1) == 0x0) &&
951 else if ((((tmp >> 3) & 0x1) == 0x1) ||
956 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
957 writel(0x00003f7c, (&denali_pi[175]));
960 clrbits_le32(&denali_pi[80], 0x3 << 24);
965 static int data_training_rl(const struct chan_info *chan, u32 channel,
966 const struct rk3399_sdram_params *params)
968 u32 *denali_pi = chan->pi->denali_pi;
970 u32 rank = params->ch[channel].cap_info.rank;
972 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
973 writel(0x00003f7c, (&denali_pi[175]));
975 for (i = 0; i < rank; i++) {
976 select_per_cs_training_index(chan, i);
978 /* PI_80 PI_RDLVL_EN:RW:16:2 */
979 clrsetbits_le32(&denali_pi[80], 0x3 << 16, 0x2 << 16);
981 /* PI_74 PI_RDLVL_REQ:WR:8:1,PI_RDLVL_CS:RW:24:2 */
982 clrsetbits_le32(&denali_pi[74],
983 (0x1 << 8) | (0x3 << 24),
984 (0x1 << 8) | (i << 24));
986 /* Waiting for training complete */
988 /* PI_174 PI_INT_STATUS:RD:8:18 */
989 tmp = readl(&denali_pi[174]) >> 8;
992 * make sure status obs not report error bit
994 * phy_rdlvl_status_obs_X:16:8
996 if ((((tmp >> 8) & 0x1) == 0x1) &&
997 (((tmp >> 13) & 0x1) == 0x1) &&
998 (((tmp >> 2) & 0x1) == 0x0))
1000 else if (((tmp >> 2) & 0x1) == 0x1)
1004 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
1005 writel(0x00003f7c, (&denali_pi[175]));
1008 clrbits_le32(&denali_pi[80], 0x3 << 16);
1013 static int data_training_wdql(const struct chan_info *chan, u32 channel,
1014 const struct rk3399_sdram_params *params)
1016 u32 *denali_pi = chan->pi->denali_pi;
1018 u32 rank = params->ch[channel].cap_info.rank;
1021 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
1022 writel(0x00003f7c, (&denali_pi[175]));
1024 if (params->base.dramtype == LPDDR4)
1025 rank_mask = (rank == 1) ? 0x5 : 0xf;
1027 rank_mask = (rank == 1) ? 0x1 : 0x3;
1029 for (i = 0; i < 4; i++) {
1030 if (!(rank_mask & (1 << i)))
1033 select_per_cs_training_index(chan, i);
1036 * disable PI_WDQLVL_VREF_EN before wdq leveling?
1037 * PI_181 PI_WDQLVL_VREF_EN:RW:8:1
1039 clrbits_le32(&denali_pi[181], 0x1 << 8);
1041 /* PI_124 PI_WDQLVL_EN:RW:16:2 */
1042 clrsetbits_le32(&denali_pi[124], 0x3 << 16, 0x2 << 16);
1044 /* PI_121 PI_WDQLVL_REQ:WR:8:1,PI_WDQLVL_CS:RW:16:2 */
1045 clrsetbits_le32(&denali_pi[121],
1046 (0x1 << 8) | (0x3 << 16),
1047 (0x1 << 8) | (i << 16));
1049 /* Waiting for training complete */
1051 /* PI_174 PI_INT_STATUS:RD:8:18 */
1052 tmp = readl(&denali_pi[174]) >> 8;
1053 if ((((tmp >> 12) & 0x1) == 0x1) &&
1054 (((tmp >> 13) & 0x1) == 0x1) &&
1055 (((tmp >> 6) & 0x1) == 0x0))
1057 else if (((tmp >> 6) & 0x1) == 0x1)
1061 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
1062 writel(0x00003f7c, (&denali_pi[175]));
1065 clrbits_le32(&denali_pi[124], 0x3 << 16);
1070 static int data_training(const struct chan_info *chan, u32 channel,
1071 const struct rk3399_sdram_params *params,
1074 u32 *denali_phy = chan->publ->denali_phy;
1077 /* PHY_927 PHY_PAD_DQS_DRIVE RPULL offset_22 */
1078 setbits_le32(&denali_phy[927], (1 << 22));
1080 if (training_flag == PI_FULL_TRAINING) {
1081 if (params->base.dramtype == LPDDR4) {
1082 training_flag = PI_CA_TRAINING | PI_WRITE_LEVELING |
1083 PI_READ_GATE_TRAINING |
1084 PI_READ_LEVELING | PI_WDQ_LEVELING;
1085 } else if (params->base.dramtype == LPDDR3) {
1086 training_flag = PI_CA_TRAINING | PI_WRITE_LEVELING |
1087 PI_READ_GATE_TRAINING;
1088 } else if (params->base.dramtype == DDR3) {
1089 training_flag = PI_WRITE_LEVELING |
1090 PI_READ_GATE_TRAINING |
1095 /* ca training(LPDDR4,LPDDR3 support) */
1096 if ((training_flag & PI_CA_TRAINING) == PI_CA_TRAINING) {
1097 ret = data_training_ca(chan, channel, params);
1099 debug("%s: data training ca failed\n", __func__);
1104 /* write leveling(LPDDR4,LPDDR3,DDR3 support) */
1105 if ((training_flag & PI_WRITE_LEVELING) == PI_WRITE_LEVELING) {
1106 ret = data_training_wl(chan, channel, params);
1108 debug("%s: data training wl failed\n", __func__);
1113 /* read gate training(LPDDR4,LPDDR3,DDR3 support) */
1114 if ((training_flag & PI_READ_GATE_TRAINING) == PI_READ_GATE_TRAINING) {
1115 ret = data_training_rg(chan, channel, params);
1117 debug("%s: data training rg failed\n", __func__);
1122 /* read leveling(LPDDR4,LPDDR3,DDR3 support) */
1123 if ((training_flag & PI_READ_LEVELING) == PI_READ_LEVELING) {
1124 ret = data_training_rl(chan, channel, params);
1126 debug("%s: data training rl failed\n", __func__);
1131 /* wdq leveling(LPDDR4 support) */
1132 if ((training_flag & PI_WDQ_LEVELING) == PI_WDQ_LEVELING) {
1133 ret = data_training_wdql(chan, channel, params);
1135 debug("%s: data training wdql failed\n", __func__);
1140 /* PHY_927 PHY_PAD_DQS_DRIVE RPULL offset_22 */
1141 clrbits_le32(&denali_phy[927], (1 << 22));
1146 static void set_ddrconfig(const struct chan_info *chan,
1147 const struct rk3399_sdram_params *params,
1148 unsigned char channel, u32 ddrconfig)
1150 /* only need to set ddrconfig */
1151 struct rk3399_msch_regs *ddr_msch_regs = chan->msch;
1152 unsigned int cs0_cap = 0;
1153 unsigned int cs1_cap = 0;
1155 cs0_cap = (1 << (params->ch[channel].cap_info.cs0_row
1156 + params->ch[channel].cap_info.col
1157 + params->ch[channel].cap_info.bk
1158 + params->ch[channel].cap_info.bw - 20));
1159 if (params->ch[channel].cap_info.rank > 1)
1160 cs1_cap = cs0_cap >> (params->ch[channel].cap_info.cs0_row
1161 - params->ch[channel].cap_info.cs1_row);
1162 if (params->ch[channel].cap_info.row_3_4) {
1163 cs0_cap = cs0_cap * 3 / 4;
1164 cs1_cap = cs1_cap * 3 / 4;
1167 writel(ddrconfig | (ddrconfig << 8), &ddr_msch_regs->ddrconf);
1168 writel(((cs0_cap / 32) & 0xff) | (((cs1_cap / 32) & 0xff) << 8),
1169 &ddr_msch_regs->ddrsize);
1172 static void dram_all_config(struct dram_info *dram,
1173 const struct rk3399_sdram_params *params)
1177 unsigned int channel, idx;
1179 sys_reg2 |= SYS_REG_ENC_DDRTYPE(params->base.dramtype);
1180 sys_reg2 |= SYS_REG_ENC_NUM_CH(params->base.num_channels);
1182 for (channel = 0, idx = 0;
1183 (idx < params->base.num_channels) && (channel < 2);
1185 const struct rk3399_sdram_channel *info = ¶ms->ch[channel];
1186 struct rk3399_msch_regs *ddr_msch_regs;
1187 const struct rk3399_msch_timings *noc_timing;
1189 if (params->ch[channel].cap_info.col == 0)
1192 sys_reg2 |= SYS_REG_ENC_ROW_3_4(info->cap_info.row_3_4, channel);
1193 sys_reg2 |= SYS_REG_ENC_CHINFO(channel);
1194 sys_reg2 |= SYS_REG_ENC_RANK(info->cap_info.rank, channel);
1195 sys_reg2 |= SYS_REG_ENC_COL(info->cap_info.col, channel);
1196 sys_reg2 |= SYS_REG_ENC_BK(info->cap_info.bk, channel);
1197 sys_reg2 |= SYS_REG_ENC_BW(info->cap_info.bw, channel);
1198 sys_reg2 |= SYS_REG_ENC_DBW(info->cap_info.dbw, channel);
1199 SYS_REG_ENC_CS0_ROW(info->cap_info.cs0_row, sys_reg2, sys_reg3, channel);
1200 if (info->cap_info.cs1_row)
1201 SYS_REG_ENC_CS1_ROW(info->cap_info.cs1_row, sys_reg2,
1203 sys_reg3 |= SYS_REG_ENC_CS1_COL(info->cap_info.col, channel);
1204 sys_reg3 |= SYS_REG_ENC_VERSION(DDR_SYS_REG_VERSION);
1206 ddr_msch_regs = dram->chan[channel].msch;
1207 noc_timing = ¶ms->ch[channel].noc_timings;
1208 writel(noc_timing->ddrtiminga0,
1209 &ddr_msch_regs->ddrtiminga0);
1210 writel(noc_timing->ddrtimingb0,
1211 &ddr_msch_regs->ddrtimingb0);
1212 writel(noc_timing->ddrtimingc0.d32,
1213 &ddr_msch_regs->ddrtimingc0);
1214 writel(noc_timing->devtodev0,
1215 &ddr_msch_regs->devtodev0);
1216 writel(noc_timing->ddrmode.d32,
1217 &ddr_msch_regs->ddrmode);
1219 /* rank 1 memory clock disable (dfi_dram_clk_disable = 1) */
1220 if (params->ch[channel].cap_info.rank == 1)
1221 setbits_le32(&dram->chan[channel].pctl->denali_ctl[276],
1225 writel(sys_reg2, &dram->pmugrf->os_reg2);
1226 writel(sys_reg3, &dram->pmugrf->os_reg3);
1227 rk_clrsetreg(&dram->pmusgrf->soc_con4, 0x1f << 10,
1228 params->base.stride << 10);
1230 /* reboot hold register set */
1231 writel(PRESET_SGRF_HOLD(0) | PRESET_GPIO0_HOLD(1) |
1232 PRESET_GPIO1_HOLD(1),
1233 &dram->pmucru->pmucru_rstnhold_con[1]);
1234 clrsetbits_le32(&dram->cru->glb_rst_con, 0x3, 0x3);
1237 static int switch_to_phy_index1(struct dram_info *dram,
1238 const struct rk3399_sdram_params *params)
1242 u32 ch_count = params->base.num_channels;
1246 writel(RK_CLRSETBITS(0x03 << 4 | 1 << 2 | 1,
1247 1 << 4 | 1 << 2 | 1),
1248 &dram->cic->cic_ctrl0);
1249 while (!(readl(&dram->cic->cic_status0) & (1 << 2))) {
1253 debug("index1 frequency change overtime\n");
1259 writel(RK_CLRSETBITS(1 << 1, 1 << 1), &dram->cic->cic_ctrl0);
1260 while (!(readl(&dram->cic->cic_status0) & (1 << 0))) {
1264 debug("index1 frequency done overtime\n");
1269 for (channel = 0; channel < ch_count; channel++) {
1270 denali_phy = dram->chan[channel].publ->denali_phy;
1271 clrsetbits_le32(&denali_phy[896], (0x3 << 8) | 1, 1 << 8);
1272 ret = data_training(&dram->chan[channel], channel,
1273 params, PI_FULL_TRAINING);
1275 debug("index1 training failed\n");
1283 static unsigned char calculate_stride(struct rk3399_sdram_params *params)
1285 unsigned int stride = params->base.stride;
1286 unsigned int channel, chinfo = 0;
1287 unsigned int ch_cap[2] = {0, 0};
1290 for (channel = 0; channel < 2; channel++) {
1291 unsigned int cs0_cap = 0;
1292 unsigned int cs1_cap = 0;
1293 struct sdram_cap_info *cap_info = ¶ms->ch[channel].cap_info;
1295 if (cap_info->col == 0)
1298 cs0_cap = (1 << (cap_info->cs0_row + cap_info->col +
1299 cap_info->bk + cap_info->bw - 20));
1300 if (cap_info->rank > 1)
1301 cs1_cap = cs0_cap >> (cap_info->cs0_row
1302 - cap_info->cs1_row);
1303 if (cap_info->row_3_4) {
1304 cs0_cap = cs0_cap * 3 / 4;
1305 cs1_cap = cs1_cap * 3 / 4;
1307 ch_cap[channel] = cs0_cap + cs1_cap;
1308 chinfo |= 1 << channel;
1311 /* stride calculation for 1 channel */
1312 if (params->base.num_channels == 1 && chinfo & 1)
1313 return 0x17; /* channel a */
1315 /* stride calculation for 2 channels, default gstride type is 256B */
1316 if (ch_cap[0] == ch_cap[1]) {
1317 cap = ch_cap[0] + ch_cap[1];
1328 * 768MB + 768MB same as total 2GB memory
1329 * useful space: 0-768MB 1GB-1792MB
1336 /* 1536MB + 1536MB */
1345 printf("%s: Unable to calculate stride for ", __func__);
1346 print_size((cap * (1 << 20)), " capacity\n");
1351 sdram_print_stride(stride);
1356 static void clear_channel_params(struct rk3399_sdram_params *params, u8 channel)
1358 params->ch[channel].cap_info.rank = 0;
1359 params->ch[channel].cap_info.col = 0;
1360 params->ch[channel].cap_info.bk = 0;
1361 params->ch[channel].cap_info.bw = 32;
1362 params->ch[channel].cap_info.dbw = 32;
1363 params->ch[channel].cap_info.row_3_4 = 0;
1364 params->ch[channel].cap_info.cs0_row = 0;
1365 params->ch[channel].cap_info.cs1_row = 0;
1366 params->ch[channel].cap_info.ddrconfig = 0;
1369 static int pctl_init(struct dram_info *dram, struct rk3399_sdram_params *params)
1374 for (channel = 0; channel < 2; channel++) {
1375 const struct chan_info *chan = &dram->chan[channel];
1376 struct rk3399_cru *cru = dram->cru;
1377 struct rk3399_ddr_publ_regs *publ = chan->publ;
1379 phy_pctrl_reset(cru, channel);
1380 phy_dll_bypass_set(publ, params->base.ddr_freq);
1382 ret = pctl_cfg(dram, chan, channel, params);
1384 printf("%s: pctl config failed\n", __func__);
1388 /* start to trigger initialization */
1389 pctl_start(dram, channel);
1395 static int sdram_init(struct dram_info *dram,
1396 struct rk3399_sdram_params *params)
1398 unsigned char dramtype = params->base.dramtype;
1399 unsigned int ddr_freq = params->base.ddr_freq;
1400 u32 training_flag = PI_READ_GATE_TRAINING;
1401 int channel, ch, rank;
1404 debug("Starting SDRAM initialization...\n");
1406 if ((dramtype == DDR3 && ddr_freq > 933) ||
1407 (dramtype == LPDDR3 && ddr_freq > 933) ||
1408 (dramtype == LPDDR4 && ddr_freq > 800)) {
1409 debug("SDRAM frequency is to high!");
1413 for (ch = 0; ch < 2; ch++) {
1414 params->ch[ch].cap_info.rank = 2;
1415 for (rank = 2; rank != 0; rank--) {
1416 ret = pctl_init(dram, params);
1418 printf("%s: pctl init failed\n", __func__);
1422 /* LPDDR2/LPDDR3 need to wait DAI complete, max 10us */
1423 if (dramtype == LPDDR3)
1426 params->ch[ch].cap_info.rank = rank;
1429 * LPDDR3 CA training msut be trigger before
1431 * DDR3 is not have CA training.
1433 if (params->base.dramtype == LPDDR3)
1434 training_flag |= PI_CA_TRAINING;
1436 if (!(data_training(&dram->chan[ch], ch,
1437 params, training_flag)))
1440 /* Computed rank with associated channel number */
1441 params->ch[ch].cap_info.rank = rank;
1444 params->base.num_channels = 0;
1445 for (channel = 0; channel < 2; channel++) {
1446 const struct chan_info *chan = &dram->chan[channel];
1447 struct sdram_cap_info *cap_info = ¶ms->ch[channel].cap_info;
1448 u8 training_flag = PI_FULL_TRAINING;
1450 if (cap_info->rank == 0) {
1451 clear_channel_params(params, channel);
1454 params->base.num_channels++;
1458 debug(channel ? "1: " : "0: ");
1460 /* LPDDR3 should have write and read gate training */
1461 if (params->base.dramtype == LPDDR3)
1462 training_flag = PI_WRITE_LEVELING |
1463 PI_READ_GATE_TRAINING;
1465 if (params->base.dramtype != LPDDR4) {
1466 ret = data_training(dram, channel, params,
1469 debug("%s: data train failed for channel %d\n",
1475 sdram_print_ddr_info(cap_info, ¶ms->base);
1477 set_ddrconfig(chan, params, channel, cap_info->ddrconfig);
1480 if (params->base.num_channels == 0) {
1481 printf("%s: ", __func__);
1482 sdram_print_dram_type(params->base.dramtype);
1483 printf(" - %dMHz failed!\n", params->base.ddr_freq);
1487 params->base.stride = calculate_stride(params);
1488 dram_all_config(dram, params);
1489 switch_to_phy_index1(dram, params);
1491 debug("Finish SDRAM initialization...\n");
1495 static int rk3399_dmc_ofdata_to_platdata(struct udevice *dev)
1497 #if !CONFIG_IS_ENABLED(OF_PLATDATA)
1498 struct rockchip_dmc_plat *plat = dev_get_platdata(dev);
1501 ret = dev_read_u32_array(dev, "rockchip,sdram-params",
1502 (u32 *)&plat->sdram_params,
1503 sizeof(plat->sdram_params) / sizeof(u32));
1505 printf("%s: Cannot read rockchip,sdram-params %d\n",
1509 ret = regmap_init_mem(dev_ofnode(dev), &plat->map);
1511 printf("%s: regmap failed %d\n", __func__, ret);
1517 #if CONFIG_IS_ENABLED(OF_PLATDATA)
1518 static int conv_of_platdata(struct udevice *dev)
1520 struct rockchip_dmc_plat *plat = dev_get_platdata(dev);
1521 struct dtd_rockchip_rk3399_dmc *dtplat = &plat->dtplat;
1524 ret = regmap_init_mem_platdata(dev, dtplat->reg,
1525 ARRAY_SIZE(dtplat->reg) / 2,
1534 static int rk3399_dmc_init(struct udevice *dev)
1536 struct dram_info *priv = dev_get_priv(dev);
1537 struct rockchip_dmc_plat *plat = dev_get_platdata(dev);
1539 #if !CONFIG_IS_ENABLED(OF_PLATDATA)
1540 struct rk3399_sdram_params *params = &plat->sdram_params;
1542 struct dtd_rockchip_rk3399_dmc *dtplat = &plat->dtplat;
1543 struct rk3399_sdram_params *params =
1544 (void *)dtplat->rockchip_sdram_params;
1546 ret = conv_of_platdata(dev);
1551 priv->cic = syscon_get_first_range(ROCKCHIP_SYSCON_CIC);
1552 priv->grf = syscon_get_first_range(ROCKCHIP_SYSCON_GRF);
1553 priv->pmugrf = syscon_get_first_range(ROCKCHIP_SYSCON_PMUGRF);
1554 priv->pmusgrf = syscon_get_first_range(ROCKCHIP_SYSCON_PMUSGRF);
1555 priv->pmucru = rockchip_get_pmucru();
1556 priv->cru = rockchip_get_cru();
1557 priv->chan[0].pctl = regmap_get_range(plat->map, 0);
1558 priv->chan[0].pi = regmap_get_range(plat->map, 1);
1559 priv->chan[0].publ = regmap_get_range(plat->map, 2);
1560 priv->chan[0].msch = regmap_get_range(plat->map, 3);
1561 priv->chan[1].pctl = regmap_get_range(plat->map, 4);
1562 priv->chan[1].pi = regmap_get_range(plat->map, 5);
1563 priv->chan[1].publ = regmap_get_range(plat->map, 6);
1564 priv->chan[1].msch = regmap_get_range(plat->map, 7);
1566 debug("con reg %p %p %p %p %p %p %p %p\n",
1567 priv->chan[0].pctl, priv->chan[0].pi,
1568 priv->chan[0].publ, priv->chan[0].msch,
1569 priv->chan[1].pctl, priv->chan[1].pi,
1570 priv->chan[1].publ, priv->chan[1].msch);
1571 debug("cru %p, cic %p, grf %p, sgrf %p, pmucru %p\n", priv->cru,
1572 priv->cic, priv->pmugrf, priv->pmusgrf, priv->pmucru);
1574 #if CONFIG_IS_ENABLED(OF_PLATDATA)
1575 ret = clk_get_by_index_platdata(dev, 0, dtplat->clocks, &priv->ddr_clk);
1577 ret = clk_get_by_index(dev, 0, &priv->ddr_clk);
1580 printf("%s clk get failed %d\n", __func__, ret);
1584 ret = clk_set_rate(&priv->ddr_clk, params->base.ddr_freq * MHz);
1586 printf("%s clk set failed %d\n", __func__, ret);
1590 ret = sdram_init(priv, params);
1592 printf("%s DRAM init failed %d\n", __func__, ret);
1600 static int rk3399_dmc_probe(struct udevice *dev)
1602 #if defined(CONFIG_TPL_BUILD) || \
1603 (!defined(CONFIG_TPL) && defined(CONFIG_SPL_BUILD))
1604 if (rk3399_dmc_init(dev))
1607 struct dram_info *priv = dev_get_priv(dev);
1609 priv->pmugrf = syscon_get_first_range(ROCKCHIP_SYSCON_PMUGRF);
1610 debug("%s: pmugrf = %p\n", __func__, priv->pmugrf);
1611 priv->info.base = CONFIG_SYS_SDRAM_BASE;
1613 rockchip_sdram_size((phys_addr_t)&priv->pmugrf->os_reg2);
1618 static int rk3399_dmc_get_info(struct udevice *dev, struct ram_info *info)
1620 struct dram_info *priv = dev_get_priv(dev);
1627 static struct ram_ops rk3399_dmc_ops = {
1628 .get_info = rk3399_dmc_get_info,
1631 static const struct udevice_id rk3399_dmc_ids[] = {
1632 { .compatible = "rockchip,rk3399-dmc" },
1636 U_BOOT_DRIVER(dmc_rk3399) = {
1637 .name = "rockchip_rk3399_dmc",
1639 .of_match = rk3399_dmc_ids,
1640 .ops = &rk3399_dmc_ops,
1641 #if defined(CONFIG_TPL_BUILD) || \
1642 (!defined(CONFIG_TPL) && defined(CONFIG_SPL_BUILD))
1643 .ofdata_to_platdata = rk3399_dmc_ofdata_to_platdata,
1645 .probe = rk3399_dmc_probe,
1646 .priv_auto_alloc_size = sizeof(struct dram_info),
1647 #if defined(CONFIG_TPL_BUILD) || \
1648 (!defined(CONFIG_TPL) && defined(CONFIG_SPL_BUILD))
1649 .platdata_auto_alloc_size = sizeof(struct rockchip_dmc_plat),