d684722444270d4cbec743179410b81f4a9c7d57
[oweals/u-boot.git] / drivers / ram / rockchip / sdram_rk3399.c
1 // SPDX-License-Identifier: GPL-2.0+ OR BSD-3-Clause
2 /*
3  * (C) Copyright 2016-2017 Rockchip Inc.
4  *
5  * Adapted from coreboot.
6  */
7
8 #include <common.h>
9 #include <clk.h>
10 #include <dm.h>
11 #include <dt-structs.h>
12 #include <ram.h>
13 #include <regmap.h>
14 #include <syscon.h>
15 #include <asm/io.h>
16 #include <asm/arch-rockchip/clock.h>
17 #include <asm/arch-rockchip/cru_rk3399.h>
18 #include <asm/arch-rockchip/grf_rk3399.h>
19 #include <asm/arch-rockchip/hardware.h>
20 #include <asm/arch-rockchip/sdram_common.h>
21 #include <asm/arch-rockchip/sdram_rk3399.h>
22 #include <linux/err.h>
23 #include <time.h>
24
25 #define PRESET_SGRF_HOLD(n)     ((0x1 << (6 + 16)) | ((n) << 6))
26 #define PRESET_GPIO0_HOLD(n)    ((0x1 << (7 + 16)) | ((n) << 7))
27 #define PRESET_GPIO1_HOLD(n)    ((0x1 << (8 + 16)) | ((n) << 8))
28
29 #define PHY_DRV_ODT_HI_Z        0x0
30 #define PHY_DRV_ODT_240         0x1
31 #define PHY_DRV_ODT_120         0x8
32 #define PHY_DRV_ODT_80          0x9
33 #define PHY_DRV_ODT_60          0xc
34 #define PHY_DRV_ODT_48          0xd
35 #define PHY_DRV_ODT_40          0xe
36 #define PHY_DRV_ODT_34_3        0xf
37
38 #define PHY_BOOSTP_EN           0x1
39 #define PHY_BOOSTN_EN           0x1
40 #define PHY_SLEWP_EN            0x1
41 #define PHY_SLEWN_EN            0x1
42
43 #define CRU_SFTRST_DDR_CTRL(ch, n)      ((0x1 << (8 + 16 + (ch) * 4)) | \
44                                         ((n) << (8 + (ch) * 4)))
45 #define CRU_SFTRST_DDR_PHY(ch, n)       ((0x1 << (9 + 16 + (ch) * 4)) | \
46                                         ((n) << (9 + (ch) * 4)))
47 struct chan_info {
48         struct rk3399_ddr_pctl_regs *pctl;
49         struct rk3399_ddr_pi_regs *pi;
50         struct rk3399_ddr_publ_regs *publ;
51         struct rk3399_msch_regs *msch;
52 };
53
54 struct dram_info {
55 #if defined(CONFIG_TPL_BUILD) || \
56         (!defined(CONFIG_TPL) && defined(CONFIG_SPL_BUILD))
57         u32 pwrup_srefresh_exit[2];
58         struct chan_info chan[2];
59         struct clk ddr_clk;
60         struct rk3399_cru *cru;
61         struct rk3399_grf_regs *grf;
62         struct rk3399_pmucru *pmucru;
63         struct rk3399_pmusgrf_regs *pmusgrf;
64         struct rk3399_ddr_cic_regs *cic;
65 #endif
66         struct ram_info info;
67         struct rk3399_pmugrf_regs *pmugrf;
68 };
69
70 #if defined(CONFIG_TPL_BUILD) || \
71         (!defined(CONFIG_TPL) && defined(CONFIG_SPL_BUILD))
72
73 struct rockchip_dmc_plat {
74 #if CONFIG_IS_ENABLED(OF_PLATDATA)
75         struct dtd_rockchip_rk3399_dmc dtplat;
76 #else
77         struct rk3399_sdram_params sdram_params;
78 #endif
79         struct regmap *map;
80 };
81
82 static void *get_ddrc0_con(struct dram_info *dram, u8 channel)
83 {
84         return (channel == 0) ? &dram->grf->ddrc0_con0 : &dram->grf->ddrc0_con1;
85 }
86
87 static void copy_to_reg(u32 *dest, const u32 *src, u32 n)
88 {
89         int i;
90
91         for (i = 0; i < n / sizeof(u32); i++) {
92                 writel(*src, dest);
93                 src++;
94                 dest++;
95         }
96 }
97
98 static void rkclk_ddr_reset(struct rk3399_cru *cru, u32 channel, u32 ctl,
99                             u32 phy)
100 {
101         channel &= 0x1;
102         ctl &= 0x1;
103         phy &= 0x1;
104         writel(CRU_SFTRST_DDR_CTRL(channel, ctl) |
105                                    CRU_SFTRST_DDR_PHY(channel, phy),
106                                    &cru->softrst_con[4]);
107 }
108
109 static void phy_pctrl_reset(struct rk3399_cru *cru,  u32 channel)
110 {
111         rkclk_ddr_reset(cru, channel, 1, 1);
112         udelay(10);
113
114         rkclk_ddr_reset(cru, channel, 1, 0);
115         udelay(10);
116
117         rkclk_ddr_reset(cru, channel, 0, 0);
118         udelay(10);
119 }
120
121 static void phy_dll_bypass_set(struct rk3399_ddr_publ_regs *ddr_publ_regs,
122                                u32 freq)
123 {
124         u32 *denali_phy = ddr_publ_regs->denali_phy;
125
126         /* From IP spec, only freq small than 125 can enter dll bypass mode */
127         if (freq <= 125) {
128                 /* phy_sw_master_mode_X PHY_86/214/342/470 4bits offset_8 */
129                 setbits_le32(&denali_phy[86], (0x3 << 2) << 8);
130                 setbits_le32(&denali_phy[214], (0x3 << 2) << 8);
131                 setbits_le32(&denali_phy[342], (0x3 << 2) << 8);
132                 setbits_le32(&denali_phy[470], (0x3 << 2) << 8);
133
134                 /* phy_adrctl_sw_master_mode PHY_547/675/803 4bits offset_16 */
135                 setbits_le32(&denali_phy[547], (0x3 << 2) << 16);
136                 setbits_le32(&denali_phy[675], (0x3 << 2) << 16);
137                 setbits_le32(&denali_phy[803], (0x3 << 2) << 16);
138         } else {
139                 /* phy_sw_master_mode_X PHY_86/214/342/470 4bits offset_8 */
140                 clrbits_le32(&denali_phy[86], (0x3 << 2) << 8);
141                 clrbits_le32(&denali_phy[214], (0x3 << 2) << 8);
142                 clrbits_le32(&denali_phy[342], (0x3 << 2) << 8);
143                 clrbits_le32(&denali_phy[470], (0x3 << 2) << 8);
144
145                 /* phy_adrctl_sw_master_mode PHY_547/675/803 4bits offset_16 */
146                 clrbits_le32(&denali_phy[547], (0x3 << 2) << 16);
147                 clrbits_le32(&denali_phy[675], (0x3 << 2) << 16);
148                 clrbits_le32(&denali_phy[803], (0x3 << 2) << 16);
149         }
150 }
151
152 static void set_memory_map(const struct chan_info *chan, u32 channel,
153                            const struct rk3399_sdram_params *params)
154 {
155         const struct rk3399_sdram_channel *sdram_ch = &params->ch[channel];
156         u32 *denali_ctl = chan->pctl->denali_ctl;
157         u32 *denali_pi = chan->pi->denali_pi;
158         u32 cs_map;
159         u32 reduc;
160         u32 row;
161
162         /* Get row number from ddrconfig setting */
163         if (sdram_ch->cap_info.ddrconfig < 2 ||
164             sdram_ch->cap_info.ddrconfig == 4)
165                 row = 16;
166         else if (sdram_ch->cap_info.ddrconfig == 3)
167                 row = 14;
168         else
169                 row = 15;
170
171         cs_map = (sdram_ch->cap_info.rank > 1) ? 3 : 1;
172         reduc = (sdram_ch->cap_info.bw == 2) ? 0 : 1;
173
174         /* Set the dram configuration to ctrl */
175         clrsetbits_le32(&denali_ctl[191], 0xF, (12 - sdram_ch->cap_info.col));
176         clrsetbits_le32(&denali_ctl[190], (0x3 << 16) | (0x7 << 24),
177                         ((3 - sdram_ch->cap_info.bk) << 16) |
178                         ((16 - row) << 24));
179
180         clrsetbits_le32(&denali_ctl[196], 0x3 | (1 << 16),
181                         cs_map | (reduc << 16));
182
183         /* PI_199 PI_COL_DIFF:RW:0:4 */
184         clrsetbits_le32(&denali_pi[199], 0xF, (12 - sdram_ch->cap_info.col));
185
186         /* PI_155 PI_ROW_DIFF:RW:24:3 PI_BANK_DIFF:RW:16:2 */
187         clrsetbits_le32(&denali_pi[155], (0x3 << 16) | (0x7 << 24),
188                         ((3 - sdram_ch->cap_info.bk) << 16) |
189                         ((16 - row) << 24));
190         /* PI_41 PI_CS_MAP:RW:24:4 */
191         clrsetbits_le32(&denali_pi[41], 0xf << 24, cs_map << 24);
192         if (sdram_ch->cap_info.rank == 1 && params->base.dramtype == DDR3)
193                 writel(0x2EC7FFFF, &denali_pi[34]);
194 }
195
196 static int phy_io_config(const struct chan_info *chan,
197                          const struct rk3399_sdram_params *params)
198 {
199         u32 *denali_phy = chan->publ->denali_phy;
200         u32 vref_mode_dq, vref_value_dq, vref_mode_ac, vref_value_ac;
201         u32 mode_sel;
202         u32 reg_value;
203         u32 drv_value, odt_value;
204         u32 speed;
205
206         /* vref setting */
207         if (params->base.dramtype == LPDDR4) {
208                 /* LPDDR4 */
209                 vref_mode_dq = 0x6;
210                 vref_value_dq = 0x1f;
211                 vref_mode_ac = 0x6;
212                 vref_value_ac = 0x1f;
213                 mode_sel = 0x6;
214         } else if (params->base.dramtype == LPDDR3) {
215                 if (params->base.odt == 1) {
216                         vref_mode_dq = 0x5;  /* LPDDR3 ODT */
217                         drv_value = (readl(&denali_phy[6]) >> 12) & 0xf;
218                         odt_value = (readl(&denali_phy[6]) >> 4) & 0xf;
219                         if (drv_value == PHY_DRV_ODT_48) {
220                                 switch (odt_value) {
221                                 case PHY_DRV_ODT_240:
222                                         vref_value_dq = 0x16;
223                                         break;
224                                 case PHY_DRV_ODT_120:
225                                         vref_value_dq = 0x26;
226                                         break;
227                                 case PHY_DRV_ODT_60:
228                                         vref_value_dq = 0x36;
229                                         break;
230                                 default:
231                                         debug("Invalid ODT value.\n");
232                                         return -EINVAL;
233                                 }
234                         } else if (drv_value == PHY_DRV_ODT_40) {
235                                 switch (odt_value) {
236                                 case PHY_DRV_ODT_240:
237                                         vref_value_dq = 0x19;
238                                         break;
239                                 case PHY_DRV_ODT_120:
240                                         vref_value_dq = 0x23;
241                                         break;
242                                 case PHY_DRV_ODT_60:
243                                         vref_value_dq = 0x31;
244                                         break;
245                                 default:
246                                         debug("Invalid ODT value.\n");
247                                         return -EINVAL;
248                                 }
249                         } else if (drv_value == PHY_DRV_ODT_34_3) {
250                                 switch (odt_value) {
251                                 case PHY_DRV_ODT_240:
252                                         vref_value_dq = 0x17;
253                                         break;
254                                 case PHY_DRV_ODT_120:
255                                         vref_value_dq = 0x20;
256                                         break;
257                                 case PHY_DRV_ODT_60:
258                                         vref_value_dq = 0x2e;
259                                         break;
260                                 default:
261                                         debug("Invalid ODT value.\n");
262                                         return -EINVAL;
263                                 }
264                         } else {
265                                 debug("Invalid DRV value.\n");
266                                 return -EINVAL;
267                         }
268                 } else {
269                         vref_mode_dq = 0x2;  /* LPDDR3 */
270                         vref_value_dq = 0x1f;
271                 }
272                 vref_mode_ac = 0x2;
273                 vref_value_ac = 0x1f;
274                 mode_sel = 0x0;
275         } else if (params->base.dramtype == DDR3) {
276                 /* DDR3L */
277                 vref_mode_dq = 0x1;
278                 vref_value_dq = 0x1f;
279                 vref_mode_ac = 0x1;
280                 vref_value_ac = 0x1f;
281                 mode_sel = 0x1;
282         } else {
283                 debug("Unknown DRAM type.\n");
284                 return -EINVAL;
285         }
286
287         reg_value = (vref_mode_dq << 9) | (0x1 << 8) | vref_value_dq;
288
289         /* PHY_913 PHY_PAD_VREF_CTRL_DQ_0 12bits offset_8 */
290         clrsetbits_le32(&denali_phy[913], 0xfff << 8, reg_value << 8);
291         /* PHY_914 PHY_PAD_VREF_CTRL_DQ_1 12bits offset_0 */
292         clrsetbits_le32(&denali_phy[914], 0xfff, reg_value);
293         /* PHY_914 PHY_PAD_VREF_CTRL_DQ_2 12bits offset_16 */
294         clrsetbits_le32(&denali_phy[914], 0xfff << 16, reg_value << 16);
295         /* PHY_915 PHY_PAD_VREF_CTRL_DQ_3 12bits offset_0 */
296         clrsetbits_le32(&denali_phy[915], 0xfff, reg_value);
297
298         reg_value = (vref_mode_ac << 9) | (0x1 << 8) | vref_value_ac;
299
300         /* PHY_915 PHY_PAD_VREF_CTRL_AC 12bits offset_16 */
301         clrsetbits_le32(&denali_phy[915], 0xfff << 16, reg_value << 16);
302
303         /* PHY_924 PHY_PAD_FDBK_DRIVE */
304         clrsetbits_le32(&denali_phy[924], 0x7 << 15, mode_sel << 15);
305         /* PHY_926 PHY_PAD_DATA_DRIVE */
306         clrsetbits_le32(&denali_phy[926], 0x7 << 6, mode_sel << 6);
307         /* PHY_927 PHY_PAD_DQS_DRIVE */
308         clrsetbits_le32(&denali_phy[927], 0x7 << 6, mode_sel << 6);
309         /* PHY_928 PHY_PAD_ADDR_DRIVE */
310         clrsetbits_le32(&denali_phy[928], 0x7 << 14, mode_sel << 14);
311         /* PHY_929 PHY_PAD_CLK_DRIVE */
312         clrsetbits_le32(&denali_phy[929], 0x7 << 14, mode_sel << 14);
313         /* PHY_935 PHY_PAD_CKE_DRIVE */
314         clrsetbits_le32(&denali_phy[935], 0x7 << 14, mode_sel << 14);
315         /* PHY_937 PHY_PAD_RST_DRIVE */
316         clrsetbits_le32(&denali_phy[937], 0x7 << 14, mode_sel << 14);
317         /* PHY_939 PHY_PAD_CS_DRIVE */
318         clrsetbits_le32(&denali_phy[939], 0x7 << 14, mode_sel << 14);
319
320         if (IS_ENABLED(CONFIG_RAM_RK3399_LPDDR4)) {
321                 /* BOOSTP_EN & BOOSTN_EN */
322                 reg_value = ((PHY_BOOSTP_EN << 4) | PHY_BOOSTN_EN);
323                 /* PHY_925 PHY_PAD_FDBK_DRIVE2 */
324                 clrsetbits_le32(&denali_phy[925], 0xff << 8, reg_value << 8);
325                 /* PHY_926 PHY_PAD_DATA_DRIVE */
326                 clrsetbits_le32(&denali_phy[926], 0xff << 12, reg_value << 12);
327                 /* PHY_927 PHY_PAD_DQS_DRIVE */
328                 clrsetbits_le32(&denali_phy[927], 0xff << 14, reg_value << 14);
329                 /* PHY_928 PHY_PAD_ADDR_DRIVE */
330                 clrsetbits_le32(&denali_phy[928], 0xff << 20, reg_value << 20);
331                 /* PHY_929 PHY_PAD_CLK_DRIVE */
332                 clrsetbits_le32(&denali_phy[929], 0xff << 22, reg_value << 22);
333                 /* PHY_935 PHY_PAD_CKE_DRIVE */
334                 clrsetbits_le32(&denali_phy[935], 0xff << 20, reg_value << 20);
335                 /* PHY_937 PHY_PAD_RST_DRIVE */
336                 clrsetbits_le32(&denali_phy[937], 0xff << 20, reg_value << 20);
337                 /* PHY_939 PHY_PAD_CS_DRIVE */
338                 clrsetbits_le32(&denali_phy[939], 0xff << 20, reg_value << 20);
339
340                 /* SLEWP_EN & SLEWN_EN */
341                 reg_value = ((PHY_SLEWP_EN << 3) | PHY_SLEWN_EN);
342                 /* PHY_924 PHY_PAD_FDBK_DRIVE */
343                 clrsetbits_le32(&denali_phy[924], 0x3f << 8, reg_value << 8);
344                 /* PHY_926 PHY_PAD_DATA_DRIVE */
345                 clrsetbits_le32(&denali_phy[926], 0x3f, reg_value);
346                 /* PHY_927 PHY_PAD_DQS_DRIVE */
347                 clrsetbits_le32(&denali_phy[927], 0x3f, reg_value);
348                 /* PHY_928 PHY_PAD_ADDR_DRIVE */
349                 clrsetbits_le32(&denali_phy[928], 0x3f << 8, reg_value << 8);
350                 /* PHY_929 PHY_PAD_CLK_DRIVE */
351                 clrsetbits_le32(&denali_phy[929], 0x3f << 8, reg_value << 8);
352                 /* PHY_935 PHY_PAD_CKE_DRIVE */
353                 clrsetbits_le32(&denali_phy[935], 0x3f << 8, reg_value << 8);
354                 /* PHY_937 PHY_PAD_RST_DRIVE */
355                 clrsetbits_le32(&denali_phy[937], 0x3f << 8, reg_value << 8);
356                 /* PHY_939 PHY_PAD_CS_DRIVE */
357                 clrsetbits_le32(&denali_phy[939], 0x3f << 8, reg_value << 8);
358         }
359
360         /* speed setting */
361         if (params->base.ddr_freq < 400)
362                 speed = 0x0;
363         else if (params->base.ddr_freq < 800)
364                 speed = 0x1;
365         else if (params->base.ddr_freq < 1200)
366                 speed = 0x2;
367         else
368                 speed = 0x3;
369
370         /* PHY_924 PHY_PAD_FDBK_DRIVE */
371         clrsetbits_le32(&denali_phy[924], 0x3 << 21, speed << 21);
372         /* PHY_926 PHY_PAD_DATA_DRIVE */
373         clrsetbits_le32(&denali_phy[926], 0x3 << 9, speed << 9);
374         /* PHY_927 PHY_PAD_DQS_DRIVE */
375         clrsetbits_le32(&denali_phy[927], 0x3 << 9, speed << 9);
376         /* PHY_928 PHY_PAD_ADDR_DRIVE */
377         clrsetbits_le32(&denali_phy[928], 0x3 << 17, speed << 17);
378         /* PHY_929 PHY_PAD_CLK_DRIVE */
379         clrsetbits_le32(&denali_phy[929], 0x3 << 17, speed << 17);
380         /* PHY_935 PHY_PAD_CKE_DRIVE */
381         clrsetbits_le32(&denali_phy[935], 0x3 << 17, speed << 17);
382         /* PHY_937 PHY_PAD_RST_DRIVE */
383         clrsetbits_le32(&denali_phy[937], 0x3 << 17, speed << 17);
384         /* PHY_939 PHY_PAD_CS_DRIVE */
385         clrsetbits_le32(&denali_phy[939], 0x3 << 17, speed << 17);
386
387         return 0;
388 }
389
390 static void set_ds_odt(const struct chan_info *chan,
391                        const struct rk3399_sdram_params *params)
392 {
393         u32 *denali_phy = chan->publ->denali_phy;
394
395         u32 tsel_idle_en, tsel_wr_en, tsel_rd_en;
396         u32 tsel_idle_select_p, tsel_rd_select_p;
397         u32 tsel_idle_select_n, tsel_rd_select_n;
398         u32 tsel_wr_select_dq_p, tsel_wr_select_ca_p;
399         u32 tsel_wr_select_dq_n, tsel_wr_select_ca_n;
400         u32 reg_value;
401
402         if (params->base.dramtype == LPDDR4) {
403                 tsel_rd_select_p = PHY_DRV_ODT_HI_Z;
404                 tsel_rd_select_n = PHY_DRV_ODT_240;
405
406                 tsel_idle_select_p = PHY_DRV_ODT_HI_Z;
407                 tsel_idle_select_n = PHY_DRV_ODT_240;
408
409                 tsel_wr_select_dq_p = PHY_DRV_ODT_40;
410                 tsel_wr_select_dq_n = PHY_DRV_ODT_40;
411
412                 tsel_wr_select_ca_p = PHY_DRV_ODT_40;
413                 tsel_wr_select_ca_n = PHY_DRV_ODT_40;
414         } else if (params->base.dramtype == LPDDR3) {
415                 tsel_rd_select_p = PHY_DRV_ODT_240;
416                 tsel_rd_select_n = PHY_DRV_ODT_HI_Z;
417
418                 tsel_idle_select_p = PHY_DRV_ODT_240;
419                 tsel_idle_select_n = PHY_DRV_ODT_HI_Z;
420
421                 tsel_wr_select_dq_p = PHY_DRV_ODT_34_3;
422                 tsel_wr_select_dq_n = PHY_DRV_ODT_34_3;
423
424                 tsel_wr_select_ca_p = PHY_DRV_ODT_48;
425                 tsel_wr_select_ca_n = PHY_DRV_ODT_48;
426         } else {
427                 tsel_rd_select_p = PHY_DRV_ODT_240;
428                 tsel_rd_select_n = PHY_DRV_ODT_240;
429
430                 tsel_idle_select_p = PHY_DRV_ODT_240;
431                 tsel_idle_select_n = PHY_DRV_ODT_240;
432
433                 tsel_wr_select_dq_p = PHY_DRV_ODT_34_3;
434                 tsel_wr_select_dq_n = PHY_DRV_ODT_34_3;
435
436                 tsel_wr_select_ca_p = PHY_DRV_ODT_34_3;
437                 tsel_wr_select_ca_n = PHY_DRV_ODT_34_3;
438         }
439
440         if (params->base.odt == 1)
441                 tsel_rd_en = 1;
442         else
443                 tsel_rd_en = 0;
444
445         tsel_wr_en = 0;
446         tsel_idle_en = 0;
447
448         /*
449          * phy_dq_tsel_select_X 24bits DENALI_PHY_6/134/262/390 offset_0
450          * sets termination values for read/idle cycles and drive strength
451          * for write cycles for DQ/DM
452          */
453         reg_value = tsel_rd_select_n | (tsel_rd_select_p << 0x4) |
454                     (tsel_wr_select_dq_n << 8) | (tsel_wr_select_dq_p << 12) |
455                     (tsel_idle_select_n << 16) | (tsel_idle_select_p << 20);
456         clrsetbits_le32(&denali_phy[6], 0xffffff, reg_value);
457         clrsetbits_le32(&denali_phy[134], 0xffffff, reg_value);
458         clrsetbits_le32(&denali_phy[262], 0xffffff, reg_value);
459         clrsetbits_le32(&denali_phy[390], 0xffffff, reg_value);
460
461         /*
462          * phy_dqs_tsel_select_X 24bits DENALI_PHY_7/135/263/391 offset_0
463          * sets termination values for read/idle cycles and drive strength
464          * for write cycles for DQS
465          */
466         clrsetbits_le32(&denali_phy[7], 0xffffff, reg_value);
467         clrsetbits_le32(&denali_phy[135], 0xffffff, reg_value);
468         clrsetbits_le32(&denali_phy[263], 0xffffff, reg_value);
469         clrsetbits_le32(&denali_phy[391], 0xffffff, reg_value);
470
471         /* phy_adr_tsel_select_ 8bits DENALI_PHY_544/672/800 offset_0 */
472         reg_value = tsel_wr_select_ca_n | (tsel_wr_select_ca_p << 0x4);
473         clrsetbits_le32(&denali_phy[544], 0xff, reg_value);
474         clrsetbits_le32(&denali_phy[672], 0xff, reg_value);
475         clrsetbits_le32(&denali_phy[800], 0xff, reg_value);
476
477         /* phy_pad_addr_drive 8bits DENALI_PHY_928 offset_0 */
478         clrsetbits_le32(&denali_phy[928], 0xff, reg_value);
479
480         /* phy_pad_rst_drive 8bits DENALI_PHY_937 offset_0 */
481         clrsetbits_le32(&denali_phy[937], 0xff, reg_value);
482
483         /* phy_pad_cke_drive 8bits DENALI_PHY_935 offset_0 */
484         clrsetbits_le32(&denali_phy[935], 0xff, reg_value);
485
486         /* phy_pad_cs_drive 8bits DENALI_PHY_939 offset_0 */
487         clrsetbits_le32(&denali_phy[939], 0xff, reg_value);
488
489         /* phy_pad_clk_drive 8bits DENALI_PHY_929 offset_0 */
490         clrsetbits_le32(&denali_phy[929], 0xff, reg_value);
491
492         /* phy_pad_fdbk_drive 23bit DENALI_PHY_924/925 */
493         clrsetbits_le32(&denali_phy[924], 0xff,
494                         tsel_wr_select_dq_n | (tsel_wr_select_dq_p << 4));
495         clrsetbits_le32(&denali_phy[925], 0xff,
496                         tsel_rd_select_n | (tsel_rd_select_p << 4));
497
498         /* phy_dq_tsel_enable_X 3bits DENALI_PHY_5/133/261/389 offset_16 */
499         reg_value = (tsel_rd_en | (tsel_wr_en << 1) | (tsel_idle_en << 2))
500                 << 16;
501         clrsetbits_le32(&denali_phy[5], 0x7 << 16, reg_value);
502         clrsetbits_le32(&denali_phy[133], 0x7 << 16, reg_value);
503         clrsetbits_le32(&denali_phy[261], 0x7 << 16, reg_value);
504         clrsetbits_le32(&denali_phy[389], 0x7 << 16, reg_value);
505
506         /* phy_dqs_tsel_enable_X 3bits DENALI_PHY_6/134/262/390 offset_24 */
507         reg_value = (tsel_rd_en | (tsel_wr_en << 1) | (tsel_idle_en << 2))
508                 << 24;
509         clrsetbits_le32(&denali_phy[6], 0x7 << 24, reg_value);
510         clrsetbits_le32(&denali_phy[134], 0x7 << 24, reg_value);
511         clrsetbits_le32(&denali_phy[262], 0x7 << 24, reg_value);
512         clrsetbits_le32(&denali_phy[390], 0x7 << 24, reg_value);
513
514         /* phy_adr_tsel_enable_ 1bit DENALI_PHY_518/646/774 offset_8 */
515         reg_value = tsel_wr_en << 8;
516         clrsetbits_le32(&denali_phy[518], 0x1 << 8, reg_value);
517         clrsetbits_le32(&denali_phy[646], 0x1 << 8, reg_value);
518         clrsetbits_le32(&denali_phy[774], 0x1 << 8, reg_value);
519
520         /* phy_pad_addr_term tsel 1bit DENALI_PHY_933 offset_17 */
521         reg_value = tsel_wr_en << 17;
522         clrsetbits_le32(&denali_phy[933], 0x1 << 17, reg_value);
523         /*
524          * pad_rst/cke/cs/clk_term tsel 1bits
525          * DENALI_PHY_938/936/940/934 offset_17
526          */
527         clrsetbits_le32(&denali_phy[938], 0x1 << 17, reg_value);
528         clrsetbits_le32(&denali_phy[936], 0x1 << 17, reg_value);
529         clrsetbits_le32(&denali_phy[940], 0x1 << 17, reg_value);
530         clrsetbits_le32(&denali_phy[934], 0x1 << 17, reg_value);
531
532         /* phy_pad_fdbk_term 1bit DENALI_PHY_930 offset_17 */
533         clrsetbits_le32(&denali_phy[930], 0x1 << 17, reg_value);
534
535         phy_io_config(chan, params);
536 }
537
538 static void pctl_start(struct dram_info *dram, u8 channel)
539 {
540         const struct chan_info *chan = &dram->chan[channel];
541         u32 *denali_ctl = chan->pctl->denali_ctl;
542         u32 *denali_phy = chan->publ->denali_phy;
543         u32 *ddrc0_con = get_ddrc0_con(dram, channel);
544         u32 count = 0;
545         u32 byte, tmp;
546
547         writel(0x01000000, &ddrc0_con);
548
549         clrsetbits_le32(&denali_phy[957], 0x3 << 24, 0x2 << 24);
550
551         while (!(readl(&denali_ctl[203]) & (1 << 3))) {
552                 if (count > 1000) {
553                         printf("%s: Failed to init pctl for channel %d\n",
554                                __func__, channel);
555                         while (1)
556                                 ;
557                 }
558
559                 udelay(1);
560                 count++;
561         }
562
563         writel(0x01000100, &ddrc0_con);
564
565         for (byte = 0; byte < 4; byte++) {
566                 tmp = 0x820;
567                 writel((tmp << 16) | tmp, &denali_phy[53 + (128 * byte)]);
568                 writel((tmp << 16) | tmp, &denali_phy[54 + (128 * byte)]);
569                 writel((tmp << 16) | tmp, &denali_phy[55 + (128 * byte)]);
570                 writel((tmp << 16) | tmp, &denali_phy[56 + (128 * byte)]);
571                 writel((tmp << 16) | tmp, &denali_phy[57 + (128 * byte)]);
572
573                 clrsetbits_le32(&denali_phy[58 + (128 * byte)], 0xffff, tmp);
574         }
575
576         clrsetbits_le32(&denali_ctl[68], PWRUP_SREFRESH_EXIT,
577                         dram->pwrup_srefresh_exit[channel]);
578 }
579
580 static int pctl_cfg(struct dram_info *dram, const struct chan_info *chan,
581                     u32 channel, const struct rk3399_sdram_params *params)
582 {
583         u32 *denali_ctl = chan->pctl->denali_ctl;
584         u32 *denali_pi = chan->pi->denali_pi;
585         u32 *denali_phy = chan->publ->denali_phy;
586         const u32 *params_ctl = params->pctl_regs.denali_ctl;
587         const u32 *params_phy = params->phy_regs.denali_phy;
588         u32 tmp, tmp1, tmp2;
589
590         /*
591          * work around controller bug:
592          * Do not program DRAM_CLASS until NO_PHY_IND_TRAIN_INT is programmed
593          */
594         copy_to_reg(&denali_ctl[1], &params_ctl[1],
595                     sizeof(struct rk3399_ddr_pctl_regs) - 4);
596         writel(params_ctl[0], &denali_ctl[0]);
597
598         /*
599          * two channel init at the same time, then ZQ Cal Start
600          * at the same time, it will use the same RZQ, but cannot
601          * start at the same time.
602          *
603          * So, increase tINIT3 for channel 1, will avoid two
604          * channel ZQ Cal Start at the same time
605          */
606         if (params->base.dramtype == LPDDR4 && channel == 1) {
607                 tmp = ((params->base.ddr_freq * MHz + 999) / 1000);
608                 tmp1 = readl(&denali_ctl[14]);
609                 writel(tmp + tmp1, &denali_ctl[14]);
610         }
611
612         copy_to_reg(denali_pi, &params->pi_regs.denali_pi[0],
613                     sizeof(struct rk3399_ddr_pi_regs));
614
615         /* rank count need to set for init */
616         set_memory_map(chan, channel, params);
617
618         writel(params->phy_regs.denali_phy[910], &denali_phy[910]);
619         writel(params->phy_regs.denali_phy[911], &denali_phy[911]);
620         writel(params->phy_regs.denali_phy[912], &denali_phy[912]);
621
622         if (IS_ENABLED(CONFIG_RAM_RK3399_LPDDR4)) {
623                 writel(params->phy_regs.denali_phy[898], &denali_phy[898]);
624                 writel(params->phy_regs.denali_phy[919], &denali_phy[919]);
625         }
626
627         dram->pwrup_srefresh_exit[channel] = readl(&denali_ctl[68]) &
628                                              PWRUP_SREFRESH_EXIT;
629         clrbits_le32(&denali_ctl[68], PWRUP_SREFRESH_EXIT);
630
631         /* PHY_DLL_RST_EN */
632         clrsetbits_le32(&denali_phy[957], 0x3 << 24, 1 << 24);
633
634         setbits_le32(&denali_pi[0], START);
635         setbits_le32(&denali_ctl[0], START);
636
637         /**
638          * LPDDR4 use PLL bypass mode for init
639          * not need to wait for the PLL to lock
640          */
641         if (params->base.dramtype != LPDDR4) {
642                 /* Waiting for phy DLL lock */
643                 while (1) {
644                         tmp = readl(&denali_phy[920]);
645                         tmp1 = readl(&denali_phy[921]);
646                         tmp2 = readl(&denali_phy[922]);
647                         if ((((tmp >> 16) & 0x1) == 0x1) &&
648                             (((tmp1 >> 16) & 0x1) == 0x1) &&
649                             (((tmp1 >> 0) & 0x1) == 0x1) &&
650                             (((tmp2 >> 0) & 0x1) == 0x1))
651                                 break;
652                 }
653         }
654
655         copy_to_reg(&denali_phy[896], &params_phy[896], (958 - 895) * 4);
656         copy_to_reg(&denali_phy[0], &params_phy[0], (90 - 0 + 1) * 4);
657         copy_to_reg(&denali_phy[128], &params_phy[128], (218 - 128 + 1) * 4);
658         copy_to_reg(&denali_phy[256], &params_phy[256], (346 - 256 + 1) * 4);
659         copy_to_reg(&denali_phy[384], &params_phy[384], (474 - 384 + 1) * 4);
660         copy_to_reg(&denali_phy[512], &params_phy[512], (549 - 512 + 1) * 4);
661         copy_to_reg(&denali_phy[640], &params_phy[640], (677 - 640 + 1) * 4);
662         copy_to_reg(&denali_phy[768], &params_phy[768], (805 - 768 + 1) * 4);
663         set_ds_odt(chan, params);
664
665         /*
666          * phy_dqs_tsel_wr_timing_X 8bits DENALI_PHY_84/212/340/468 offset_8
667          * dqs_tsel_wr_end[7:4] add Half cycle
668          */
669         tmp = (readl(&denali_phy[84]) >> 8) & 0xff;
670         clrsetbits_le32(&denali_phy[84], 0xff << 8, (tmp + 0x10) << 8);
671         tmp = (readl(&denali_phy[212]) >> 8) & 0xff;
672         clrsetbits_le32(&denali_phy[212], 0xff << 8, (tmp + 0x10) << 8);
673         tmp = (readl(&denali_phy[340]) >> 8) & 0xff;
674         clrsetbits_le32(&denali_phy[340], 0xff << 8, (tmp + 0x10) << 8);
675         tmp = (readl(&denali_phy[468]) >> 8) & 0xff;
676         clrsetbits_le32(&denali_phy[468], 0xff << 8, (tmp + 0x10) << 8);
677
678         /*
679          * phy_dqs_tsel_wr_timing_X 8bits DENALI_PHY_83/211/339/467 offset_8
680          * dq_tsel_wr_end[7:4] add Half cycle
681          */
682         tmp = (readl(&denali_phy[83]) >> 16) & 0xff;
683         clrsetbits_le32(&denali_phy[83], 0xff << 16, (tmp + 0x10) << 16);
684         tmp = (readl(&denali_phy[211]) >> 16) & 0xff;
685         clrsetbits_le32(&denali_phy[211], 0xff << 16, (tmp + 0x10) << 16);
686         tmp = (readl(&denali_phy[339]) >> 16) & 0xff;
687         clrsetbits_le32(&denali_phy[339], 0xff << 16, (tmp + 0x10) << 16);
688         tmp = (readl(&denali_phy[467]) >> 16) & 0xff;
689         clrsetbits_le32(&denali_phy[467], 0xff << 16, (tmp + 0x10) << 16);
690
691         return 0;
692 }
693
694 static void select_per_cs_training_index(const struct chan_info *chan,
695                                          u32 rank)
696 {
697         u32 *denali_phy = chan->publ->denali_phy;
698
699         /* PHY_84 PHY_PER_CS_TRAINING_EN_0 1bit offset_16 */
700         if ((readl(&denali_phy[84]) >> 16) & 1) {
701                 /*
702                  * PHY_8/136/264/392
703                  * phy_per_cs_training_index_X 1bit offset_24
704                  */
705                 clrsetbits_le32(&denali_phy[8], 0x1 << 24, rank << 24);
706                 clrsetbits_le32(&denali_phy[136], 0x1 << 24, rank << 24);
707                 clrsetbits_le32(&denali_phy[264], 0x1 << 24, rank << 24);
708                 clrsetbits_le32(&denali_phy[392], 0x1 << 24, rank << 24);
709         }
710 }
711
712 static void override_write_leveling_value(const struct chan_info *chan)
713 {
714         u32 *denali_ctl = chan->pctl->denali_ctl;
715         u32 *denali_phy = chan->publ->denali_phy;
716         u32 byte;
717
718         /* PHY_896 PHY_FREQ_SEL_MULTICAST_EN 1bit offset_0 */
719         setbits_le32(&denali_phy[896], 1);
720
721         /*
722          * PHY_8/136/264/392
723          * phy_per_cs_training_multicast_en_X 1bit offset_16
724          */
725         clrsetbits_le32(&denali_phy[8], 0x1 << 16, 1 << 16);
726         clrsetbits_le32(&denali_phy[136], 0x1 << 16, 1 << 16);
727         clrsetbits_le32(&denali_phy[264], 0x1 << 16, 1 << 16);
728         clrsetbits_le32(&denali_phy[392], 0x1 << 16, 1 << 16);
729
730         for (byte = 0; byte < 4; byte++)
731                 clrsetbits_le32(&denali_phy[63 + (128 * byte)], 0xffff << 16,
732                                 0x200 << 16);
733
734         /* PHY_896 PHY_FREQ_SEL_MULTICAST_EN 1bit offset_0 */
735         clrbits_le32(&denali_phy[896], 1);
736
737         /* CTL_200 ctrlupd_req 1bit offset_8 */
738         clrsetbits_le32(&denali_ctl[200], 0x1 << 8, 0x1 << 8);
739 }
740
741 static int data_training_ca(const struct chan_info *chan, u32 channel,
742                             const struct rk3399_sdram_params *params)
743 {
744         u32 *denali_pi = chan->pi->denali_pi;
745         u32 *denali_phy = chan->publ->denali_phy;
746         u32 i, tmp;
747         u32 obs_0, obs_1, obs_2, obs_err = 0;
748         u32 rank = params->ch[channel].cap_info.rank;
749         u32 rank_mask;
750
751         /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
752         writel(0x00003f7c, (&denali_pi[175]));
753
754         if (params->base.dramtype == LPDDR4)
755                 rank_mask = (rank == 1) ? 0x5 : 0xf;
756         else
757                 rank_mask = (rank == 1) ? 0x1 : 0x3;
758
759         for (i = 0; i < 4; i++) {
760                 if (!(rank_mask & (1 << i)))
761                         continue;
762
763                 select_per_cs_training_index(chan, i);
764
765                 /* PI_100 PI_CALVL_EN:RW:8:2 */
766                 clrsetbits_le32(&denali_pi[100], 0x3 << 8, 0x2 << 8);
767
768                 /* PI_92 PI_CALVL_REQ:WR:16:1,PI_CALVL_CS:RW:24:2 */
769                 clrsetbits_le32(&denali_pi[92],
770                                 (0x1 << 16) | (0x3 << 24),
771                                 (0x1 << 16) | (i << 24));
772
773                 /* Waiting for training complete */
774                 while (1) {
775                         /* PI_174 PI_INT_STATUS:RD:8:18 */
776                         tmp = readl(&denali_pi[174]) >> 8;
777                         /*
778                          * check status obs
779                          * PHY_532/660/789 phy_adr_calvl_obs1_:0:32
780                          */
781                         obs_0 = readl(&denali_phy[532]);
782                         obs_1 = readl(&denali_phy[660]);
783                         obs_2 = readl(&denali_phy[788]);
784                         if (((obs_0 >> 30) & 0x3) ||
785                             ((obs_1 >> 30) & 0x3) ||
786                             ((obs_2 >> 30) & 0x3))
787                                 obs_err = 1;
788                         if ((((tmp >> 11) & 0x1) == 0x1) &&
789                             (((tmp >> 13) & 0x1) == 0x1) &&
790                             (((tmp >> 5) & 0x1) == 0x0) &&
791                             obs_err == 0)
792                                 break;
793                         else if ((((tmp >> 5) & 0x1) == 0x1) ||
794                                  (obs_err == 1))
795                                 return -EIO;
796                 }
797
798                 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
799                 writel(0x00003f7c, (&denali_pi[175]));
800         }
801
802         clrbits_le32(&denali_pi[100], 0x3 << 8);
803
804         return 0;
805 }
806
807 static int data_training_wl(const struct chan_info *chan, u32 channel,
808                             const struct rk3399_sdram_params *params)
809 {
810         u32 *denali_pi = chan->pi->denali_pi;
811         u32 *denali_phy = chan->publ->denali_phy;
812         u32 i, tmp;
813         u32 obs_0, obs_1, obs_2, obs_3, obs_err = 0;
814         u32 rank = params->ch[channel].cap_info.rank;
815
816         /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
817         writel(0x00003f7c, (&denali_pi[175]));
818
819         for (i = 0; i < rank; i++) {
820                 select_per_cs_training_index(chan, i);
821
822                 /* PI_60 PI_WRLVL_EN:RW:8:2 */
823                 clrsetbits_le32(&denali_pi[60], 0x3 << 8, 0x2 << 8);
824
825                 /* PI_59 PI_WRLVL_REQ:WR:8:1,PI_WRLVL_CS:RW:16:2 */
826                 clrsetbits_le32(&denali_pi[59],
827                                 (0x1 << 8) | (0x3 << 16),
828                                 (0x1 << 8) | (i << 16));
829
830                 /* Waiting for training complete */
831                 while (1) {
832                         /* PI_174 PI_INT_STATUS:RD:8:18 */
833                         tmp = readl(&denali_pi[174]) >> 8;
834
835                         /*
836                          * check status obs, if error maybe can not
837                          * get leveling done PHY_40/168/296/424
838                          * phy_wrlvl_status_obs_X:0:13
839                          */
840                         obs_0 = readl(&denali_phy[40]);
841                         obs_1 = readl(&denali_phy[168]);
842                         obs_2 = readl(&denali_phy[296]);
843                         obs_3 = readl(&denali_phy[424]);
844                         if (((obs_0 >> 12) & 0x1) ||
845                             ((obs_1 >> 12) & 0x1) ||
846                             ((obs_2 >> 12) & 0x1) ||
847                             ((obs_3 >> 12) & 0x1))
848                                 obs_err = 1;
849                         if ((((tmp >> 10) & 0x1) == 0x1) &&
850                             (((tmp >> 13) & 0x1) == 0x1) &&
851                             (((tmp >> 4) & 0x1) == 0x0) &&
852                             obs_err == 0)
853                                 break;
854                         else if ((((tmp >> 4) & 0x1) == 0x1) ||
855                                  (obs_err == 1))
856                                 return -EIO;
857                 }
858
859                 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
860                 writel(0x00003f7c, (&denali_pi[175]));
861         }
862
863         override_write_leveling_value(chan);
864         clrbits_le32(&denali_pi[60], 0x3 << 8);
865
866         return 0;
867 }
868
869 static int data_training_rg(const struct chan_info *chan, u32 channel,
870                             const struct rk3399_sdram_params *params)
871 {
872         u32 *denali_pi = chan->pi->denali_pi;
873         u32 *denali_phy = chan->publ->denali_phy;
874         u32 i, tmp;
875         u32 obs_0, obs_1, obs_2, obs_3, obs_err = 0;
876         u32 rank = params->ch[channel].cap_info.rank;
877
878         /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
879         writel(0x00003f7c, (&denali_pi[175]));
880
881         for (i = 0; i < rank; i++) {
882                 select_per_cs_training_index(chan, i);
883
884                 /* PI_80 PI_RDLVL_GATE_EN:RW:24:2 */
885                 clrsetbits_le32(&denali_pi[80], 0x3 << 24, 0x2 << 24);
886
887                 /*
888                  * PI_74 PI_RDLVL_GATE_REQ:WR:16:1
889                  * PI_RDLVL_CS:RW:24:2
890                  */
891                 clrsetbits_le32(&denali_pi[74],
892                                 (0x1 << 16) | (0x3 << 24),
893                                 (0x1 << 16) | (i << 24));
894
895                 /* Waiting for training complete */
896                 while (1) {
897                         /* PI_174 PI_INT_STATUS:RD:8:18 */
898                         tmp = readl(&denali_pi[174]) >> 8;
899
900                         /*
901                          * check status obs
902                          * PHY_43/171/299/427
903                          *     PHY_GTLVL_STATUS_OBS_x:16:8
904                          */
905                         obs_0 = readl(&denali_phy[43]);
906                         obs_1 = readl(&denali_phy[171]);
907                         obs_2 = readl(&denali_phy[299]);
908                         obs_3 = readl(&denali_phy[427]);
909                         if (((obs_0 >> (16 + 6)) & 0x3) ||
910                             ((obs_1 >> (16 + 6)) & 0x3) ||
911                             ((obs_2 >> (16 + 6)) & 0x3) ||
912                             ((obs_3 >> (16 + 6)) & 0x3))
913                                 obs_err = 1;
914                         if ((((tmp >> 9) & 0x1) == 0x1) &&
915                             (((tmp >> 13) & 0x1) == 0x1) &&
916                             (((tmp >> 3) & 0x1) == 0x0) &&
917                             obs_err == 0)
918                                 break;
919                         else if ((((tmp >> 3) & 0x1) == 0x1) ||
920                                  (obs_err == 1))
921                                 return -EIO;
922                 }
923
924                 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
925                 writel(0x00003f7c, (&denali_pi[175]));
926         }
927
928         clrbits_le32(&denali_pi[80], 0x3 << 24);
929
930         return 0;
931 }
932
933 static int data_training_rl(const struct chan_info *chan, u32 channel,
934                             const struct rk3399_sdram_params *params)
935 {
936         u32 *denali_pi = chan->pi->denali_pi;
937         u32 i, tmp;
938         u32 rank = params->ch[channel].cap_info.rank;
939
940         /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
941         writel(0x00003f7c, (&denali_pi[175]));
942
943         for (i = 0; i < rank; i++) {
944                 select_per_cs_training_index(chan, i);
945
946                 /* PI_80 PI_RDLVL_EN:RW:16:2 */
947                 clrsetbits_le32(&denali_pi[80], 0x3 << 16, 0x2 << 16);
948
949                 /* PI_74 PI_RDLVL_REQ:WR:8:1,PI_RDLVL_CS:RW:24:2 */
950                 clrsetbits_le32(&denali_pi[74],
951                                 (0x1 << 8) | (0x3 << 24),
952                                 (0x1 << 8) | (i << 24));
953
954                 /* Waiting for training complete */
955                 while (1) {
956                         /* PI_174 PI_INT_STATUS:RD:8:18 */
957                         tmp = readl(&denali_pi[174]) >> 8;
958
959                         /*
960                          * make sure status obs not report error bit
961                          * PHY_46/174/302/430
962                          *     phy_rdlvl_status_obs_X:16:8
963                          */
964                         if ((((tmp >> 8) & 0x1) == 0x1) &&
965                             (((tmp >> 13) & 0x1) == 0x1) &&
966                             (((tmp >> 2) & 0x1) == 0x0))
967                                 break;
968                         else if (((tmp >> 2) & 0x1) == 0x1)
969                                 return -EIO;
970                 }
971
972                 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
973                 writel(0x00003f7c, (&denali_pi[175]));
974         }
975
976         clrbits_le32(&denali_pi[80], 0x3 << 16);
977
978         return 0;
979 }
980
981 static int data_training_wdql(const struct chan_info *chan, u32 channel,
982                               const struct rk3399_sdram_params *params)
983 {
984         u32 *denali_pi = chan->pi->denali_pi;
985         u32 i, tmp;
986         u32 rank = params->ch[channel].cap_info.rank;
987         u32 rank_mask;
988
989         /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
990         writel(0x00003f7c, (&denali_pi[175]));
991
992         if (params->base.dramtype == LPDDR4)
993                 rank_mask = (rank == 1) ? 0x5 : 0xf;
994         else
995                 rank_mask = (rank == 1) ? 0x1 : 0x3;
996
997         for (i = 0; i < 4; i++) {
998                 if (!(rank_mask & (1 << i)))
999                         continue;
1000
1001                 select_per_cs_training_index(chan, i);
1002
1003                 /*
1004                  * disable PI_WDQLVL_VREF_EN before wdq leveling?
1005                  * PI_181 PI_WDQLVL_VREF_EN:RW:8:1
1006                  */
1007                 clrbits_le32(&denali_pi[181], 0x1 << 8);
1008
1009                 /* PI_124 PI_WDQLVL_EN:RW:16:2 */
1010                 clrsetbits_le32(&denali_pi[124], 0x3 << 16, 0x2 << 16);
1011
1012                 /* PI_121 PI_WDQLVL_REQ:WR:8:1,PI_WDQLVL_CS:RW:16:2 */
1013                 clrsetbits_le32(&denali_pi[121],
1014                                 (0x1 << 8) | (0x3 << 16),
1015                                 (0x1 << 8) | (i << 16));
1016
1017                 /* Waiting for training complete */
1018                 while (1) {
1019                         /* PI_174 PI_INT_STATUS:RD:8:18 */
1020                         tmp = readl(&denali_pi[174]) >> 8;
1021                         if ((((tmp >> 12) & 0x1) == 0x1) &&
1022                             (((tmp >> 13) & 0x1) == 0x1) &&
1023                             (((tmp >> 6) & 0x1) == 0x0))
1024                                 break;
1025                         else if (((tmp >> 6) & 0x1) == 0x1)
1026                                 return -EIO;
1027                 }
1028
1029                 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
1030                 writel(0x00003f7c, (&denali_pi[175]));
1031         }
1032
1033         clrbits_le32(&denali_pi[124], 0x3 << 16);
1034
1035         return 0;
1036 }
1037
1038 static int data_training(const struct chan_info *chan, u32 channel,
1039                          const struct rk3399_sdram_params *params,
1040                          u32 training_flag)
1041 {
1042         u32 *denali_phy = chan->publ->denali_phy;
1043         int ret;
1044
1045         /* PHY_927 PHY_PAD_DQS_DRIVE  RPULL offset_22 */
1046         setbits_le32(&denali_phy[927], (1 << 22));
1047
1048         if (training_flag == PI_FULL_TRAINING) {
1049                 if (params->base.dramtype == LPDDR4) {
1050                         training_flag = PI_CA_TRAINING | PI_WRITE_LEVELING |
1051                                         PI_READ_GATE_TRAINING |
1052                                         PI_READ_LEVELING | PI_WDQ_LEVELING;
1053                 } else if (params->base.dramtype == LPDDR3) {
1054                         training_flag = PI_CA_TRAINING | PI_WRITE_LEVELING |
1055                                         PI_READ_GATE_TRAINING;
1056                 } else if (params->base.dramtype == DDR3) {
1057                         training_flag = PI_WRITE_LEVELING |
1058                                         PI_READ_GATE_TRAINING |
1059                                         PI_READ_LEVELING;
1060                 }
1061         }
1062
1063         /* ca training(LPDDR4,LPDDR3 support) */
1064         if ((training_flag & PI_CA_TRAINING) == PI_CA_TRAINING) {
1065                 ret = data_training_ca(chan, channel, params);
1066                 if (ret < 0) {
1067                         debug("%s: data training ca failed\n", __func__);
1068                         return ret;
1069                 }
1070         }
1071
1072         /* write leveling(LPDDR4,LPDDR3,DDR3 support) */
1073         if ((training_flag & PI_WRITE_LEVELING) == PI_WRITE_LEVELING) {
1074                 ret = data_training_wl(chan, channel, params);
1075                 if (ret < 0) {
1076                         debug("%s: data training wl failed\n", __func__);
1077                         return ret;
1078                 }
1079         }
1080
1081         /* read gate training(LPDDR4,LPDDR3,DDR3 support) */
1082         if ((training_flag & PI_READ_GATE_TRAINING) == PI_READ_GATE_TRAINING) {
1083                 ret = data_training_rg(chan, channel, params);
1084                 if (ret < 0) {
1085                         debug("%s: data training rg failed\n", __func__);
1086                         return ret;
1087                 }
1088         }
1089
1090         /* read leveling(LPDDR4,LPDDR3,DDR3 support) */
1091         if ((training_flag & PI_READ_LEVELING) == PI_READ_LEVELING) {
1092                 ret = data_training_rl(chan, channel, params);
1093                 if (ret < 0) {
1094                         debug("%s: data training rl failed\n", __func__);
1095                         return ret;
1096                 }
1097         }
1098
1099         /* wdq leveling(LPDDR4 support) */
1100         if ((training_flag & PI_WDQ_LEVELING) == PI_WDQ_LEVELING) {
1101                 ret = data_training_wdql(chan, channel, params);
1102                 if (ret < 0) {
1103                         debug("%s: data training wdql failed\n", __func__);
1104                         return ret;
1105                 }
1106         }
1107
1108         /* PHY_927 PHY_PAD_DQS_DRIVE  RPULL offset_22 */
1109         clrbits_le32(&denali_phy[927], (1 << 22));
1110
1111         return 0;
1112 }
1113
1114 static void set_ddrconfig(const struct chan_info *chan,
1115                           const struct rk3399_sdram_params *params,
1116                           unsigned char channel, u32 ddrconfig)
1117 {
1118         /* only need to set ddrconfig */
1119         struct rk3399_msch_regs *ddr_msch_regs = chan->msch;
1120         unsigned int cs0_cap = 0;
1121         unsigned int cs1_cap = 0;
1122
1123         cs0_cap = (1 << (params->ch[channel].cap_info.cs0_row
1124                         + params->ch[channel].cap_info.col
1125                         + params->ch[channel].cap_info.bk
1126                         + params->ch[channel].cap_info.bw - 20));
1127         if (params->ch[channel].cap_info.rank > 1)
1128                 cs1_cap = cs0_cap >> (params->ch[channel].cap_info.cs0_row
1129                                 - params->ch[channel].cap_info.cs1_row);
1130         if (params->ch[channel].cap_info.row_3_4) {
1131                 cs0_cap = cs0_cap * 3 / 4;
1132                 cs1_cap = cs1_cap * 3 / 4;
1133         }
1134
1135         writel(ddrconfig | (ddrconfig << 8), &ddr_msch_regs->ddrconf);
1136         writel(((cs0_cap / 32) & 0xff) | (((cs1_cap / 32) & 0xff) << 8),
1137                &ddr_msch_regs->ddrsize);
1138 }
1139
1140 static void dram_all_config(struct dram_info *dram,
1141                             const struct rk3399_sdram_params *params)
1142 {
1143         u32 sys_reg2 = 0;
1144         u32 sys_reg3 = 0;
1145         unsigned int channel, idx;
1146
1147         sys_reg2 |= SYS_REG_ENC_DDRTYPE(params->base.dramtype);
1148         sys_reg2 |= SYS_REG_ENC_NUM_CH(params->base.num_channels);
1149
1150         for (channel = 0, idx = 0;
1151              (idx < params->base.num_channels) && (channel < 2);
1152              channel++) {
1153                 const struct rk3399_sdram_channel *info = &params->ch[channel];
1154                 struct rk3399_msch_regs *ddr_msch_regs;
1155                 const struct rk3399_msch_timings *noc_timing;
1156
1157                 if (params->ch[channel].cap_info.col == 0)
1158                         continue;
1159                 idx++;
1160                 sys_reg2 |= SYS_REG_ENC_ROW_3_4(info->cap_info.row_3_4, channel);
1161                 sys_reg2 |= SYS_REG_ENC_CHINFO(channel);
1162                 sys_reg2 |= SYS_REG_ENC_RANK(info->cap_info.rank, channel);
1163                 sys_reg2 |= SYS_REG_ENC_COL(info->cap_info.col, channel);
1164                 sys_reg2 |= SYS_REG_ENC_BK(info->cap_info.bk, channel);
1165                 sys_reg2 |= SYS_REG_ENC_BW(info->cap_info.bw, channel);
1166                 sys_reg2 |= SYS_REG_ENC_DBW(info->cap_info.dbw, channel);
1167                 SYS_REG_ENC_CS0_ROW(info->cap_info.cs0_row, sys_reg2, sys_reg3, channel);
1168                 if (info->cap_info.cs1_row)
1169                         SYS_REG_ENC_CS1_ROW(info->cap_info.cs1_row, sys_reg2,
1170                                             sys_reg3, channel);
1171                 sys_reg3 |= SYS_REG_ENC_CS1_COL(info->cap_info.col, channel);
1172                 sys_reg3 |= SYS_REG_ENC_VERSION(DDR_SYS_REG_VERSION);
1173
1174                 ddr_msch_regs = dram->chan[channel].msch;
1175                 noc_timing = &params->ch[channel].noc_timings;
1176                 writel(noc_timing->ddrtiminga0,
1177                        &ddr_msch_regs->ddrtiminga0);
1178                 writel(noc_timing->ddrtimingb0,
1179                        &ddr_msch_regs->ddrtimingb0);
1180                 writel(noc_timing->ddrtimingc0.d32,
1181                        &ddr_msch_regs->ddrtimingc0);
1182                 writel(noc_timing->devtodev0,
1183                        &ddr_msch_regs->devtodev0);
1184                 writel(noc_timing->ddrmode.d32,
1185                        &ddr_msch_regs->ddrmode);
1186
1187                 /* rank 1 memory clock disable (dfi_dram_clk_disable = 1) */
1188                 if (params->ch[channel].cap_info.rank == 1)
1189                         setbits_le32(&dram->chan[channel].pctl->denali_ctl[276],
1190                                      1 << 17);
1191         }
1192
1193         writel(sys_reg2, &dram->pmugrf->os_reg2);
1194         writel(sys_reg3, &dram->pmugrf->os_reg3);
1195         rk_clrsetreg(&dram->pmusgrf->soc_con4, 0x1f << 10,
1196                      params->base.stride << 10);
1197
1198         /* reboot hold register set */
1199         writel(PRESET_SGRF_HOLD(0) | PRESET_GPIO0_HOLD(1) |
1200                 PRESET_GPIO1_HOLD(1),
1201                 &dram->pmucru->pmucru_rstnhold_con[1]);
1202         clrsetbits_le32(&dram->cru->glb_rst_con, 0x3, 0x3);
1203 }
1204
1205 static int switch_to_phy_index1(struct dram_info *dram,
1206                                 const struct rk3399_sdram_params *params)
1207 {
1208         u32 channel;
1209         u32 *denali_phy;
1210         u32 ch_count = params->base.num_channels;
1211         int ret;
1212         int i = 0;
1213
1214         writel(RK_CLRSETBITS(0x03 << 4 | 1 << 2 | 1,
1215                              1 << 4 | 1 << 2 | 1),
1216                         &dram->cic->cic_ctrl0);
1217         while (!(readl(&dram->cic->cic_status0) & (1 << 2))) {
1218                 mdelay(10);
1219                 i++;
1220                 if (i > 10) {
1221                         debug("index1 frequency change overtime\n");
1222                         return -ETIME;
1223                 }
1224         }
1225
1226         i = 0;
1227         writel(RK_CLRSETBITS(1 << 1, 1 << 1), &dram->cic->cic_ctrl0);
1228         while (!(readl(&dram->cic->cic_status0) & (1 << 0))) {
1229                 mdelay(10);
1230                 i++;
1231                 if (i > 10) {
1232                         debug("index1 frequency done overtime\n");
1233                         return -ETIME;
1234                 }
1235         }
1236
1237         for (channel = 0; channel < ch_count; channel++) {
1238                 denali_phy = dram->chan[channel].publ->denali_phy;
1239                 clrsetbits_le32(&denali_phy[896], (0x3 << 8) | 1, 1 << 8);
1240                 ret = data_training(&dram->chan[channel], channel,
1241                                     params, PI_FULL_TRAINING);
1242                 if (ret < 0) {
1243                         debug("index1 training failed\n");
1244                         return ret;
1245                 }
1246         }
1247
1248         return 0;
1249 }
1250
1251 static unsigned char calculate_stride(struct rk3399_sdram_params *params)
1252 {
1253         unsigned int stride = params->base.stride;
1254         unsigned int channel, chinfo = 0;
1255         unsigned int ch_cap[2] = {0, 0};
1256         u64 cap;
1257
1258         for (channel = 0; channel < 2; channel++) {
1259                 unsigned int cs0_cap = 0;
1260                 unsigned int cs1_cap = 0;
1261                 struct sdram_cap_info *cap_info = &params->ch[channel].cap_info;
1262
1263                 if (cap_info->col == 0)
1264                         continue;
1265
1266                 cs0_cap = (1 << (cap_info->cs0_row + cap_info->col +
1267                                  cap_info->bk + cap_info->bw - 20));
1268                 if (cap_info->rank > 1)
1269                         cs1_cap = cs0_cap >> (cap_info->cs0_row
1270                                               - cap_info->cs1_row);
1271                 if (cap_info->row_3_4) {
1272                         cs0_cap = cs0_cap * 3 / 4;
1273                         cs1_cap = cs1_cap * 3 / 4;
1274                 }
1275                 ch_cap[channel] = cs0_cap + cs1_cap;
1276                 chinfo |= 1 << channel;
1277         }
1278
1279         /* stride calculation for 1 channel */
1280         if (params->base.num_channels == 1 && chinfo & 1)
1281                 return 0x17;    /* channel a */
1282
1283         /* stride calculation for 2 channels, default gstride type is 256B */
1284         if (ch_cap[0] == ch_cap[1]) {
1285                 cap = ch_cap[0] + ch_cap[1];
1286                 switch (cap) {
1287                 /* 512MB */
1288                 case 512:
1289                         stride = 0;
1290                         break;
1291                 /* 1GB */
1292                 case 1024:
1293                         stride = 0x5;
1294                         break;
1295                 /*
1296                  * 768MB + 768MB same as total 2GB memory
1297                  * useful space: 0-768MB 1GB-1792MB
1298                  */
1299                 case 1536:
1300                 /* 2GB */
1301                 case 2048:
1302                         stride = 0x9;
1303                         break;
1304                 /* 1536MB + 1536MB */
1305                 case 3072:
1306                         stride = 0x11;
1307                         break;
1308                 /* 4GB */
1309                 case 4096:
1310                         stride = 0xD;
1311                         break;
1312                 default:
1313                         printf("%s: Unable to calculate stride for ", __func__);
1314                         print_size((cap * (1 << 20)), " capacity\n");
1315                         break;
1316                 }
1317         }
1318
1319         sdram_print_stride(stride);
1320
1321         return stride;
1322 }
1323
1324 static void clear_channel_params(struct rk3399_sdram_params *params, u8 channel)
1325 {
1326         params->ch[channel].cap_info.rank = 0;
1327         params->ch[channel].cap_info.col = 0;
1328         params->ch[channel].cap_info.bk = 0;
1329         params->ch[channel].cap_info.bw = 32;
1330         params->ch[channel].cap_info.dbw = 32;
1331         params->ch[channel].cap_info.row_3_4 = 0;
1332         params->ch[channel].cap_info.cs0_row = 0;
1333         params->ch[channel].cap_info.cs1_row = 0;
1334         params->ch[channel].cap_info.ddrconfig = 0;
1335 }
1336
1337 static int pctl_init(struct dram_info *dram, struct rk3399_sdram_params *params)
1338 {
1339         int channel;
1340         int ret;
1341
1342         for (channel = 0; channel < 2; channel++) {
1343                 const struct chan_info *chan = &dram->chan[channel];
1344                 struct rk3399_cru *cru = dram->cru;
1345                 struct rk3399_ddr_publ_regs *publ = chan->publ;
1346
1347                 phy_pctrl_reset(cru, channel);
1348                 phy_dll_bypass_set(publ, params->base.ddr_freq);
1349
1350                 ret = pctl_cfg(dram, chan, channel, params);
1351                 if (ret < 0) {
1352                         printf("%s: pctl config failed\n", __func__);
1353                         return ret;
1354                 }
1355
1356                 /* start to trigger initialization */
1357                 pctl_start(dram, channel);
1358         }
1359
1360         return 0;
1361 }
1362
1363 static int sdram_init(struct dram_info *dram,
1364                       struct rk3399_sdram_params *params)
1365 {
1366         unsigned char dramtype = params->base.dramtype;
1367         unsigned int ddr_freq = params->base.ddr_freq;
1368         u32 training_flag = PI_READ_GATE_TRAINING;
1369         int channel, ch, rank;
1370         int ret;
1371
1372         debug("Starting SDRAM initialization...\n");
1373
1374         if ((dramtype == DDR3 && ddr_freq > 933) ||
1375             (dramtype == LPDDR3 && ddr_freq > 933) ||
1376             (dramtype == LPDDR4 && ddr_freq > 800)) {
1377                 debug("SDRAM frequency is to high!");
1378                 return -E2BIG;
1379         }
1380
1381         for (ch = 0; ch < 2; ch++) {
1382                 params->ch[ch].cap_info.rank = 2;
1383                 for (rank = 2; rank != 0; rank--) {
1384                         ret = pctl_init(dram, params);
1385                         if (ret < 0) {
1386                                 printf("%s: pctl init failed\n", __func__);
1387                                 return ret;
1388                         }
1389
1390                         /* LPDDR2/LPDDR3 need to wait DAI complete, max 10us */
1391                         if (dramtype == LPDDR3)
1392                                 udelay(10);
1393
1394                         params->ch[ch].cap_info.rank = rank;
1395
1396                         /*
1397                          * LPDDR3 CA training msut be trigger before
1398                          * other training.
1399                          * DDR3 is not have CA training.
1400                          */
1401                         if (params->base.dramtype == LPDDR3)
1402                                 training_flag |= PI_CA_TRAINING;
1403
1404                         if (!(data_training(&dram->chan[ch], ch,
1405                                             params, training_flag)))
1406                                 break;
1407                 }
1408                 /* Computed rank with associated channel number */
1409                 params->ch[ch].cap_info.rank = rank;
1410         }
1411
1412         params->base.num_channels = 0;
1413         for (channel = 0; channel < 2; channel++) {
1414                 const struct chan_info *chan = &dram->chan[channel];
1415                 struct sdram_cap_info *cap_info = &params->ch[channel].cap_info;
1416                 u8 training_flag = PI_FULL_TRAINING;
1417
1418                 if (cap_info->rank == 0) {
1419                         clear_channel_params(params, channel);
1420                         continue;
1421                 } else {
1422                         params->base.num_channels++;
1423                 }
1424
1425                 debug("Channel ");
1426                 debug(channel ? "1: " : "0: ");
1427
1428                 /* LPDDR3 should have write and read gate training */
1429                 if (params->base.dramtype == LPDDR3)
1430                         training_flag = PI_WRITE_LEVELING |
1431                                         PI_READ_GATE_TRAINING;
1432
1433                 if (params->base.dramtype != LPDDR4) {
1434                         ret = data_training(dram, channel, params,
1435                                             training_flag);
1436                         if (!ret) {
1437                                 debug("%s: data train failed for channel %d\n",
1438                                       __func__, ret);
1439                                 continue;
1440                         }
1441                 }
1442
1443                 sdram_print_ddr_info(cap_info, &params->base);
1444
1445                 set_ddrconfig(chan, params, channel, cap_info->ddrconfig);
1446         }
1447
1448         if (params->base.num_channels == 0) {
1449                 printf("%s: ", __func__);
1450                 sdram_print_dram_type(params->base.dramtype);
1451                 printf(" - %dMHz failed!\n", params->base.ddr_freq);
1452                 return -EINVAL;
1453         }
1454
1455         params->base.stride = calculate_stride(params);
1456         dram_all_config(dram, params);
1457         switch_to_phy_index1(dram, params);
1458
1459         debug("Finish SDRAM initialization...\n");
1460         return 0;
1461 }
1462
1463 static int rk3399_dmc_ofdata_to_platdata(struct udevice *dev)
1464 {
1465 #if !CONFIG_IS_ENABLED(OF_PLATDATA)
1466         struct rockchip_dmc_plat *plat = dev_get_platdata(dev);
1467         int ret;
1468
1469         ret = dev_read_u32_array(dev, "rockchip,sdram-params",
1470                                  (u32 *)&plat->sdram_params,
1471                                  sizeof(plat->sdram_params) / sizeof(u32));
1472         if (ret) {
1473                 printf("%s: Cannot read rockchip,sdram-params %d\n",
1474                        __func__, ret);
1475                 return ret;
1476         }
1477         ret = regmap_init_mem(dev_ofnode(dev), &plat->map);
1478         if (ret)
1479                 printf("%s: regmap failed %d\n", __func__, ret);
1480
1481 #endif
1482         return 0;
1483 }
1484
1485 #if CONFIG_IS_ENABLED(OF_PLATDATA)
1486 static int conv_of_platdata(struct udevice *dev)
1487 {
1488         struct rockchip_dmc_plat *plat = dev_get_platdata(dev);
1489         struct dtd_rockchip_rk3399_dmc *dtplat = &plat->dtplat;
1490         int ret;
1491
1492         ret = regmap_init_mem_platdata(dev, dtplat->reg,
1493                                        ARRAY_SIZE(dtplat->reg) / 2,
1494                                        &plat->map);
1495         if (ret)
1496                 return ret;
1497
1498         return 0;
1499 }
1500 #endif
1501
1502 static int rk3399_dmc_init(struct udevice *dev)
1503 {
1504         struct dram_info *priv = dev_get_priv(dev);
1505         struct rockchip_dmc_plat *plat = dev_get_platdata(dev);
1506         int ret;
1507 #if !CONFIG_IS_ENABLED(OF_PLATDATA)
1508         struct rk3399_sdram_params *params = &plat->sdram_params;
1509 #else
1510         struct dtd_rockchip_rk3399_dmc *dtplat = &plat->dtplat;
1511         struct rk3399_sdram_params *params =
1512                                         (void *)dtplat->rockchip_sdram_params;
1513
1514         ret = conv_of_platdata(dev);
1515         if (ret)
1516                 return ret;
1517 #endif
1518
1519         priv->cic = syscon_get_first_range(ROCKCHIP_SYSCON_CIC);
1520         priv->grf = syscon_get_first_range(ROCKCHIP_SYSCON_GRF);
1521         priv->pmugrf = syscon_get_first_range(ROCKCHIP_SYSCON_PMUGRF);
1522         priv->pmusgrf = syscon_get_first_range(ROCKCHIP_SYSCON_PMUSGRF);
1523         priv->pmucru = rockchip_get_pmucru();
1524         priv->cru = rockchip_get_cru();
1525         priv->chan[0].pctl = regmap_get_range(plat->map, 0);
1526         priv->chan[0].pi = regmap_get_range(plat->map, 1);
1527         priv->chan[0].publ = regmap_get_range(plat->map, 2);
1528         priv->chan[0].msch = regmap_get_range(plat->map, 3);
1529         priv->chan[1].pctl = regmap_get_range(plat->map, 4);
1530         priv->chan[1].pi = regmap_get_range(plat->map, 5);
1531         priv->chan[1].publ = regmap_get_range(plat->map, 6);
1532         priv->chan[1].msch = regmap_get_range(plat->map, 7);
1533
1534         debug("con reg %p %p %p %p %p %p %p %p\n",
1535               priv->chan[0].pctl, priv->chan[0].pi,
1536               priv->chan[0].publ, priv->chan[0].msch,
1537               priv->chan[1].pctl, priv->chan[1].pi,
1538               priv->chan[1].publ, priv->chan[1].msch);
1539         debug("cru %p, cic %p, grf %p, sgrf %p, pmucru %p\n", priv->cru,
1540               priv->cic, priv->pmugrf, priv->pmusgrf, priv->pmucru);
1541
1542 #if CONFIG_IS_ENABLED(OF_PLATDATA)
1543         ret = clk_get_by_index_platdata(dev, 0, dtplat->clocks, &priv->ddr_clk);
1544 #else
1545         ret = clk_get_by_index(dev, 0, &priv->ddr_clk);
1546 #endif
1547         if (ret) {
1548                 printf("%s clk get failed %d\n", __func__, ret);
1549                 return ret;
1550         }
1551
1552         ret = clk_set_rate(&priv->ddr_clk, params->base.ddr_freq * MHz);
1553         if (ret < 0) {
1554                 printf("%s clk set failed %d\n", __func__, ret);
1555                 return ret;
1556         }
1557
1558         ret = sdram_init(priv, params);
1559         if (ret < 0) {
1560                 printf("%s DRAM init failed %d\n", __func__, ret);
1561                 return ret;
1562         }
1563
1564         return 0;
1565 }
1566 #endif
1567
1568 static int rk3399_dmc_probe(struct udevice *dev)
1569 {
1570 #if defined(CONFIG_TPL_BUILD) || \
1571         (!defined(CONFIG_TPL) && defined(CONFIG_SPL_BUILD))
1572         if (rk3399_dmc_init(dev))
1573                 return 0;
1574 #else
1575         struct dram_info *priv = dev_get_priv(dev);
1576
1577         priv->pmugrf = syscon_get_first_range(ROCKCHIP_SYSCON_PMUGRF);
1578         debug("%s: pmugrf = %p\n", __func__, priv->pmugrf);
1579         priv->info.base = CONFIG_SYS_SDRAM_BASE;
1580         priv->info.size =
1581                 rockchip_sdram_size((phys_addr_t)&priv->pmugrf->os_reg2);
1582 #endif
1583         return 0;
1584 }
1585
1586 static int rk3399_dmc_get_info(struct udevice *dev, struct ram_info *info)
1587 {
1588         struct dram_info *priv = dev_get_priv(dev);
1589
1590         *info = priv->info;
1591
1592         return 0;
1593 }
1594
1595 static struct ram_ops rk3399_dmc_ops = {
1596         .get_info = rk3399_dmc_get_info,
1597 };
1598
1599 static const struct udevice_id rk3399_dmc_ids[] = {
1600         { .compatible = "rockchip,rk3399-dmc" },
1601         { }
1602 };
1603
1604 U_BOOT_DRIVER(dmc_rk3399) = {
1605         .name = "rockchip_rk3399_dmc",
1606         .id = UCLASS_RAM,
1607         .of_match = rk3399_dmc_ids,
1608         .ops = &rk3399_dmc_ops,
1609 #if defined(CONFIG_TPL_BUILD) || \
1610         (!defined(CONFIG_TPL) && defined(CONFIG_SPL_BUILD))
1611         .ofdata_to_platdata = rk3399_dmc_ofdata_to_platdata,
1612 #endif
1613         .probe = rk3399_dmc_probe,
1614         .priv_auto_alloc_size = sizeof(struct dram_info),
1615 #if defined(CONFIG_TPL_BUILD) || \
1616         (!defined(CONFIG_TPL) && defined(CONFIG_SPL_BUILD))
1617         .platdata_auto_alloc_size = sizeof(struct rockchip_dmc_plat),
1618 #endif
1619 };