2 * Copyright (C) 2018, STMicroelectronics - All Rights Reserved
4 * SPDX-License-Identifier: GPL-2.0+ BSD-3-Clause
13 #include <asm/arch/ddr.h>
14 #include <linux/iopoll.h>
15 #include "stm32mp1_ddr.h"
16 #include "stm32mp1_ddr_regs.h"
18 #define RCC_DDRITFCR 0xD8
20 #define RCC_DDRITFCR_DDRCAPBRST (BIT(14))
21 #define RCC_DDRITFCR_DDRCAXIRST (BIT(15))
22 #define RCC_DDRITFCR_DDRCORERST (BIT(16))
23 #define RCC_DDRITFCR_DPHYAPBRST (BIT(17))
24 #define RCC_DDRITFCR_DPHYRST (BIT(18))
25 #define RCC_DDRITFCR_DPHYCTLRST (BIT(19))
29 u16 offset; /* offset for base address */
30 u8 par_offset; /* offset for parameter array */
33 #define INVALID_OFFSET 0xFF
35 #define DDRCTL_REG(x, y) \
37 offsetof(struct stm32mp1_ddrctl, x),\
38 offsetof(struct y, x)}
40 #define DDRPHY_REG(x, y) \
42 offsetof(struct stm32mp1_ddrphy, x),\
43 offsetof(struct y, x)}
45 #define DDRCTL_REG_REG(x) DDRCTL_REG(x, stm32mp1_ddrctrl_reg)
46 static const struct reg_desc ddr_reg[] = {
48 DDRCTL_REG_REG(mrctrl0),
49 DDRCTL_REG_REG(mrctrl1),
50 DDRCTL_REG_REG(derateen),
51 DDRCTL_REG_REG(derateint),
52 DDRCTL_REG_REG(pwrctl),
53 DDRCTL_REG_REG(pwrtmg),
54 DDRCTL_REG_REG(hwlpctl),
55 DDRCTL_REG_REG(rfshctl0),
56 DDRCTL_REG_REG(rfshctl3),
57 DDRCTL_REG_REG(crcparctl0),
58 DDRCTL_REG_REG(zqctl0),
59 DDRCTL_REG_REG(dfitmg0),
60 DDRCTL_REG_REG(dfitmg1),
61 DDRCTL_REG_REG(dfilpcfg0),
62 DDRCTL_REG_REG(dfiupd0),
63 DDRCTL_REG_REG(dfiupd1),
64 DDRCTL_REG_REG(dfiupd2),
65 DDRCTL_REG_REG(dfiphymstr),
66 DDRCTL_REG_REG(odtmap),
69 DDRCTL_REG_REG(dbgcmd),
70 DDRCTL_REG_REG(poisoncfg),
71 DDRCTL_REG_REG(pccfg),
74 #define DDRCTL_REG_TIMING(x) DDRCTL_REG(x, stm32mp1_ddrctrl_timing)
75 static const struct reg_desc ddr_timing[] = {
76 DDRCTL_REG_TIMING(rfshtmg),
77 DDRCTL_REG_TIMING(dramtmg0),
78 DDRCTL_REG_TIMING(dramtmg1),
79 DDRCTL_REG_TIMING(dramtmg2),
80 DDRCTL_REG_TIMING(dramtmg3),
81 DDRCTL_REG_TIMING(dramtmg4),
82 DDRCTL_REG_TIMING(dramtmg5),
83 DDRCTL_REG_TIMING(dramtmg6),
84 DDRCTL_REG_TIMING(dramtmg7),
85 DDRCTL_REG_TIMING(dramtmg8),
86 DDRCTL_REG_TIMING(dramtmg14),
87 DDRCTL_REG_TIMING(odtcfg),
90 #define DDRCTL_REG_MAP(x) DDRCTL_REG(x, stm32mp1_ddrctrl_map)
91 static const struct reg_desc ddr_map[] = {
92 DDRCTL_REG_MAP(addrmap1),
93 DDRCTL_REG_MAP(addrmap2),
94 DDRCTL_REG_MAP(addrmap3),
95 DDRCTL_REG_MAP(addrmap4),
96 DDRCTL_REG_MAP(addrmap5),
97 DDRCTL_REG_MAP(addrmap6),
98 DDRCTL_REG_MAP(addrmap9),
99 DDRCTL_REG_MAP(addrmap10),
100 DDRCTL_REG_MAP(addrmap11),
103 #define DDRCTL_REG_PERF(x) DDRCTL_REG(x, stm32mp1_ddrctrl_perf)
104 static const struct reg_desc ddr_perf[] = {
105 DDRCTL_REG_PERF(sched),
106 DDRCTL_REG_PERF(sched1),
107 DDRCTL_REG_PERF(perfhpr1),
108 DDRCTL_REG_PERF(perflpr1),
109 DDRCTL_REG_PERF(perfwr1),
110 DDRCTL_REG_PERF(pcfgr_0),
111 DDRCTL_REG_PERF(pcfgw_0),
112 DDRCTL_REG_PERF(pcfgqos0_0),
113 DDRCTL_REG_PERF(pcfgqos1_0),
114 DDRCTL_REG_PERF(pcfgwqos0_0),
115 DDRCTL_REG_PERF(pcfgwqos1_0),
116 DDRCTL_REG_PERF(pcfgr_1),
117 DDRCTL_REG_PERF(pcfgw_1),
118 DDRCTL_REG_PERF(pcfgqos0_1),
119 DDRCTL_REG_PERF(pcfgqos1_1),
120 DDRCTL_REG_PERF(pcfgwqos0_1),
121 DDRCTL_REG_PERF(pcfgwqos1_1),
124 #define DDRPHY_REG_REG(x) DDRPHY_REG(x, stm32mp1_ddrphy_reg)
125 static const struct reg_desc ddrphy_reg[] = {
126 DDRPHY_REG_REG(pgcr),
127 DDRPHY_REG_REG(aciocr),
128 DDRPHY_REG_REG(dxccr),
129 DDRPHY_REG_REG(dsgcr),
131 DDRPHY_REG_REG(odtcr),
132 DDRPHY_REG_REG(zq0cr1),
133 DDRPHY_REG_REG(dx0gcr),
134 DDRPHY_REG_REG(dx1gcr),
135 DDRPHY_REG_REG(dx2gcr),
136 DDRPHY_REG_REG(dx3gcr),
139 #define DDRPHY_REG_TIMING(x) DDRPHY_REG(x, stm32mp1_ddrphy_timing)
140 static const struct reg_desc ddrphy_timing[] = {
141 DDRPHY_REG_TIMING(ptr0),
142 DDRPHY_REG_TIMING(ptr1),
143 DDRPHY_REG_TIMING(ptr2),
144 DDRPHY_REG_TIMING(dtpr0),
145 DDRPHY_REG_TIMING(dtpr1),
146 DDRPHY_REG_TIMING(dtpr2),
147 DDRPHY_REG_TIMING(mr0),
148 DDRPHY_REG_TIMING(mr1),
149 DDRPHY_REG_TIMING(mr2),
150 DDRPHY_REG_TIMING(mr3),
153 #define DDRPHY_REG_CAL(x) DDRPHY_REG(x, stm32mp1_ddrphy_cal)
154 static const struct reg_desc ddrphy_cal[] = {
155 DDRPHY_REG_CAL(dx0dllcr),
156 DDRPHY_REG_CAL(dx0dqtr),
157 DDRPHY_REG_CAL(dx0dqstr),
158 DDRPHY_REG_CAL(dx1dllcr),
159 DDRPHY_REG_CAL(dx1dqtr),
160 DDRPHY_REG_CAL(dx1dqstr),
161 DDRPHY_REG_CAL(dx2dllcr),
162 DDRPHY_REG_CAL(dx2dqtr),
163 DDRPHY_REG_CAL(dx2dqstr),
164 DDRPHY_REG_CAL(dx3dllcr),
165 DDRPHY_REG_CAL(dx3dqtr),
166 DDRPHY_REG_CAL(dx3dqstr),
186 struct ddr_reg_info {
188 const struct reg_desc *desc;
193 #define DDRPHY_REG_CAL(x) DDRPHY_REG(x, stm32mp1_ddrphy_cal)
195 const struct ddr_reg_info ddr_registers[REG_TYPE_NB] = {
197 "static", ddr_reg, ARRAY_SIZE(ddr_reg), DDR_BASE},
199 "timing", ddr_timing, ARRAY_SIZE(ddr_timing), DDR_BASE},
201 "perf", ddr_perf, ARRAY_SIZE(ddr_perf), DDR_BASE},
203 "map", ddr_map, ARRAY_SIZE(ddr_map), DDR_BASE},
205 "static", ddrphy_reg, ARRAY_SIZE(ddrphy_reg), DDRPHY_BASE},
207 "timing", ddrphy_timing, ARRAY_SIZE(ddrphy_timing), DDRPHY_BASE},
209 "cal", ddrphy_cal, ARRAY_SIZE(ddrphy_cal), DDRPHY_BASE},
212 const char *base_name[] = {
214 [DDRPHY_BASE] = "phy",
217 static u32 get_base_addr(const struct ddr_info *priv, enum base_type base)
219 if (base == DDRPHY_BASE)
220 return (u32)priv->phy;
222 return (u32)priv->ctl;
225 static void set_reg(const struct ddr_info *priv,
230 unsigned int *ptr, value;
231 enum base_type base = ddr_registers[type].base;
232 u32 base_addr = get_base_addr(priv, base);
233 const struct reg_desc *desc = ddr_registers[type].desc;
235 debug("init %s\n", ddr_registers[type].name);
236 for (i = 0; i < ddr_registers[type].size; i++) {
237 ptr = (unsigned int *)(base_addr + desc[i].offset);
238 if (desc[i].par_offset == INVALID_OFFSET) {
239 pr_err("invalid parameter offset for %s", desc[i].name);
241 value = *((u32 *)((u32)param +
242 desc[i].par_offset));
244 debug("[0x%x] %s= 0x%08x\n",
245 (u32)ptr, desc[i].name, value);
250 static void ddrphy_idone_wait(struct stm32mp1_ddrphy *phy)
255 ret = readl_poll_timeout(&phy->pgsr, pgsr,
256 pgsr & (DDRPHYC_PGSR_IDONE |
258 DDRPHYC_PGSR_DTIERR |
259 DDRPHYC_PGSR_DFTERR |
261 DDRPHYC_PGSR_RVEIRR),
263 debug("\n[0x%08x] pgsr = 0x%08x ret=%d\n",
264 (u32)&phy->pgsr, pgsr, ret);
267 void stm32mp1_ddrphy_init(struct stm32mp1_ddrphy *phy, u32 pir)
269 pir |= DDRPHYC_PIR_INIT;
270 writel(pir, &phy->pir);
271 debug("[0x%08x] pir = 0x%08x -> 0x%08x\n",
272 (u32)&phy->pir, pir, readl(&phy->pir));
274 /* need to wait 10 configuration clock before start polling */
277 /* Wait DRAM initialization and Gate Training Evaluation complete */
278 ddrphy_idone_wait(phy);
281 /* start quasi dynamic register update */
282 static void start_sw_done(struct stm32mp1_ddrctl *ctl)
284 clrbits_le32(&ctl->swctl, DDRCTRL_SWCTL_SW_DONE);
287 /* wait quasi dynamic register update */
288 static void wait_sw_done_ack(struct stm32mp1_ddrctl *ctl)
293 setbits_le32(&ctl->swctl, DDRCTRL_SWCTL_SW_DONE);
295 ret = readl_poll_timeout(&ctl->swstat, swstat,
296 swstat & DDRCTRL_SWSTAT_SW_DONE_ACK,
299 panic("Timeout initialising DRAM : DDR->swstat = %x\n",
302 debug("[0x%08x] swstat = 0x%08x\n", (u32)&ctl->swstat, swstat);
305 /* wait quasi dynamic register update */
306 static void wait_operating_mode(struct ddr_info *priv, int mode)
308 u32 stat, val, mask, val2 = 0, mask2 = 0;
311 mask = DDRCTRL_STAT_OPERATING_MODE_MASK;
313 /* self-refresh due to software => check also STAT.selfref_type */
314 if (mode == DDRCTRL_STAT_OPERATING_MODE_SR) {
315 mask |= DDRCTRL_STAT_SELFREF_TYPE_MASK;
316 stat |= DDRCTRL_STAT_SELFREF_TYPE_SR;
317 } else if (mode == DDRCTRL_STAT_OPERATING_MODE_NORMAL) {
318 /* normal mode: handle also automatic self refresh */
319 mask2 = DDRCTRL_STAT_OPERATING_MODE_MASK |
320 DDRCTRL_STAT_SELFREF_TYPE_MASK;
321 val2 = DDRCTRL_STAT_OPERATING_MODE_SR |
322 DDRCTRL_STAT_SELFREF_TYPE_ASR;
325 ret = readl_poll_timeout(&priv->ctl->stat, stat,
326 ((stat & mask) == val) ||
327 (mask2 && ((stat & mask2) == val2)),
331 panic("Timeout DRAM : DDR->stat = %x\n", stat);
333 debug("[0x%08x] stat = 0x%08x\n", (u32)&priv->ctl->stat, stat);
336 void stm32mp1_refresh_disable(struct stm32mp1_ddrctl *ctl)
339 /* quasi-dynamic register update*/
340 setbits_le32(&ctl->rfshctl3, DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH);
341 clrbits_le32(&ctl->pwrctl, DDRCTRL_PWRCTL_POWERDOWN_EN);
342 clrbits_le32(&ctl->dfimisc, DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
343 wait_sw_done_ack(ctl);
346 void stm32mp1_refresh_restore(struct stm32mp1_ddrctl *ctl,
347 u32 rfshctl3, u32 pwrctl)
350 if (!(rfshctl3 & DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH))
351 clrbits_le32(&ctl->rfshctl3, DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH);
352 if (pwrctl & DDRCTRL_PWRCTL_POWERDOWN_EN)
353 setbits_le32(&ctl->pwrctl, DDRCTRL_PWRCTL_POWERDOWN_EN);
354 setbits_le32(&ctl->dfimisc, DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
355 wait_sw_done_ack(ctl);
358 /* board-specific DDR power initializations. */
359 __weak int board_ddr_power_init(void)
365 void stm32mp1_ddr_init(struct ddr_info *priv,
366 const struct stm32mp1_ddr_config *config)
371 ret = board_ddr_power_init();
374 panic("ddr power init failed\n");
376 debug("name = %s\n", config->info.name);
377 debug("speed = %d MHz\n", config->info.speed);
378 debug("size = 0x%x\n", config->info.size);
380 * 1. Program the DWC_ddr_umctl2 registers
381 * 1.1 RESETS: presetn, core_ddrc_rstn, aresetn
383 /* Assert All DDR part */
384 setbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAPBRST);
385 setbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAXIRST);
386 setbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCORERST);
387 setbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYAPBRST);
388 setbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYRST);
389 setbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYCTLRST);
391 /* 1.2. start CLOCK */
392 if (stm32mp1_ddr_clk_enable(priv, config->info.speed))
393 panic("invalid DRAM clock : %d MHz\n",
396 /* 1.3. deassert reset */
397 /* de-assert PHY rstn and ctl_rstn via DPHYRST and DPHYCTLRST */
398 clrbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYRST);
399 clrbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYCTLRST);
400 /* De-assert presetn once the clocks are active
401 * and stable via DDRCAPBRST bit
403 clrbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAPBRST);
405 /* 1.4. wait 4 cycles for synchronization */
411 /* 1.5. initialize registers ddr_umctl2 */
412 /* Stop uMCTL2 before PHY is ready */
413 clrbits_le32(&priv->ctl->dfimisc, DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
414 debug("[0x%08x] dfimisc = 0x%08x\n",
415 (u32)&priv->ctl->dfimisc, readl(&priv->ctl->dfimisc));
417 set_reg(priv, REG_REG, &config->c_reg);
418 set_reg(priv, REG_TIMING, &config->c_timing);
419 set_reg(priv, REG_MAP, &config->c_map);
421 /* skip CTRL init, SDRAM init is done by PHY PUBL */
422 clrsetbits_le32(&priv->ctl->init0,
423 DDRCTRL_INIT0_SKIP_DRAM_INIT_MASK,
424 DDRCTRL_INIT0_SKIP_DRAM_INIT_NORMAL);
426 set_reg(priv, REG_PERF, &config->c_perf);
428 /* 2. deassert reset signal core_ddrc_rstn, aresetn and presetn */
429 clrbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCORERST);
430 clrbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAXIRST);
431 clrbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYAPBRST);
433 /* 3. start PHY init by accessing relevant PUBL registers
434 * (DXGCR, DCR, PTR*, MR*, DTPR*)
436 set_reg(priv, REGPHY_REG, &config->p_reg);
437 set_reg(priv, REGPHY_TIMING, &config->p_timing);
438 set_reg(priv, REGPHY_CAL, &config->p_cal);
440 /* 4. Monitor PHY init status by polling PUBL register PGSR.IDONE
441 * Perform DDR PHY DRAM initialization and Gate Training Evaluation
443 ddrphy_idone_wait(priv->phy);
445 /* 5. Indicate to PUBL that controller performs SDRAM initialization
446 * by setting PIR.INIT and PIR CTLDINIT and pool PGSR.IDONE
447 * DRAM init is done by PHY, init0.skip_dram.init = 1
449 pir = DDRPHYC_PIR_DLLSRST | DDRPHYC_PIR_DLLLOCK | DDRPHYC_PIR_ZCAL |
450 DDRPHYC_PIR_ITMSRST | DDRPHYC_PIR_DRAMINIT | DDRPHYC_PIR_ICPC;
452 if (config->c_reg.mstr & DDRCTRL_MSTR_DDR3)
453 pir |= DDRPHYC_PIR_DRAMRST; /* only for DDR3 */
455 stm32mp1_ddrphy_init(priv->phy, pir);
457 /* 6. SET DFIMISC.dfi_init_complete_en to 1 */
458 /* Enable quasi-dynamic register programming*/
459 start_sw_done(priv->ctl);
460 setbits_le32(&priv->ctl->dfimisc, DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
461 wait_sw_done_ack(priv->ctl);
463 /* 7. Wait for DWC_ddr_umctl2 to move to normal operation mode
464 * by monitoring STAT.operating_mode signal
466 /* wait uMCTL2 ready */
468 wait_operating_mode(priv, DDRCTRL_STAT_OPERATING_MODE_NORMAL);
470 debug("DDR DQS training : ");
471 /* 8. Disable Auto refresh and power down by setting
472 * - RFSHCTL3.dis_au_refresh = 1
473 * - PWRCTL.powerdown_en = 0
474 * - DFIMISC.dfiinit_complete_en = 0
476 stm32mp1_refresh_disable(priv->ctl);
478 /* 9. Program PUBL PGCR to enable refresh during training and rank to train
479 * not done => keep the programed value in PGCR
482 /* 10. configure PUBL PIR register to specify which training step to run */
483 /* warning : RVTRN is not supported by this PUBL */
484 stm32mp1_ddrphy_init(priv->phy, DDRPHYC_PIR_QSTRN);
486 /* 11. monitor PUB PGSR.IDONE to poll cpmpletion of training sequence */
487 ddrphy_idone_wait(priv->phy);
489 /* 12. set back registers in step 8 to the orginal values if desidered */
490 stm32mp1_refresh_restore(priv->ctl, config->c_reg.rfshctl3,
491 config->c_reg.pwrctl);
493 /* enable uMCTL2 AXI port 0 and 1 */
494 setbits_le32(&priv->ctl->pctrl_0, DDRCTRL_PCTRL_N_PORT_EN);
495 setbits_le32(&priv->ctl->pctrl_1, DDRCTRL_PCTRL_N_PORT_EN);