2 * DDR3 mem setup file for board based on EXYNOS5
4 * Copyright (C) 2012 Samsung Electronics
6 * SPDX-License-Identifier: GPL-2.0+
11 #include <asm/arch/clock.h>
12 #include <asm/arch/cpu.h>
13 #include <asm/arch/dmc.h>
14 #include <asm/arch/power.h>
15 #include "common_setup.h"
16 #include "exynos5_setup.h"
17 #include "clock_init.h"
21 #ifdef CONFIG_EXYNOS5250
22 static void reset_phy_ctrl(void)
24 struct exynos5_clock *clk =
25 (struct exynos5_clock *)samsung_get_base_clock();
27 writel(DDR3PHY_CTRL_PHY_RESET_OFF, &clk->lpddr3phy_ctrl);
28 writel(DDR3PHY_CTRL_PHY_RESET, &clk->lpddr3phy_ctrl);
31 int ddr3_mem_ctrl_init(struct mem_timings *mem, unsigned long mem_iv_size,
35 struct exynos5_phy_control *phy0_ctrl, *phy1_ctrl;
36 struct exynos5_dmc *dmc;
39 phy0_ctrl = (struct exynos5_phy_control *)samsung_get_base_dmc_phy();
40 phy1_ctrl = (struct exynos5_phy_control *)(samsung_get_base_dmc_phy()
42 dmc = (struct exynos5_dmc *)samsung_get_base_dmc_ctrl();
47 /* Set Impedance Output Driver */
48 val = (mem->impedance << CA_CK_DRVR_DS_OFFSET) |
49 (mem->impedance << CA_CKE_DRVR_DS_OFFSET) |
50 (mem->impedance << CA_CS_DRVR_DS_OFFSET) |
51 (mem->impedance << CA_ADR_DRVR_DS_OFFSET);
52 writel(val, &phy0_ctrl->phy_con39);
53 writel(val, &phy1_ctrl->phy_con39);
55 /* Set Read Latency and Burst Length for PHY0 and PHY1 */
56 val = (mem->ctrl_bstlen << PHY_CON42_CTRL_BSTLEN_SHIFT) |
57 (mem->ctrl_rdlat << PHY_CON42_CTRL_RDLAT_SHIFT);
58 writel(val, &phy0_ctrl->phy_con42);
59 writel(val, &phy1_ctrl->phy_con42);
62 if (dmc_config_zq(mem, &phy0_ctrl->phy_con16, &phy1_ctrl->phy_con16,
63 &phy0_ctrl->phy_con17, &phy1_ctrl->phy_con17))
64 return SETUP_ERR_ZQ_CALIBRATION_FAILURE;
67 writel(mem->phy0_pulld_dqs, &phy0_ctrl->phy_con14);
68 writel(mem->phy1_pulld_dqs, &phy1_ctrl->phy_con14);
70 writel(mem->concontrol | (mem->rd_fetch << CONCONTROL_RD_FETCH_SHIFT)
71 | (mem->dfi_init_start << CONCONTROL_DFI_INIT_START_SHIFT),
74 update_reset_dll(&dmc->phycontrol0, DDR_MODE_DDR3);
77 writel(mem->phy0_dqs, &phy0_ctrl->phy_con4);
78 writel(mem->phy1_dqs, &phy1_ctrl->phy_con4);
80 writel(mem->phy0_dq, &phy0_ctrl->phy_con6);
81 writel(mem->phy1_dq, &phy1_ctrl->phy_con6);
83 writel(mem->phy0_tFS, &phy0_ctrl->phy_con10);
84 writel(mem->phy1_tFS, &phy1_ctrl->phy_con10);
86 val = (mem->ctrl_start_point << PHY_CON12_CTRL_START_POINT_SHIFT) |
87 (mem->ctrl_inc << PHY_CON12_CTRL_INC_SHIFT) |
88 (mem->ctrl_dll_on << PHY_CON12_CTRL_DLL_ON_SHIFT) |
89 (mem->ctrl_ref << PHY_CON12_CTRL_REF_SHIFT);
90 writel(val, &phy0_ctrl->phy_con12);
91 writel(val, &phy1_ctrl->phy_con12);
93 /* Start DLL locking */
94 writel(val | (mem->ctrl_start << PHY_CON12_CTRL_START_SHIFT),
95 &phy0_ctrl->phy_con12);
96 writel(val | (mem->ctrl_start << PHY_CON12_CTRL_START_SHIFT),
97 &phy1_ctrl->phy_con12);
99 update_reset_dll(&dmc->phycontrol0, DDR_MODE_DDR3);
101 writel(mem->concontrol | (mem->rd_fetch << CONCONTROL_RD_FETCH_SHIFT),
104 /* Memory Channel Inteleaving Size */
105 writel(mem->iv_size, &dmc->ivcontrol);
107 writel(mem->memconfig, &dmc->memconfig0);
108 writel(mem->memconfig, &dmc->memconfig1);
109 writel(mem->membaseconfig0, &dmc->membaseconfig0);
110 writel(mem->membaseconfig1, &dmc->membaseconfig1);
112 /* Precharge Configuration */
113 writel(mem->prechconfig_tp_cnt << PRECHCONFIG_TP_CNT_SHIFT,
116 /* Power Down mode Configuration */
117 writel(mem->dpwrdn_cyc << PWRDNCONFIG_DPWRDN_CYC_SHIFT |
118 mem->dsref_cyc << PWRDNCONFIG_DSREF_CYC_SHIFT,
121 /* TimingRow, TimingData, TimingPower and Timingaref
122 * values as per Memory AC parameters
124 writel(mem->timing_ref, &dmc->timingref);
125 writel(mem->timing_row, &dmc->timingrow);
126 writel(mem->timing_data, &dmc->timingdata);
127 writel(mem->timing_power, &dmc->timingpower);
129 /* Send PALL command */
130 dmc_config_prech(mem, &dmc->directcmd);
132 /* Send NOP, MRS and ZQINIT commands */
133 dmc_config_mrs(mem, &dmc->directcmd);
135 if (mem->gate_leveling_enable) {
136 val = PHY_CON0_RESET_VAL;
138 writel(val, &phy0_ctrl->phy_con0);
139 writel(val, &phy1_ctrl->phy_con0);
141 val = PHY_CON2_RESET_VAL;
142 val |= INIT_DESKEW_EN;
143 writel(val, &phy0_ctrl->phy_con2);
144 writel(val, &phy1_ctrl->phy_con2);
146 val = PHY_CON0_RESET_VAL;
148 val |= BYTE_RDLVL_EN;
149 writel(val, &phy0_ctrl->phy_con0);
150 writel(val, &phy1_ctrl->phy_con0);
152 val = (mem->ctrl_start_point <<
153 PHY_CON12_CTRL_START_POINT_SHIFT) |
154 (mem->ctrl_inc << PHY_CON12_CTRL_INC_SHIFT) |
155 (mem->ctrl_force << PHY_CON12_CTRL_FORCE_SHIFT) |
156 (mem->ctrl_start << PHY_CON12_CTRL_START_SHIFT) |
157 (mem->ctrl_ref << PHY_CON12_CTRL_REF_SHIFT);
158 writel(val, &phy0_ctrl->phy_con12);
159 writel(val, &phy1_ctrl->phy_con12);
161 val = PHY_CON2_RESET_VAL;
162 val |= INIT_DESKEW_EN;
163 val |= RDLVL_GATE_EN;
164 writel(val, &phy0_ctrl->phy_con2);
165 writel(val, &phy1_ctrl->phy_con2);
167 val = PHY_CON0_RESET_VAL;
169 val |= BYTE_RDLVL_EN;
171 writel(val, &phy0_ctrl->phy_con0);
172 writel(val, &phy1_ctrl->phy_con0);
174 val = PHY_CON1_RESET_VAL;
175 val &= ~(CTRL_GATEDURADJ_MASK);
176 writel(val, &phy0_ctrl->phy_con1);
177 writel(val, &phy1_ctrl->phy_con1);
179 writel(CTRL_RDLVL_GATE_ENABLE, &dmc->rdlvl_config);
181 while ((readl(&dmc->phystatus) &
182 (RDLVL_COMPLETE_CHO | RDLVL_COMPLETE_CH1)) !=
183 (RDLVL_COMPLETE_CHO | RDLVL_COMPLETE_CH1) && i > 0) {
185 * TODO(waihong): Comment on how long this take to
192 return SETUP_ERR_RDLV_COMPLETE_TIMEOUT;
193 writel(CTRL_RDLVL_GATE_DISABLE, &dmc->rdlvl_config);
195 writel(0, &phy0_ctrl->phy_con14);
196 writel(0, &phy1_ctrl->phy_con14);
198 val = (mem->ctrl_start_point <<
199 PHY_CON12_CTRL_START_POINT_SHIFT) |
200 (mem->ctrl_inc << PHY_CON12_CTRL_INC_SHIFT) |
201 (mem->ctrl_force << PHY_CON12_CTRL_FORCE_SHIFT) |
202 (mem->ctrl_start << PHY_CON12_CTRL_START_SHIFT) |
203 (mem->ctrl_dll_on << PHY_CON12_CTRL_DLL_ON_SHIFT) |
204 (mem->ctrl_ref << PHY_CON12_CTRL_REF_SHIFT);
205 writel(val, &phy0_ctrl->phy_con12);
206 writel(val, &phy1_ctrl->phy_con12);
208 update_reset_dll(&dmc->phycontrol0, DDR_MODE_DDR3);
211 /* Send PALL command */
212 dmc_config_prech(mem, &dmc->directcmd);
214 writel(mem->memcontrol, &dmc->memcontrol);
216 /* Set DMC Concontrol and enable auto-refresh counter */
217 writel(mem->concontrol | (mem->rd_fetch << CONCONTROL_RD_FETCH_SHIFT)
218 | (mem->aref_en << CONCONTROL_AREF_EN_SHIFT), &dmc->concontrol);
223 #ifdef CONFIG_EXYNOS5420
224 int ddr3_mem_ctrl_init(struct mem_timings *mem, unsigned long mem_iv_size,
227 struct exynos5420_clock *clk =
228 (struct exynos5420_clock *)samsung_get_base_clock();
229 struct exynos5420_power *power =
230 (struct exynos5420_power *)samsung_get_base_power();
231 struct exynos5420_phy_control *phy0_ctrl, *phy1_ctrl;
232 struct exynos5420_dmc *drex0, *drex1;
233 struct exynos5420_tzasc *tzasc0, *tzasc1;
234 uint32_t val, n_lock_r, n_lock_w_phy0, n_lock_w_phy1;
238 phy0_ctrl = (struct exynos5420_phy_control *)samsung_get_base_dmc_phy();
239 phy1_ctrl = (struct exynos5420_phy_control *)(samsung_get_base_dmc_phy()
241 drex0 = (struct exynos5420_dmc *)samsung_get_base_dmc_ctrl();
242 drex1 = (struct exynos5420_dmc *)(samsung_get_base_dmc_ctrl()
244 tzasc0 = (struct exynos5420_tzasc *)samsung_get_base_dmc_tzasc();
245 tzasc1 = (struct exynos5420_tzasc *)(samsung_get_base_dmc_tzasc()
248 /* Enable PAUSE for DREX */
249 setbits_le32(&clk->pause, ENABLE_BIT);
251 /* Enable BYPASS mode */
252 setbits_le32(&clk->bpll_con1, BYPASS_EN);
254 writel(MUX_BPLL_SEL_FOUTBPLL, &clk->src_cdrex);
256 val = readl(&clk->mux_stat_cdrex);
257 val &= BPLL_SEL_MASK;
258 } while (val != FOUTBPLL);
260 clrbits_le32(&clk->bpll_con1, BYPASS_EN);
262 /* Specify the DDR memory type as DDR3 */
263 val = readl(&phy0_ctrl->phy_con0);
264 val &= ~(PHY_CON0_CTRL_DDR_MODE_MASK << PHY_CON0_CTRL_DDR_MODE_SHIFT);
265 val |= (DDR_MODE_DDR3 << PHY_CON0_CTRL_DDR_MODE_SHIFT);
266 writel(val, &phy0_ctrl->phy_con0);
268 val = readl(&phy1_ctrl->phy_con0);
269 val &= ~(PHY_CON0_CTRL_DDR_MODE_MASK << PHY_CON0_CTRL_DDR_MODE_SHIFT);
270 val |= (DDR_MODE_DDR3 << PHY_CON0_CTRL_DDR_MODE_SHIFT);
271 writel(val, &phy1_ctrl->phy_con0);
273 /* Set Read Latency and Burst Length for PHY0 and PHY1 */
274 val = (mem->ctrl_bstlen << PHY_CON42_CTRL_BSTLEN_SHIFT) |
275 (mem->ctrl_rdlat << PHY_CON42_CTRL_RDLAT_SHIFT);
276 writel(val, &phy0_ctrl->phy_con42);
277 writel(val, &phy1_ctrl->phy_con42);
279 val = readl(&phy0_ctrl->phy_con26);
280 val &= ~(T_WRDATA_EN_MASK << T_WRDATA_EN_OFFSET);
281 val |= (T_WRDATA_EN_DDR3 << T_WRDATA_EN_OFFSET);
282 writel(val, &phy0_ctrl->phy_con26);
284 val = readl(&phy1_ctrl->phy_con26);
285 val &= ~(T_WRDATA_EN_MASK << T_WRDATA_EN_OFFSET);
286 val |= (T_WRDATA_EN_DDR3 << T_WRDATA_EN_OFFSET);
287 writel(val, &phy1_ctrl->phy_con26);
290 * Set Driver strength for CK, CKE, CS & CA to 0x7
291 * Set Driver strength for Data Slice 0~3 to 0x7
293 val = (0x7 << CA_CK_DRVR_DS_OFFSET) | (0x7 << CA_CKE_DRVR_DS_OFFSET) |
294 (0x7 << CA_CS_DRVR_DS_OFFSET) | (0x7 << CA_ADR_DRVR_DS_OFFSET);
295 val |= (0x7 << DA_3_DS_OFFSET) | (0x7 << DA_2_DS_OFFSET) |
296 (0x7 << DA_1_DS_OFFSET) | (0x7 << DA_0_DS_OFFSET);
297 writel(val, &phy0_ctrl->phy_con39);
298 writel(val, &phy1_ctrl->phy_con39);
301 if (dmc_config_zq(mem, &phy0_ctrl->phy_con16, &phy1_ctrl->phy_con16,
302 &phy0_ctrl->phy_con17, &phy1_ctrl->phy_con17))
303 return SETUP_ERR_ZQ_CALIBRATION_FAILURE;
305 clrbits_le32(&phy0_ctrl->phy_con16, ZQ_CLK_DIV_EN);
306 clrbits_le32(&phy1_ctrl->phy_con16, ZQ_CLK_DIV_EN);
309 val = readl(&phy0_ctrl->phy_con14);
310 val |= mem->phy0_pulld_dqs;
311 writel(val, &phy0_ctrl->phy_con14);
312 val = readl(&phy1_ctrl->phy_con14);
313 val |= mem->phy1_pulld_dqs;
314 writel(val, &phy1_ctrl->phy_con14);
316 val = MEM_TERM_EN | PHY_TERM_EN;
317 writel(val, &drex0->phycontrol0);
318 writel(val, &drex1->phycontrol0);
320 writel(mem->concontrol |
321 (mem->dfi_init_start << CONCONTROL_DFI_INIT_START_SHIFT) |
322 (mem->rd_fetch << CONCONTROL_RD_FETCH_SHIFT),
324 writel(mem->concontrol |
325 (mem->dfi_init_start << CONCONTROL_DFI_INIT_START_SHIFT) |
326 (mem->rd_fetch << CONCONTROL_RD_FETCH_SHIFT),
330 val = readl(&drex0->phystatus);
331 } while ((val & DFI_INIT_COMPLETE) != DFI_INIT_COMPLETE);
333 val = readl(&drex1->phystatus);
334 } while ((val & DFI_INIT_COMPLETE) != DFI_INIT_COMPLETE);
336 clrbits_le32(&drex0->concontrol, DFI_INIT_START);
337 clrbits_le32(&drex1->concontrol, DFI_INIT_START);
339 update_reset_dll(&drex0->phycontrol0, DDR_MODE_DDR3);
340 update_reset_dll(&drex1->phycontrol0, DDR_MODE_DDR3);
344 * 0x2000_0000 ~ 0x5FFF_FFFF
345 * 0x6000_0000 ~ 0x9FFF_FFFF
348 val = DMC_MEMBASECONFIGX_CHIP_BASE(DMC_CHIP_BASE_0) |
349 DMC_MEMBASECONFIGX_CHIP_MASK(DMC_CHIP_MASK);
350 writel(val, &tzasc0->membaseconfig0);
351 writel(val, &tzasc1->membaseconfig0);
354 val = DMC_MEMBASECONFIGX_CHIP_BASE(DMC_CHIP_BASE_1) |
355 DMC_MEMBASECONFIGX_CHIP_MASK(DMC_CHIP_MASK);
356 writel(val, &tzasc0->membaseconfig1);
357 writel(val, &tzasc1->membaseconfig1);
360 * Memory Channel Inteleaving Size
361 * Ares Channel interleaving = 128 bytes
364 writel(mem->memconfig, &tzasc0->memconfig0);
365 writel(mem->memconfig, &tzasc1->memconfig0);
366 writel(mem->memconfig, &tzasc0->memconfig1);
367 writel(mem->memconfig, &tzasc1->memconfig1);
369 /* Precharge Configuration */
370 writel(mem->prechconfig_tp_cnt << PRECHCONFIG_TP_CNT_SHIFT,
371 &drex0->prechconfig0);
372 writel(mem->prechconfig_tp_cnt << PRECHCONFIG_TP_CNT_SHIFT,
373 &drex1->prechconfig0);
376 * TimingRow, TimingData, TimingPower and Timingaref
377 * values as per Memory AC parameters
379 writel(mem->timing_ref, &drex0->timingref);
380 writel(mem->timing_ref, &drex1->timingref);
381 writel(mem->timing_row, &drex0->timingrow0);
382 writel(mem->timing_row, &drex1->timingrow0);
383 writel(mem->timing_data, &drex0->timingdata0);
384 writel(mem->timing_data, &drex1->timingdata0);
385 writel(mem->timing_power, &drex0->timingpower0);
386 writel(mem->timing_power, &drex1->timingpower0);
390 * Send NOP, MRS and ZQINIT commands
391 * Sending MRS command will reset the DRAM. We should not be
392 * reseting the DRAM after resume, this will lead to memory
393 * corruption as DRAM content is lost after DRAM reset
395 dmc_config_mrs(mem, &drex0->directcmd);
396 dmc_config_mrs(mem, &drex1->directcmd);
399 * During Suspend-Resume & S/W-Reset, as soon as PMU releases
400 * pad retention, CKE goes high. This causes memory contents
401 * not to be retained during DRAM initialization. Therfore,
402 * there is a new control register(0x100431e8[28]) which lets us
403 * release pad retention and retain the memory content until the
404 * initialization is complete.
406 writel(PAD_RETENTION_DRAM_COREBLK_VAL,
407 &power->pad_retention_dram_coreblk_option);
409 val = readl(&power->pad_retention_dram_status);
410 } while (val != 0x1);
413 * CKE PAD retention disables DRAM self-refresh mode.
414 * Send auto refresh command for DRAM refresh.
416 for (i = 0; i < 128; i++) {
417 for (chip = 0; chip < mem->chips_to_configure; chip++) {
418 writel(DIRECT_CMD_REFA |
419 (chip << DIRECT_CMD_CHIP_SHIFT),
421 writel(DIRECT_CMD_REFA |
422 (chip << DIRECT_CMD_CHIP_SHIFT),
428 if (mem->gate_leveling_enable) {
429 writel(PHY_CON0_RESET_VAL, &phy0_ctrl->phy_con0);
430 writel(PHY_CON0_RESET_VAL, &phy1_ctrl->phy_con0);
432 setbits_le32(&phy0_ctrl->phy_con0, P0_CMD_EN);
433 setbits_le32(&phy1_ctrl->phy_con0, P0_CMD_EN);
435 val = PHY_CON2_RESET_VAL;
436 val |= INIT_DESKEW_EN;
437 writel(val, &phy0_ctrl->phy_con2);
438 writel(val, &phy1_ctrl->phy_con2);
440 val = readl(&phy0_ctrl->phy_con1);
441 val |= (RDLVL_PASS_ADJ_VAL << RDLVL_PASS_ADJ_OFFSET);
442 writel(val, &phy0_ctrl->phy_con1);
444 val = readl(&phy1_ctrl->phy_con1);
445 val |= (RDLVL_PASS_ADJ_VAL << RDLVL_PASS_ADJ_OFFSET);
446 writel(val, &phy1_ctrl->phy_con1);
448 n_lock_r = readl(&phy0_ctrl->phy_con13);
449 n_lock_w_phy0 = (n_lock_r & CTRL_LOCK_COARSE_MASK) >> 2;
450 n_lock_r = readl(&phy0_ctrl->phy_con12);
451 n_lock_r &= ~CTRL_DLL_ON;
452 n_lock_r |= n_lock_w_phy0;
453 writel(n_lock_r, &phy0_ctrl->phy_con12);
455 n_lock_r = readl(&phy1_ctrl->phy_con13);
456 n_lock_w_phy1 = (n_lock_r & CTRL_LOCK_COARSE_MASK) >> 2;
457 n_lock_r = readl(&phy1_ctrl->phy_con12);
458 n_lock_r &= ~CTRL_DLL_ON;
459 n_lock_r |= n_lock_w_phy1;
460 writel(n_lock_r, &phy1_ctrl->phy_con12);
462 val = (0x3 << DIRECT_CMD_BANK_SHIFT) | 0x4;
463 for (chip = 0; chip < mem->chips_to_configure; chip++) {
464 writel(val | (chip << DIRECT_CMD_CHIP_SHIFT),
466 writel(val | (chip << DIRECT_CMD_CHIP_SHIFT),
470 setbits_le32(&phy0_ctrl->phy_con2, RDLVL_GATE_EN);
471 setbits_le32(&phy1_ctrl->phy_con2, RDLVL_GATE_EN);
473 setbits_le32(&phy0_ctrl->phy_con0, CTRL_SHGATE);
474 setbits_le32(&phy1_ctrl->phy_con0, CTRL_SHGATE);
476 val = readl(&phy0_ctrl->phy_con1);
477 val &= ~(CTRL_GATEDURADJ_MASK);
478 writel(val, &phy0_ctrl->phy_con1);
480 val = readl(&phy1_ctrl->phy_con1);
481 val &= ~(CTRL_GATEDURADJ_MASK);
482 writel(val, &phy1_ctrl->phy_con1);
484 writel(CTRL_RDLVL_GATE_ENABLE, &drex0->rdlvl_config);
486 while (((readl(&drex0->phystatus) & RDLVL_COMPLETE_CHO) !=
487 RDLVL_COMPLETE_CHO) && (i > 0)) {
489 * TODO(waihong): Comment on how long this take to
496 return SETUP_ERR_RDLV_COMPLETE_TIMEOUT;
497 writel(CTRL_RDLVL_GATE_DISABLE, &drex0->rdlvl_config);
499 writel(CTRL_RDLVL_GATE_ENABLE, &drex1->rdlvl_config);
501 while (((readl(&drex1->phystatus) & RDLVL_COMPLETE_CHO) !=
502 RDLVL_COMPLETE_CHO) && (i > 0)) {
504 * TODO(waihong): Comment on how long this take to
511 return SETUP_ERR_RDLV_COMPLETE_TIMEOUT;
512 writel(CTRL_RDLVL_GATE_DISABLE, &drex1->rdlvl_config);
514 writel(0, &phy0_ctrl->phy_con14);
515 writel(0, &phy1_ctrl->phy_con14);
517 val = (0x3 << DIRECT_CMD_BANK_SHIFT);
518 for (chip = 0; chip < mem->chips_to_configure; chip++) {
519 writel(val | (chip << DIRECT_CMD_CHIP_SHIFT),
521 writel(val | (chip << DIRECT_CMD_CHIP_SHIFT),
525 if (mem->read_leveling_enable) {
526 /* Set Read DQ Calibration */
527 val = (0x3 << DIRECT_CMD_BANK_SHIFT) | 0x4;
528 for (chip = 0; chip < mem->chips_to_configure; chip++) {
529 writel(val | (chip << DIRECT_CMD_CHIP_SHIFT),
531 writel(val | (chip << DIRECT_CMD_CHIP_SHIFT),
535 val = readl(&phy0_ctrl->phy_con1);
536 val |= READ_LEVELLING_DDR3;
537 writel(val, &phy0_ctrl->phy_con1);
538 val = readl(&phy1_ctrl->phy_con1);
539 val |= READ_LEVELLING_DDR3;
540 writel(val, &phy1_ctrl->phy_con1);
542 val = readl(&phy0_ctrl->phy_con2);
543 val |= (RDLVL_EN | RDLVL_INCR_ADJ);
544 writel(val, &phy0_ctrl->phy_con2);
545 val = readl(&phy1_ctrl->phy_con2);
546 val |= (RDLVL_EN | RDLVL_INCR_ADJ);
547 writel(val, &phy1_ctrl->phy_con2);
549 setbits_le32(&drex0->rdlvl_config,
550 CTRL_RDLVL_DATA_ENABLE);
552 while (((readl(&drex0->phystatus) & RDLVL_COMPLETE_CHO)
553 != RDLVL_COMPLETE_CHO) && (i > 0)) {
555 * TODO(waihong): Comment on how long this take
562 return SETUP_ERR_RDLV_COMPLETE_TIMEOUT;
564 clrbits_le32(&drex0->rdlvl_config,
565 CTRL_RDLVL_DATA_ENABLE);
566 setbits_le32(&drex1->rdlvl_config,
567 CTRL_RDLVL_DATA_ENABLE);
569 while (((readl(&drex1->phystatus) & RDLVL_COMPLETE_CHO)
570 != RDLVL_COMPLETE_CHO) && (i > 0)) {
572 * TODO(waihong): Comment on how long this take
579 return SETUP_ERR_RDLV_COMPLETE_TIMEOUT;
581 clrbits_le32(&drex1->rdlvl_config,
582 CTRL_RDLVL_DATA_ENABLE);
584 val = (0x3 << DIRECT_CMD_BANK_SHIFT);
585 for (chip = 0; chip < mem->chips_to_configure; chip++) {
586 writel(val | (chip << DIRECT_CMD_CHIP_SHIFT),
588 writel(val | (chip << DIRECT_CMD_CHIP_SHIFT),
592 update_reset_dll(&drex0->phycontrol0, DDR_MODE_DDR3);
593 update_reset_dll(&drex1->phycontrol0, DDR_MODE_DDR3);
596 /* Common Settings for Leveling */
597 val = PHY_CON12_RESET_VAL;
598 writel((val + n_lock_w_phy0), &phy0_ctrl->phy_con12);
599 writel((val + n_lock_w_phy1), &phy1_ctrl->phy_con12);
601 setbits_le32(&phy0_ctrl->phy_con2, DLL_DESKEW_EN);
602 setbits_le32(&phy1_ctrl->phy_con2, DLL_DESKEW_EN);
605 /* Send PALL command */
606 dmc_config_prech(mem, &drex0->directcmd);
607 dmc_config_prech(mem, &drex1->directcmd);
609 writel(mem->memcontrol, &drex0->memcontrol);
610 writel(mem->memcontrol, &drex1->memcontrol);
613 * Set DMC Concontrol: Enable auto-refresh counter, provide
614 * read data fetch cycles and enable DREX auto set powerdown
615 * for input buffer of I/O in none read memory state.
617 writel(mem->concontrol | (mem->aref_en << CONCONTROL_AREF_EN_SHIFT) |
618 (mem->rd_fetch << CONCONTROL_RD_FETCH_SHIFT)|
619 DMC_CONCONTROL_IO_PD_CON(0x2),
621 writel(mem->concontrol | (mem->aref_en << CONCONTROL_AREF_EN_SHIFT) |
622 (mem->rd_fetch << CONCONTROL_RD_FETCH_SHIFT)|
623 DMC_CONCONTROL_IO_PD_CON(0x2),
627 * Enable Clock Gating Control for DMC
628 * this saves around 25 mw dmc power as compared to the power
629 * consumption without these bits enabled
631 setbits_le32(&drex0->cgcontrol, DMC_INTERNAL_CG);
632 setbits_le32(&drex1->cgcontrol, DMC_INTERNAL_CG);