common: Drop linux/delay.h from common header
[oweals/u-boot.git] / arch / arm / mach-sunxi / dram_sun9i.c
1 // SPDX-License-Identifier: GPL-2.0+
2 /*
3  * sun9i dram controller initialisation
4  *
5  * (C) Copyright 2007-2015
6  * Allwinner Technology Co., Ltd. <www.allwinnertech.com>
7  * Jerry Wang <wangflord@allwinnertech.com>
8  *
9  * (C) Copyright 2016 Theobroma Systems Design und Consulting GmbH
10  *                    Philipp Tomsich <philipp.tomsich@theobroma-systems.com>
11  */
12
13 #include <common.h>
14 #include <dm.h>
15 #include <errno.h>
16 #include <init.h>
17 #include <log.h>
18 #include <ram.h>
19 #include <asm/io.h>
20 #include <asm/arch/clock.h>
21 #include <asm/arch/dram.h>
22 #include <asm/arch/sys_proto.h>
23
24 #define DRAM_CLK (CONFIG_DRAM_CLK * 1000000)
25
26 /*
27  * The following amounts to an extensive rewrite of the code received from
28  * Allwinner as part of the open-source bootloader release (refer to
29  * https://github.com/allwinner-zh/bootloader.git) and augments the upstream
30  * sources (which act as the primary reference point for the inner workings
31  * of the 'underdocumented' DRAM controller in the A80) using the following
32  * documentation for other memory controllers based on the (Synopsys)
33  * Designware IP (DDR memory protocol controller and DDR PHY)
34  *   * TI Keystone II Architecture: DDR3 Memory Controller, User's Guide
35  *     Document 'SPRUHN7C', Oct 2013 (revised March 2015)
36  *   * Xilinx Zynq UltraScale+ MPSoC Register Reference
37  *     document ug1087 (v1.0)
38  * Note that the Zynq-documentation provides a very close match for the DDR
39  * memory protocol controller (and provides a very good guide to the rounding
40  * rules for various timings), whereas the TI Keystone II document should be
41  * referred to for DDR PHY specifics only.
42  *
43  * The DRAM controller in the A80 runs at half the frequency of the DDR PHY
44  * (i.e. the rules for MEMC_FREQ_RATIO=2 from the Zynq-documentation apply).
45  *
46  * Known limitations
47  * =================
48  * In the current state, the following features are not fully supported and
49  * a number of simplifying assumptions have been made:
50  *   1) Only DDR3 support is implemented, as our test platform (the A80-Q7
51  *      module) is designed to accomodate DDR3/DDR3L.
52  *   2) Only 2T-mode has been implemented and tested.
53  *   3) The controller supports two different clocking strategies (PLL6 can
54  *      either be 2*CK or CK/2)... we only support the 2*CK clock at this
55  *      time and haven't verified whether the alternative clocking strategy
56  *      works.  If you are interested in porting this over/testing this,
57  *      please refer to cases where bit 0 of 'dram_tpr8' is tested in the
58  *      original code from Allwinner.
59  *   4) Support for 2 ranks per controller is not implemented (as we don't
60  *      the hardware to test it).
61  *
62  * Future directions
63  * =================
64  * The driver should be driven from a device-tree based configuration that
65  * can dynamically provide the necessary timing parameters (i.e. target
66  * frequency and speed-bin information)---the data structures used in the
67  * calculation of the timing parameters are already designed to capture
68  * similar information as the device tree would provide.
69  *
70  * To enable a device-tree based configuration of the sun9i platform, we
71  * will need to enable CONFIG_TPL and bootstrap in 3 stages: initially
72  * into SRAM A1 (40KB) and next into SRAM A2 (160KB)---which would be the
73  * stage to initialise the platform via the device-tree---before having
74  * the full U-Boot run from DDR.
75  */
76
77 /*
78  * A number of DDR3 timings are given as "the greater of a fixed number of
79  * clock cycles (CK) or nanoseconds.  We express these using a structure
80  * that holds a cycle count and a duration in picoseconds (so we can model
81  * sub-ns timings, such as 7.5ns without losing precision or resorting to
82  * rounding up early.
83  */
84 struct dram_sun9i_timing {
85         u32 ck;
86         u32 ps;
87 };
88
89 /* */
90 struct dram_sun9i_cl_cwl_timing {
91         u32 CL;
92         u32 CWL;
93         u32 tCKmin;  /* in ps */
94         u32 tCKmax;  /* in ps */
95 };
96
97 struct dram_sun9i_para {
98         u32 dram_type;
99
100         u8 bus_width;
101         u8 chan;
102         u8 rank;
103         u8 rows;
104         u16 page_size;
105
106         /* Timing information for each speed-bin */
107         struct dram_sun9i_cl_cwl_timing *cl_cwl_table;
108         u32 cl_cwl_numentries;
109
110         /*
111          * For the timings, we try to keep the order and grouping used in
112          * JEDEC Standard No. 79-3F
113          */
114
115         /* timings */
116         u32 tREFI; /* in ns */
117         u32 tRFC;  /* in ns */
118
119         u32 tRAS;  /* in ps */
120
121         /* command and address timing */
122         u32 tDLLK; /* in nCK */
123         struct dram_sun9i_timing tRTP;
124         struct dram_sun9i_timing tWTR;
125         u32 tWR;   /* in nCK */
126         u32 tMRD;  /* in nCK */
127         struct dram_sun9i_timing tMOD;
128         u32 tRCD;  /* in ps */
129         u32 tRP;   /* in ps */
130         u32 tRC;   /* in ps */
131         u32 tCCD;  /* in nCK */
132         struct dram_sun9i_timing tRRD;
133         u32 tFAW;  /* in ps */
134
135         /* calibration timing */
136         /* struct dram_sun9i_timing tZQinit; */
137         struct dram_sun9i_timing tZQoper;
138         struct dram_sun9i_timing tZQCS;
139
140         /* reset timing */
141         /* struct dram_sun9i_timing tXPR; */
142
143         /* self-refresh timings */
144         struct dram_sun9i_timing tXS;
145         u32 tXSDLL; /* in nCK */
146         /* struct dram_sun9i_timing tCKESR; */
147         struct dram_sun9i_timing tCKSRE;
148         struct dram_sun9i_timing tCKSRX;
149
150         /* power-down timings */
151         struct dram_sun9i_timing tXP;
152         struct dram_sun9i_timing tXPDLL;
153         struct dram_sun9i_timing tCKE;
154
155         /* write leveling timings */
156         u32 tWLMRD;    /* min, in nCK */
157         /* u32 tWLDQSEN;  min, in nCK */
158         u32 tWLO;      /* max, in ns */
159         /* u32 tWLOE;     max, in ns */
160
161         /* u32 tCKDPX;    in nCK */
162         /* u32 tCKCSX;    in nCK */
163 };
164
165 static void mctl_sys_init(void);
166
167 #define SCHED_RDWR_IDLE_GAP(n)            ((n & 0xff) << 24)
168 #define SCHED_GO2CRITICAL_HYSTERESIS(n)   ((n & 0xff) << 16)
169 #define SCHED_LPR_NUM_ENTRIES(n)          ((n & 0xff) <<  8)
170 #define SCHED_PAGECLOSE                   (1 << 2)
171 #define SCHED_PREFER_WRITE                (1 << 1)
172 #define SCHED_FORCE_LOW_PRI_N             (1 << 0)
173
174 #define SCHED_CONFIG            (SCHED_RDWR_IDLE_GAP(0xf) | \
175                                  SCHED_GO2CRITICAL_HYSTERESIS(0x80) | \
176                                  SCHED_LPR_NUM_ENTRIES(0x20) | \
177                                  SCHED_FORCE_LOW_PRI_N)
178 #define PERFHPR0_CONFIG                   0x0000001f
179 #define PERFHPR1_CONFIG                   0x1f00001f
180 #define PERFLPR0_CONFIG                   0x000000ff
181 #define PERFLPR1_CONFIG                   0x0f0000ff
182 #define PERFWR0_CONFIG                    0x000000ff
183 #define PERFWR1_CONFIG                    0x0f0001ff
184
185 static void mctl_ctl_sched_init(unsigned long  base)
186 {
187         struct sunxi_mctl_ctl_reg *mctl_ctl =
188                 (struct sunxi_mctl_ctl_reg *)base;
189
190         /* Needs to be done before the global clk enable... */
191         writel(SCHED_CONFIG, &mctl_ctl->sched);
192         writel(PERFHPR0_CONFIG, &mctl_ctl->perfhpr0);
193         writel(PERFHPR1_CONFIG, &mctl_ctl->perfhpr1);
194         writel(PERFLPR0_CONFIG, &mctl_ctl->perflpr0);
195         writel(PERFLPR1_CONFIG, &mctl_ctl->perflpr1);
196         writel(PERFWR0_CONFIG, &mctl_ctl->perfwr0);
197         writel(PERFWR1_CONFIG, &mctl_ctl->perfwr1);
198 }
199
200 static void mctl_sys_init(void)
201 {
202         struct sunxi_ccm_reg * const ccm =
203                 (struct sunxi_ccm_reg *)SUNXI_CCM_BASE;
204         struct sunxi_mctl_com_reg * const mctl_com =
205                 (struct sunxi_mctl_com_reg *)SUNXI_DRAM_COM_BASE;
206
207         debug("Setting PLL6 to %d\n", DRAM_CLK * 2);
208         clock_set_pll6(DRAM_CLK * 2);
209
210         /* Original dram init code which may come in handy later
211         ********************************************************
212         clock_set_pll6(use_2channelPLL ? (DRAM_CLK * 2) :
213                                          (DRAM_CLK / 2), false);
214
215         if ((para->dram_clk <= 400)|((para->dram_tpr8 & 0x1)==0)) {
216                  * PLL6 should be 2*CK *
217                  * ccm_setup_pll6_ddr_clk(PLL6_DDR_CLK); *
218                 ccm_setup_pll6_ddr_clk((1000000 * (para->dram_clk) * 2), 0);
219         } else {
220                  * PLL6 should be CK/2 *
221                 ccm_setup_pll6_ddr_clk((1000000 * (para->dram_clk) / 2), 1);
222         }
223
224         if (para->dram_tpr13 & (0xf<<18)) {
225                  *
226                  * bit21:bit18=0001:pll swing 0.4
227                  * bit21:bit18=0010:pll swing 0.3
228                  * bit21:bit18=0100:pll swing 0.2
229                  * bit21:bit18=1000:pll swing 0.1
230                  *
231                 dram_dbg("DRAM fre extend open !\n");
232                 reg_val=mctl_read_w(CCM_PLL6_DDR_REG);
233                 reg_val&=(0x1<<16);
234                 reg_val=reg_val>>16;
235
236                 if(para->dram_tpr13 & (0x1<<18))
237                 {
238                         mctl_write_w(CCM_PLL_BASE + 0x114,
239                                 (0x3333U|(0x3<<17)|(reg_val<<19)|(0x120U<<20)|
240                                 (0x2U<<29)|(0x1U<<31)));
241                 }
242                 else if(para->dram_tpr13 & (0x1<<19))
243                 {
244                         mctl_write_w(CCM_PLL_BASE + 0x114,
245                                 (0x6666U|(0x3U<<17)|(reg_val<<19)|(0xD8U<<20)|
246                                 (0x2U<<29)|(0x1U<<31)));
247                 }
248                 else if(para->dram_tpr13 & (0x1<<20))
249                 {
250                         mctl_write_w(CCM_PLL_BASE + 0x114,
251                                 (0x9999U|(0x3U<<17)|(reg_val<<19)|(0x90U<<20)|
252                                 (0x2U<<29)|(0x1U<<31)));
253                 }
254                 else if(para->dram_tpr13 & (0x1<<21))
255                 {
256                         mctl_write_w(CCM_PLL_BASE + 0x114,
257                                 (0xccccU|(0x3U<<17)|(reg_val<<19)|(0x48U<<20)|
258                                 (0x2U<<29)|(0x1U<<31)));
259                 }
260
261                 //frequency extend open
262                 reg_val = mctl_read_w(CCM_PLL6_DDR_REG);
263                 reg_val |= ((0x1<<24)|(0x1<<30));
264                 mctl_write_w(CCM_PLL6_DDR_REG, reg_val);
265
266
267                 while(mctl_read_w(CCM_PLL6_DDR_REG) & (0x1<<30));
268         }
269
270         aw_delay(0x20000);      //make some delay
271         ********************************************************
272         */
273
274         /* assert mctl reset */
275         clrbits_le32(&ccm->ahb_reset0_cfg, 1 << AHB_RESET_OFFSET_MCTL);
276         /* stop mctl clock */
277         clrbits_le32(&ccm->ahb_gate0, 1 << AHB_GATE_OFFSET_MCTL);
278
279         sdelay(2000);
280
281         /* deassert mctl reset */
282         setbits_le32(&ccm->ahb_reset0_cfg, 1 << AHB_RESET_OFFSET_MCTL);
283         /* enable mctl clock */
284         setbits_le32(&ccm->ahb_gate0, 1 << AHB_GATE_OFFSET_MCTL);
285
286         /* set up the transactions scheduling before enabling the global clk */
287         mctl_ctl_sched_init(SUNXI_DRAM_CTL0_BASE);
288         mctl_ctl_sched_init(SUNXI_DRAM_CTL1_BASE);
289         sdelay(1000);
290
291         debug("2\n");
292
293         /* (3 << 12): PLL_DDR */
294         writel((3 << 12) | (1 << 16), &ccm->dram_clk_cfg);
295         do {
296                 debug("Waiting for DRAM_CLK_CFG\n");
297                 sdelay(10000);
298         } while (readl(&ccm->dram_clk_cfg) & (1 << 16));
299         setbits_le32(&ccm->dram_clk_cfg, (1 << 31));
300
301         /* TODO: we only support the common case ... i.e. 2*CK */
302         setbits_le32(&mctl_com->ccr, (1 << 14) | (1 << 30));
303         writel(2, &mctl_com->rmcr); /* controller clock is PLL6/4 */
304
305         sdelay(2000);
306
307         /* Original dram init code which may come in handy later
308         ********************************************************
309         if ((para->dram_clk <= 400) | ((para->dram_tpr8 & 0x1) == 0)) {
310                  * PLL6 should be 2*CK *
311                  * gating 2 channel pll *
312                 reg_val = mctl_read_w(MC_CCR);
313                 reg_val |= ((0x1 << 14) | (0x1U << 30));
314                 mctl_write_w(MC_CCR, reg_val);
315                 mctl_write_w(MC_RMCR, 0x2); * controller clock use pll6/4 *
316         } else {
317                  * enable 2 channel pll *
318                 reg_val = mctl_read_w(MC_CCR);
319                 reg_val &= ~((0x1 << 14) | (0x1U << 30));
320                 mctl_write_w(MC_CCR, reg_val);
321                 mctl_write_w(MC_RMCR, 0x0); * controller clock use pll6 *
322         }
323
324         reg_val = mctl_read_w(MC_CCR);
325         reg_val &= ~((0x1<<15)|(0x1U<<31));
326         mctl_write_w(MC_CCR, reg_val);
327         aw_delay(20);
328         //aw_delay(0x10);
329         ********************************************************
330         */
331
332         clrbits_le32(&mctl_com->ccr, MCTL_CCR_CH0_CLK_EN | MCTL_CCR_CH1_CLK_EN);
333         sdelay(1000);
334
335         setbits_le32(&mctl_com->ccr, MCTL_CCR_CH0_CLK_EN);
336         /* TODO if (para->chan == 2) */
337         setbits_le32(&mctl_com->ccr, MCTL_CCR_CH1_CLK_EN);
338 }
339
340 static void mctl_com_init(struct dram_sun9i_para *para)
341 {
342         struct sunxi_mctl_com_reg * const mctl_com =
343                 (struct sunxi_mctl_com_reg *)SUNXI_DRAM_COM_BASE;
344
345         /* TODO: hard-wired for DDR3 now */
346         writel(((para->chan == 2) ? MCTL_CR_CHANNEL_DUAL :
347                                     MCTL_CR_CHANNEL_SINGLE)
348                | MCTL_CR_DRAMTYPE_DDR3 | MCTL_CR_BANK(1)
349                | MCTL_CR_ROW(para->rows)
350                | ((para->bus_width == 32) ? MCTL_CR_BUSW32 : MCTL_CR_BUSW16)
351                | MCTL_CR_PAGE_SIZE(para->page_size) | MCTL_CR_RANK(para->rank),
352                &mctl_com->cr);
353
354         debug("CR: %d\n", readl(&mctl_com->cr));
355 }
356
357 static u32 mctl_channel_init(u32 ch_index, struct dram_sun9i_para *para)
358 {
359         struct sunxi_mctl_ctl_reg *mctl_ctl;
360         struct sunxi_mctl_phy_reg *mctl_phy;
361
362         u32 CL = 0;
363         u32 CWL = 0;
364         u16 mr[4] = { 0, };
365
366 #define PS2CYCLES_FLOOR(n)    ((n * CONFIG_DRAM_CLK) / 1000000)
367 #define PS2CYCLES_ROUNDUP(n)  ((n * CONFIG_DRAM_CLK + 999999) / 1000000)
368 #define NS2CYCLES_FLOOR(n)    ((n * CONFIG_DRAM_CLK) / 1000)
369 #define NS2CYCLES_ROUNDUP(n)  ((n * CONFIG_DRAM_CLK + 999) / 1000)
370 #define MAX(a, b)             ((a) > (b) ? (a) : (b))
371
372         /*
373          * Convert the values to cycle counts (nCK) from what is provided
374          * by the definition of each speed bin.
375          */
376         /* const u32 tREFI = NS2CYCLES_FLOOR(para->tREFI); */
377         const u32 tREFI = NS2CYCLES_FLOOR(para->tREFI);
378         const u32 tRFC  = NS2CYCLES_ROUNDUP(para->tRFC);
379         const u32 tRCD  = PS2CYCLES_ROUNDUP(para->tRCD);
380         const u32 tRP   = PS2CYCLES_ROUNDUP(para->tRP);
381         const u32 tRC   = PS2CYCLES_ROUNDUP(para->tRC);
382         const u32 tRAS  = PS2CYCLES_ROUNDUP(para->tRAS);
383
384         /* command and address timing */
385         const u32 tDLLK = para->tDLLK;
386         const u32 tRTP  = MAX(para->tRTP.ck, PS2CYCLES_ROUNDUP(para->tRTP.ps));
387         const u32 tWTR  = MAX(para->tWTR.ck, PS2CYCLES_ROUNDUP(para->tWTR.ps));
388         const u32 tWR   = NS2CYCLES_FLOOR(para->tWR);
389         const u32 tMRD  = para->tMRD;
390         const u32 tMOD  = MAX(para->tMOD.ck, PS2CYCLES_ROUNDUP(para->tMOD.ps));
391         const u32 tCCD  = para->tCCD;
392         const u32 tRRD  = MAX(para->tRRD.ck, PS2CYCLES_ROUNDUP(para->tRRD.ps));
393         const u32 tFAW  = PS2CYCLES_ROUNDUP(para->tFAW);
394
395         /* calibration timings */
396         /* const u32 tZQinit = MAX(para->tZQinit.ck,
397                                 PS2CYCLES_ROUNDUP(para->tZQinit.ps)); */
398         const u32 tZQoper = MAX(para->tZQoper.ck,
399                                 PS2CYCLES_ROUNDUP(para->tZQoper.ps));
400         const u32 tZQCS   = MAX(para->tZQCS.ck,
401                                 PS2CYCLES_ROUNDUP(para->tZQCS.ps));
402
403         /* reset timing */
404         /* const u32 tXPR  = MAX(para->tXPR.ck,
405                                 PS2CYCLES_ROUNDUP(para->tXPR.ps)); */
406
407         /* power-down timings */
408         const u32 tXP    = MAX(para->tXP.ck, PS2CYCLES_ROUNDUP(para->tXP.ps));
409         const u32 tXPDLL = MAX(para->tXPDLL.ck,
410                                PS2CYCLES_ROUNDUP(para->tXPDLL.ps));
411         const u32 tCKE   = MAX(para->tCKE.ck, PS2CYCLES_ROUNDUP(para->tCKE.ps));
412
413         /*
414          * self-refresh timings (keep below power-down timings, as tCKESR
415          * needs to be calculated based on the nCK value of tCKE)
416          */
417         const u32 tXS    = MAX(para->tXS.ck, PS2CYCLES_ROUNDUP(para->tXS.ps));
418         const u32 tXSDLL = para->tXSDLL;
419         const u32 tCKSRE = MAX(para->tCKSRE.ck,
420                                PS2CYCLES_ROUNDUP(para->tCKSRE.ps));
421         const u32 tCKESR = tCKE + 1;
422         const u32 tCKSRX = MAX(para->tCKSRX.ck,
423                                PS2CYCLES_ROUNDUP(para->tCKSRX.ps));
424
425         /* write leveling timings */
426         const u32 tWLMRD = para->tWLMRD;
427         /* const u32 tWLDQSEN = para->tWLDQSEN; */
428         const u32 tWLO = PS2CYCLES_FLOOR(para->tWLO);
429         /* const u32 tWLOE = PS2CYCLES_FLOOR(para->tWLOE); */
430
431         const u32 tRASmax = tREFI * 9;
432         int i;
433
434         for (i = 0; i < para->cl_cwl_numentries; ++i) {
435                 const u32 tCK = 1000000 / CONFIG_DRAM_CLK;
436
437                 if ((para->cl_cwl_table[i].tCKmin <= tCK) &&
438                     (tCK < para->cl_cwl_table[i].tCKmax)) {
439                         CL = para->cl_cwl_table[i].CL;
440                         CWL = para->cl_cwl_table[i].CWL;
441
442                         debug("found CL/CWL: CL = %d, CWL = %d\n", CL, CWL);
443                         break;
444                 }
445         }
446
447         if ((CL == 0) && (CWL == 0)) {
448                 printf("failed to find valid CL/CWL for operating point %d MHz\n",
449                        CONFIG_DRAM_CLK);
450                 return 0;
451         }
452
453         if (ch_index == 0) {
454                 mctl_ctl = (struct sunxi_mctl_ctl_reg *)SUNXI_DRAM_CTL0_BASE;
455                 mctl_phy = (struct sunxi_mctl_phy_reg *)SUNXI_DRAM_PHY0_BASE;
456         } else {
457                 mctl_ctl = (struct sunxi_mctl_ctl_reg *)SUNXI_DRAM_CTL1_BASE;
458                 mctl_phy = (struct sunxi_mctl_phy_reg *)SUNXI_DRAM_PHY1_BASE;
459         }
460
461         if (para->dram_type == DRAM_TYPE_DDR3) {
462                 mr[0] = DDR3_MR0_PPD_FAST_EXIT | DDR3_MR0_WR(tWR) |
463                         DDR3_MR0_CL(CL);
464                 mr[1] = DDR3_MR1_RTT120OHM;
465                 mr[2] = DDR3_MR2_TWL(CWL);
466                 mr[3] = 0;
467
468                 /*
469                  * DRAM3 initialisation requires holding CKE LOW for
470                  * at least 500us prior to starting the initialisation
471                  * sequence and at least 10ns after driving CKE HIGH
472                  * before the initialisation sequence may be started).
473                  *
474                  * Refer to Micron document "TN-41-07: DDR3 Power-Up,
475                  * Initialization, and Reset DDR3 Initialization
476                  * Routine" for details).
477                  */
478                 writel(MCTL_INIT0_POST_CKE_x1024(1) |
479                        MCTL_INIT0_PRE_CKE_x1024(
480                             (500 * CONFIG_DRAM_CLK + 1023) / 1024), /* 500us */
481                        &mctl_ctl->init[0]);
482                 writel(MCTL_INIT1_DRAM_RSTN_x1024(1),
483                        &mctl_ctl->init[1]);
484                 /* INIT2 is not used for DDR3 */
485                 writel(MCTL_INIT3_MR(mr[0]) | MCTL_INIT3_EMR(mr[1]),
486                        &mctl_ctl->init[3]);
487                 writel(MCTL_INIT4_EMR2(mr[2]) | MCTL_INIT4_EMR3(mr[3]),
488                        &mctl_ctl->init[4]);
489                 writel(MCTL_INIT5_DEV_ZQINIT_x32(512 / 32), /* 512 cycles */
490                        &mctl_ctl->init[5]);
491         } else {
492                 /* !!! UNTESTED !!! */
493                 /*
494                  * LPDDR2 and/or LPDDR3 require a 200us minimum delay
495                  * after driving CKE HIGH in the initialisation sequence.
496                  */
497                 writel(MCTL_INIT0_POST_CKE_x1024(
498                                 (200 * CONFIG_DRAM_CLK + 1023) / 1024),
499                        &mctl_ctl->init[0]);
500                 writel(MCTL_INIT1_DRAM_RSTN_x1024(1),
501                        &mctl_ctl->init[1]);
502                 writel(MCTL_INIT2_IDLE_AFTER_RESET_x32(
503                                 (CONFIG_DRAM_CLK + 31) / 32) /* 1us */
504                        | MCTL_INIT2_MIN_STABLE_CLOCK_x1(5),  /* 5 cycles */
505                        &mctl_ctl->init[2]);
506                 writel(MCTL_INIT3_MR(mr[1]) | MCTL_INIT3_EMR(mr[2]),
507                        &mctl_ctl->init[3]);
508                 writel(MCTL_INIT4_EMR2(mr[3]),
509                        &mctl_ctl->init[4]);
510                 writel(MCTL_INIT5_DEV_ZQINIT_x32(
511                                 (CONFIG_DRAM_CLK + 31) / 32) /* 1us */
512                        | MCTL_INIT5_MAX_AUTO_INIT_x1024(
513                                 (10 * CONFIG_DRAM_CLK + 1023) / 1024),
514                        &mctl_ctl->init[5]);
515         }
516
517         /* (DDR3) We always use a burst-length of 8. */
518 #define MCTL_BL               8
519         /* wr2pre: WL + BL/2 + tWR */
520 #define WR2PRE           (MCTL_BL/2 + CWL + tWTR)
521         /* wr2rd = CWL + BL/2 + tWTR */
522 #define WR2RD            (MCTL_BL/2 + CWL + tWTR)
523         /*
524          * rd2wr = RL + BL/2 + 2 - WL (for DDR3)
525          * rd2wr = RL + BL/2 + RU(tDQSCKmax/tCK) + 1 - WL (for LPDDR2/LPDDR3)
526          */
527 #define RD2WR            (CL + MCTL_BL/2 + 2 - CWL)
528 #define MCTL_PHY_TRTW        0
529 #define MCTL_PHY_TRTODT      0
530
531 #define MCTL_DIV2(n)         ((n + 1)/2)
532 #define MCTL_DIV32(n)        (n/32)
533 #define MCTL_DIV1024(n)      (n/1024)
534
535         writel((MCTL_DIV2(WR2PRE) << 24) | (MCTL_DIV2(tFAW) << 16) |
536                (MCTL_DIV1024(tRASmax) << 8) | (MCTL_DIV2(tRAS) << 0),
537                &mctl_ctl->dramtmg[0]);
538         writel((MCTL_DIV2(tXP) << 16) | (MCTL_DIV2(tRTP) << 8) |
539                (MCTL_DIV2(tRC) << 0),
540                &mctl_ctl->dramtmg[1]);
541         writel((MCTL_DIV2(CWL) << 24) | (MCTL_DIV2(CL) << 16) |
542                (MCTL_DIV2(RD2WR) << 8) | (MCTL_DIV2(WR2RD) << 0),
543                &mctl_ctl->dramtmg[2]);
544         /*
545          * Note: tMRW is located at bit 16 (and up) in DRAMTMG3...
546          * this is only relevant for LPDDR2/LPDDR3
547          */
548         writel((MCTL_DIV2(tMRD) << 12) | (MCTL_DIV2(tMOD) << 0),
549                &mctl_ctl->dramtmg[3]);
550         writel((MCTL_DIV2(tRCD) << 24) | (MCTL_DIV2(tCCD) << 16) |
551                (MCTL_DIV2(tRRD) << 8) | (MCTL_DIV2(tRP) << 0),
552                &mctl_ctl->dramtmg[4]);
553         writel((MCTL_DIV2(tCKSRX) << 24) | (MCTL_DIV2(tCKSRE) << 16) |
554                (MCTL_DIV2(tCKESR) << 8) | (MCTL_DIV2(tCKE) << 0),
555                &mctl_ctl->dramtmg[5]);
556
557         /* These timings are relevant for LPDDR2/LPDDR3 only */
558         /* writel((MCTL_TCKDPDE << 24) | (MCTL_TCKDPX << 16) |
559                (MCTL_TCKCSX << 0), &mctl_ctl->dramtmg[6]); */
560
561         /* printf("DRAMTMG7 reset value: 0x%x\n",
562                 readl(&mctl_ctl->dramtmg[7])); */
563         /* DRAMTMG7 reset value: 0x202 */
564         /* DRAMTMG7 should contain t_ckpde and t_ckpdx: check reset values!!! */
565         /* printf("DRAMTMG8 reset value: 0x%x\n",
566                 readl(&mctl_ctl->dramtmg[8])); */
567         /* DRAMTMG8 reset value: 0x44 */
568
569         writel((MCTL_DIV32(tXSDLL) << 0), &mctl_ctl->dramtmg[8]);
570
571         writel((MCTL_DIV32(tREFI) << 16) | (MCTL_DIV2(tRFC) << 0),
572                &mctl_ctl->rfshtmg);
573
574         if (para->dram_type == DRAM_TYPE_DDR3) {
575                 writel((2 << 24) | ((MCTL_DIV2(CL) - 2) << 16) |
576                        (1 << 8) | ((MCTL_DIV2(CWL) - 2) << 0),
577                         &mctl_ctl->dfitmg[0]);
578         } else {
579                 /* TODO */
580         }
581
582         /* TODO: handle the case of the write latency domain going to 0 ... */
583
584         /*
585          * Disable dfi_init_complete_en (the triggering of the SDRAM
586          * initialisation when the PHY initialisation completes).
587          */
588         clrbits_le32(&mctl_ctl->dfimisc, MCTL_DFIMISC_DFI_INIT_COMPLETE_EN);
589         /* Disable the automatic generation of DLL calibration requests */
590         setbits_le32(&mctl_ctl->dfiupd[0], MCTL_DFIUPD0_DIS_AUTO_CTRLUPD);
591
592         /* A80-Q7: 2T, 1 rank, DDR3, full-32bit-DQ */
593         /* TODO: make 2T and BUSWIDTH configurable  */
594         writel(MCTL_MSTR_DEVICETYPE(para->dram_type) |
595                MCTL_MSTR_BURSTLENGTH(para->dram_type) |
596                MCTL_MSTR_ACTIVERANKS(para->rank) |
597                MCTL_MSTR_2TMODE | MCTL_MSTR_BUSWIDTH32,
598                &mctl_ctl->mstr);
599
600         if (para->dram_type == DRAM_TYPE_DDR3) {
601                 writel(MCTL_ZQCTRL0_TZQCL(MCTL_DIV2(tZQoper)) |
602                        (MCTL_DIV2(tZQCS)), &mctl_ctl->zqctrl[0]);
603                 /*
604                  * TODO: is the following really necessary as the bottom
605                  * half should already be 0x100 and the upper half should
606                  * be ignored for a DDR3 device???
607                  */
608                 writel(MCTL_ZQCTRL1_TZQSI_x1024(0x100),
609                        &mctl_ctl->zqctrl[1]);
610         } else {
611                 writel(MCTL_ZQCTRL0_TZQCL(0x200) | MCTL_ZQCTRL0_TZQCS(0x40),
612                        &mctl_ctl->zqctrl[0]);
613                 writel(MCTL_ZQCTRL1_TZQRESET(0x28) |
614                        MCTL_ZQCTRL1_TZQSI_x1024(0x100),
615                        &mctl_ctl->zqctrl[1]);
616         }
617
618         /* Assert dfi_init_complete signal */
619         setbits_le32(&mctl_ctl->dfimisc, MCTL_DFIMISC_DFI_INIT_COMPLETE_EN);
620         /* Disable auto-refresh */
621         setbits_le32(&mctl_ctl->rfshctl3, MCTL_RFSHCTL3_DIS_AUTO_REFRESH);
622
623         /* PHY initialisation */
624
625         /* TODO: make 2T and 8-bank mode configurable  */
626         writel(MCTL_PHY_DCR_BYTEMASK | MCTL_PHY_DCR_2TMODE |
627                MCTL_PHY_DCR_DDR8BNK | MCTL_PHY_DRAMMODE_DDR3,
628                &mctl_phy->dcr);
629
630         /* For LPDDR2 or LPDDR3, set DQSGX to 0 before training. */
631         if (para->dram_type != DRAM_TYPE_DDR3)
632                 clrbits_le32(&mctl_phy->dsgcr, (3 << 6));
633
634         writel(mr[0], &mctl_phy->mr0);
635         writel(mr[1], &mctl_phy->mr1);
636         writel(mr[2], &mctl_phy->mr2);
637         writel(mr[3], &mctl_phy->mr3);
638
639         /*
640          * The DFI PHY is running at full rate. We thus use the actual
641          * timings in clock cycles here.
642          */
643         writel((tRC << 26) | (tRRD << 22) | (tRAS << 16) |
644                (tRCD << 12) | (tRP << 8) | (tWTR << 4) | (tRTP << 0),
645                 &mctl_phy->dtpr[0]);
646         writel((tMRD << 0) | ((tMOD - 12) << 2) | (tFAW << 5) |
647                (tRFC << 11) | (tWLMRD << 20) | (tWLO << 26),
648                &mctl_phy->dtpr[1]);
649         writel((tXS << 0) | (MAX(tXP, tXPDLL) << 10) |
650                (tCKE << 15) | (tDLLK << 19) |
651                (MCTL_PHY_TRTODT << 29) | (MCTL_PHY_TRTW << 30) |
652                (((tCCD - 4) & 0x1) << 31),
653                &mctl_phy->dtpr[2]);
654
655         /* tDQSCK and tDQSCKmax are used LPDDR2/LPDDR3 */
656         /* writel((tDQSCK << 0) | (tDQSCKMAX << 3), &mctl_phy->dtpr[3]); */
657
658         /*
659          * We use the same values used by Allwinner's Boot0 for the PTR
660          * (PHY timing register) configuration that is tied to the PHY
661          * implementation.
662          */
663         writel(0x42C21590, &mctl_phy->ptr[0]);
664         writel(0xD05612C0, &mctl_phy->ptr[1]);
665         if (para->dram_type == DRAM_TYPE_DDR3) {
666                 const unsigned int tdinit0 = 500 * CONFIG_DRAM_CLK; /* 500us */
667                 const unsigned int tdinit1 = (360 * CONFIG_DRAM_CLK + 999) /
668                         1000; /* 360ns */
669                 const unsigned int tdinit2 = 200 * CONFIG_DRAM_CLK; /* 200us */
670                 const unsigned int tdinit3 = CONFIG_DRAM_CLK; /* 1us */
671
672                 writel((tdinit1 << 20) | tdinit0, &mctl_phy->ptr[3]);
673                 writel((tdinit3 << 18) | tdinit2, &mctl_phy->ptr[4]);
674         } else {
675                 /* LPDDR2 or LPDDR3 */
676                 const unsigned int tdinit0 = (100 * CONFIG_DRAM_CLK + 999) /
677                         1000; /* 100ns */
678                 const unsigned int tdinit1 = 200 * CONFIG_DRAM_CLK; /* 200us */
679                 const unsigned int tdinit2 = 22 * CONFIG_DRAM_CLK; /* 11us */
680                 const unsigned int tdinit3 = 2 * CONFIG_DRAM_CLK; /* 2us */
681
682                 writel((tdinit1 << 20) | tdinit0, &mctl_phy->ptr[3]);
683                 writel((tdinit3 << 18) | tdinit2, &mctl_phy->ptr[4]);
684         }
685
686         /* TEST ME */
687         writel(0x00203131, &mctl_phy->acmdlr);
688
689         /* TODO: can we enable this for 2 ranks, even when we don't know yet */
690         writel(MCTL_DTCR_DEFAULT | MCTL_DTCR_RANKEN(para->rank),
691                &mctl_phy->dtcr);
692
693         /* TODO: half width */
694         debug("DX2GCR0 reset: 0x%x\n", readl(&mctl_phy->dx[2].gcr[0]));
695         writel(0x7C000285, &mctl_phy->dx[2].gcr[0]);
696         writel(0x7C000285, &mctl_phy->dx[3].gcr[0]);
697
698         clrsetbits_le32(&mctl_phy->zq[0].pr, 0xff,
699                         (CONFIG_DRAM_ZQ >>  0) & 0xff);  /* CK/CA */
700         clrsetbits_le32(&mctl_phy->zq[1].pr, 0xff,
701                         (CONFIG_DRAM_ZQ >>  8) & 0xff);  /* DX0/DX1 */
702         clrsetbits_le32(&mctl_phy->zq[2].pr, 0xff,
703                         (CONFIG_DRAM_ZQ >> 16) & 0xff);  /* DX2/DX3 */
704
705         /* TODO: make configurable & implement non-ODT path */
706         if (1) {
707                 int lane;
708                 for (lane = 0; lane < 4; ++lane) {
709                         clrbits_le32(&mctl_phy->dx[lane].gcr[2], 0xffff);
710                         clrbits_le32(&mctl_phy->dx[lane].gcr[3],
711                                      (0x3<<12) | (0x3<<4));
712                 }
713         } else {
714                 /* TODO: check */
715                 int lane;
716                 for (lane = 0; lane < 4; ++lane) {
717                         clrsetbits_le32(&mctl_phy->dx[lane].gcr[2], 0xffff,
718                                         0xaaaa);
719                         if (para->dram_type == DRAM_TYPE_DDR3)
720                                 setbits_le32(&mctl_phy->dx[lane].gcr[3],
721                                              (0x3<<12) | (0x3<<4));
722                         else
723                                 setbits_le32(&mctl_phy->dx[lane].gcr[3],
724                                              0x00000012);
725                 }
726         }
727
728         writel(0x04058D02, &mctl_phy->zq[0].cr); /* CK/CA */
729         writel(0x04058D02, &mctl_phy->zq[1].cr); /* DX0/DX1 */
730         writel(0x04058D02, &mctl_phy->zq[2].cr); /* DX2/DX3 */
731
732         /* Disable auto-refresh prior to data training */
733         setbits_le32(&mctl_ctl->rfshctl3, MCTL_RFSHCTL3_DIS_AUTO_REFRESH);
734
735         setbits_le32(&mctl_phy->dsgcr, 0xf << 24); /* unclear what this is... */
736         /* TODO: IODDRM (IO DDR-MODE) for DDR3L */
737         clrsetbits_le32(&mctl_phy->pgcr[1],
738                         MCTL_PGCR1_ZCKSEL_MASK,
739                         MCTL_PGCR1_IODDRM_DDR3 | MCTL_PGCR1_INHVT_EN);
740
741         setbits_le32(&mctl_phy->pllcr, 0x3 << 19); /* PLL frequency select */
742         /* TODO: single-channel PLL mode??? missing */
743         setbits_le32(&mctl_phy->pllcr,
744                      MCTL_PLLGCR_PLL_BYPASS | MCTL_PLLGCR_PLL_POWERDOWN);
745         /* setbits_le32(&mctl_phy->pir, MCTL_PIR_PLL_BYPASS); included below */
746
747         /* Disable VT compensation */
748         clrbits_le32(&mctl_phy->pgcr[0], 0x3f);
749
750         /* TODO: "other" PLL mode ... 0x20000 seems to be the PLL Bypass */
751         if (para->dram_type == DRAM_TYPE_DDR3)
752                 clrsetbits_le32(&mctl_phy->pir, MCTL_PIR_MASK, 0x20df3);
753         else
754                 clrsetbits_le32(&mctl_phy->pir, MCTL_PIR_MASK, 0x2c573);
755
756         sdelay(10000); /* XXX necessary? */
757
758         /* Wait for the INIT bit to clear itself... */
759         while ((readl(&mctl_phy->pir) & MCTL_PIR_INIT) != MCTL_PIR_INIT) {
760                 /* not done yet -- keep spinning */
761                 debug("MCTL_PIR_INIT not set\n");
762                 sdelay(1000);
763                 /* TODO: implement timeout */
764         }
765
766         /* TODO: not used --- there's a "2rank debug" section here */
767
768         /* Original dram init code which may come in handy later
769         ********************************************************
770          * LPDDR2 and LPDDR3 *
771         if ((para->dram_type) == 6 || (para->dram_type) == 7) {
772                 reg_val = mctl_read_w(P0_DSGCR + ch_offset);
773                 reg_val &= (~(0x3<<6));         * set DQSGX to 1 *
774                 reg_val |= (0x1<<6);            * dqs gate extend *
775                 mctl_write_w(P0_DSGCR + ch_offset, reg_val);
776                 dram_dbg("DQS Gate Extend Enable!\n", ch_index);
777         }
778
779          * Disable ZCAL after initial--for nand dma debug--20140330 by YSZ *
780         if (para->dram_tpr13 & (0x1<<31)) {
781                 reg_val = mctl_read_w(P0_ZQ0CR + ch_offset);
782                 reg_val |= (0x7<<11);
783                 mctl_write_w(P0_ZQ0CR + ch_offset, reg_val);
784         }
785         ********************************************************
786         */
787
788         /*
789          * TODO: more 2-rank support
790          * (setting the "dqs gate delay to average between 2 rank")
791          */
792
793         /* check if any errors are set */
794         if (readl(&mctl_phy->pgsr[0]) & MCTL_PGSR0_ERRORS) {
795                 debug("Channel %d unavailable!\n", ch_index);
796                 return 0;
797         } else{
798                 /* initial OK */
799                 debug("Channel %d OK!\n", ch_index);
800                 /* return 1; */
801         }
802
803         while ((readl(&mctl_ctl->stat) & 0x1) != 0x1) {
804                 debug("Waiting for INIT to be done (controller to come up into 'normal operating' mode\n");
805                 sdelay(100000);
806                 /* init not done */
807                 /* TODO: implement time-out */
808         }
809         debug("done\n");
810
811         /* "DDR is controller by contoller" */
812         clrbits_le32(&mctl_phy->pgcr[3], (1 << 25));
813
814         /* TODO: is the following necessary? */
815         debug("DFIMISC before writing 0: 0x%x\n", readl(&mctl_ctl->dfimisc));
816         writel(0, &mctl_ctl->dfimisc);
817
818         /* Enable auto-refresh */
819         clrbits_le32(&mctl_ctl->rfshctl3, MCTL_RFSHCTL3_DIS_AUTO_REFRESH);
820
821         debug("channel_init complete\n");
822         return 1;
823 }
824
825 signed int DRAMC_get_dram_size(void)
826 {
827         struct sunxi_mctl_com_reg * const mctl_com =
828                 (struct sunxi_mctl_com_reg *)SUNXI_DRAM_COM_BASE;
829
830         unsigned int reg_val;
831         unsigned int dram_size;
832         unsigned int temp;
833
834         reg_val = readl(&mctl_com->cr);
835
836         temp = (reg_val >> 8) & 0xf;    /* page size code */
837         dram_size = (temp - 6);         /* (1 << dram_size) * 512Bytes */
838
839         temp = (reg_val >> 4) & 0xf;    /* row width code */
840         dram_size += (temp + 1);        /* (1 << dram_size) * 512Bytes */
841
842         temp = (reg_val >> 2) & 0x3;    /* bank number code */
843         dram_size += (temp + 2);        /* (1 << dram_size) * 512Bytes */
844
845         temp = reg_val & 0x3;           /* rank number code */
846         dram_size += temp;              /* (1 << dram_size) * 512Bytes */
847
848         temp = (reg_val >> 19) & 0x1;   /* channel number code */
849         dram_size += temp;              /* (1 << dram_size) * 512Bytes */
850
851         dram_size = dram_size - 11;     /* (1 << dram_size) MBytes */
852
853         return 1 << dram_size;
854 }
855
856 unsigned long sunxi_dram_init(void)
857 {
858         struct sunxi_mctl_com_reg * const mctl_com =
859                 (struct sunxi_mctl_com_reg *)SUNXI_DRAM_COM_BASE;
860
861         struct dram_sun9i_cl_cwl_timing cl_cwl[] = {
862                 { .CL =  5, .CWL = 5, .tCKmin = 3000, .tCKmax = 3300 },
863                 { .CL =  6, .CWL = 5, .tCKmin = 2500, .tCKmax = 3300 },
864                 { .CL =  8, .CWL = 6, .tCKmin = 1875, .tCKmax = 2500 },
865                 { .CL = 10, .CWL = 7, .tCKmin = 1500, .tCKmax = 1875 },
866                 { .CL = 11, .CWL = 8, .tCKmin = 1250, .tCKmax = 1500 }
867         };
868
869         /* Set initial parameters, these get modified by the autodetect code */
870         struct dram_sun9i_para para = {
871                 .dram_type = DRAM_TYPE_DDR3,
872                 .bus_width = 32,
873                 .chan = 2,
874                 .rank = 1,
875                 /* .rank = 2, */
876                 .page_size = 4096,
877                 /* .rows = 16, */
878                 .rows = 15,
879
880                 /* CL/CWL table for the speed bin */
881                 .cl_cwl_table = cl_cwl,
882                 .cl_cwl_numentries = sizeof(cl_cwl) /
883                         sizeof(struct dram_sun9i_cl_cwl_timing),
884
885                 /* timings */
886                 .tREFI = 7800,  /* 7.8us (up to 85 degC) */
887                 .tRFC  = 260,   /* 260ns for 4GBit devices */
888                                 /* 350ns @ 8GBit */
889
890                 .tRCD  = 13750,
891                 .tRP   = 13750,
892                 .tRC   = 48750,
893                 .tRAS  = 35000,
894
895                 .tDLLK = 512,
896                 .tRTP  = { .ck = 4, .ps = 7500 },
897                 .tWTR  = { .ck = 4, .ps = 7500 },
898                 .tWR   = 15,
899                 .tMRD  = 4,
900                 .tMOD  = { .ck = 12, .ps = 15000 },
901                 .tCCD  = 4,
902                 .tRRD  = { .ck = 4, .ps = 7500 },
903                 .tFAW  = 40,
904
905                 /* calibration timing */
906                 /* .tZQinit = { .ck = 512, .ps = 640000 }, */
907                 .tZQoper = { .ck = 256, .ps = 320000 },
908                 .tZQCS   = { .ck = 64,  .ps = 80000 },
909
910                 /* reset timing */
911                 /* .tXPR  = { .ck = 5, .ps = 10000 }, */
912
913                 /* self-refresh timings */
914                 .tXS  = { .ck = 5, .ps = 10000 },
915                 .tXSDLL = 512,
916                 .tCKSRE = { .ck = 5, .ps = 10000 },
917                 .tCKSRX = { .ck = 5, .ps = 10000 },
918
919                 /* power-down timings */
920                 .tXP = { .ck = 3, .ps = 6000 },
921                 .tXPDLL = { .ck = 10, .ps = 24000 },
922                 .tCKE = { .ck = 3, .ps = 5000 },
923
924                 /* write leveling timings */
925                 .tWLMRD = 40,
926                 /* .tWLDQSEN = 25, */
927                 .tWLO = 7500,
928                 /* .tWLOE = 2000, */
929         };
930
931         /*
932          * Disable A80 internal 240 ohm resistor.
933          *
934          * This code sequence is adapated from Allwinner's Boot0 (see
935          * https://github.com/allwinner-zh/bootloader.git), as there
936          * is no documentation for these two registers in the R_PRCM
937          * block.
938          */
939         setbits_le32(SUNXI_PRCM_BASE + 0x1e0, (0x3 << 8));
940         writel(0, SUNXI_PRCM_BASE + 0x1e8);
941
942         mctl_sys_init();
943
944         if (!mctl_channel_init(0, &para))
945                 return 0;
946
947         /* dual-channel */
948         if (!mctl_channel_init(1, &para)) {
949                 /* disable channel 1 */
950                 clrsetbits_le32(&mctl_com->cr, MCTL_CR_CHANNEL_MASK,
951                                 MCTL_CR_CHANNEL_SINGLE);
952                 /* disable channel 1 global clock */
953                 clrbits_le32(&mctl_com->cr, MCTL_CCR_CH1_CLK_EN);
954         }
955
956         mctl_com_init(&para);
957
958         /* return the proper RAM size */
959         return DRAMC_get_dram_size() << 20;
960 }