Merge tag 'u-boot-atmel-fixes-2019.07-a' of git://git.denx.de/u-boot-atmel
[oweals/u-boot.git] / arch / mips / mach-mscc / include / mach / ddr.h
1 /* SPDX-License-Identifier: (GPL-2.0+ OR MIT) */
2 /*
3  * Copyright (c) 2018 Microsemi Corporation
4  */
5
6 #ifndef __ASM_MACH_DDR_H
7 #define __ASM_MACH_DDR_H
8
9 #include <asm/cacheops.h>
10 #include <asm/io.h>
11 #include <asm/reboot.h>
12 #include <mach/common.h>
13
14 #define MIPS_VCOREIII_MEMORY_DDR3
15 #define MIPS_VCOREIII_DDR_SIZE CONFIG_SYS_SDRAM_SIZE
16
17 #if defined(CONFIG_DDRTYPE_H5TQ1G63BFA) /* Serval1 Refboard */
18
19 /* Hynix H5TQ1G63BFA (1Gbit DDR3, x16) @ 3.20ns */
20 #define VC3_MPAR_bank_addr_cnt    3
21 #define VC3_MPAR_row_addr_cnt     13
22 #define VC3_MPAR_col_addr_cnt     10
23 #define VC3_MPAR_tREFI            2437
24 #define VC3_MPAR_tRAS_min         12
25 #define VC3_MPAR_CL               6
26 #define VC3_MPAR_tWTR             4
27 #define VC3_MPAR_tRC              16
28 #define VC3_MPAR_tFAW             16
29 #define VC3_MPAR_tRP              5
30 #define VC3_MPAR_tRRD             4
31 #define VC3_MPAR_tRCD             5
32 #define VC3_MPAR_tMRD             4
33 #define VC3_MPAR_tRFC             35
34 #define VC3_MPAR_CWL              5
35 #define VC3_MPAR_tXPR             38
36 #define VC3_MPAR_tMOD             12
37 #define VC3_MPAR_tDLLK            512
38 #define VC3_MPAR_tWR              5
39
40 #elif defined(CONFIG_DDRTYPE_MT41J128M16HA)     /* Validation board */
41
42 /* Micron MT41J128M16HA-15E:D (2Gbit DDR3, x16) @ 3.20ns */
43 #define VC3_MPAR_bank_addr_cnt    3
44 #define VC3_MPAR_row_addr_cnt     14
45 #define VC3_MPAR_col_addr_cnt     10
46 #define VC3_MPAR_tREFI            2437
47 #define VC3_MPAR_tRAS_min         12
48 #define VC3_MPAR_CL               5
49 #define VC3_MPAR_tWTR             4
50 #define VC3_MPAR_tRC              16
51 #define VC3_MPAR_tFAW             16
52 #define VC3_MPAR_tRP              5
53 #define VC3_MPAR_tRRD             4
54 #define VC3_MPAR_tRCD             5
55 #define VC3_MPAR_tMRD             4
56 #define VC3_MPAR_tRFC             50
57 #define VC3_MPAR_CWL              5
58 #define VC3_MPAR_tXPR             54
59 #define VC3_MPAR_tMOD             12
60 #define VC3_MPAR_tDLLK            512
61 #define VC3_MPAR_tWR              5
62
63 #elif defined(CONFIG_DDRTYPE_MT41K256M16)       /* JR2 Validation board */
64
65 /* Micron MT41K256M16 (4Gbit, DDR3L-800, 256Mbitx16) @ 3.20ns */
66 #define VC3_MPAR_bank_addr_cnt    3
67 #define VC3_MPAR_row_addr_cnt     15
68 #define VC3_MPAR_col_addr_cnt     10
69 #define VC3_MPAR_tREFI            2437
70 #define VC3_MPAR_tRAS_min         12
71 #define VC3_MPAR_CL               5
72 #define VC3_MPAR_tWTR             4
73 #define VC3_MPAR_tRC              16
74 #define VC3_MPAR_tFAW             16
75 #define VC3_MPAR_tRP              5
76 #define VC3_MPAR_tRRD             4
77 #define VC3_MPAR_tRCD             5
78 #define VC3_MPAR_tMRD             4
79 #define VC3_MPAR_tRFC             82
80 #define VC3_MPAR_CWL              5
81 #define VC3_MPAR_tXPR             85
82 #define VC3_MPAR_tMOD             12
83 #define VC3_MPAR_tDLLK            512
84 #define VC3_MPAR_tWR              5
85
86 #elif defined(CONFIG_DDRTYPE_H5TQ4G63MFR)       /* JR2 Reference board */
87
88 /* Hynix H5TQ4G63MFR-PBC (4Gbit, DDR3-800, 256Mbitx16) - 2kb pages @ 3.20ns */
89 #define VC3_MPAR_bank_addr_cnt    3
90 #define VC3_MPAR_row_addr_cnt     15
91 #define VC3_MPAR_col_addr_cnt     10
92 #define VC3_MPAR_tREFI            2437
93 #define VC3_MPAR_tRAS_min         12
94 #define VC3_MPAR_CL               6
95 #define VC3_MPAR_tWTR             4
96 #define VC3_MPAR_tRC              17
97 #define VC3_MPAR_tFAW             16
98 #define VC3_MPAR_tRP              5
99 #define VC3_MPAR_tRRD             4
100 #define VC3_MPAR_tRCD             5
101 #define VC3_MPAR_tMRD             4
102 #define VC3_MPAR_tRFC             82
103 #define VC3_MPAR_CWL              5
104 #define VC3_MPAR_tXPR             85
105 #define VC3_MPAR_tMOD             12
106 #define VC3_MPAR_tDLLK            512
107 #define VC3_MPAR_tWR              5
108
109 #elif defined(CONFIG_DDRTYPE_MT41K128M16JT)
110
111 /* Micron Micron MT41K128M16JT-125 (2Gbit DDR3L, 128Mbitx16) @ 3.20ns */
112 #define VC3_MPAR_bank_addr_cnt    3
113 #define VC3_MPAR_row_addr_cnt     14
114 #define VC3_MPAR_col_addr_cnt     10
115 #define VC3_MPAR_tREFI            2437
116 #define VC3_MPAR_tRAS_min         12
117 #define VC3_MPAR_CL               6
118 #define VC3_MPAR_tWTR             4
119 #define VC3_MPAR_tRC              16
120 #define VC3_MPAR_tFAW             16
121 #define VC3_MPAR_tRP              5
122 #define VC3_MPAR_tRRD             4
123 #define VC3_MPAR_tRCD             5
124 #define VC3_MPAR_tMRD             4
125 #define VC3_MPAR_tRFC             82
126 #define VC3_MPAR_CWL              5
127 #define VC3_MPAR_tXPR             85
128 #define VC3_MPAR_tMOD             12
129 #define VC3_MPAR_tDLLK            512
130 #define VC3_MPAR_tWR              5
131
132 #elif defined(CONFIG_DDRTYPE_MT47H128M8HQ)      /* Luton10/26 Refboards */
133
134 /* Micron 1Gb MT47H128M8-3 16Meg x 8 x 8 banks, DDR-533@CL4 @ 4.80ns */
135 #define VC3_MPAR_bank_addr_cnt    3
136 #define VC3_MPAR_row_addr_cnt     14
137 #define VC3_MPAR_col_addr_cnt     10
138 #define VC3_MPAR_tREFI            1625
139 #define VC3_MPAR_tRAS_min         9
140 #define VC3_MPAR_CL               4
141 #define VC3_MPAR_tWTR             2
142 #define VC3_MPAR_tRC              12
143 #define VC3_MPAR_tFAW             8
144 #define VC3_MPAR_tRP              4
145 #define VC3_MPAR_tRRD             2
146 #define VC3_MPAR_tRCD             4
147
148 #define VC3_MPAR_tRPA             4
149 #define VC3_MPAR_tRP              4
150
151 #define VC3_MPAR_tMRD             2
152 #define VC3_MPAR_tRFC             27
153
154 #define VC3_MPAR__400_ns_dly      84
155
156 #define VC3_MPAR_tWR              4
157 #undef MIPS_VCOREIII_MEMORY_DDR3
158 #else
159
160 #error Unknown DDR system configuration - please add!
161
162 #endif
163
164 #if defined(CONFIG_SOC_OCELOT) || defined(CONFIG_SOC_JR2) || \
165         defined(CONFIG_SOC_SERVALT) || defined(CONFIG_SOC_SERVAL)
166 #define MIPS_VCOREIII_MEMORY_16BIT 1
167 #endif
168
169 #define MIPS_VCOREIII_MEMORY_SSTL_ODT 7
170 #define MIPS_VCOREIII_MEMORY_SSTL_DRIVE 7
171 #define VCOREIII_DDR_DQS_MODE_CALIBRATE
172
173 #ifdef MIPS_VCOREIII_MEMORY_16BIT
174 #define VC3_MPAR_16BIT       1
175 #else
176 #define VC3_MPAR_16BIT       0
177 #endif
178
179 #ifdef MIPS_VCOREIII_MEMORY_DDR3
180 #define VC3_MPAR_DDR3_MODE    1 /* DDR3 */
181 #define VC3_MPAR_BURST_LENGTH 8 /* Always 8 (1) for DDR3 */
182 #ifdef MIPS_VCOREIII_MEMORY_16BIT
183 #define VC3_MPAR_BURST_SIZE   1 /* Always 1 for DDR3/16bit */
184 #else
185 #define VC3_MPAR_BURST_SIZE   0
186 #endif
187 #else
188 #define VC3_MPAR_DDR3_MODE    0 /* DDR2 */
189 #ifdef MIPS_VCOREIII_MEMORY_16BIT
190 #define VC3_MPAR_BURST_LENGTH 4 /* in DDR2 16-bit mode, use burstlen 4 */
191 #else
192 #define VC3_MPAR_BURST_LENGTH 8 /* For 8-bit IF we must run burst-8 */
193 #endif
194 #define VC3_MPAR_BURST_SIZE   0 /* Always 0 for DDR2 */
195 #endif
196
197 #define VC3_MPAR_RL VC3_MPAR_CL
198 #if !defined(MIPS_VCOREIII_MEMORY_DDR3)
199 #define VC3_MPAR_WL (VC3_MPAR_RL - 1)
200 #define VC3_MPAR_MD VC3_MPAR_tMRD
201 #define VC3_MPAR_ID VC3_MPAR__400_ns_dly
202 #define VC3_MPAR_SD VC3_MPAR_tXSRD
203 #define VC3_MPAR_OW (VC3_MPAR_WL - 2)
204 #define VC3_MPAR_OR (VC3_MPAR_WL - 3)
205 #define VC3_MPAR_RP (VC3_MPAR_bank_addr_cnt < 3 ? VC3_MPAR_tRP : VC3_MPAR_tRPA)
206 #define VC3_MPAR_FAW (VC3_MPAR_bank_addr_cnt < 3 ? 1 : VC3_MPAR_tFAW)
207 #define VC3_MPAR_BL (VC3_MPAR_BURST_LENGTH == 4 ? 2 : 4)
208 #define MSCC_MEMPARM_MR0 \
209         (VC3_MPAR_BURST_LENGTH == 8 ? 3 : 2) | (VC3_MPAR_CL << 4) | \
210         ((VC3_MPAR_tWR - 1) << 9)
211 /* DLL-on, Full-OD, AL=0, RTT=off, nDQS-on, RDQS-off, out-en */
212 #define MSCC_MEMPARM_MR1 0x382
213 #define MSCC_MEMPARM_MR2 0
214 #define MSCC_MEMPARM_MR3 0
215 #else
216 #define VC3_MPAR_WL VC3_MPAR_CWL
217 #define VC3_MPAR_MD VC3_MPAR_tMOD
218 #define VC3_MPAR_ID VC3_MPAR_tXPR
219 #define VC3_MPAR_SD VC3_MPAR_tDLLK
220 #define VC3_MPAR_OW 2
221 #define VC3_MPAR_OR 2
222 #define VC3_MPAR_RP VC3_MPAR_tRP
223 #define VC3_MPAR_FAW VC3_MPAR_tFAW
224 #define VC3_MPAR_BL 4
225 #define MSCC_MEMPARM_MR0 ((VC3_MPAR_RL - 4) << 4) | ((VC3_MPAR_tWR - 4) << 9)
226 /* ODT_RTT: “0x0040” for 120ohm, and “0x0004” for 60ohm. */
227 #define MSCC_MEMPARM_MR1 0x0040
228 #define MSCC_MEMPARM_MR2 ((VC3_MPAR_WL - 5) << 3)
229 #define MSCC_MEMPARM_MR3 0
230 #endif                          /* MIPS_VCOREIII_MEMORY_DDR3 */
231
232 #define MSCC_MEMPARM_MEMCFG                                             \
233         ((MIPS_VCOREIII_DDR_SIZE > SZ_512M) ?                           \
234          ICPU_MEMCTRL_CFG_DDR_512MBYTE_PLUS : 0) |                      \
235         (VC3_MPAR_16BIT ? ICPU_MEMCTRL_CFG_DDR_WIDTH : 0) |             \
236         (VC3_MPAR_DDR3_MODE ? ICPU_MEMCTRL_CFG_DDR_MODE : 0) |          \
237         (VC3_MPAR_BURST_SIZE ? ICPU_MEMCTRL_CFG_BURST_SIZE : 0) |       \
238         (VC3_MPAR_BURST_LENGTH == 8 ? ICPU_MEMCTRL_CFG_BURST_LEN : 0) | \
239         (VC3_MPAR_bank_addr_cnt == 3 ? ICPU_MEMCTRL_CFG_BANK_CNT : 0) | \
240         ICPU_MEMCTRL_CFG_MSB_ROW_ADDR(VC3_MPAR_row_addr_cnt - 1) |      \
241         ICPU_MEMCTRL_CFG_MSB_COL_ADDR(VC3_MPAR_col_addr_cnt - 1)
242
243 #if defined(CONFIG_SOC_OCELOT) || defined(CONFIG_SOC_JR2) || \
244         defined(CONFIG_SOC_SERVALT) || defined(CONFIG_SOC_SERVAL)
245 #define MSCC_MEMPARM_PERIOD                                     \
246         ICPU_MEMCTRL_REF_PERIOD_MAX_PEND_REF(8) |               \
247         ICPU_MEMCTRL_REF_PERIOD_REF_PERIOD(VC3_MPAR_tREFI)
248
249 #define MSCC_MEMPARM_TIMING0                                            \
250         ICPU_MEMCTRL_TIMING0_RD_TO_WR_DLY(VC3_MPAR_RL + VC3_MPAR_BL + 1 - \
251                                           VC3_MPAR_WL) |                \
252         ICPU_MEMCTRL_TIMING0_WR_CS_CHANGE_DLY(VC3_MPAR_BL - 1) |        \
253         ICPU_MEMCTRL_TIMING0_RD_CS_CHANGE_DLY(VC3_MPAR_BL) |            \
254         ICPU_MEMCTRL_TIMING0_RAS_TO_PRECH_DLY(VC3_MPAR_tRAS_min - 1) |  \
255         ICPU_MEMCTRL_TIMING0_WR_TO_PRECH_DLY(VC3_MPAR_WL +              \
256                                              VC3_MPAR_BL +              \
257                                              VC3_MPAR_tWR - 1) |        \
258         ICPU_MEMCTRL_TIMING0_RD_TO_PRECH_DLY(VC3_MPAR_BL - 1) |         \
259                 ICPU_MEMCTRL_TIMING0_WR_DATA_XFR_DLY(VC3_MPAR_WL - 1) | \
260         ICPU_MEMCTRL_TIMING0_RD_DATA_XFR_DLY(VC3_MPAR_RL - 3)
261
262 #define MSCC_MEMPARM_TIMING1                                            \
263         ICPU_MEMCTRL_TIMING1_RAS_TO_RAS_SAME_BANK_DLY(VC3_MPAR_tRC - 1) | \
264         ICPU_MEMCTRL_TIMING1_BANK8_FAW_DLY(VC3_MPAR_FAW - 1) |          \
265         ICPU_MEMCTRL_TIMING1_PRECH_TO_RAS_DLY(VC3_MPAR_RP - 1) |        \
266         ICPU_MEMCTRL_TIMING1_RAS_TO_RAS_DLY(VC3_MPAR_tRRD - 1) |        \
267         ICPU_MEMCTRL_TIMING1_RAS_TO_CAS_DLY(VC3_MPAR_tRCD - 1) |        \
268         ICPU_MEMCTRL_TIMING1_WR_TO_RD_DLY(VC3_MPAR_WL +                 \
269                                           VC3_MPAR_BL +                 \
270                                           VC3_MPAR_tWTR - 1)
271
272 #define MSCC_MEMPARM_TIMING2                                    \
273         ICPU_MEMCTRL_TIMING2_PRECH_ALL_DLY(VC3_MPAR_RP - 1) |   \
274         ICPU_MEMCTRL_TIMING2_MDSET_DLY(VC3_MPAR_MD - 1) |               \
275         ICPU_MEMCTRL_TIMING2_REF_DLY(VC3_MPAR_tRFC - 1) |               \
276         ICPU_MEMCTRL_TIMING2_INIT_DLY(VC3_MPAR_ID - 1)
277
278 #define MSCC_MEMPARM_TIMING3                                            \
279         ICPU_MEMCTRL_TIMING3_WR_TO_RD_CS_CHANGE_DLY(VC3_MPAR_WL +       \
280                                                     VC3_MPAR_tWTR - 1) |\
281         ICPU_MEMCTRL_TIMING3_ODT_RD_DLY(VC3_MPAR_OR - 1) |              \
282         ICPU_MEMCTRL_TIMING3_ODT_WR_DLY(VC3_MPAR_OW - 1) |              \
283         ICPU_MEMCTRL_TIMING3_LOCAL_ODT_RD_DLY(VC3_MPAR_RL - 3)
284
285 #else
286 #define MSCC_MEMPARM_PERIOD                                     \
287         ICPU_MEMCTRL_REF_PERIOD_MAX_PEND_REF(1) |               \
288         ICPU_MEMCTRL_REF_PERIOD_REF_PERIOD(VC3_MPAR_tREFI)
289
290 #define MSCC_MEMPARM_TIMING0                                            \
291         ICPU_MEMCTRL_TIMING0_RAS_TO_PRECH_DLY(VC3_MPAR_tRAS_min - 1) |  \
292         ICPU_MEMCTRL_TIMING0_WR_TO_PRECH_DLY(VC3_MPAR_CL +              \
293                                              (VC3_MPAR_BURST_LENGTH == 8 ? 2 : 0) + \
294                                              VC3_MPAR_tWR) |            \
295         ICPU_MEMCTRL_TIMING0_RD_TO_PRECH_DLY(VC3_MPAR_BURST_LENGTH == 8 ? 3 : 1) | \
296         ICPU_MEMCTRL_TIMING0_WR_DATA_XFR_DLY(VC3_MPAR_CL - 3) |         \
297         ICPU_MEMCTRL_TIMING0_RD_DATA_XFR_DLY(VC3_MPAR_CL - 3)
298
299 #define MSCC_MEMPARM_TIMING1                                            \
300         ICPU_MEMCTRL_TIMING1_RAS_TO_RAS_SAME_BANK_DLY(VC3_MPAR_tRC - 1) | \
301         ICPU_MEMCTRL_TIMING1_BANK8_FAW_DLY(VC3_MPAR_tFAW - 1) |         \
302         ICPU_MEMCTRL_TIMING1_PRECH_TO_RAS_DLY(VC3_MPAR_tRP - 1) |       \
303         ICPU_MEMCTRL_TIMING1_RAS_TO_RAS_DLY(VC3_MPAR_tRRD - 1) |        \
304         ICPU_MEMCTRL_TIMING1_RAS_TO_CAS_DLY(VC3_MPAR_tRCD - 1) |        \
305         ICPU_MEMCTRL_TIMING1_WR_TO_RD_DLY(VC3_MPAR_CL +                 \
306                                           (VC3_MPAR_BURST_LENGTH == 8 ? 2 : 0) + \
307                                           VC3_MPAR_tWTR)
308 #define MSCC_MEMPARM_TIMING2                                            \
309         ICPU_MEMCTRL_TIMING2_PRECH_ALL_DLY(VC3_MPAR_tRPA - 1) |         \
310         ICPU_MEMCTRL_TIMING2_MDSET_DLY(VC3_MPAR_tMRD - 1) |             \
311         ICPU_MEMCTRL_TIMING2_REF_DLY(VC3_MPAR_tRFC - 1) |               \
312         ICPU_MEMCTRL_TIMING2_FOUR_HUNDRED_NS_DLY(VC3_MPAR__400_ns_dly)
313
314 #define MSCC_MEMPARM_TIMING3                                            \
315         ICPU_MEMCTRL_TIMING3_WR_TO_RD_CS_CHANGE_DLY(VC3_MPAR_CL - 1) |  \
316         ICPU_MEMCTRL_TIMING3_ODT_WR_DLY(VC3_MPAR_CL - 1) |              \
317         ICPU_MEMCTRL_TIMING3_LOCAL_ODT_RD_DLY(VC3_MPAR_CL - 1)
318
319 #endif
320
321 enum {
322         DDR_TRAIN_OK,
323         DDR_TRAIN_CONTINUE,
324         DDR_TRAIN_ERROR,
325 };
326
327 /*
328  * We actually have very few 'pause' possibilities apart from
329  * these assembly nops (at this very early stage).
330  */
331 #define PAUSE() asm volatile("nop; nop; nop; nop; nop; nop; nop; nop")
332
333 /* NB: Assumes inlining as no stack is available! */
334 static inline void set_dly(u32 bytelane, u32 dly)
335 {
336         register u32 r = readl(BASE_CFG + ICPU_MEMCTRL_DQS_DLY(bytelane));
337
338         r &= ~ICPU_MEMCTRL_DQS_DLY_DQS_DLY_M;
339         r |= ICPU_MEMCTRL_DQS_DLY_DQS_DLY(dly);
340         writel(r, BASE_CFG + ICPU_MEMCTRL_DQS_DLY(bytelane));
341 }
342
343 static inline bool incr_dly(u32 bytelane)
344 {
345         register u32 r = readl(BASE_CFG + ICPU_MEMCTRL_DQS_DLY(bytelane));
346
347         if (ICPU_MEMCTRL_DQS_DLY_DQS_DLY(r) < 31) {
348                 writel(r + 1, BASE_CFG + ICPU_MEMCTRL_DQS_DLY(bytelane));
349                 return true;
350         }
351
352         return false;
353 }
354
355 static inline bool adjust_dly(int adjust)
356 {
357         register u32 r = readl(BASE_CFG + ICPU_MEMCTRL_DQS_DLY(0));
358
359         if (ICPU_MEMCTRL_DQS_DLY_DQS_DLY(r) < 31) {
360                 writel(r + adjust, BASE_CFG + ICPU_MEMCTRL_DQS_DLY(0));
361                 return true;
362         }
363
364         return false;
365 }
366
367 /* NB: Assumes inlining as no stack is available! */
368 static inline void center_dly(u32 bytelane, u32 start)
369 {
370         register u32 r = readl(BASE_CFG + ICPU_MEMCTRL_DQS_DLY(bytelane)) - start;
371
372         writel(start + (r >> 1), BASE_CFG + ICPU_MEMCTRL_DQS_DLY(bytelane));
373 }
374
375 static inline void memphy_soft_reset(void)
376 {
377         setbits_le32(BASE_CFG + ICPU_MEMPHY_CFG, ICPU_MEMPHY_CFG_PHY_FIFO_RST);
378         PAUSE();
379         clrbits_le32(BASE_CFG + ICPU_MEMPHY_CFG, ICPU_MEMPHY_CFG_PHY_FIFO_RST);
380         PAUSE();
381 }
382
383 #if defined(CONFIG_SOC_OCELOT) || defined(CONFIG_SOC_JR2) || \
384         defined(CONFIG_SOC_SERVALT) || defined(CONFIG_SOC_SERVAL)
385 static u8 training_data[] = { 0xfe, 0x11, 0x33, 0x55, 0x77, 0x99, 0xbb, 0xdd };
386
387 static inline void sleep_100ns(u32 val)
388 {
389         /* Set the timer tick generator to 100 ns */
390         writel(VCOREIII_TIMER_DIVIDER - 1, BASE_CFG + ICPU_TIMER_TICK_DIV);
391
392         /* Set the timer value */
393         writel(val, BASE_CFG + ICPU_TIMER_VALUE(0));
394
395         /* Enable timer 0 for one-shot */
396         writel(ICPU_TIMER_CTRL_ONE_SHOT_ENA | ICPU_TIMER_CTRL_TIMER_ENA,
397                BASE_CFG + ICPU_TIMER_CTRL(0));
398
399         /* Wait for timer 0 to reach 0 */
400         while (readl(BASE_CFG + ICPU_TIMER_VALUE(0)) != 0)
401                 ;
402 }
403
404 #if defined(CONFIG_SOC_OCELOT) || defined(CONFIG_SOC_SERVAL)
405 /*
406  * DDR memory sanity checking failed, tally and do hard reset
407  *
408  * NB: Assumes inlining as no stack is available!
409  */
410 static inline void hal_vcoreiii_ddr_failed(void)
411 {
412         register u32 reset;
413
414 #if defined(CONFIG_SOC_OCELOT)
415         writel(readl(BASE_CFG + ICPU_GPR(6)) + 1, BASE_CFG + ICPU_GPR(6));
416
417         clrbits_le32(BASE_DEVCPU_GCB + PERF_GPIO_OE, BIT(19));
418 #endif
419
420         /* We have to execute the reset function from cache. Indeed,
421          * the reboot workaround in _machine_restart() will change the
422          * SPI NOR into SW bitbang.
423          *
424          * This will render the CPU unable to execute directly from
425          * the NOR, which is why the reset instructions are prefetched
426          * into the I-cache.
427          *
428          * When failing the DDR initialization we are executing from
429          * NOR.
430          *
431          * The last instruction in _machine_restart() will reset the
432          * MIPS CPU (and the cache), and the CPU will start executing
433          * from the reset vector.
434          */
435         reset = KSEG0ADDR(_machine_restart);
436         icache_lock((void *)reset, 128);
437         asm volatile ("jr %0"::"r" (reset));
438
439         panic("DDR init failed\n");
440 }
441 #else                           /* JR2 || ServalT */
442 static inline void hal_vcoreiii_ddr_failed(void)
443 {
444         writel(0, BASE_CFG + ICPU_RESET);
445         writel(PERF_SOFT_RST_SOFT_CHIP_RST, BASE_CFG + PERF_SOFT_RST);
446
447         panic("DDR init failed\n");
448 }
449 #endif
450
451 #if defined(CONFIG_SOC_OCELOT)
452 static inline void hal_vcoreiii_ddr_reset_assert(void)
453 {
454         /* DDR has reset pin on GPIO 19 toggle Low-High to release */
455         setbits_le32(BASE_DEVCPU_GCB + PERF_GPIO_OE, BIT(19));
456         writel(BIT(19), BASE_DEVCPU_GCB + PERF_GPIO_OUT_CLR);
457         sleep_100ns(10000);
458 }
459
460 static inline void hal_vcoreiii_ddr_reset_release(void)
461 {
462         /* DDR has reset pin on GPIO 19 toggle Low-High to release */
463         setbits_le32(BASE_DEVCPU_GCB + PERF_GPIO_OE, BIT(19));
464         writel(BIT(19), BASE_DEVCPU_GCB + PERF_GPIO_OUT_SET);
465         sleep_100ns(10000);
466 }
467
468 #else                           /* JR2 || ServalT || Serval */
469 static inline void hal_vcoreiii_ddr_reset_assert(void)
470 {
471         /* Ensure the memory controller physical iface is forced reset */
472         writel(readl(BASE_CFG + ICPU_MEMPHY_CFG) |
473                ICPU_MEMPHY_CFG_PHY_RST, BASE_CFG + ICPU_MEMPHY_CFG);
474
475         /* Ensure the memory controller is forced reset */
476         writel(readl(BASE_CFG + ICPU_RESET) |
477                ICPU_RESET_MEM_RST_FORCE, BASE_CFG + ICPU_RESET);
478 }
479 #endif                          /* JR2 || ServalT || Serval */
480
481 /*
482  * DDR memory sanity checking done, possibly enable ECC.
483  *
484  * NB: Assumes inlining as no stack is available!
485  */
486 static inline void hal_vcoreiii_ddr_verified(void)
487 {
488 #ifdef MIPS_VCOREIII_MEMORY_ECC
489         /* Finally, enable ECC */
490         register u32 val = readl(BASE_CFG + ICPU_MEMCTRL_CFG);
491
492         val |= ICPU_MEMCTRL_CFG_DDR_ECC_ERR_ENA;
493         val &= ~ICPU_MEMCTRL_CFG_BURST_SIZE;
494
495         writel(val, BASE_CFG + ICPU_MEMCTRL_CFG);
496 #endif
497
498         /* Reset Status register - sticky bits */
499         writel(readl(BASE_CFG + ICPU_MEMCTRL_STAT), BASE_CFG + ICPU_MEMCTRL_STAT);
500 }
501
502 /* NB: Assumes inlining as no stack is available! */
503 static inline int look_for(u32 bytelane)
504 {
505         register u32 i;
506
507         /* Reset FIFO in case any previous access failed */
508         for (i = 0; i < sizeof(training_data); i++) {
509                 register u32 byte;
510
511                 memphy_soft_reset();
512                 /* Reset sticky bits */
513                 writel(readl(BASE_CFG + ICPU_MEMCTRL_STAT),
514                        BASE_CFG + ICPU_MEMCTRL_STAT);
515                 /* Read data */
516                 byte = __raw_readb((void __iomem *)MSCC_DDR_TO + bytelane +
517                                    (i * 4));
518
519                 /*
520                  * Prevent the compiler reordering the instruction so
521                  * the read of RAM happens after the check of the
522                  * errors.
523                  */
524                 rmb();
525                 if (readl(BASE_CFG + ICPU_MEMCTRL_STAT) &
526                     (ICPU_MEMCTRL_STAT_RDATA_MASKED |
527                      ICPU_MEMCTRL_STAT_RDATA_DUMMY)) {
528                         /* Noise on the line */
529                         goto read_error;
530                 }
531                 /* If mismatch, increment DQS - if possible */
532                 if (byte != training_data[i]) {
533  read_error:
534                         if (!incr_dly(bytelane))
535                                 return DDR_TRAIN_ERROR;
536                         return DDR_TRAIN_CONTINUE;
537                 }
538         }
539         return DDR_TRAIN_OK;
540 }
541
542 /* NB: Assumes inlining as no stack is available! */
543 static inline int look_past(u32 bytelane)
544 {
545         register u32 i;
546
547         /* Reset FIFO in case any previous access failed */
548         for (i = 0; i < sizeof(training_data); i++) {
549                 register u32 byte;
550
551                 memphy_soft_reset();
552                 /* Ack sticky bits */
553                 writel(readl(BASE_CFG + ICPU_MEMCTRL_STAT),
554                        BASE_CFG + ICPU_MEMCTRL_STAT);
555                 byte = __raw_readb((void __iomem *)MSCC_DDR_TO + bytelane +
556                                    (i * 4));
557                 /*
558                  * Prevent the compiler reordering the instruction so
559                  * the read of RAM happens after the check of the
560                  * errors.
561                  */
562                 rmb();
563                 if (readl(BASE_CFG + ICPU_MEMCTRL_STAT) &
564                     (ICPU_MEMCTRL_STAT_RDATA_MASKED |
565                      ICPU_MEMCTRL_STAT_RDATA_DUMMY)) {
566                         /* Noise on the line */
567                         goto read_error;
568                 }
569                 /* Bail out when we see first mismatch */
570                 if (byte != training_data[i]) {
571  read_error:
572                         return DDR_TRAIN_OK;
573                 }
574         }
575         /* All data compares OK, increase DQS and retry */
576         if (!incr_dly(bytelane))
577                 return DDR_TRAIN_ERROR;
578
579         return DDR_TRAIN_CONTINUE;
580 }
581
582 static inline int hal_vcoreiii_train_bytelane(u32 bytelane)
583 {
584         register int res;
585         register u32 dqs_s;
586
587         set_dly(bytelane, 0);   /* Start training at DQS=0 */
588         while ((res = look_for(bytelane)) == DDR_TRAIN_CONTINUE)
589                 ;
590         if (res != DDR_TRAIN_OK)
591                 return res;
592
593         dqs_s = readl(BASE_CFG + ICPU_MEMCTRL_DQS_DLY(bytelane));
594         while ((res = look_past(bytelane)) == DDR_TRAIN_CONTINUE)
595                 ;
596         if (res != DDR_TRAIN_OK)
597                 return res;
598         /* Reset FIFO - for good measure */
599         memphy_soft_reset();
600         /* Adjust to center [dqs_s;cur] */
601         center_dly(bytelane, dqs_s);
602         return DDR_TRAIN_OK;
603 }
604
605 /* This algorithm is converted from the TCL training algorithm used
606  * during silicon simulation.
607  * NB: Assumes inlining as no stack is available!
608  */
609 static inline int hal_vcoreiii_init_dqs(void)
610 {
611 #define MAX_DQS 32
612         register u32 i, j;
613
614         for (i = 0; i < MAX_DQS; i++) {
615                 set_dly(0, i);  /* Byte-lane 0 */
616                 for (j = 0; j < MAX_DQS; j++) {
617                         __maybe_unused register u32  byte;
618
619                         set_dly(1, j);  /* Byte-lane 1 */
620                         /* Reset FIFO in case any previous access failed */
621                         memphy_soft_reset();
622                         writel(readl(BASE_CFG + ICPU_MEMCTRL_STAT),
623                                BASE_CFG + ICPU_MEMCTRL_STAT);
624                         byte = __raw_readb((void __iomem *)MSCC_DDR_TO);
625                         byte = __raw_readb((void __iomem *)(MSCC_DDR_TO + 1));
626                         if (!(readl(BASE_CFG + ICPU_MEMCTRL_STAT) &
627                             (ICPU_MEMCTRL_STAT_RDATA_MASKED |
628                              ICPU_MEMCTRL_STAT_RDATA_DUMMY)))
629                                 return 0;
630                 }
631         }
632         return -1;
633 }
634
635 static inline int dram_check(void)
636 {
637         register u32 i;
638
639         for (i = 0; i < 8; i++) {
640                 __raw_writel(~i, (void __iomem *)(MSCC_DDR_TO + (i * 4)));
641                 if (__raw_readl((void __iomem *)(MSCC_DDR_TO + (i * 4))) != ~i)
642                         return 1;
643         }
644         return 0;
645 }
646 #else                           /* Luton */
647
648 static inline void sleep_100ns(u32 val)
649 {
650 }
651
652 static inline void hal_vcoreiii_ddr_reset_assert(void)
653 {
654         setbits_le32(BASE_CFG + ICPU_MEMPHY_CFG, ICPU_MEMPHY_CFG_PHY_RST);
655         setbits_le32(BASE_CFG + ICPU_RESET, ICPU_RESET_MEM_RST_FORCE);
656 }
657
658 static inline void hal_vcoreiii_ddr_reset_release(void)
659 {
660 }
661
662 static inline void hal_vcoreiii_ddr_failed(void)
663 {
664         register u32 memphy_cfg = readl(BASE_CFG + ICPU_MEMPHY_CFG);
665
666         /* Do a fifo reset and start over */
667         writel(memphy_cfg | ICPU_MEMPHY_CFG_PHY_FIFO_RST,
668                BASE_CFG + ICPU_MEMPHY_CFG);
669         writel(memphy_cfg & ~ICPU_MEMPHY_CFG_PHY_FIFO_RST,
670                BASE_CFG + ICPU_MEMPHY_CFG);
671         writel(memphy_cfg | ICPU_MEMPHY_CFG_PHY_FIFO_RST,
672                BASE_CFG + ICPU_MEMPHY_CFG);
673 }
674
675 static inline void hal_vcoreiii_ddr_verified(void)
676 {
677 }
678
679 static inline int look_for(u32 data)
680 {
681         register u32 byte = __raw_readb((void __iomem *)MSCC_DDR_TO);
682
683         if (data != byte) {
684                 if (!incr_dly(0))
685                         return DDR_TRAIN_ERROR;
686                 return DDR_TRAIN_CONTINUE;
687         }
688
689         return DDR_TRAIN_OK;
690 }
691
692 /* This algorithm is converted from the TCL training algorithm used
693  * during silicon simulation.
694  * NB: Assumes inlining as no stack is available!
695  */
696 static inline int hal_vcoreiii_train_bytelane(u32 bytelane)
697 {
698         register int res;
699
700         set_dly(bytelane, 0);   /* Start training at DQS=0 */
701         while ((res = look_for(0xff)) == DDR_TRAIN_CONTINUE)
702                 ;
703         if (res != DDR_TRAIN_OK)
704                 return res;
705
706         set_dly(bytelane, 0);   /* Start training at DQS=0 */
707         while ((res = look_for(0x00)) == DDR_TRAIN_CONTINUE)
708
709                 ;
710
711         if (res != DDR_TRAIN_OK)
712                 return res;
713
714         adjust_dly(-3);
715
716         return DDR_TRAIN_OK;
717 }
718
719 static inline int hal_vcoreiii_init_dqs(void)
720 {
721         return 0;
722 }
723
724 static inline int dram_check(void)
725 {
726         register u32 i;
727
728         for (i = 0; i < 8; i++) {
729                 __raw_writel(~i, (void __iomem *)(MSCC_DDR_TO + (i * 4)));
730
731                 if (__raw_readl((void __iomem *)(MSCC_DDR_TO + (i * 4))) != ~i)
732                         return 1;
733         }
734
735         return 0;
736 }
737 #endif
738
739 /*
740  * NB: Called *early* to init memory controller - assumes inlining as
741  * no stack is available!
742  */
743 static inline void hal_vcoreiii_init_memctl(void)
744 {
745         /* Ensure DDR is in reset */
746         hal_vcoreiii_ddr_reset_assert();
747
748         /* Wait maybe not needed, but ... */
749         PAUSE();
750
751         /* Drop sys ctl memory controller forced reset */
752         clrbits_le32(BASE_CFG + ICPU_RESET, ICPU_RESET_MEM_RST_FORCE);
753
754         PAUSE();
755
756         /* Drop Reset, enable SSTL */
757         writel(ICPU_MEMPHY_CFG_PHY_SSTL_ENA, BASE_CFG + ICPU_MEMPHY_CFG);
758         PAUSE();
759
760         /* Start the automatic SSTL output and ODT drive-strength calibration */
761         writel(ICPU_MEMPHY_ZCAL_ZCAL_PROG_ODT(MIPS_VCOREIII_MEMORY_SSTL_ODT) |
762                /* drive strength */
763                ICPU_MEMPHY_ZCAL_ZCAL_PROG(MIPS_VCOREIII_MEMORY_SSTL_DRIVE) |
764                /* Start calibration process */
765                ICPU_MEMPHY_ZCAL_ZCAL_ENA, BASE_CFG + ICPU_MEMPHY_ZCAL);
766
767         /* Wait for ZCAL to clear */
768         while (readl(BASE_CFG + ICPU_MEMPHY_ZCAL) & ICPU_MEMPHY_ZCAL_ZCAL_ENA)
769                 ;
770 #if defined(CONFIG_SOC_OCELOT) || defined(CONFIG_SOC_JR2) || \
771         defined(CONFIG_SOC_SERVALT)
772         /* Check no ZCAL_ERR */
773         if (readl(BASE_CFG + ICPU_MEMPHY_ZCAL_STAT)
774             & ICPU_MEMPHY_ZCAL_STAT_ZCAL_ERR)
775                 hal_vcoreiii_ddr_failed();
776 #endif
777         /* Drive CL, CK, ODT */
778         setbits_le32(BASE_CFG + ICPU_MEMPHY_CFG, ICPU_MEMPHY_CFG_PHY_ODT_OE |
779                      ICPU_MEMPHY_CFG_PHY_CK_OE | ICPU_MEMPHY_CFG_PHY_CL_OE);
780
781         /* Initialize memory controller */
782         writel(MSCC_MEMPARM_MEMCFG, BASE_CFG + ICPU_MEMCTRL_CFG);
783         writel(MSCC_MEMPARM_PERIOD, BASE_CFG + ICPU_MEMCTRL_REF_PERIOD);
784
785 #if defined(CONFIG_SOC_OCELOT) || defined(CONFIG_SOC_JR2) || \
786         defined(CONFIG_SOC_SERVALT) || defined(CONFIG_SOC_SERVAL)
787         writel(MSCC_MEMPARM_TIMING0, BASE_CFG + ICPU_MEMCTRL_TIMING0);
788 #else /* Luton */
789         clrbits_le32(BASE_CFG + ICPU_MEMCTRL_TIMING0, ((1 << 20) - 1));
790         setbits_le32(BASE_CFG + ICPU_MEMCTRL_TIMING0, MSCC_MEMPARM_TIMING0);
791 #endif
792
793         writel(MSCC_MEMPARM_TIMING1, BASE_CFG + ICPU_MEMCTRL_TIMING1);
794         writel(MSCC_MEMPARM_TIMING2, BASE_CFG + ICPU_MEMCTRL_TIMING2);
795         writel(MSCC_MEMPARM_TIMING3, BASE_CFG + ICPU_MEMCTRL_TIMING3);
796         writel(MSCC_MEMPARM_MR0, BASE_CFG + ICPU_MEMCTRL_MR0_VAL);
797         writel(MSCC_MEMPARM_MR1, BASE_CFG + ICPU_MEMCTRL_MR1_VAL);
798         writel(MSCC_MEMPARM_MR2, BASE_CFG + ICPU_MEMCTRL_MR2_VAL);
799         writel(MSCC_MEMPARM_MR3, BASE_CFG + ICPU_MEMCTRL_MR3_VAL);
800
801 #if defined(CONFIG_SOC_OCELOT) || defined(CONFIG_SOC_SERVAL)
802         /* Termination setup - enable ODT */
803         writel(ICPU_MEMCTRL_TERMRES_CTRL_LOCAL_ODT_RD_ENA |
804                /* Assert ODT0 for any write */
805                ICPU_MEMCTRL_TERMRES_CTRL_ODT_WR_ENA(3),
806                BASE_CFG + ICPU_MEMCTRL_TERMRES_CTRL);
807
808         /* Release Reset from DDR */
809 #if defined(CONFIG_SOC_OCELOT)
810         hal_vcoreiii_ddr_reset_release();
811 #endif
812
813         writel(readl(BASE_CFG + ICPU_GPR(7)) + 1, BASE_CFG + ICPU_GPR(7));
814 #elif defined(CONFIG_SOC_JR2) || defined(CONFIG_SOC_SERVALT)
815         writel(ICPU_MEMCTRL_TERMRES_CTRL_ODT_WR_ENA(3),
816                BASE_CFG + ICPU_MEMCTRL_TERMRES_CTRL);
817 #else                           /* Luton */
818         /* Termination setup - disable ODT */
819         writel(0, BASE_CFG + ICPU_MEMCTRL_TERMRES_CTRL);
820
821 #endif
822 }
823
824 static inline void hal_vcoreiii_wait_memctl(void)
825 {
826         /* Now, rip it! */
827         writel(ICPU_MEMCTRL_CTRL_INITIALIZE, BASE_CFG + ICPU_MEMCTRL_CTRL);
828
829         while (!(readl(BASE_CFG + ICPU_MEMCTRL_STAT)
830                  & ICPU_MEMCTRL_STAT_INIT_DONE))
831                 ;
832
833         /* Settle...? */
834         sleep_100ns(10000);
835 #if defined(CONFIG_SOC_OCELOT) || defined(CONFIG_SOC_JR2) || \
836         defined(CONFIG_SOC_SERVALT) || defined(CONFIG_SOC_SERVAL)
837         /* Establish data contents in DDR RAM for training */
838
839         __raw_writel(0xcacafefe, ((void __iomem *)MSCC_DDR_TO));
840         __raw_writel(0x22221111, ((void __iomem *)MSCC_DDR_TO + 0x4));
841         __raw_writel(0x44443333, ((void __iomem *)MSCC_DDR_TO + 0x8));
842         __raw_writel(0x66665555, ((void __iomem *)MSCC_DDR_TO + 0xC));
843         __raw_writel(0x88887777, ((void __iomem *)MSCC_DDR_TO + 0x10));
844         __raw_writel(0xaaaa9999, ((void __iomem *)MSCC_DDR_TO + 0x14));
845         __raw_writel(0xccccbbbb, ((void __iomem *)MSCC_DDR_TO + 0x18));
846         __raw_writel(0xeeeedddd, ((void __iomem *)MSCC_DDR_TO + 0x1C));
847 #else
848         __raw_writel(0xff, ((void __iomem *)MSCC_DDR_TO));
849 #endif
850 }
851 #endif                          /* __ASM_MACH_DDR_H */