2 * Qualcomm/Atheros WiSoCs DRAM related
3 * functions for WiSoC families:
6 * - Qualcomm/Atheros QCA953x
7 * - Qualcomm/Atheros QCA955x
8 * - Qualcomm/Atheros QCA956x
10 * Copyright (C) 2016 Piotr Dymacz <piotr@dymacz.pl>
11 * Copyright (C) 2015-2016 Wills Wang <wills.wang@live.com>
12 * Copyright (C) 2014 Qualcomm Atheros, Inc.
13 * Copyright (C) 2008-2010 Atheros Communications Inc.
15 * SPDX-License-Identifier: GPL-2.0
20 #include <asm/addrspace.h>
21 #include <soc/qca_soc_common.h>
22 #include <soc/qca_dram.h>
24 #define QCA_DDR_SIZE_INCREMENT (8 * 1024 * 1024)
27 * Returns size (in bytes) of the DRAM memory
29 * DDR wraps around, write a pattern to 0x00000000
30 * at 8M, 16M, 32M etc. and check when it gets overwritten
32 u32 qca_dram_size(void)
40 #define max_i (QCA_DRAM_MAX_SIZE_VAL / QCA_DDR_SIZE_INCREMENT)
42 for (i = 1; (i < max_i); i++) {
43 *(p + i * QCA_DDR_SIZE_INCREMENT) = (u8)i;
50 #ifndef CONFIG_SKIP_LOWLEVEL_INIT
52 (i * QCA_DDR_SIZE_INCREMENT) : QCA_DRAM_MAX_SIZE_VAL);
56 * something is wrong with relocation,
57 * need to fix it for boards with > 32M of RAM
59 * For now just return 1 MB smaller size
62 (i * QCA_DDR_SIZE_INCREMENT) : QCA_DRAM_MAX_SIZE_VAL) - 1024 * 1024;
67 * Return memory type value from BOOT_STRAP register
69 u32 qca_dram_type(void)
71 #if defined(CONFIG_BOARD_DRAM_TYPE_SDR)
72 #error "SDRAM is not supported!"
73 return RAM_MEMORY_TYPE_SDR;
74 #elif defined(CONFIG_BOARD_DRAM_TYPE_DDR1)
75 return RAM_MEMORY_TYPE_DDR1;
76 #elif defined(CONFIG_BOARD_DRAM_TYPE_DDR2)
77 return RAM_MEMORY_TYPE_DDR2;
81 dram_type = ((qca_soc_reg_read(QCA_RST_BOOTSTRAP_REG)
82 & QCA_RST_BOOTSTRAP_MEM_TYPE_MASK)
83 >> QCA_RST_BOOTSTRAP_MEM_TYPE_SHIFT);
86 * There are two major different versions of Dragino 2. Initial one uses DDR1
87 * and a custom PCB. The new one is based on Dragino HE module which has DDR2.
89 * Both versions have a "bug" in DRAM type detection. They don't use both GPIOs
90 * (12 and 28) for setting DRAM type during bootstrap - only GPIO28 is used.
92 * Therefore, use a custom DRAM type detection here (ignore LSB bit)
94 #if defined(CONFIG_FOR_DRAGINO_V2) || defined(CONFIG_FOR_MESH_POTATO_V2)
95 dram_type = dram_type >> 1;
98 dram_type = RAM_MEMORY_TYPE_DDR2;
100 dram_type = RAM_MEMORY_TYPE_DDR1;
103 case QCA_RST_BOOTSTRAP_MEM_TYPE_SDR_VAL:
104 dram_type = RAM_MEMORY_TYPE_SDR;
106 case QCA_RST_BOOTSTRAP_MEM_TYPE_DDR1_VAL:
107 dram_type = RAM_MEMORY_TYPE_DDR1;
109 case QCA_RST_BOOTSTRAP_MEM_TYPE_DDR2_VAL:
110 dram_type = RAM_MEMORY_TYPE_DDR2;
113 dram_type = RAM_MEMORY_TYPE_UNKNOWN;
123 * Returns DDR width (16 or 32)
125 u32 qca_dram_ddr_width(void)
127 #ifndef CONFIG_BOARD_DRAM_DDR_WIDTH
128 #if (SOC_TYPE & QCA_AR933X_SOC)
131 if (qca_soc_reg_read(QCA_RST_BOOTSTRAP_REG)
132 & QCA_RST_BOOTSTRAP_DDR_WIDTH_32_MASK)
138 return CONFIG_BOARD_DRAM_DDR_WIDTH;
143 * Returns CAS latency, based on setting in DDR_CONFIG register
145 u32 qca_dram_cas_lat(void)
147 #ifndef CONFIG_BOARD_DRAM_CAS_LATENCY
150 reg = (qca_soc_reg_read(QCA_DDR_CFG_REG) & QCA_DDR_CFG_CAS_3LSB_MASK)
151 >> QCA_DDR_CFG_CAS_3LSB_SHIFT;
153 if (qca_soc_reg_read(QCA_DDR_CFG_REG) & QCA_DDR_CFG_CAS_MSB_MASK)
156 /* CAS_LATENCY value in DDR_CONFIG register == 2 * MEM_CAS */
159 return CONFIG_BOARD_DRAM_CAS_LATENCY;
164 * Returns tRCD latency
166 u32 qca_dram_trcd_lat(void)
170 reg = (qca_soc_reg_read(QCA_DDR_CFG_REG) & QCA_DDR_CFG_TRCD_MASK)
171 >> QCA_DDR_CFG_TRCD_SHIFT;
177 * Returns tRP latency
179 u32 qca_dram_trp_lat(void)
183 reg = (qca_soc_reg_read(QCA_DDR_CFG_REG) & QCA_DDR_CFG_TRP_MASK)
184 >> QCA_DDR_CFG_TRP_SHIFT;
190 * Returns tRAS latency
192 u32 qca_dram_tras_lat(void)
196 reg = (qca_soc_reg_read(QCA_DDR_CFG_REG) & QCA_DDR_CFG_TRAS_MASK)
197 >> QCA_DDR_CFG_TRAS_SHIFT;
203 * ===============================================
204 * DQS delay tap controller tune related functions
205 * ===============================================
207 #define DQS_DELAY_TAP_DEFAULT_VAL 8
209 #if (SOC_TYPE & QCA_AR933X_SOC) |\
210 (SOC_TYPE & QCA_AR934X_SOC)
211 #define DQS_DELAY_TAP_MAX_VAL 62
213 #define DQS_DELAY_TAP_MAX_VAL 63
217 * Setup DQS_{0,1,2,3} delay tap control register/s
219 static void qca_ddr_tap_save(u32 tap, u32 ddr_width)
221 #if (SOC_TYPE & QCA_AR933X_SOC) |\
222 (SOC_TYPE & QCA_AR934X_SOC)
225 /* It seems that AR93xx SoCs have two delay chains */
226 if (tap > (DQS_DELAY_TAP_MAX_VAL / 2)) {
227 tap_h = tap - (DQS_DELAY_TAP_MAX_VAL / 2);
228 tap = tap & QCA_DDR_TAP_CTRL_TAP_L_MASK;
229 tap = tap | (tap_h << QCA_DDR_TAP_CTRL_TAP_H_SHIFT);
233 qca_soc_reg_write(QCA_DDR_TAP_CTRL_0_REG, tap);
234 qca_soc_reg_write(QCA_DDR_TAP_CTRL_1_REG, tap);
236 /* Setup DQS2 and DQS3 only for 32-bit DDR interface width */
237 if (ddr_width == 32) {
238 qca_soc_reg_write(QCA_DDR_TAP_CTRL_2_REG, tap);
239 qca_soc_reg_write(QCA_DDR_TAP_CTRL_3_REG, tap);
244 * Only for AR933x we will use different code
245 * for delay tap controller tune as it seems
246 * that this SoC doesn't have DDR BIST.
248 * Below function is universal, so it should
249 * work also for other QC/A WiSoCs and give
250 * same (or very similar) results. The only
251 * difference is that the DDR BIST based
252 * version seems to be much faster.
254 #if (SOC_TYPE & QCA_AR933X_SOC)
256 #define DQS_DELAY_TAP_PATTERN_OFFSET 0x2000
257 #define DQS_DELAY_TAP_PATTERN_SIZE 0x1000
258 #define DQS_DELAY_TAP_TEST_LOOPS 2
261 * Prepare pattern for further tests
263 static inline void qca_ddr_tap_patt(void)
268 /* Prepare 4M (256 x 4 x 4 bytes) pattern */
269 addr = (void *)KSEG1ADDR(DQS_DELAY_TAP_PATTERN_OFFSET);
271 for (i = 0; i < 256; i++) {
274 for (j = 0; j < 8; j++) {
291 * This function is a modified C version of the original
292 * ar933x_ddr_tap_init() function, written in asm,
293 * included in Atheros (Q)SDK code.
295 * Below is a modified version, partially based on:
296 * https://patchwork.ozlabs.org/patch/569046/
298 static void qca_ddr_tap_tune(u32 ddr_width)
300 u32 *addr, *addr_k0, *addr_k1;
301 u32 tap, tap_hi, tap_lo;
311 addr = (void *)KSEG1ADDR(DQS_DELAY_TAP_PATTERN_OFFSET
312 + DQS_DELAY_TAP_PATTERN_SIZE);
315 * Idea here is to test all possible tap values, one by one,
316 * starting from the lowest. We are looking for a range within
317 * the written and read back data is the same. We assume here
318 * that the valid tap range is continuous.
320 * From hardware POV, delay tap controller is used to adjust
323 for (tap = 0; tap <= DQS_DELAY_TAP_MAX_VAL; tap++) {
324 qca_ddr_tap_save(tap, ddr_width);
328 for (i = 0; i < DQS_DELAY_TAP_TEST_LOOPS; i++) {
329 addr_k1 = (void *)KSEG1ADDR(DQS_DELAY_TAP_PATTERN_OFFSET);
330 addr_k0 = (void *)KSEG0ADDR(DQS_DELAY_TAP_PATTERN_OFFSET);
332 while (addr_k1 < addr) {
333 if (*addr_k1++ != *addr_k0++) {
360 /* Calculate final tap value (rounded up average) */
362 tap = (tap_hi + tap_lo + 1) / 2;
364 tap = DQS_DELAY_TAP_DEFAULT_VAL;
367 qca_ddr_tap_save(tap, ddr_width);
370 #else /* SOC_TYPE & QCA_AR933X_SOC */
372 #define DQS_DELAY_TAP_TEST_LOOPS 8
375 * Unknown magic values and registers from Atheros (Q)SDK.
377 * It looks like some test patterns and masks setup,
378 * but it's not confirmed. Used here values are
379 * different, but were tested on real hardware.
381 static inline void qca_ddr_tap_bist_init(void)
383 qca_soc_reg_write(QCA_DDR_PERF_COMP_AHB_GE0_0_REG, 0xAAAAAAAA);
384 qca_soc_reg_write(QCA_DDR_PERF_MASK_AHB_GE0_0_REG, 0xAAAAAAAA);
386 qca_soc_reg_write(QCA_DDR_PERF_COMP_AHB_GE0_1_REG, 0x55555555);
387 qca_soc_reg_write(QCA_DDR_PERF_MASK_AHB_GE0_1_REG, 0x55555555);
389 qca_soc_reg_write(QCA_DDR_PERF_COMP_AHB_GE1_0_REG, 0xAAAAAAAA);
390 qca_soc_reg_write(QCA_DDR_PERF_MASK_AHB_GE1_0_REG, 0xAAAAAAAA);
392 qca_soc_reg_write(QCA_DDR_PERF_COMP_AHB_GE1_1_REG, 0x55555555);
393 qca_soc_reg_write(QCA_DDR_PERF_MASK_AHB_GE1_1_REG, 0x55555555);
397 * This function is a modified C version of the original
398 * ath_ddr_tap_cal() function, written in asm,
399 * included in Atheros (Q)SDK code.
401 * It seems that newer QC/A WiSoCs have some kind of
402 * built-in self-test (BIST) for DDR controller, but
403 * none of the used registers or their values are
404 * described in datasheets, so for now, we will just
405 * use them as in original code.
407 * Below is a modified version, partially based on:
408 * https://patchwork.ozlabs.org/patch/569047/
410 static void qca_ddr_tap_tune(u32 ddr_width)
412 u32 tap, tap_hi, tap_lo;
413 u32 fail, got_lo, reg;
419 /* How many test loops per tested tap value */
420 qca_soc_reg_write(QCA_DDR_PERF_COMP_ADDR_1_REG,
421 (DQS_DELAY_TAP_TEST_LOOPS
422 << QCA_DDR_PERF_COMP_ADDR_1_TEST_CNT_SHIFT));
425 * Unknown magic value, original comment:
426 * "4 Row Address Bits, 4 Column Address Bits, 2 BA bits"
428 qca_soc_reg_write(QCA_DDR_PERF_MASK_ADDR_0_REG, 0xFA5DE83F);
431 * Test all possible tap values, try to find working range
432 * (minimum and maximum delays) and use average value
434 for (tap = 0; tap <= DQS_DELAY_TAP_MAX_VAL; tap++) {
435 qca_ddr_tap_save(tap, ddr_width);
437 qca_ddr_tap_bist_init();
439 /* Enable BIST test and wait for finish */
440 qca_soc_reg_write(QCA_DDR_BIST_REG, QCA_DDR_BIST_TEST_EN_MASK);
443 reg = qca_soc_reg_read(QCA_DDR_BIST_STATUS_REG);
444 } while (!(reg & QCA_DDR_BIST_STATUS_DONE_MASK));
446 /* Disable BIST test */
447 qca_soc_reg_write(QCA_DDR_BIST_REG, 0);
449 /* Check how many tests failed */
450 fail = (reg & QCA_DDR_BIST_STATUS_FAIL_CNT_MASK)
451 >> QCA_DDR_BIST_STATUS_FAIL_CNT_SHIFT;
470 /* Calculate final tap value (rounded up average) */
472 tap = (tap_hi + tap_lo + 1) / 2;
474 tap = DQS_DELAY_TAP_DEFAULT_VAL;
477 qca_ddr_tap_save(tap, ddr_width);
480 #endif /* SOC_TYPE & QCA_AR933X_SOC */
483 * ===============================================
484 * DDR controller initialization related functions
485 * ===============================================
489 * Below defines are "safe" DDR1/DDR2 timing parameters.
490 * They should work for most chips, but not for all.
492 * For different values, user can define target value
493 * of all memory controller related registers.
496 #define DDRx_tMRD_ns 10
497 #define DDRx_tRAS_ns 40
498 #define DDRx_tRCD_ns 15
499 #define DDRx_tRP_ns 15
500 #define DDRx_tRRD_ns 10
501 #define DDRx_tWR_ns 15
502 #define DDRx_tWTR_ns 10
504 #define DDR1_tRFC_ns 75
505 #define DDR2_tRFC_ns 120
507 #define DDR2_tFAW_ns 50
508 #define DDR2_tWL_ns 5
510 #define DDR_addit_lat 0
511 #define DDR_burst_len 8
513 /* All above values are safe for clocks not lower than below values */
514 #define DDR1_timing_clk_max 400
515 #define DDR2_timing_clk_max 533
517 /* Maximum timing values, based on register fields sizes */
518 #define MAX_tFAW BITS(0, 6)
519 #define MAX_tMRD BITS(0, 4)
520 #define MAX_tRAS BITS(0, 5)
521 #define MAX_tRCD BITS(0, 4)
522 #define MAX_tRFC BITS(0, 6)
523 #define MAX_tRP BITS(0, 4)
524 #define MAX_tRRD BITS(0, 4)
525 #define MAX_tRTP BITS(0, 4)
526 #define MAX_tRTW BITS(0, 5)
527 #define MAX_tWL BITS(0, 4)
528 #define MAX_tWR BITS(0, 4)
529 #define MAX_tWTR BITS(0, 5)
532 * Setup DDR_CONFIG register
534 static inline void qca_dram_set_ddr_cfg(u32 mem_cas,
538 #ifndef CONFIG_QCA_DDR_CFG_REG_VAL
542 reg = qca_soc_reg_read(QCA_DDR_CFG_REG);
544 /* Always use page close policy */
545 reg = reg | QCA_DDR_CFG_PAGE_CLOSE_MASK;
547 /* CAS should be (2 * MEM_CAS) or (2 * MEM_CAS) + 1/2/3 */
549 tmp = (tmp << QCA_DDR_CFG_CAS_3LSB_SHIFT) & QCA_DDR_CFG_CAS_3LSB_MASK;
551 tmp = tmp | QCA_DDR_CFG_CAS_MSB_MASK;
554 reg = reg & ~QCA_DDR_CFG_CAS_3LSB_MASK;
558 * Calculate rest of timing related values,
559 * always round up to closest integer
563 tmp = ((DDRx_tMRD_ns * ddr_clk) + 500) / 1000;
567 tmp = (tmp << QCA_DDR_CFG_TMRD_SHIFT) & QCA_DDR_CFG_TMRD_MASK;
568 reg = reg & ~QCA_DDR_CFG_TMRD_MASK;
572 if (mem_type == RAM_MEMORY_TYPE_DDR2) {
573 tmp = ((DDR2_tRFC_ns * ddr_clk) + 500) / 1000;
575 tmp = ((DDR1_tRFC_ns * ddr_clk) + 500) / 1000;
581 tmp = (tmp << QCA_DDR_CFG_TRFC_SHIFT) & QCA_DDR_CFG_TRFC_MASK;
582 reg = reg & ~QCA_DDR_CFG_TRFC_MASK;
586 tmp = ((DDRx_tRRD_ns * ddr_clk) + 500) / 1000;
590 tmp = (tmp << QCA_DDR_CFG_TRRD_SHIFT) & QCA_DDR_CFG_TRRD_MASK;
591 reg = reg & ~QCA_DDR_CFG_TRRD_MASK;
595 tmp = ((DDRx_tRP_ns * ddr_clk) + 500) / 1000;
599 tmp = (tmp << QCA_DDR_CFG_TRP_SHIFT) & QCA_DDR_CFG_TRP_MASK;
600 reg = reg & ~QCA_DDR_CFG_TRP_MASK;
604 tmp = ((DDRx_tRCD_ns * ddr_clk) + 500) / 1000;
608 tmp = (tmp << QCA_DDR_CFG_TRCD_SHIFT) & QCA_DDR_CFG_TRCD_MASK;
609 reg = reg & ~QCA_DDR_CFG_TRCD_MASK;
613 tmp = ((DDRx_tRAS_ns * ddr_clk) + 500) / 1000;
617 tmp = (tmp << QCA_DDR_CFG_TRAS_SHIFT) & QCA_DDR_CFG_TRAS_MASK;
618 reg = reg & ~QCA_DDR_CFG_TRAS_MASK;
621 qca_soc_reg_write(QCA_DDR_CFG_REG, reg);
623 qca_soc_reg_write(QCA_DDR_CFG_REG, CONFIG_QCA_DDR_CFG_REG_VAL);
628 * Setup DDR_CONFIG2 register
630 static inline void qca_dram_set_ddr_cfg2(u32 mem_cas,
635 #ifndef CONFIG_QCA_DDR_CFG2_REG_VAL
639 reg = qca_soc_reg_read(QCA_DDR_CFG2_REG);
642 reg = reg | QCA_DDR_CFG2_CKE_MASK;
644 /* Gate open latency = 2 * MEM_CAS */
646 tmp = (tmp << QCA_DDR_CFG2_GATE_OPEN_LATENCY_SHIFT)
647 & QCA_DDR_CFG2_GATE_OPEN_LATENCY_MASK;
648 reg = reg & ~QCA_DDR_CFG2_GATE_OPEN_LATENCY_MASK;
652 if (mem_type == RAM_MEMORY_TYPE_DDR2) {
653 /* tWTR = 2 * WL + BL + 2 * max(tWTR/tCK, 2) */
654 tmp = 2 * (mem_cas + DDR_addit_lat - 1) + DDR_burst_len + 4;
659 /* tWTR = 2 + BL + (2 * tWTR/tCK) */
660 tmp = 2 + DDR_burst_len
661 + (((DDRx_tWTR_ns * ddr_clk) + 500) / 1000);
667 tmp = (tmp << QCA_DDR_CFG2_TWTR_SHIFT) & QCA_DDR_CFG2_TWTR_MASK;
668 reg = reg & ~QCA_DDR_CFG2_TWTR_MASK;
672 if (ddr_width == 32) {
678 tmp = (tmp << QCA_DDR_CFG2_TRTP_SHIFT) & QCA_DDR_CFG2_TRTP_MASK;
679 reg = reg & ~QCA_DDR_CFG2_TRTP_MASK;
683 if (mem_type == RAM_MEMORY_TYPE_DDR2) {
684 /* tRTW = 2 * (RL + BL/2 + 1 -WL), RL = CL + AL, WL = RL - 1 */
685 tmp = DDR_burst_len + 4;
687 /* tRTW = 2 * (CL + BL/2) */
688 tmp = DDR_burst_len + (2 * mem_cas);
694 tmp = (tmp << QCA_DDR_CFG2_TRTW_SHIFT) & QCA_DDR_CFG2_TRTW_MASK;
695 reg = reg & ~QCA_DDR_CFG2_TRTW_MASK;
699 tmp = ((DDRx_tWR_ns * ddr_clk) + 500) / 1000;
703 tmp = (tmp << QCA_DDR_CFG2_TWR_SHIFT) & QCA_DDR_CFG2_TWR_MASK;
704 reg = reg & ~QCA_DDR_CFG2_TWR_MASK;
707 /* Always use burst length = 8 and type: sequential */
708 tmp = (DDR_burst_len << QCA_DDR_CFG2_BURST_LEN_SHIFT)
709 & QCA_DDR_CFG2_BURST_LEN_MASK;
710 reg = reg & ~(QCA_DDR_CFG2_BURST_LEN_MASK
711 | QCA_DDR_CFG2_BURST_TYPE_MASK);
714 qca_soc_reg_write(QCA_DDR_CFG2_REG, reg);
716 qca_soc_reg_write(QCA_DDR_CFG2_REG, CONFIG_QCA_DDR_CFG2_REG_VAL);
721 * Setup DDR2_CONFIG register (only for DDR2)
723 static inline void qca_dram_set_ddr2_cfg(u32 mem_cas,
726 #ifndef CONFIG_QCA_DDR_DDR2_CFG_REG_VAL
730 reg = qca_soc_reg_read(QCA_DDR_DDR2_CFG_REG);
733 reg = reg | QCA_DDR_DDR2_CFG_DDR2_EN_MASK;
736 tmp = ((DDR2_tFAW_ns * ddr_clk) + 500) / 1000;
740 tmp = (tmp << QCA_DDR_DDR2_CFG_DDR2_TFAW_SHIFT)
741 & QCA_DDR_DDR2_CFG_DDR2_TFAW_MASK;
742 reg = reg & ~QCA_DDR_DDR2_CFG_DDR2_TFAW_MASK;
746 tmp = (2 * mem_cas) - 3;
748 /* For some reason, odd value doesn't work on AR933x (FIXME) */
749 #if (SOC_TYPE & QCA_AR933X_SOC)
754 tmp = (tmp << QCA_DDR_DDR2_CFG_DDR2_TWL_SHIFT)
755 & QCA_DDR_DDR2_CFG_DDR2_TWL_MASK;
756 reg = reg & ~QCA_DDR_DDR2_CFG_DDR2_TWL_MASK;
759 qca_soc_reg_write(QCA_DDR_DDR2_CFG_REG, reg);
761 qca_soc_reg_write(QCA_DDR_DDR2_CFG_REG,
762 CONFIG_QCA_DDR_DDR2_CFG_REG_VAL);
767 * Enables DDR refresh and sets
768 * refresh period based on XTAL
770 static inline void qca_dram_set_en_refresh(void)
773 * Enable DDR refresh and setup refresh period:
774 * 1. We assume 7.8 us maximum average period refresh interval
775 * 2. 7.8 us ~= 0.1282 MHz
776 * 3. For 25 MHz XTAL: (25 / 0.1282) ~= 195
777 * 4. For 40 MHz XTAL: (40 / 0.1282) ~= 312
779 if (qca_xtal_is_40mhz()) {
780 qca_soc_reg_write(QCA_DDR_REFRESH_REG,
781 QCA_DDR_REFRESH_EN_MASK
782 | (312 << QCA_DDR_REFRESH_PERIOD_SHIFT));
784 qca_soc_reg_write(QCA_DDR_REFRESH_REG,
785 QCA_DDR_REFRESH_EN_MASK
786 | (195 << QCA_DDR_REFRESH_PERIOD_SHIFT));
791 * Initial DRAM configuration
793 void qca_dram_init(void)
795 u32 ahb_clk, cpu_clk, ddr_clk, mem_type, tmp_clk;
796 u32 cas_lat, ddr_width, reg, tmp, wr_recovery;
798 mem_type = qca_dram_type();
800 qca_sys_clocks(&cpu_clk, &ddr_clk, &ahb_clk, NULL, NULL);
801 cpu_clk = cpu_clk / 1000000;
802 ddr_clk = ddr_clk / 1000000;
803 ahb_clk = ahb_clk / 1000000;
805 /* Set CAS based on clock, but allow to set static value */
806 #ifndef CONFIG_BOARD_DRAM_CAS_LATENCY
807 if (mem_type == RAM_MEMORY_TYPE_DDR1) {
808 if (ddr_clk <= 266) {
814 if (ddr_clk <= 400) {
816 } else if (ddr_clk <= 533) {
818 } else if (ddr_clk <= 666) {
820 } else if (ddr_clk <= 800) {
827 cas_lat = CONFIG_BOARD_DRAM_CAS_LATENCY;
830 #if (SOC_TYPE & QCA_AR933X_SOC)
831 /* AR933x supports only 16-bit memory */
833 qca_soc_reg_write(QCA_DDR_RD_DATA_THIS_CYCLE_REG, 0xFF);
835 /* For other WiSoCs we can determine DDR width, based on bootstrap */
836 ddr_width = qca_dram_ddr_width();
838 if (ddr_width == 32) {
839 /* For 32-bit clear HALF_WIDTH and set VEC = 0xFF */
840 qca_soc_reg_read_clear(QCA_DDR_CTRL_CFG_REG,
841 QCA_DDR_CTRL_CFG_HALF_WIDTH_MASK);
843 qca_soc_reg_write(QCA_DDR_RD_DATA_THIS_CYCLE_REG, 0xFF);
845 qca_soc_reg_read_set(QCA_DDR_CTRL_CFG_REG,
846 QCA_DDR_CTRL_CFG_HALF_WIDTH_MASK);
848 qca_soc_reg_write(QCA_DDR_RD_DATA_THIS_CYCLE_REG, 0xFFFF);
851 /* If DDR_CLK < 2 * AHB_CLK, set DDR FSM wait control to 0xA24 */
852 if (ddr_clk < (2 * ahb_clk))
853 qca_soc_reg_write(QCA_DDR_FSM_WAIT_CTRL_REG, 0xA24);
855 /* If CPU clock < AHB clock, set SRAM REQ ACK */
856 if (cpu_clk < ahb_clk)
857 qca_soc_reg_read_set(QCA_DDR_CTRL_CFG_REG,
858 QCA_DDR_CTRL_CFG_SRAM_REQ_ACK_MASK);
860 qca_soc_reg_read_clear(QCA_DDR_CTRL_CFG_REG,
861 QCA_DDR_CTRL_CFG_SRAM_REQ_ACK_MASK);
865 * CPU/DDR sync mode only when we don't use
866 * fractional multipliers in PLL/clocks config
870 #if (SOC_TYPE & QCA_AR933X_SOC)
871 reg = qca_soc_reg_read(QCA_PLL_CPU_PLL_DITHER_FRAC_REG);
872 reg = (reg & QCA_PLL_CPU_PLL_DITHER_FRAC_NFRAC_MIN_MASK)
873 >> QCA_PLL_CPU_PLL_DITHER_FRAC_NFRAC_MIN_SHIFT;
878 reg = qca_soc_reg_read(QCA_PLL_CPU_PLL_DITHER_REG);
879 reg = (reg & QCA_PLL_CPU_PLL_DITHER_NFRAC_MIN_MASK)
880 >> QCA_PLL_CPU_PLL_DITHER_NFRAC_MIN_SHIFT;
885 reg = qca_soc_reg_read(QCA_PLL_DDR_PLL_DITHER_REG);
886 reg = (reg & QCA_PLL_DDR_PLL_DITHER_NFRAC_MIN_MASK)
887 >> QCA_PLL_DDR_PLL_DITHER_NFRAC_MIN_SHIFT;
893 if (!tmp && (cpu_clk == ddr_clk)) {
894 #if (SOC_TYPE & QCA_AR933X_SOC)
895 qca_soc_reg_read_set(QCA_DDR_TAP_CTRL_3_REG, (1 << 8));
897 qca_soc_reg_read_set(QCA_DDR_CTRL_CFG_REG,
898 QCA_DDR_CTRL_CFG_CPU_DDR_SYNC_MASK);
901 #if (SOC_TYPE & QCA_AR933X_SOC)
902 qca_soc_reg_read_clear(QCA_DDR_TAP_CTRL_3_REG, (1 << 8));
904 qca_soc_reg_read_clear(QCA_DDR_CTRL_CFG_REG,
905 QCA_DDR_CTRL_CFG_CPU_DDR_SYNC_MASK);
909 /* Check if clock is not too low for our "safe" timing values */
911 if (mem_type == RAM_MEMORY_TYPE_DDR1) {
912 if (tmp_clk < DDR1_timing_clk_max)
913 tmp_clk = DDR1_timing_clk_max;
915 if (tmp_clk < DDR2_timing_clk_max)
916 tmp_clk = DDR2_timing_clk_max;
920 #if (SOC_TYPE & QCA_AR933X_SOC)
921 if (mem_type == RAM_MEMORY_TYPE_DDR2)
922 qca_dram_set_ddr2_cfg(cas_lat, tmp_clk);
924 if (mem_type == RAM_MEMORY_TYPE_DDR2) {
925 qca_soc_reg_read_set(QCA_DDR_CTRL_CFG_REG,
926 QCA_DDR_CTRL_CFG_PAD_DDR2_SEL_MASK);
928 qca_dram_set_ddr2_cfg(cas_lat, tmp_clk);
930 qca_soc_reg_read_clear(QCA_DDR_CTRL_CFG_REG,
931 QCA_DDR_CTRL_CFG_PAD_DDR2_SEL_MASK);
935 /* Setup DDR timing related registers */
936 qca_dram_set_ddr_cfg(cas_lat, tmp_clk, mem_type);
937 qca_dram_set_ddr_cfg2(cas_lat, tmp_clk, mem_type, ddr_width);
940 qca_dram_force_preall();
942 if (mem_type == RAM_MEMORY_TYPE_DDR2) {
943 /* Setup target EMR2 and EMR3 */
944 qca_dram_set_emr2(_ddr_sdram_emr2_val(0, 0, 0));
945 qca_dram_set_emr3(0);
948 /* Enable and reset DLL */
949 qca_dram_set_emr(_ddr_sdram_emr_val(0, 1, 0, 0, 0, 0));
950 qca_dram_set_mr(_ddr_sdram_mr_val(0, 0, 1, 0));
952 /* Precharge all, 2x auto refresh */
953 qca_dram_force_preall();
955 qca_dram_force_aref();
956 qca_dram_force_aref();
958 if (mem_type == RAM_MEMORY_TYPE_DDR2) {
959 /* Setup target MR */
960 wr_recovery = ((DDRx_tWR_ns * tmp_clk) + 1000) / 2000;
961 qca_dram_set_mr(_ddr_sdram_mr_val(0, cas_lat, 0, wr_recovery));
963 /* OCD calibration, target EMR (nDQS disable, weak strength) */
965 _ddr_sdram_emr_val(0, 1, DDR_SDRAM_EMR_OCD_DEFAULT_VAL,
969 _ddr_sdram_emr_val(0, 1, DDR_SDRAM_EMR_OCD_EXIT_VAL,
972 /* Setup target MR */
973 qca_dram_set_mr(_ddr_sdram_mr_val(0, cas_lat, 0, 0));
976 /* Enable DDR refresh and setup refresh period */
977 qca_dram_set_en_refresh();
980 * At this point memory should be fully configured,
981 * so we can perform delay tap controller tune.
983 qca_ddr_tap_tune(ddr_width);