2 * Qualcomm/Atheros WiSoCs DRAM related
3 * functions for WiSoC families:
6 * - Qualcomm/Atheros QCA953x
7 * - Qualcomm/Atheros QCA955x
8 * - Qualcomm/Atheros QCA956x
10 * Copyright (C) 2016 Piotr Dymacz <piotr@dymacz.pl>
11 * Copyright (C) 2015-2016 Wills Wang <wills.wang@live.com>
12 * Copyright (C) 2014 Qualcomm Atheros, Inc.
13 * Copyright (C) 2008-2010 Atheros Communications Inc.
15 * SPDX-License-Identifier: GPL-2.0
20 #include <asm/addrspace.h>
21 #include <soc/qca_soc_common.h>
22 #include <soc/qca_dram.h>
24 #define QCA_DDR_SIZE_INCREMENT (8 * 1024 * 1024)
27 * Returns size (in bytes) of the DRAM memory
29 * DDR wraps around, write a pattern to 0x00000000
30 * at 8M, 16M, 32M etc. and check when it gets overwritten
32 u32 qca_dram_size(void)
40 #define max_i (QCA_DRAM_MAX_SIZE_VAL / QCA_DDR_SIZE_INCREMENT)
42 for (i = 1; (i < max_i); i++) {
43 *(p + i * QCA_DDR_SIZE_INCREMENT) = (u8)i;
50 #ifndef CONFIG_SKIP_LOWLEVEL_INIT
52 (i * QCA_DDR_SIZE_INCREMENT) : QCA_DRAM_MAX_SIZE_VAL);
56 * something is wrong with relocation,
57 * need to fix it for boards with > 32M of RAM
59 * For now just return 1 MB smaller size
62 (i * QCA_DDR_SIZE_INCREMENT) : QCA_DRAM_MAX_SIZE_VAL) - 1024 * 1024;
67 * Return memory type value from BOOT_STRAP register
69 u32 qca_dram_type(void)
71 #if defined(CONFIG_BOARD_DRAM_TYPE_SDR)
72 #error "SDRAM is not supported!"
73 return RAM_MEMORY_TYPE_SDR;
74 #elif defined(CONFIG_BOARD_DRAM_TYPE_DDR1)
75 return RAM_MEMORY_TYPE_DDR1;
76 #elif defined(CONFIG_BOARD_DRAM_TYPE_DDR2)
77 return RAM_MEMORY_TYPE_DDR2;
81 dram_type = ((qca_soc_reg_read(QCA_RST_BOOTSTRAP_REG)
82 & QCA_RST_BOOTSTRAP_MEM_TYPE_MASK) >> QCA_RST_BOOTSTRAP_MEM_TYPE_SHIFT);
85 case QCA_RST_BOOTSTRAP_MEM_TYPE_SDR_VAL:
86 dram_type = RAM_MEMORY_TYPE_SDR;
88 case QCA_RST_BOOTSTRAP_MEM_TYPE_DDR1_VAL:
89 dram_type = RAM_MEMORY_TYPE_DDR1;
91 case QCA_RST_BOOTSTRAP_MEM_TYPE_DDR2_VAL:
92 dram_type = RAM_MEMORY_TYPE_DDR2;
95 dram_type = RAM_MEMORY_TYPE_UNKNOWN;
104 * Returns DDR width (16 or 32)
106 u32 qca_dram_ddr_width(void)
108 #ifndef CONFIG_BOARD_DRAM_DDR_WIDTH
109 #if (SOC_TYPE & QCA_AR933X_SOC)
112 if (qca_soc_reg_read(QCA_RST_BOOTSTRAP_REG)
113 & QCA_RST_BOOTSTRAP_DDR_WIDTH_32_MASK)
119 return CONFIG_BOARD_DRAM_DDR_WIDTH;
124 * Returns CAS latency, based on setting in DDR_CONFIG register
126 u32 qca_dram_cas_lat(void)
128 #ifndef CONFIG_BOARD_DRAM_CAS_LATENCY
131 reg = (qca_soc_reg_read(QCA_DDR_CFG_REG) & QCA_DDR_CFG_CAS_3LSB_MASK)
132 >> QCA_DDR_CFG_CAS_3LSB_SHIFT;
134 if (qca_soc_reg_read(QCA_DDR_CFG_REG) & QCA_DDR_CFG_CAS_MSB_MASK)
137 /* CAS_LATENCY value in DDR_CONFIG register == 2 * MEM_CAS */
140 return CONFIG_BOARD_DRAM_CAS_LATENCY
145 * Returns tRCD latency
147 u32 qca_dram_trcd_lat(void)
151 reg = (qca_soc_reg_read(QCA_DDR_CFG_REG) & QCA_DDR_CFG_TRCD_MASK)
152 >> QCA_DDR_CFG_TRCD_SHIFT;
158 * Returns tRP latency
160 u32 qca_dram_trp_lat(void)
164 reg = (qca_soc_reg_read(QCA_DDR_CFG_REG) & QCA_DDR_CFG_TRP_MASK)
165 >> QCA_DDR_CFG_TRP_SHIFT;
171 * Returns tRAS latency
173 u32 qca_dram_tras_lat(void)
177 reg = (qca_soc_reg_read(QCA_DDR_CFG_REG) & QCA_DDR_CFG_TRAS_MASK)
178 >> QCA_DDR_CFG_TRAS_SHIFT;
184 * ===============================================
185 * DQS delay tap controller tune related functions
186 * ===============================================
188 #define DQS_DELAY_TAP_DEFAULT_VAL 8
190 #if (SOC_TYPE & QCA_AR933X_SOC) |\
191 (SOC_TYPE & QCA_AR934X_SOC)
192 #define DQS_DELAY_TAP_MAX_VAL 62
194 #define DQS_DELAY_TAP_MAX_VAL 63
198 * Setup DQS_{0,1,2,3} delay tap control register/s
200 static void qca_ddr_tap_save(u32 tap, u32 ddr_width)
202 #if (SOC_TYPE & QCA_AR933X_SOC) |\
203 (SOC_TYPE & QCA_AR934X_SOC)
206 /* It seems that AR93xx SoCs have two delay chains */
207 if (tap > (DQS_DELAY_TAP_MAX_VAL / 2)) {
208 tap_h = tap - (DQS_DELAY_TAP_MAX_VAL / 2);
209 tap = tap & QCA_DDR_TAP_CTRL_TAP_L_MASK;
210 tap = tap | (tap_h << QCA_DDR_TAP_CTRL_TAP_H_SHIFT);
214 qca_soc_reg_write(QCA_DDR_TAP_CTRL_0_REG, tap);
215 qca_soc_reg_write(QCA_DDR_TAP_CTRL_1_REG, tap);
217 /* Setup DQS2 and DQS3 only for 32-bit DDR interface width */
218 if (ddr_width == 32) {
219 qca_soc_reg_write(QCA_DDR_TAP_CTRL_2_REG, tap);
220 qca_soc_reg_write(QCA_DDR_TAP_CTRL_3_REG, tap);
225 * Only for AR933x we will use different code
226 * for delay tap controller tune as it seems
227 * that this SoC doesn't have DDR BIST.
229 * Below function is universal, so it should
230 * work also for other QC/A WiSoCs and give
231 * same (or very similar) results. The only
232 * difference is that the DDR BIST based
233 * version seems to be much faster.
235 #if (SOC_TYPE & QCA_AR933X_SOC)
237 #define DQS_DELAY_TAP_PATTERN_OFFSET 0x2000
238 #define DQS_DELAY_TAP_PATTERN_SIZE 0x1000
239 #define DQS_DELAY_TAP_TEST_LOOPS 2
242 * Prepare pattern for further tests
244 static inline void qca_ddr_tap_patt(void)
249 /* Prepare 4M (256 x 4 x 4 bytes) pattern */
250 addr = (void *)KSEG1ADDR(DQS_DELAY_TAP_PATTERN_OFFSET);
252 for (i = 0; i < 256; i++) {
255 for (j = 0; j < 8; j++) {
272 * This function is a modified C version of the original
273 * ar933x_ddr_tap_init() function, written in asm,
274 * included in Atheros (Q)SDK code.
276 * Below is a modified version, partially based on:
277 * https://patchwork.ozlabs.org/patch/569046/
279 static void qca_ddr_tap_tune(u32 ddr_width)
281 u32 *addr, *addr_k0, *addr_k1;
282 u32 tap, tap_hi, tap_lo;
292 addr = (void *)KSEG1ADDR(DQS_DELAY_TAP_PATTERN_OFFSET
293 + DQS_DELAY_TAP_PATTERN_SIZE);
296 * Idea here is to test all possible tap values, one by one,
297 * starting from the lowest. We are looking for a range within
298 * the written and read back data is the same. We assume here
299 * that the valid tap range is continuous.
301 * From hardware POV, delay tap controller is used to adjust
304 for (tap = 0; tap <= DQS_DELAY_TAP_MAX_VAL; tap++) {
305 qca_ddr_tap_save(tap, ddr_width);
309 for (i = 0; i < DQS_DELAY_TAP_TEST_LOOPS; i++) {
310 addr_k1 = (void *)KSEG1ADDR(DQS_DELAY_TAP_PATTERN_OFFSET);
311 addr_k0 = (void *)KSEG0ADDR(DQS_DELAY_TAP_PATTERN_OFFSET);
313 while (addr_k1 < addr) {
314 if (*addr_k1++ != *addr_k0++) {
341 /* Calculate final tap value (rounded up average) */
343 tap = (tap_hi + tap_lo + 1) / 2;
345 tap = DQS_DELAY_TAP_DEFAULT_VAL;
348 qca_ddr_tap_save(tap, ddr_width);
351 #else /* SOC_TYPE & QCA_AR933X_SOC */
353 #define DQS_DELAY_TAP_TEST_LOOPS 8
356 * Unknown magic values and registers from Atheros (Q)SDK.
358 * It looks like some test patterns and masks setup,
359 * but it's not confirmed. Used here values are
360 * different, but were tested on real hardware.
362 static inline void qca_ddr_tap_bist_init(void)
364 qca_soc_reg_write(QCA_DDR_PERF_COMP_AHB_GE0_0_REG, 0xAAAAAAAA);
365 qca_soc_reg_write(QCA_DDR_PERF_MASK_AHB_GE0_0_REG, 0xAAAAAAAA);
367 qca_soc_reg_write(QCA_DDR_PERF_COMP_AHB_GE0_1_REG, 0x55555555);
368 qca_soc_reg_write(QCA_DDR_PERF_MASK_AHB_GE0_1_REG, 0x55555555);
370 qca_soc_reg_write(QCA_DDR_PERF_COMP_AHB_GE1_0_REG, 0xAAAAAAAA);
371 qca_soc_reg_write(QCA_DDR_PERF_MASK_AHB_GE1_0_REG, 0xAAAAAAAA);
373 qca_soc_reg_write(QCA_DDR_PERF_COMP_AHB_GE1_1_REG, 0x55555555);
374 qca_soc_reg_write(QCA_DDR_PERF_MASK_AHB_GE1_1_REG, 0x55555555);
378 * This function is a modified C version of the original
379 * ath_ddr_tap_cal() function, written in asm,
380 * included in Atheros (Q)SDK code.
382 * It seems that newer QC/A WiSoCs have some kind of
383 * built-in self-test (BIST) for DDR controller, but
384 * none of the used registers or their values are
385 * described in datasheets, so for now, we will just
386 * use them as in original code.
388 * Below is a modified version, partially based on:
389 * https://patchwork.ozlabs.org/patch/569047/
391 static void qca_ddr_tap_tune(u32 ddr_width)
393 u32 tap, tap_hi, tap_lo;
394 u32 fail, got_lo, reg;
400 /* How many test loops per tested tap value */
401 qca_soc_reg_write(QCA_DDR_PERF_COMP_ADDR_1_REG,
402 (DQS_DELAY_TAP_TEST_LOOPS
403 << QCA_DDR_PERF_COMP_ADDR_1_TEST_CNT_SHIFT));
406 * Unknown magic value, original comment:
407 * "4 Row Address Bits, 4 Column Address Bits, 2 BA bits"
409 qca_soc_reg_write(QCA_DDR_PERF_MASK_ADDR_0_REG, 0xFA5DE83F);
412 * Test all possible tap values, try to find working range
413 * (minimum and maximum delays) and use average value
415 for (tap = 0; tap <= DQS_DELAY_TAP_MAX_VAL; tap++) {
416 qca_ddr_tap_save(tap, ddr_width);
418 qca_ddr_tap_bist_init();
420 /* Enable BIST test and wait for finish */
421 qca_soc_reg_write(QCA_DDR_BIST_REG, QCA_DDR_BIST_TEST_EN_MASK);
424 reg = qca_soc_reg_read(QCA_DDR_BIST_STATUS_REG);
425 } while (!(reg & QCA_DDR_BIST_STATUS_DONE_MASK));
427 /* Disable BIST test */
428 qca_soc_reg_write(QCA_DDR_BIST_REG, 0);
430 /* Check how many tests failed */
431 fail = (reg & QCA_DDR_BIST_STATUS_FAIL_CNT_MASK)
432 >> QCA_DDR_BIST_STATUS_FAIL_CNT_SHIFT;
451 /* Calculate final tap value (rounded up average) */
453 tap = (tap_hi + tap_lo + 1) / 2;
455 tap = DQS_DELAY_TAP_DEFAULT_VAL;
458 qca_ddr_tap_save(tap, ddr_width);
461 #endif /* SOC_TYPE & QCA_AR933X_SOC */
464 * ===============================================
465 * DDR controller initialization related functions
466 * ===============================================
470 * Below defines are "safe" DDR1/DDR2 timing parameters.
471 * They should work for most chips, but not for all.
473 * For different values, user can define target value
474 * of all memory controller related registers.
477 #define DDRx_tMRD_ns 10
478 #define DDRx_tRAS_ns 40
479 #define DDRx_tRCD_ns 15
480 #define DDRx_tRP_ns 15
481 #define DDRx_tRRD_ns 10
482 #define DDRx_tWR_ns 15
483 #define DDRx_tWTR_ns 10
485 #define DDR1_tRFC_ns 75
486 #define DDR2_tRFC_ns 120
488 #define DDR2_tFAW_ns 50
489 #define DDR2_tWL_ns 5
491 #define DDR_addit_lat 0
492 #define DDR_burst_len 8
494 /* All above values are safe for clocks not lower than below values */
495 #define DDR1_timing_clk_max 400
496 #define DDR2_timing_clk_max 533
498 /* Maximum timing values, based on register fields sizes */
499 #define MAX_tFAW BITS(0, 6)
500 #define MAX_tMRD BITS(0, 4)
501 #define MAX_tRAS BITS(0, 5)
502 #define MAX_tRCD BITS(0, 4)
503 #define MAX_tRFC BITS(0, 6)
504 #define MAX_tRP BITS(0, 4)
505 #define MAX_tRRD BITS(0, 4)
506 #define MAX_tRTP BITS(0, 4)
507 #define MAX_tRTW BITS(0, 5)
508 #define MAX_tWL BITS(0, 4)
509 #define MAX_tWR BITS(0, 4)
510 #define MAX_tWTR BITS(0, 5)
513 * Setup DDR_CONFIG register
515 static inline void qca_dram_set_ddr_cfg(u32 mem_cas,
519 #ifndef CONFIG_QCA_DDR_CFG_REG_VAL
523 reg = qca_soc_reg_read(QCA_DDR_CFG_REG);
525 /* Always use page close policy */
526 reg = reg | QCA_DDR_CFG_PAGE_CLOSE_MASK;
528 /* CAS should be (2 * MEM_CAS) or (2 * MEM_CAS) + 1/2/3 */
530 tmp = (tmp << QCA_DDR_CFG_CAS_3LSB_SHIFT) & QCA_DDR_CFG_CAS_3LSB_MASK;
532 tmp = tmp | QCA_DDR_CFG_CAS_MSB_MASK;
535 reg = reg & ~QCA_DDR_CFG_CAS_3LSB_MASK;
539 * Calculate rest of timing related values,
540 * always round up to closest integer
544 tmp = ((DDRx_tMRD_ns * ddr_clk) + 500) / 1000;
548 tmp = (tmp << QCA_DDR_CFG_TMRD_SHIFT) & QCA_DDR_CFG_TMRD_MASK;
549 reg = reg & ~QCA_DDR_CFG_TMRD_MASK;
553 if (mem_type == RAM_MEMORY_TYPE_DDR2) {
554 tmp = ((DDR2_tRFC_ns * ddr_clk) + 500) / 1000;
556 tmp = ((DDR1_tRFC_ns * ddr_clk) + 500) / 1000;
562 tmp = (tmp << QCA_DDR_CFG_TRFC_SHIFT) & QCA_DDR_CFG_TRFC_MASK;
563 reg = reg & ~QCA_DDR_CFG_TRFC_MASK;
567 tmp = ((DDRx_tRRD_ns * ddr_clk) + 500) / 1000;
571 tmp = (tmp << QCA_DDR_CFG_TRRD_SHIFT) & QCA_DDR_CFG_TRRD_MASK;
572 reg = reg & ~QCA_DDR_CFG_TRRD_MASK;
576 tmp = ((DDRx_tRP_ns * ddr_clk) + 500) / 1000;
580 tmp = (tmp << QCA_DDR_CFG_TRP_SHIFT) & QCA_DDR_CFG_TRP_MASK;
581 reg = reg & ~QCA_DDR_CFG_TRP_MASK;
585 tmp = ((DDRx_tRCD_ns * ddr_clk) + 500) / 1000;
589 tmp = (tmp << QCA_DDR_CFG_TRCD_SHIFT) & QCA_DDR_CFG_TRCD_MASK;
590 reg = reg & ~QCA_DDR_CFG_TRCD_MASK;
594 tmp = ((DDRx_tRAS_ns * ddr_clk) + 500) / 1000;
598 tmp = (tmp << QCA_DDR_CFG_TRAS_SHIFT) & QCA_DDR_CFG_TRAS_MASK;
599 reg = reg & ~QCA_DDR_CFG_TRAS_MASK;
602 qca_soc_reg_write(QCA_DDR_CFG_REG, reg);
604 qca_soc_reg_write(QCA_DDR_CFG_REG, CONFIG_QCA_DDR_CFG_REG_VAL);
609 * Setup DDR_CONFIG2 register
611 static inline void qca_dram_set_ddr_cfg2(u32 mem_cas,
616 #ifndef CONFIG_QCA_DDR_CFG2_REG_VAL
620 reg = qca_soc_reg_read(QCA_DDR_CFG2_REG);
623 reg = reg | QCA_DDR_CFG2_CKE_MASK;
625 /* Gate open latency = 2 * MEM_CAS */
627 tmp = (tmp << QCA_DDR_CFG2_GATE_OPEN_LATENCY_SHIFT)
628 & QCA_DDR_CFG2_GATE_OPEN_LATENCY_MASK;
629 reg = reg & ~QCA_DDR_CFG2_GATE_OPEN_LATENCY_MASK;
633 if (mem_type == RAM_MEMORY_TYPE_DDR2) {
634 /* tWTR = 2 * WL + BL + 2 * max(tWTR/tCK, 2) */
635 tmp = 2 * (mem_cas + DDR_addit_lat - 1) + DDR_burst_len + 4;
640 /* tWTR = 2 + BL + (2 * tWTR/tCK) */
641 tmp = 2 + DDR_burst_len + (((DDRx_tWTR_ns * ddr_clk) + 500) / 1000);
647 tmp = (tmp << QCA_DDR_CFG2_TWTR_SHIFT) & QCA_DDR_CFG2_TWTR_MASK;
648 reg = reg & ~QCA_DDR_CFG2_TWTR_MASK;
652 if (ddr_width == 32) {
658 tmp = (tmp << QCA_DDR_CFG2_TRTP_SHIFT) & QCA_DDR_CFG2_TRTP_MASK;
659 reg = reg & ~QCA_DDR_CFG2_TRTP_MASK;
663 if (mem_type == RAM_MEMORY_TYPE_DDR2) {
664 /* tRTW = 2 * (RL + BL/2 + 1 -WL), RL = CL + AL, WL = RL - 1 */
665 tmp = DDR_burst_len + 4;
667 /* tRTW = 2 * (CL + BL/2) */
668 tmp = DDR_burst_len + (2 * mem_cas);
674 tmp = (tmp << QCA_DDR_CFG2_TRTW_SHIFT) & QCA_DDR_CFG2_TRTW_MASK;
675 reg = reg & ~QCA_DDR_CFG2_TRTW_MASK;
679 tmp = ((DDRx_tWR_ns * ddr_clk) + 500) / 1000;
683 tmp = (tmp << QCA_DDR_CFG2_TWR_SHIFT) & QCA_DDR_CFG2_TWR_MASK;
684 reg = reg & ~QCA_DDR_CFG2_TWR_MASK;
687 /* Always use burst length = 8 and type: sequential */
688 tmp = (DDR_burst_len << QCA_DDR_CFG2_BURST_LEN_SHIFT)
689 & QCA_DDR_CFG2_BURST_LEN_MASK;
690 reg = reg & ~(QCA_DDR_CFG2_BURST_LEN_MASK
691 | QCA_DDR_CFG2_BURST_TYPE_MASK);
694 qca_soc_reg_write(QCA_DDR_CFG2_REG, reg);
696 qca_soc_reg_write(QCA_DDR_CFG2_REG, CONFIG_QCA_DDR_CFG2_REG_VAL);
701 * Setup DDR2_CONFIG register (only for DDR2)
703 static inline void qca_dram_set_ddr2_cfg(u32 mem_cas,
706 #ifndef CONFIG_QCA_DDR_DDR2_CFG_REG_VAL
710 reg = qca_soc_reg_read(QCA_DDR_DDR2_CFG_REG);
713 reg = reg | QCA_DDR_DDR2_CFG_DDR2_EN_MASK;
716 tmp = ((DDR2_tFAW_ns * ddr_clk) + 500) / 1000;
720 tmp = (tmp << QCA_DDR_DDR2_CFG_DDR2_TFAW_SHIFT)
721 & QCA_DDR_DDR2_CFG_DDR2_TFAW_MASK;
722 reg = reg & ~QCA_DDR_DDR2_CFG_DDR2_TFAW_MASK;
726 tmp = (2 * mem_cas) - 3;
728 /* For some reason, odd value doesn't work on AR933x (FIXME) */
729 #if (SOC_TYPE & QCA_AR933X_SOC)
734 tmp = (tmp << QCA_DDR_DDR2_CFG_DDR2_TWL_SHIFT)
735 & QCA_DDR_DDR2_CFG_DDR2_TWL_MASK;
736 reg = reg & ~QCA_DDR_DDR2_CFG_DDR2_TWL_MASK;
739 qca_soc_reg_write(QCA_DDR_DDR2_CFG_REG, reg);
741 qca_soc_reg_write(QCA_DDR_DDR2_CFG_REG, CONFIG_QCA_DDR_DDR2_CFG_REG_VAL);
746 * Enables DDR refresh and sets
747 * refresh period based on XTAL
749 static inline void qca_dram_set_en_refresh(void)
752 * Enable DDR refresh and setup refresh period:
753 * 1. We assume 7.8 us maximum average period refresh interval
754 * 2. 7.8 us ~= 0.1282 MHz
755 * 3. For 25 MHz XTAL: (25 / 0.1282) ~= 195
756 * 4. For 40 MHz XTAL: (40 / 0.1282) ~= 312
758 if (qca_xtal_is_40mhz()) {
759 qca_soc_reg_write(QCA_DDR_REFRESH_REG,
760 QCA_DDR_REFRESH_EN_MASK
761 | (312 << QCA_DDR_REFRESH_PERIOD_SHIFT));
763 qca_soc_reg_write(QCA_DDR_REFRESH_REG,
764 QCA_DDR_REFRESH_EN_MASK
765 | (195 << QCA_DDR_REFRESH_PERIOD_SHIFT));
770 * Initial DRAM configuration
772 void qca_dram_init(void)
774 u32 ahb_clk, cpu_clk, ddr_clk, mem_type, tmp_clk;
775 u32 cas_lat, ddr_width, reg, tmp, wr_recovery;
777 mem_type = qca_dram_type();
779 qca_sys_clocks(&cpu_clk, &ddr_clk, &ahb_clk, NULL, NULL);
780 cpu_clk = cpu_clk / 1000000;
781 ddr_clk = ddr_clk / 1000000;
782 ahb_clk = ahb_clk / 1000000;
784 /* Set CAS based on clock, but allow to set static value */
785 #ifndef CONFIG_BOARD_DRAM_CAS_LATENCY
786 if (mem_type == RAM_MEMORY_TYPE_DDR1) {
787 if (ddr_clk <= 266) {
793 if (ddr_clk <= 400) {
795 } else if (ddr_clk <= 533) {
797 } else if (ddr_clk <= 666) {
799 } else if (ddr_clk <= 800) {
806 cas_lat = CONFIG_BOARD_DRAM_CAS_LATENCY;
809 #if (SOC_TYPE & QCA_AR933X_SOC)
810 /* AR933x supports only 16-bit memory */
812 qca_soc_reg_write(QCA_DDR_RD_DATA_THIS_CYCLE_REG, 0xFF);
814 /* For other WiSoCs we can determine DDR width, based on bootstrap */
815 ddr_width = qca_dram_ddr_width();
817 if (ddr_width == 32) {
818 /* For 32-bit clear HALF_WIDTH and set VEC = 0xFF */
819 qca_soc_reg_read_clear(QCA_DDR_CTRL_CFG_REG,
820 QCA_DDR_CTRL_CFG_HALF_WIDTH_MASK);
822 qca_soc_reg_write(QCA_DDR_RD_DATA_THIS_CYCLE_REG, 0xFF);
824 qca_soc_reg_read_set(QCA_DDR_CTRL_CFG_REG,
825 QCA_DDR_CTRL_CFG_HALF_WIDTH_MASK);
827 qca_soc_reg_write(QCA_DDR_RD_DATA_THIS_CYCLE_REG, 0xFFFF);
830 /* If DDR_CLK < 2 * AHB_CLK, set DDR FSM wait control to 0xA24 */
831 if (ddr_clk < (2 * ahb_clk))
832 qca_soc_reg_write(QCA_DDR_FSM_WAIT_CTRL_REG, 0xA24);
836 * CPU/DDR sync mode only when we don't use
837 * fractional multipliers in PLL/clocks config
841 #if (SOC_TYPE & QCA_AR933X_SOC)
842 reg = qca_soc_reg_read(QCA_PLL_CPU_PLL_DITHER_FRAC_REG);
843 reg = (reg & QCA_PLL_CPU_PLL_DITHER_FRAC_NFRAC_MIN_MASK)
844 >> QCA_PLL_CPU_PLL_DITHER_FRAC_NFRAC_MIN_SHIFT;
849 reg = qca_soc_reg_read(QCA_PLL_CPU_PLL_DITHER_REG);
850 reg = (reg & QCA_PLL_CPU_PLL_DITHER_NFRAC_MIN_MASK)
851 >> QCA_PLL_CPU_PLL_DITHER_NFRAC_MIN_SHIFT;
856 reg = qca_soc_reg_read(QCA_PLL_DDR_PLL_DITHER_REG);
857 reg = (reg & QCA_PLL_DDR_PLL_DITHER_NFRAC_MIN_MASK)
858 >> QCA_PLL_DDR_PLL_DITHER_NFRAC_MIN_SHIFT;
864 if (!tmp && (cpu_clk == ddr_clk)) {
865 #if (SOC_TYPE & QCA_AR933X_SOC)
866 qca_soc_reg_read_set(QCA_DDR_TAP_CTRL_3_REG, (1 << 8));
868 qca_soc_reg_read_set(QCA_DDR_CTRL_CFG_REG,
869 QCA_DDR_CTRL_CFG_CPU_DDR_SYNC_MASK);
872 #if (SOC_TYPE & QCA_AR933X_SOC)
873 qca_soc_reg_read_clear(QCA_DDR_TAP_CTRL_3_REG, (1 << 8));
875 qca_soc_reg_read_clear(QCA_DDR_CTRL_CFG_REG,
876 QCA_DDR_CTRL_CFG_CPU_DDR_SYNC_MASK);
880 /* Check if clock is not too low for our "safe" timing values */
882 if (mem_type == RAM_MEMORY_TYPE_DDR1) {
883 if (tmp_clk < DDR1_timing_clk_max)
884 tmp_clk = DDR1_timing_clk_max;
886 if (tmp_clk < DDR2_timing_clk_max)
887 tmp_clk = DDR2_timing_clk_max;
891 if (mem_type == RAM_MEMORY_TYPE_DDR2) {
892 #if (SOC_TYPE & QCA_AR933X_SOC)
893 qca_dram_set_ddr2_cfg(cas_lat, tmp_clk);
895 qca_soc_reg_write(QCA_DDR_CTRL_CFG_REG,
896 QCA_DDR_CTRL_CFG_PAD_DDR2_SEL_MASK);
898 qca_dram_set_ddr2_cfg(cas_lat, tmp_clk);
903 /* Setup DDR timing related registers */
904 qca_dram_set_ddr_cfg(cas_lat, tmp_clk, mem_type);
905 qca_dram_set_ddr_cfg2(cas_lat, tmp_clk, mem_type, ddr_width);
908 qca_dram_force_preall();
910 if (mem_type == RAM_MEMORY_TYPE_DDR2) {
911 /* Setup target EMR2 and EMR3 */
912 qca_dram_set_emr2(_ddr_sdram_emr2_val(0, 0, 0));
913 qca_dram_set_emr3(0);
916 /* Enable and reset DLL */
917 qca_dram_set_emr(_ddr_sdram_emr_val(0, 1, 0, 0, 0, 0));
918 qca_dram_set_mr(_ddr_sdram_mr_val(0, 0, 1, 0));
920 /* Precharge all, 2x auto refresh */
921 qca_dram_force_preall();
923 qca_dram_force_aref();
924 qca_dram_force_aref();
926 if (mem_type == RAM_MEMORY_TYPE_DDR2) {
927 /* Setup target MR */
928 wr_recovery = ((DDRx_tWR_ns * tmp_clk) + 1000) / 2000;
929 qca_dram_set_mr(_ddr_sdram_mr_val(0, cas_lat, 0, wr_recovery));
931 /* OCD calibration, target EMR (nDQS disable, weak strength) */
933 _ddr_sdram_emr_val(0, 1, DDR_SDRAM_EMR_OCD_DEFAULT_VAL, 1, 0, 0));
936 _ddr_sdram_emr_val(0, 1, DDR_SDRAM_EMR_OCD_EXIT_VAL, 1, 0, 0));
938 /* Setup target MR */
939 qca_dram_set_mr(_ddr_sdram_mr_val(0, cas_lat, 0, 0));
942 /* Enable DDR refresh and setup refresh period */
943 qca_dram_set_en_refresh();
946 * At this point memory should be fully configured,
947 * so we can perform delay tap controller tune.
949 qca_ddr_tap_tune(ddr_width);