#ifndef CONFIG_SKIP_LOWLEVEL_INIT
return ((i < max_i) ?
- (i * QCA_DDR_SIZE_INCREMENT) : QCA_DRAM_MAX_SIZE_VAL);
+ (i * QCA_DDR_SIZE_INCREMENT) : QCA_DRAM_MAX_SIZE_VAL);
#else
/*
* TODO:
* For now just return 1 MB smaller size
*/
return ((i < max_i) ?
- (i * QCA_DDR_SIZE_INCREMENT) : QCA_DRAM_MAX_SIZE_VAL) - 1024 * 1024;
+ (i * QCA_DDR_SIZE_INCREMENT) : QCA_DRAM_MAX_SIZE_VAL) - 1024 * 1024;
#endif
}
u32 dram_type;
dram_type = ((qca_soc_reg_read(QCA_RST_BOOTSTRAP_REG)
- & QCA_RST_BOOTSTRAP_MEM_TYPE_MASK) >> QCA_RST_BOOTSTRAP_MEM_TYPE_SHIFT);
+ & QCA_RST_BOOTSTRAP_MEM_TYPE_MASK)
+ >> QCA_RST_BOOTSTRAP_MEM_TYPE_SHIFT);
switch (dram_type) {
case QCA_RST_BOOTSTRAP_MEM_TYPE_SDR_VAL:
return 16;
#else
if (qca_soc_reg_read(QCA_RST_BOOTSTRAP_REG)
- & QCA_RST_BOOTSTRAP_DDR_WIDTH_32_MASK)
+ & QCA_RST_BOOTSTRAP_DDR_WIDTH_32_MASK)
return 32;
return 16;
u32 reg;
reg = (qca_soc_reg_read(QCA_DDR_CFG_REG) & QCA_DDR_CFG_CAS_3LSB_MASK)
- >> QCA_DDR_CFG_CAS_3LSB_SHIFT;
+ >> QCA_DDR_CFG_CAS_3LSB_SHIFT;
if (qca_soc_reg_read(QCA_DDR_CFG_REG) & QCA_DDR_CFG_CAS_MSB_MASK)
reg = reg + 8;
/* CAS_LATENCY value in DDR_CONFIG register == 2 * MEM_CAS */
return reg / 2;
#else
- return CONFIG_BOARD_DRAM_CAS_LATENCY
+ return CONFIG_BOARD_DRAM_CAS_LATENCY;
#endif
}
u32 reg;
reg = (qca_soc_reg_read(QCA_DDR_CFG_REG) & QCA_DDR_CFG_TRCD_MASK)
- >> QCA_DDR_CFG_TRCD_SHIFT;
+ >> QCA_DDR_CFG_TRCD_SHIFT;
return reg / 2;
}
u32 reg;
reg = (qca_soc_reg_read(QCA_DDR_CFG_REG) & QCA_DDR_CFG_TRP_MASK)
- >> QCA_DDR_CFG_TRP_SHIFT;
+ >> QCA_DDR_CFG_TRP_SHIFT;
return reg / 2;
}
u32 reg;
reg = (qca_soc_reg_read(QCA_DDR_CFG_REG) & QCA_DDR_CFG_TRAS_MASK)
- >> QCA_DDR_CFG_TRAS_SHIFT;
+ >> QCA_DDR_CFG_TRAS_SHIFT;
return reg / 2;
}
* DQS delay tap controller tune related functions
* ===============================================
*/
-#define DQS_DELAY_TAP_DEFAULT_VAL 8
+#define DQS_DELAY_TAP_DEFAULT_VAL 8
#if (SOC_TYPE & QCA_AR933X_SOC) |\
- (SOC_TYPE & QCA_AR934X_SOC)
- #define DQS_DELAY_TAP_MAX_VAL 62
+ (SOC_TYPE & QCA_AR934X_SOC)
+ #define DQS_DELAY_TAP_MAX_VAL 62
#else
- #define DQS_DELAY_TAP_MAX_VAL 63
+ #define DQS_DELAY_TAP_MAX_VAL 63
#endif
/*
static void qca_ddr_tap_save(u32 tap, u32 ddr_width)
{
#if (SOC_TYPE & QCA_AR933X_SOC) |\
- (SOC_TYPE & QCA_AR934X_SOC)
+ (SOC_TYPE & QCA_AR934X_SOC)
u32 tap_h;
/* It seems that AR93xx SoCs have two delay chains */
#if (SOC_TYPE & QCA_AR933X_SOC)
#define DQS_DELAY_TAP_PATTERN_OFFSET 0x2000
- #define DQS_DELAY_TAP_PATTERN_SIZE 0x1000
- #define DQS_DELAY_TAP_TEST_LOOPS 2
+ #define DQS_DELAY_TAP_PATTERN_SIZE 0x1000
+ #define DQS_DELAY_TAP_TEST_LOOPS 2
/*
* Prepare pattern for further tests
tap_lo = 0;
addr = (void *)KSEG1ADDR(DQS_DELAY_TAP_PATTERN_OFFSET
- + DQS_DELAY_TAP_PATTERN_SIZE);
+ + DQS_DELAY_TAP_PATTERN_SIZE);
/*
* Idea here is to test all possible tap values, one by one,
/* How many test loops per tested tap value */
qca_soc_reg_write(QCA_DDR_PERF_COMP_ADDR_1_REG,
- (DQS_DELAY_TAP_TEST_LOOPS
- << QCA_DDR_PERF_COMP_ADDR_1_TEST_CNT_SHIFT));
+ (DQS_DELAY_TAP_TEST_LOOPS
+ << QCA_DDR_PERF_COMP_ADDR_1_TEST_CNT_SHIFT));
/*
* Unknown magic value, original comment:
/* Check how many tests failed */
fail = (reg & QCA_DDR_BIST_STATUS_FAIL_CNT_MASK)
- >> QCA_DDR_BIST_STATUS_FAIL_CNT_SHIFT;
+ >> QCA_DDR_BIST_STATUS_FAIL_CNT_SHIFT;
if (fail == 0) {
if (!got_lo) {
#define DDRx_tMRD_ns 10
#define DDRx_tRAS_ns 40
#define DDRx_tRCD_ns 15
-#define DDRx_tRP_ns 15
+#define DDRx_tRP_ns 15
#define DDRx_tRRD_ns 10
-#define DDRx_tWR_ns 15
+#define DDRx_tWR_ns 15
#define DDRx_tWTR_ns 10
#define DDR1_tRFC_ns 75
#define DDR2_tRFC_ns 120
#define DDR2_tFAW_ns 50
-#define DDR2_tWL_ns 5
+#define DDR2_tWL_ns 5
#define DDR_addit_lat 0
#define DDR_burst_len 8
/* All above values are safe for clocks not lower than below values */
-#define DDR1_timing_clk_max 400
-#define DDR2_timing_clk_max 533
+#define DDR1_timing_clk_max 400
+#define DDR2_timing_clk_max 533
/* Maximum timing values, based on register fields sizes */
-#define MAX_tFAW BITS(0, 6)
-#define MAX_tMRD BITS(0, 4)
-#define MAX_tRAS BITS(0, 5)
-#define MAX_tRCD BITS(0, 4)
-#define MAX_tRFC BITS(0, 6)
-#define MAX_tRP BITS(0, 4)
-#define MAX_tRRD BITS(0, 4)
-#define MAX_tRTP BITS(0, 4)
-#define MAX_tRTW BITS(0, 5)
-#define MAX_tWL BITS(0, 4)
-#define MAX_tWR BITS(0, 4)
-#define MAX_tWTR BITS(0, 5)
+#define MAX_tFAW BITS(0, 6)
+#define MAX_tMRD BITS(0, 4)
+#define MAX_tRAS BITS(0, 5)
+#define MAX_tRCD BITS(0, 4)
+#define MAX_tRFC BITS(0, 6)
+#define MAX_tRP BITS(0, 4)
+#define MAX_tRRD BITS(0, 4)
+#define MAX_tRTP BITS(0, 4)
+#define MAX_tRTW BITS(0, 5)
+#define MAX_tWL BITS(0, 4)
+#define MAX_tWR BITS(0, 4)
+#define MAX_tWTR BITS(0, 5)
/*
* Setup DDR_CONFIG register
*/
static inline void qca_dram_set_ddr_cfg(u32 mem_cas,
- u32 ddr_clk,
- u32 mem_type)
+ u32 ddr_clk,
+ u32 mem_type)
{
#ifndef CONFIG_QCA_DDR_CFG_REG_VAL
u32 reg = 0;
* Setup DDR_CONFIG2 register
*/
static inline void qca_dram_set_ddr_cfg2(u32 mem_cas,
- u32 ddr_clk,
- u32 mem_type,
- u32 ddr_width)
+ u32 ddr_clk,
+ u32 mem_type,
+ u32 ddr_width)
{
#ifndef CONFIG_QCA_DDR_CFG2_REG_VAL
u32 reg = 0;
/* Gate open latency = 2 * MEM_CAS */
tmp = 2 * mem_cas;
tmp = (tmp << QCA_DDR_CFG2_GATE_OPEN_LATENCY_SHIFT)
- & QCA_DDR_CFG2_GATE_OPEN_LATENCY_MASK;
+ & QCA_DDR_CFG2_GATE_OPEN_LATENCY_MASK;
reg = reg & ~QCA_DDR_CFG2_GATE_OPEN_LATENCY_MASK;
reg = reg | tmp;
tmp = tmp + 2;
} else {
/* tWTR = 2 + BL + (2 * tWTR/tCK) */
- tmp = 2 + DDR_burst_len + (((DDRx_tWTR_ns * ddr_clk) + 500) / 1000);
+ tmp = 2 + DDR_burst_len
+ + (((DDRx_tWTR_ns * ddr_clk) + 500) / 1000);
}
if (tmp > MAX_tWTR)
/* Always use burst length = 8 and type: sequential */
tmp = (DDR_burst_len << QCA_DDR_CFG2_BURST_LEN_SHIFT)
- & QCA_DDR_CFG2_BURST_LEN_MASK;
+ & QCA_DDR_CFG2_BURST_LEN_MASK;
reg = reg & ~(QCA_DDR_CFG2_BURST_LEN_MASK
- | QCA_DDR_CFG2_BURST_TYPE_MASK);
+ | QCA_DDR_CFG2_BURST_TYPE_MASK);
reg = reg | tmp;
qca_soc_reg_write(QCA_DDR_CFG2_REG, reg);
* Setup DDR2_CONFIG register (only for DDR2)
*/
static inline void qca_dram_set_ddr2_cfg(u32 mem_cas,
- u32 ddr_clk)
+ u32 ddr_clk)
{
#ifndef CONFIG_QCA_DDR_DDR2_CFG_REG_VAL
u32 reg = 0;
tmp = MAX_tFAW;
tmp = (tmp << QCA_DDR_DDR2_CFG_DDR2_TFAW_SHIFT)
- & QCA_DDR_DDR2_CFG_DDR2_TFAW_MASK;
+ & QCA_DDR_DDR2_CFG_DDR2_TFAW_MASK;
reg = reg & ~QCA_DDR_DDR2_CFG_DDR2_TFAW_MASK;
reg = reg | tmp;
#endif
tmp = (tmp << QCA_DDR_DDR2_CFG_DDR2_TWL_SHIFT)
- & QCA_DDR_DDR2_CFG_DDR2_TWL_MASK;
+ & QCA_DDR_DDR2_CFG_DDR2_TWL_MASK;
reg = reg & ~QCA_DDR_DDR2_CFG_DDR2_TWL_MASK;
reg = reg | tmp;
qca_soc_reg_write(QCA_DDR_DDR2_CFG_REG, reg);
#else
- qca_soc_reg_write(QCA_DDR_DDR2_CFG_REG, CONFIG_QCA_DDR_DDR2_CFG_REG_VAL);
+ qca_soc_reg_write(QCA_DDR_DDR2_CFG_REG,
+ CONFIG_QCA_DDR_DDR2_CFG_REG_VAL);
#endif
}
*/
if (qca_xtal_is_40mhz()) {
qca_soc_reg_write(QCA_DDR_REFRESH_REG,
- QCA_DDR_REFRESH_EN_MASK
- | (312 << QCA_DDR_REFRESH_PERIOD_SHIFT));
+ QCA_DDR_REFRESH_EN_MASK
+ | (312 << QCA_DDR_REFRESH_PERIOD_SHIFT));
} else {
qca_soc_reg_write(QCA_DDR_REFRESH_REG,
- QCA_DDR_REFRESH_EN_MASK
- | (195 << QCA_DDR_REFRESH_PERIOD_SHIFT));
+ QCA_DDR_REFRESH_EN_MASK
+ | (195 << QCA_DDR_REFRESH_PERIOD_SHIFT));
}
}
if (ddr_width == 32) {
/* For 32-bit clear HALF_WIDTH and set VEC = 0xFF */
qca_soc_reg_read_clear(QCA_DDR_CTRL_CFG_REG,
- QCA_DDR_CTRL_CFG_HALF_WIDTH_MASK);
+ QCA_DDR_CTRL_CFG_HALF_WIDTH_MASK);
qca_soc_reg_write(QCA_DDR_RD_DATA_THIS_CYCLE_REG, 0xFF);
} else {
qca_soc_reg_read_set(QCA_DDR_CTRL_CFG_REG,
- QCA_DDR_CTRL_CFG_HALF_WIDTH_MASK);
+ QCA_DDR_CTRL_CFG_HALF_WIDTH_MASK);
qca_soc_reg_write(QCA_DDR_RD_DATA_THIS_CYCLE_REG, 0xFFFF);
}
#if (SOC_TYPE & QCA_AR933X_SOC)
reg = qca_soc_reg_read(QCA_PLL_CPU_PLL_DITHER_FRAC_REG);
reg = (reg & QCA_PLL_CPU_PLL_DITHER_FRAC_NFRAC_MIN_MASK)
- >> QCA_PLL_CPU_PLL_DITHER_FRAC_NFRAC_MIN_SHIFT;
+ >> QCA_PLL_CPU_PLL_DITHER_FRAC_NFRAC_MIN_SHIFT;
if (reg)
tmp = 1;
#else
reg = qca_soc_reg_read(QCA_PLL_CPU_PLL_DITHER_REG);
reg = (reg & QCA_PLL_CPU_PLL_DITHER_NFRAC_MIN_MASK)
- >> QCA_PLL_CPU_PLL_DITHER_NFRAC_MIN_SHIFT;
+ >> QCA_PLL_CPU_PLL_DITHER_NFRAC_MIN_SHIFT;
if (reg)
tmp = 1;
reg = qca_soc_reg_read(QCA_PLL_DDR_PLL_DITHER_REG);
reg = (reg & QCA_PLL_DDR_PLL_DITHER_NFRAC_MIN_MASK)
- >> QCA_PLL_DDR_PLL_DITHER_NFRAC_MIN_SHIFT;
+ >> QCA_PLL_DDR_PLL_DITHER_NFRAC_MIN_SHIFT;
if (reg)
tmp = 1;
qca_soc_reg_read_set(QCA_DDR_TAP_CTRL_3_REG, (1 << 8));
#else
qca_soc_reg_read_set(QCA_DDR_CTRL_CFG_REG,
- QCA_DDR_CTRL_CFG_CPU_DDR_SYNC_MASK);
+ QCA_DDR_CTRL_CFG_CPU_DDR_SYNC_MASK);
#endif
} else {
#if (SOC_TYPE & QCA_AR933X_SOC)
qca_soc_reg_read_clear(QCA_DDR_TAP_CTRL_3_REG, (1 << 8));
#else
qca_soc_reg_read_clear(QCA_DDR_CTRL_CFG_REG,
- QCA_DDR_CTRL_CFG_CPU_DDR_SYNC_MASK);
+ QCA_DDR_CTRL_CFG_CPU_DDR_SYNC_MASK);
#endif
}
qca_dram_set_ddr2_cfg(cas_lat, tmp_clk);
#else
qca_soc_reg_write(QCA_DDR_CTRL_CFG_REG,
- QCA_DDR_CTRL_CFG_PAD_DDR2_SEL_MASK);
+ QCA_DDR_CTRL_CFG_PAD_DDR2_SEL_MASK);
qca_dram_set_ddr2_cfg(cas_lat, tmp_clk);
#endif
/* OCD calibration, target EMR (nDQS disable, weak strength) */
qca_dram_set_emr(
- _ddr_sdram_emr_val(0, 1, DDR_SDRAM_EMR_OCD_DEFAULT_VAL, 1, 0, 0));
+ _ddr_sdram_emr_val(0, 1, DDR_SDRAM_EMR_OCD_DEFAULT_VAL,
+ 1, 0, 0));
qca_dram_set_emr(
- _ddr_sdram_emr_val(0, 1, DDR_SDRAM_EMR_OCD_EXIT_VAL, 1, 0, 0));
+ _ddr_sdram_emr_val(0, 1, DDR_SDRAM_EMR_OCD_EXIT_VAL,
+ 1, 0, 0));
} else {
/* Setup target MR */
qca_dram_set_mr(_ddr_sdram_mr_val(0, cas_lat, 0, 0));
* Prepare DDR SDRAM mode register value
* For now use always burst length == 8
*/
-#define DDR_SDRAM_MR_BURST_LEN_SHIFT 0
-#define DDR_SDRAM_MR_BURST_LEN_MASK BITS(DDR_SDRAM_MR_BURST_LEN_SHIFT, 3)
-#define DDR_SDRAM_MR_BURST_INTERLEAVE_SHIFT 3
-#define DDR_SDRAM_MR_BURST_INTERLEAVE_MASK (1 << DDR_SDRAM_MR_BURST_INTERLEAVE_SHIFT)
-#define DDR_SDRAM_MR_CAS_LAT_SHIFT 4
-#define DDR_SDRAM_MR_CAS_LAT_MASK BITS(DDR_SDRAM_MR_CAS_LAT_SHIFT, 3)
-#define DDR_SDRAM_MR_DLL_RESET_SHIFT 8
-#define DDR_SDRAM_MR_DLL_RESET_MASK (1 << DDR_SDRAM_MR_DLL_RESET_SHIFT)
-#define DDR_SDRAM_MR_WR_RECOVERY_SHIFT 9
-#define DDR_SDRAM_MR_WR_RECOVERY_MASK BITS(DDR_SDRAM_MR_WR_RECOVERY_SHIFT, 3)
+#define DDR_SDRAM_MR_BURST_LEN_SHIFT 0
+#define DDR_SDRAM_MR_BURST_LEN_MASK BITS(DDR_SDRAM_MR_BURST_LEN_SHIFT, 3)
+#define DDR_SDRAM_MR_BURST_INTERLEAVE_SHIFT 3
+#define DDR_SDRAM_MR_BURST_INTERLEAVE_MASK (1 << DDR_SDRAM_MR_BURST_INTERLEAVE_SHIFT)
+#define DDR_SDRAM_MR_CAS_LAT_SHIFT 4
+#define DDR_SDRAM_MR_CAS_LAT_MASK BITS(DDR_SDRAM_MR_CAS_LAT_SHIFT, 3)
+#define DDR_SDRAM_MR_DLL_RESET_SHIFT 8
+#define DDR_SDRAM_MR_DLL_RESET_MASK (1 << DDR_SDRAM_MR_DLL_RESET_SHIFT)
+#define DDR_SDRAM_MR_WR_RECOVERY_SHIFT 9
+#define DDR_SDRAM_MR_WR_RECOVERY_MASK BITS(DDR_SDRAM_MR_WR_RECOVERY_SHIFT, 3)
#define _ddr_sdram_mr_val(_burst_i, \
- _cas_lat, \
- _dll_res, \
- _wr_rcov) \
- \
+ _cas_lat, \
+ _dll_res, \
+ _wr_rcov) \
+ \
((0x3 << DDR_SDRAM_MR_BURST_LEN_SHIFT) & DDR_SDRAM_MR_BURST_LEN_MASK) |\
((_cas_lat << DDR_SDRAM_MR_CAS_LAT_SHIFT) & DDR_SDRAM_MR_CAS_LAT_MASK) |\
((_dll_res << DDR_SDRAM_MR_DLL_RESET_SHIFT) & DDR_SDRAM_MR_DLL_RESET_MASK) |\
((_burst_i << DDR_SDRAM_MR_BURST_INTERLEAVE_SHIFT) & DDR_SDRAM_MR_BURST_INTERLEAVE_MASK)
/* Prepare DDR SDRAM extended mode register value */
-#define DDR_SDRAM_EMR_DLL_EN_SHIFT 0
-#define DDR_SDRAM_EMR_DLL_EN_MASK (1 << DDR_SDRAM_EMR_DLL_EN_SHIFT)
-#define DDR_SDRAM_EMR_WEAK_STRENGTH_SHIFT 1
-#define DDR_SDRAM_EMR_WEAK_STRENGTH_MASK (1 << DDR_SDRAM_EMR_WEAK_STRENGTH_SHIFT)
-#define DDR_SDRAM_EMR_OCD_PRG_SHIFT 7
-#define DDR_SDRAM_EMR_OCD_PRG_MASK BITS(DDR_SDRAM_EMR_OCD_PRG_SHIFT, 3)
-#define DDR_SDRAM_EMR_OCD_EXIT_VAL 0
-#define DDR_SDRAM_EMR_OCD_DEFAULT_VAL 7
-#define DDR_SDRAM_EMR_NDQS_DIS_SHIFT 10
-#define DDR_SDRAM_EMR_NDQS_DIS_MASK (1 << DDR_SDRAM_EMR_NDQS_DIS_SHIFT)
-#define DDR_SDRAM_EMR_RDQS_EN_SHIFT 11
-#define DDR_SDRAM_EMR_RDQS_EN_MASK (1 << DDR_SDRAM_EMR_RDQS_EN_SHIFT)
-#define DDR_SDRAM_EMR_OBUF_DIS_SHIFT 12
-#define DDR_SDRAM_EMR_OBUF_DIS_MASK (1 << DDR_SDRAM_EMR_OBUF_DIS_SHIFT)
+#define DDR_SDRAM_EMR_DLL_EN_SHIFT 0
+#define DDR_SDRAM_EMR_DLL_EN_MASK (1 << DDR_SDRAM_EMR_DLL_EN_SHIFT)
+#define DDR_SDRAM_EMR_WEAK_STRENGTH_SHIFT 1
+#define DDR_SDRAM_EMR_WEAK_STRENGTH_MASK (1 << DDR_SDRAM_EMR_WEAK_STRENGTH_SHIFT)
+#define DDR_SDRAM_EMR_OCD_PRG_SHIFT 7
+#define DDR_SDRAM_EMR_OCD_PRG_MASK BITS(DDR_SDRAM_EMR_OCD_PRG_SHIFT, 3)
+#define DDR_SDRAM_EMR_OCD_EXIT_VAL 0
+#define DDR_SDRAM_EMR_OCD_DEFAULT_VAL 7
+#define DDR_SDRAM_EMR_NDQS_DIS_SHIFT 10
+#define DDR_SDRAM_EMR_NDQS_DIS_MASK (1 << DDR_SDRAM_EMR_NDQS_DIS_SHIFT)
+#define DDR_SDRAM_EMR_RDQS_EN_SHIFT 11
+#define DDR_SDRAM_EMR_RDQS_EN_MASK (1 << DDR_SDRAM_EMR_RDQS_EN_SHIFT)
+#define DDR_SDRAM_EMR_OBUF_DIS_SHIFT 12
+#define DDR_SDRAM_EMR_OBUF_DIS_MASK (1 << DDR_SDRAM_EMR_OBUF_DIS_SHIFT)
#define _ddr_sdram_emr_val(_dll_dis, \
- _drv_weak, \
- _ocd_prg, \
- _ndqs_dis, \
- _rdqs_en, \
- _obuf_dis) \
- \
+ _drv_weak, \
+ _ocd_prg, \
+ _ndqs_dis, \
+ _rdqs_en, \
+ _obuf_dis) \
+ \
((_dll_dis << DDR_SDRAM_EMR_DLL_EN_SHIFT) & DDR_SDRAM_EMR_DLL_EN_MASK) |\
((_ocd_prg << DDR_SDRAM_EMR_OCD_PRG_SHIFT) & DDR_SDRAM_EMR_OCD_PRG_MASK) |\
((_ndqs_dis << DDR_SDRAM_EMR_NDQS_DIS_SHIFT) & DDR_SDRAM_EMR_NDQS_DIS_MASK) |\
((_drv_weak << DDR_SDRAM_EMR_WEAK_STRENGTH_SHIFT) & DDR_SDRAM_EMR_WEAK_STRENGTH_MASK)
/* Prepare DDR SDRAM extended mode register 2 value */
-#define DDR_SDRAM_EMR2_PASR_SHIFT 0
-#define DDR_SDRAM_EMR2_PASR_MASK BITS(DDR_SDRAM_EMR2_PASR_SHIFT, 3)
-#define DDR_SDRAM_EMR2_DCC_EN_SHIFT 3
-#define DDR_SDRAM_EMR2_DCC_EN_MASK (1 << DDR_SDRAM_EMR2_DCC_EN_SHIFT)
-#define DDR_SDRAM_EMR2_SRF_EN_SHIFT 7
-#define DDR_SDRAM_EMR2_SRF_EN_MASK (1 << DDR_SDRAM_EMR2_SRF_EN_SHIFT)
+#define DDR_SDRAM_EMR2_PASR_SHIFT 0
+#define DDR_SDRAM_EMR2_PASR_MASK BITS(DDR_SDRAM_EMR2_PASR_SHIFT, 3)
+#define DDR_SDRAM_EMR2_DCC_EN_SHIFT 3
+#define DDR_SDRAM_EMR2_DCC_EN_MASK (1 << DDR_SDRAM_EMR2_DCC_EN_SHIFT)
+#define DDR_SDRAM_EMR2_SRF_EN_SHIFT 7
+#define DDR_SDRAM_EMR2_SRF_EN_MASK (1 << DDR_SDRAM_EMR2_SRF_EN_SHIFT)
#define _ddr_sdram_emr2_val(_pasr, \
- _dcc_en, \
- _srf_en) \
- \
+ _dcc_en, \
+ _srf_en) \
+ \
((_pasr << DDR_SDRAM_EMR2_PASR_SHIFT) & DDR_SDRAM_EMR2_PASR_MASK) |\
((_dcc_en << DDR_SDRAM_EMR2_DCC_EN_SHIFT) & DDR_SDRAM_EMR2_DCC_EN_MASK) |\
((_srf_en << DDR_SDRAM_EMR2_SRF_EN_SHIFT) & DDR_SDRAM_EMR2_SRF_EN_MASK)
/* DDR_CONFIG */
#define _qca_ddr_cfg_reg_val(_tras, \
- _trcd, \
- _trp, \
- _trrd, \
- _trfc, \
- _tmrd, \
- _cas, \
- _opage) \
- \
+ _trcd, \
+ _trp, \
+ _trrd, \
+ _trfc, \
+ _tmrd, \
+ _cas, \
+ _opage) \
+ \
((_tras << QCA_DDR_CFG_TRAS_SHIFT) & QCA_DDR_CFG_TRAS_MASK) |\
((_trcd << QCA_DDR_CFG_TRCD_SHIFT) & QCA_DDR_CFG_TRCD_MASK) |\
((_trp << QCA_DDR_CFG_TRP_SHIFT) & QCA_DDR_CFG_TRP_MASK) |\
/* DDR_CONFIG2 */
#define _qca_ddr_cfg2_reg_val(_burst_type, \
- _ctrl_oe_en, \
- _phase_sel, \
- _cke, \
- _twr, \
- _trtw, \
- _trtp, \
- _twtr, \
- _gate_lat, \
- _half_width) \
- \
+ _ctrl_oe_en, \
+ _phase_sel, \
+ _cke, \
+ _twr, \
+ _trtw, \
+ _trtp, \
+ _twtr, \
+ _gate_lat, \
+ _half_width) \
+ \
(0x8 << QCA_DDR_CFG2_BURST_LEN_SHIFT) |\
((_burst_type << QCA_DDR_CFG2_BURST_TYPE_SHIFT) & QCA_DDR_CFG2_BURST_TYPE_MASK) |\
((_ctrl_oe_en << QCA_DDR_CFG2_CTRL_OE_EN_SHIFT) & QCA_DDR_CFG2_CTRL_OE_EN_MASK) |\
/* DDR_DDR2_CONFIG */
#define _qca_ddr_ddr2_cfg_reg_val(_ddr2_en, \
- _tfaw, \
- _twl) \
- \
+ _tfaw, \
+ _twl) \
+ \
((_ddr2_en << QCA_DDR_DDR2_CFG_DDR2_EN_SHIFT) & QCA_DDR_DDR2_CFG_DDR2_EN_MASK) |\
((_tfaw << QCA_DDR_DDR2_CFG_DDR2_TFAW_SHIFT) & QCA_DDR_DDR2_CFG_DDR2_TFAW_MASK) |\
((_twl << QCA_DDR_DDR2_CFG_DDR2_TWL_SHIFT) & QCA_DDR_DDR2_CFG_DDR2_TWL_MASK)
static inline void qca_dram_force_mrs(void)
{
qca_soc_reg_write(QCA_DDR_CTRL_REG,
- QCA_DDR_CTRL_FORCE_MRS_MASK);
+ QCA_DDR_CTRL_FORCE_MRS_MASK);
}
/* Force EMRS (extended mode register set) */
static inline void qca_dram_force_emrs(void)
{
qca_soc_reg_write(QCA_DDR_CTRL_REG,
- QCA_DDR_CTRL_FORCE_EMRS_MASK);
+ QCA_DDR_CTRL_FORCE_EMRS_MASK);
}
/* Force EMR2S (extended mode register 2 set) */
static inline void qca_dram_force_emr2s(void)
{
qca_soc_reg_write(QCA_DDR_CTRL_REG,
- QCA_DDR_CTRL_FORCE_EMR2S_MASK);
+ QCA_DDR_CTRL_FORCE_EMR2S_MASK);
}
/* Force EMR3S (extended mode register 3 set) */
static inline void qca_dram_force_emr3s(void)
{
qca_soc_reg_write(QCA_DDR_CTRL_REG,
- QCA_DDR_CTRL_FORCE_EMR3S_MASK);
+ QCA_DDR_CTRL_FORCE_EMR3S_MASK);
}
/* Force auto refresh */
static inline void qca_dram_force_aref(void)
{
qca_soc_reg_write(QCA_DDR_CTRL_REG,
- QCA_DDR_CTRL_FORCE_AUTO_REFRESH_MASK);
+ QCA_DDR_CTRL_FORCE_AUTO_REFRESH_MASK);
}
/* Force precharge all */
static inline void qca_dram_force_preall(void)
{
qca_soc_reg_write(QCA_DDR_CTRL_REG,
- QCA_DDR_CTRL_FORCE_PRECHARGE_ALL_MASK);
+ QCA_DDR_CTRL_FORCE_PRECHARGE_ALL_MASK);
}
/*