1 // SPDX-License-Identifier: GPL-2.0
3 * Copyright (C) Marvell International Ltd. and its affiliates
7 #include "mv_ddr_common.h"
8 #include "mv_ddr_training_db.h"
9 #include "mv_ddr_regs.h"
12 #define GET_CS_FROM_MASK(mask) (cs_mask2_num[mask])
13 #define CS_CBE_VALUE(cs_num) (cs_cbe_reg[cs_num])
15 u32 window_mem_addr = 0;
19 u32 phy_reg3_val = PARAM_UNDEFINED;
20 enum mv_ddr_freq low_freq = MV_DDR_FREQ_LOW_FREQ;
21 enum mv_ddr_freq medium_freq;
23 u32 odt_additional = 1;
24 u32 *dq_map_table = NULL;
26 /* in case of ddr4 do not run ddr3_tip_write_additional_odt_setting function - mc odt always 'on'
27 * in ddr4 case the terminations are rttWR and rttPARK and the odt must be always 'on' 0x1498 = 0xf
34 u32 is_pll_before_init = 0, is_adll_calib_before_init = 1, is_dfs_in_init = 0;
37 u32 g_rtt_nom_cs0, g_rtt_nom_cs1;
38 u8 calibration_update_control; /* 2 external only, 1 is internal only */
40 enum hws_result training_result[MAX_STAGE_LIMIT][MAX_INTERFACE_NUM];
41 enum auto_tune_stage training_stage = INIT_CONTROLLER;
42 u32 finger_test = 0, p_finger_start = 11, p_finger_end = 64,
43 n_finger_start = 11, n_finger_end = 64,
44 p_finger_step = 3, n_finger_step = 3;
45 u32 clamp_tbl[] = { 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3 };
47 /* Initiate to 0xff, this variable is define by user in debug mode */
49 u32 xsb_validate_type = 0;
50 u32 xsb_validation_base_address = 0xf000;
51 u32 first_active_if = 0;
52 u32 dfs_low_phy1 = 0x1f;
54 int use_broadcast = 0;
55 struct hws_tip_freq_config_info *freq_info_table = NULL;
56 u8 is_cbe_required = 0;
59 int rl_mid_freq_wa = 0;
63 u32 vref_init_val = 0x4;
64 u32 ck_delay = PARAM_UNDEFINED;
66 /* Design guidelines parameters */
67 u32 g_zpri_data = PARAM_UNDEFINED; /* controller data - P drive strength */
68 u32 g_znri_data = PARAM_UNDEFINED; /* controller data - N drive strength */
69 u32 g_zpri_ctrl = PARAM_UNDEFINED; /* controller C/A - P drive strength */
70 u32 g_znri_ctrl = PARAM_UNDEFINED; /* controller C/A - N drive strength */
72 u32 g_zpodt_data = PARAM_UNDEFINED; /* controller data - P ODT */
73 u32 g_znodt_data = PARAM_UNDEFINED; /* controller data - N ODT */
74 u32 g_zpodt_ctrl = PARAM_UNDEFINED; /* controller data - P ODT */
75 u32 g_znodt_ctrl = PARAM_UNDEFINED; /* controller data - N ODT */
77 u32 g_odt_config = PARAM_UNDEFINED;
78 u32 g_rtt_nom = PARAM_UNDEFINED;
79 u32 g_rtt_wr = PARAM_UNDEFINED;
80 u32 g_dic = PARAM_UNDEFINED;
81 u32 g_rtt_park = PARAM_UNDEFINED;
83 u32 mask_tune_func = (SET_MEDIUM_FREQ_MASK_BIT |
84 WRITE_LEVELING_MASK_BIT |
85 LOAD_PATTERN_2_MASK_BIT |
86 READ_LEVELING_MASK_BIT |
87 SET_TARGET_FREQ_MASK_BIT |
88 WRITE_LEVELING_TF_MASK_BIT |
89 READ_LEVELING_TF_MASK_BIT |
90 CENTRALIZATION_RX_MASK_BIT |
91 CENTRALIZATION_TX_MASK_BIT);
93 static int ddr3_tip_ddr3_training_main_flow(u32 dev_num);
94 static int ddr3_tip_write_odt(u32 dev_num, enum hws_access_type access_type,
95 u32 if_id, u32 cl_value, u32 cwl_value);
96 static int ddr3_tip_ddr3_auto_tune(u32 dev_num);
98 #ifdef ODT_TEST_SUPPORT
99 static int odt_test(u32 dev_num, enum hws_algo_type algo_type);
102 int adll_calibration(u32 dev_num, enum hws_access_type access_type,
103 u32 if_id, enum mv_ddr_freq frequency);
104 static int ddr3_tip_set_timing(u32 dev_num, enum hws_access_type access_type,
105 u32 if_id, enum mv_ddr_freq frequency);
107 static u8 mem_size_config[MV_DDR_DIE_CAP_LAST] = {
113 0x0, /* TODO: placeholder for 16-Mbit die capacity */
114 0x0, /* TODO: placeholder for 32-Mbit die capacity */
115 0x0, /* TODO: placeholder for 12-Mbit die capacity */
116 0x0 /* TODO: placeholder for 24-Mbit die capacity */
119 static u8 cs_mask2_num[] = { 0, 0, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3 };
121 static struct reg_data odpg_default_value[] = {
122 {0x1034, 0x38000, MASK_ALL_BITS},
123 {0x1038, 0x0, MASK_ALL_BITS},
124 {0x10b0, 0x0, MASK_ALL_BITS},
125 {0x10b8, 0x0, MASK_ALL_BITS},
126 {0x10c0, 0x0, MASK_ALL_BITS},
127 {0x10f0, 0x0, MASK_ALL_BITS},
128 {0x10f4, 0x0, MASK_ALL_BITS},
129 {0x10f8, 0xff, MASK_ALL_BITS},
130 {0x10fc, 0xffff, MASK_ALL_BITS},
131 {0x1130, 0x0, MASK_ALL_BITS},
132 {0x1830, 0x2000000, MASK_ALL_BITS},
133 {0x14d0, 0x0, MASK_ALL_BITS},
134 {0x14d4, 0x0, MASK_ALL_BITS},
135 {0x14d8, 0x0, MASK_ALL_BITS},
136 {0x14dc, 0x0, MASK_ALL_BITS},
137 {0x1454, 0x0, MASK_ALL_BITS},
138 {0x1594, 0x0, MASK_ALL_BITS},
139 {0x1598, 0x0, MASK_ALL_BITS},
140 {0x159c, 0x0, MASK_ALL_BITS},
141 {0x15a0, 0x0, MASK_ALL_BITS},
142 {0x15a4, 0x0, MASK_ALL_BITS},
143 {0x15a8, 0x0, MASK_ALL_BITS},
144 {0x15ac, 0x0, MASK_ALL_BITS},
145 {0x1604, 0x0, MASK_ALL_BITS},
146 {0x1608, 0x0, MASK_ALL_BITS},
147 {0x160c, 0x0, MASK_ALL_BITS},
148 {0x1610, 0x0, MASK_ALL_BITS},
149 {0x1614, 0x0, MASK_ALL_BITS},
150 {0x1618, 0x0, MASK_ALL_BITS},
151 {0x1624, 0x0, MASK_ALL_BITS},
152 {0x1690, 0x0, MASK_ALL_BITS},
153 {0x1694, 0x0, MASK_ALL_BITS},
154 {0x1698, 0x0, MASK_ALL_BITS},
155 {0x169c, 0x0, MASK_ALL_BITS},
156 {0x14b8, 0x6f67, MASK_ALL_BITS},
157 {0x1630, 0x0, MASK_ALL_BITS},
158 {0x1634, 0x0, MASK_ALL_BITS},
159 {0x1638, 0x0, MASK_ALL_BITS},
160 {0x163c, 0x0, MASK_ALL_BITS},
161 {0x16b0, 0x0, MASK_ALL_BITS},
162 {0x16b4, 0x0, MASK_ALL_BITS},
163 {0x16b8, 0x0, MASK_ALL_BITS},
164 {0x16bc, 0x0, MASK_ALL_BITS},
165 {0x16c0, 0x0, MASK_ALL_BITS},
166 {0x16c4, 0x0, MASK_ALL_BITS},
167 {0x16c8, 0x0, MASK_ALL_BITS},
168 {0x16cc, 0x1, MASK_ALL_BITS},
169 {0x16f0, 0x1, MASK_ALL_BITS},
170 {0x16f4, 0x0, MASK_ALL_BITS},
171 {0x16f8, 0x0, MASK_ALL_BITS},
172 {0x16fc, 0x0, MASK_ALL_BITS}
175 /* MR cmd and addr definitions */
176 struct mv_ddr_mr_data mr_data[] = {
184 static int ddr3_tip_pad_inv(void)
187 u32 sphy_max = ddr3_tip_dev_attr_get(0, MV_ATTR_OCTET_PER_INTERFACE);
188 u32 ck_swap_ctrl_sphy;
189 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
191 for (sphy = 0; sphy < sphy_max; sphy++) {
192 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, sphy);
193 if (tm->interface_params[0].
194 as_bus_params[sphy].is_dqs_swap == 1) {
195 data = (INVERT_PAD << INV_PAD4_OFFS |
196 INVERT_PAD << INV_PAD5_OFFS);
198 ddr3_tip_bus_read_modify_write(0, ACCESS_TYPE_UNICAST,
205 if (tm->interface_params[0].as_bus_params[sphy].
206 is_ck_swap == 1 && sphy == 0) {
207 /* TODO: move this code to per platform one */
208 #if defined(CONFIG_ARMADA_38X) || defined(CONFIG_ARMADA_39X)
209 /* clock swap for both cs0 and cs1 */
210 data = (INVERT_PAD << INV_PAD2_OFFS |
211 INVERT_PAD << INV_PAD6_OFFS |
212 INVERT_PAD << INV_PAD4_OFFS |
213 INVERT_PAD << INV_PAD5_OFFS);
214 ck_swap_ctrl_sphy = CK_SWAP_CTRL_PHY_NUM;
215 ddr3_tip_bus_read_modify_write(0, ACCESS_TYPE_UNICAST,
216 0, ck_swap_ctrl_sphy,
220 #else /* !CONFIG_ARMADA_38X && !CONFIG_ARMADA_39X && !A70X0 && !A80X0 && !A3900 */
221 #pragma message "unknown platform to configure ddr clock swap"
229 static int ddr3_tip_rank_control(u32 dev_num, u32 if_id);
232 * Update global training parameters by data from user
234 int ddr3_tip_tune_training_params(u32 dev_num,
235 struct tune_train_params *params)
237 if (params->ck_delay != PARAM_UNDEFINED)
238 ck_delay = params->ck_delay;
239 if (params->phy_reg3_val != PARAM_UNDEFINED)
240 phy_reg3_val = params->phy_reg3_val;
241 if (params->g_rtt_nom != PARAM_UNDEFINED)
242 g_rtt_nom = params->g_rtt_nom;
243 if (params->g_rtt_wr != PARAM_UNDEFINED)
244 g_rtt_wr = params->g_rtt_wr;
245 if (params->g_dic != PARAM_UNDEFINED)
246 g_dic = params->g_dic;
247 if (params->g_odt_config != PARAM_UNDEFINED)
248 g_odt_config = params->g_odt_config;
249 if (params->g_zpri_data != PARAM_UNDEFINED)
250 g_zpri_data = params->g_zpri_data;
251 if (params->g_znri_data != PARAM_UNDEFINED)
252 g_znri_data = params->g_znri_data;
253 if (params->g_zpri_ctrl != PARAM_UNDEFINED)
254 g_zpri_ctrl = params->g_zpri_ctrl;
255 if (params->g_znri_ctrl != PARAM_UNDEFINED)
256 g_znri_ctrl = params->g_znri_ctrl;
257 if (params->g_zpodt_data != PARAM_UNDEFINED)
258 g_zpodt_data = params->g_zpodt_data;
259 if (params->g_znodt_data != PARAM_UNDEFINED)
260 g_znodt_data = params->g_znodt_data;
261 if (params->g_zpodt_ctrl != PARAM_UNDEFINED)
262 g_zpodt_ctrl = params->g_zpodt_ctrl;
263 if (params->g_znodt_ctrl != PARAM_UNDEFINED)
264 g_znodt_ctrl = params->g_znodt_ctrl;
265 if (params->g_rtt_park != PARAM_UNDEFINED)
266 g_rtt_park = params->g_rtt_park;
269 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
270 ("DGL parameters: 0x%X 0x%X 0x%X 0x%X 0x%X 0x%X 0x%X 0x%X 0x%X 0x%X 0x%X 0x%X\n",
271 g_zpri_data, g_znri_data, g_zpri_ctrl, g_znri_ctrl, g_zpodt_data, g_znodt_data,
272 g_zpodt_ctrl, g_znodt_ctrl, g_rtt_nom, g_dic, g_odt_config, g_rtt_wr));
280 int ddr3_tip_configure_cs(u32 dev_num, u32 if_id, u32 cs_num, u32 enable)
282 u32 data, addr_hi, data_high;
285 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
287 if (tm->clk_enable & (1 << cs_num))
293 data = (tm->interface_params[if_id].bus_width ==
294 MV_DDR_DEV_WIDTH_8BIT) ? 0 : 1;
295 CHECK_STATUS(ddr3_tip_if_write
296 (dev_num, ACCESS_TYPE_UNICAST, if_id,
297 SDRAM_ADDR_CTRL_REG, (data << (cs_num * 4)),
298 0x3 << (cs_num * 4)));
299 mem_index = tm->interface_params[if_id].memory_size;
301 addr_hi = mem_size_config[mem_index] & 0x3;
302 CHECK_STATUS(ddr3_tip_if_write
303 (dev_num, ACCESS_TYPE_UNICAST, if_id,
305 (addr_hi << (2 + cs_num * 4)),
306 0x3 << (2 + cs_num * 4)));
308 data_high = (mem_size_config[mem_index] & 0x4) >> 2;
309 CHECK_STATUS(ddr3_tip_if_write
310 (dev_num, ACCESS_TYPE_UNICAST, if_id,
312 data_high << (20 + cs_num), 1 << (20 + cs_num)));
314 /* Enable Address Select Mode */
315 CHECK_STATUS(ddr3_tip_if_write
316 (dev_num, ACCESS_TYPE_UNICAST, if_id,
317 SDRAM_ADDR_CTRL_REG, 1 << (16 + cs_num),
318 1 << (16 + cs_num)));
324 CHECK_STATUS(ddr3_tip_if_write
325 (dev_num, ACCESS_TYPE_UNICAST, if_id,
326 DUNIT_CTRL_LOW_REG, (clk_enable << (cs_num + 11)),
327 1 << (cs_num + 11)));
330 CHECK_STATUS(ddr3_tip_if_write
331 (dev_num, ACCESS_TYPE_UNICAST, if_id,
332 DUNIT_CTRL_LOW_REG, (clk_enable << 15), 1 << 15));
340 * Init Controller Flow
342 int hws_ddr3_tip_init_controller(u32 dev_num, struct init_cntr_param *init_cntr_prm)
346 u32 t_ckclk = 0, t_wr = 0, t2t = 0;
347 u32 data_value = 0, cs_cnt = 0,
348 mem_mask = 0, bus_index = 0;
349 enum mv_ddr_speed_bin speed_bin_index = SPEED_BIN_DDR_2133N;
351 u32 cl_value = 0, cwl_val = 0;
352 u32 bus_cnt = 0, adll_tap = 0;
353 enum hws_access_type access_type = ACCESS_TYPE_UNICAST;
354 u32 data_read[MAX_INTERFACE_NUM];
355 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
356 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
357 enum mv_ddr_timing timing;
358 enum mv_ddr_freq freq = tm->interface_params[0].memory_freq;
360 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE,
361 ("Init_controller, do_mrs_phy=%d, is_ctrl64_bit=%d\n",
362 init_cntr_prm->do_mrs_phy,
363 init_cntr_prm->is_ctrl64_bit));
365 if (init_cntr_prm->init_phy == 1) {
366 CHECK_STATUS(ddr3_tip_configure_phy(dev_num));
369 if (generic_init_controller == 1) {
370 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
371 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
372 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE,
373 ("active IF %d\n", if_id));
376 bus_index < octets_per_if_num;
378 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_index);
380 tm->interface_params[if_id].
381 as_bus_params[bus_index].mirror_enable_bitmask;
385 CHECK_STATUS(ddr3_tip_if_write
386 (dev_num, ACCESS_TYPE_MULTICAST,
387 if_id, DUAL_DUNIT_CFG_REG, 0,
392 tm->interface_params[if_id].
395 /* t_ckclk is external clock */
396 t_ckclk = (MEGA / mv_ddr_freq_get(freq));
398 if (MV_DDR_IS_HALF_BUS_DRAM_MODE(tm->bus_act_mask, octets_per_if_num))
399 data_value = (0x4000 | 0 | 0x1000000) & ~(1 << 26);
401 data_value = (0x4000 | 0x8000 | 0x1000000) & ~(1 << 26);
403 /* Interface Bus Width */
405 CHECK_STATUS(ddr3_tip_if_write
406 (dev_num, access_type, if_id,
407 SDRAM_CFG_REG, data_value,
410 /* Interleave first command pre-charge enable (TBD) */
411 CHECK_STATUS(ddr3_tip_if_write
412 (dev_num, access_type, if_id,
413 SDRAM_OPEN_PAGES_CTRL_REG, (1 << 10),
416 /* Reset divider_b assert -> de-assert */
417 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
419 0x0 << PUP_RST_DIVIDER_OFFS,
420 PUP_RST_DIVIDER_MASK << PUP_RST_DIVIDER_OFFS));
422 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
424 0x1 << PUP_RST_DIVIDER_OFFS,
425 PUP_RST_DIVIDER_MASK << PUP_RST_DIVIDER_OFFS));
427 /* PHY configuration */
429 * Postamble Length = 1.5cc, Addresscntl to clk skew
430 * \BD, Preamble length normal, parralal ADLL enable
432 CHECK_STATUS(ddr3_tip_if_write
433 (dev_num, access_type, if_id,
434 DRAM_PHY_CFG_REG, 0x28, 0x3e));
435 if (init_cntr_prm->is_ctrl64_bit) {
437 CHECK_STATUS(ddr3_tip_if_write
438 (dev_num, access_type, if_id,
439 DRAM_PHY_CFG_REG, 0x0,
443 /* calibration block disable */
444 /* Xbar Read buffer select (for Internal access) */
445 CHECK_STATUS(ddr3_tip_if_write
446 (dev_num, access_type, if_id,
447 MAIN_PADS_CAL_MACH_CTRL_REG, 0x1200c,
449 CHECK_STATUS(ddr3_tip_if_write
450 (dev_num, access_type, if_id,
451 MAIN_PADS_CAL_MACH_CTRL_REG,
452 calibration_update_control << 3, 0x3 << 3));
454 /* Pad calibration control - enable */
455 CHECK_STATUS(ddr3_tip_if_write
456 (dev_num, access_type, if_id,
457 MAIN_PADS_CAL_MACH_CTRL_REG, 0x1, 0x1));
458 if (ddr3_tip_dev_attr_get(dev_num, MV_ATTR_TIP_REV) < MV_TIP_REV_3) {
459 /* DDR3 rank ctrl \96 part of the generic code */
460 /* CS1 mirroring enable + w/a for JIRA DUNIT-14581 */
461 CHECK_STATUS(ddr3_tip_if_write
462 (dev_num, access_type, if_id,
463 DDR3_RANK_CTRL_REG, 0x27, MASK_ALL_BITS));
469 * Address ctrl \96 Part of the Generic code
470 * The next configuration is done:
475 * Per Dunit get from the Map_topology the parameters:
480 (tm->interface_params[if_id].
481 bus_width == MV_DDR_DEV_WIDTH_8BIT) ? 0 : 1;
483 /* create merge cs mask for all cs available in dunit */
485 bus_cnt < octets_per_if_num;
487 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_cnt);
489 tm->interface_params[if_id].
490 as_bus_params[bus_cnt].cs_bitmask;
492 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE,
493 ("Init_controller IF %d cs_mask %d\n",
496 * Configure the next upon the Map Topology \96 If the
497 * Dunit is CS0 Configure CS0 if it is multi CS
498 * configure them both: The Bust_width it\92s the
499 * Memory Bus width \96 x8 or x16
501 for (cs_cnt = 0; cs_cnt < MAX_CS_NUM; cs_cnt++) {
502 ddr3_tip_configure_cs(dev_num, if_id, cs_cnt,
503 ((cs_mask & (1 << cs_cnt)) ? 1
507 if (init_cntr_prm->do_mrs_phy) {
509 * MR0 \96 Part of the Generic code
510 * The next configuration is done:
513 * get for each dunit what is it Speed_bin &
514 * Target Frequency. From those both parameters
515 * get the appropriate Cas_l from the CL table
518 tm->interface_params[if_id].
521 tm->interface_params[if_id].
523 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE,
524 ("cl_value 0x%x cwl_val 0x%x\n",
527 t_wr = time_to_nclk(mv_ddr_speed_bin_timing_get
529 SPEED_BIN_TWR), t_ckclk);
532 ((cl_mask_table[cl_value] & 0x1) << 2) |
533 ((cl_mask_table[cl_value] & 0xe) << 3);
534 CHECK_STATUS(ddr3_tip_if_write
535 (dev_num, access_type, if_id,
537 (0x7 << 4) | (1 << 2)));
538 CHECK_STATUS(ddr3_tip_if_write
539 (dev_num, access_type, if_id,
540 MR0_REG, twr_mask_table[t_wr] << 9,
544 * MR1: Set RTT and DIC Design GL values
547 CHECK_STATUS(ddr3_tip_if_write
548 (dev_num, ACCESS_TYPE_MULTICAST,
549 PARAM_NOT_CARE, MR1_REG,
550 g_dic | g_rtt_nom, 0x266));
552 /* MR2 - Part of the Generic code */
554 * The next configuration is done:
556 * 2) CAS Write Latency
558 data_value = (cwl_mask_table[cwl_val] << 3);
560 ((tm->interface_params[if_id].
562 MV_DDR_TEMP_HIGH) ? (1 << 7) : 0);
563 data_value |= g_rtt_wr;
564 CHECK_STATUS(ddr3_tip_if_write
565 (dev_num, access_type, if_id,
567 (0x7 << 3) | (0x1 << 7) | (0x3 <<
571 ddr3_tip_write_odt(dev_num, access_type, if_id,
573 ddr3_tip_set_timing(dev_num, access_type, if_id, freq);
575 if (ddr3_tip_dev_attr_get(dev_num, MV_ATTR_TIP_REV) < MV_TIP_REV_3) {
576 CHECK_STATUS(ddr3_tip_if_write
577 (dev_num, access_type, if_id,
578 DUNIT_CTRL_HIGH_REG, 0x1000119,
581 CHECK_STATUS(ddr3_tip_if_write
582 (dev_num, access_type, if_id,
583 DUNIT_CTRL_HIGH_REG, 0x600177 |
584 (init_cntr_prm->is_ctrl64_bit ?
585 CPU_INTERJECTION_ENA_SPLIT_ENA << CPU_INTERJECTION_ENA_OFFS :
586 CPU_INTERJECTION_ENA_SPLIT_DIS << CPU_INTERJECTION_ENA_OFFS),
587 0x1600177 | CPU_INTERJECTION_ENA_MASK <<
588 CPU_INTERJECTION_ENA_OFFS));
592 CHECK_STATUS(ddr3_tip_if_write
593 (dev_num, access_type, if_id,
595 (init_cntr_prm->msys_init << 7), (1 << 7)));
597 timing = tm->interface_params[if_id].timing;
599 if (mode_2t != 0xff) {
601 } else if (timing != MV_DDR_TIM_DEFAULT) {
602 t2t = (timing == MV_DDR_TIM_2T) ? 1 : 0;
604 /* calculate number of CS (per interface) */
605 cs_num = mv_ddr_cs_num_get();
606 t2t = (cs_num == 1) ? 0 : 1;
609 CHECK_STATUS(ddr3_tip_if_write
610 (dev_num, access_type, if_id,
611 DUNIT_CTRL_LOW_REG, t2t << 3,
613 CHECK_STATUS(ddr3_tip_if_write
614 (dev_num, access_type, if_id,
615 DDR_TIMING_REG, 0x28 << 9, 0x3f << 9));
616 CHECK_STATUS(ddr3_tip_if_write
617 (dev_num, access_type, if_id,
618 DDR_TIMING_REG, 0xa << 21, 0xff << 21));
620 /* move the block to ddr3_tip_set_timing - end */
621 /* AUTO_ZQC_TIMING */
622 CHECK_STATUS(ddr3_tip_if_write
623 (dev_num, access_type, if_id,
624 ZQC_CFG_REG, (AUTO_ZQC_TIMING | (2 << 20)),
626 CHECK_STATUS(ddr3_tip_if_read
627 (dev_num, access_type, if_id,
628 DRAM_PHY_CFG_REG, data_read, 0x30));
630 (data_read[if_id] == 0) ? (1 << 11) : 0;
631 CHECK_STATUS(ddr3_tip_if_write
632 (dev_num, access_type, if_id,
633 DUNIT_CTRL_HIGH_REG, data_value,
636 /* Set Active control for ODT write transactions */
637 CHECK_STATUS(ddr3_tip_if_write
638 (dev_num, ACCESS_TYPE_MULTICAST,
639 PARAM_NOT_CARE, 0x1494, g_odt_config,
642 if (ddr3_tip_dev_attr_get(dev_num, MV_ATTR_TIP_REV) == MV_TIP_REV_3) {
643 CHECK_STATUS(ddr3_tip_if_write
644 (dev_num, access_type, if_id,
645 0x14a8, 0x900, 0x900));
646 /* wa: controls control sub-phy outputs floating during self-refresh */
647 CHECK_STATUS(ddr3_tip_if_write
648 (dev_num, access_type, if_id,
654 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
655 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
656 CHECK_STATUS(ddr3_tip_rank_control(dev_num, if_id));
658 if (init_cntr_prm->do_mrs_phy)
661 /* Pad calibration control - disable */
662 CHECK_STATUS(ddr3_tip_if_write
663 (dev_num, access_type, if_id,
664 MAIN_PADS_CAL_MACH_CTRL_REG, 0x0, 0x1));
665 CHECK_STATUS(ddr3_tip_if_write
666 (dev_num, access_type, if_id,
667 MAIN_PADS_CAL_MACH_CTRL_REG,
668 calibration_update_control << 3, 0x3 << 3));
672 if (delay_enable != 0) {
673 adll_tap = MEGA / (mv_ddr_freq_get(freq) * 64);
674 ddr3_tip_cmd_addr_init_delay(dev_num, adll_tap);
683 static int ddr3_tip_rev2_rank_control(u32 dev_num, u32 if_id)
685 u32 data_value = 0, bus_cnt = 0;
686 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
687 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
689 for (bus_cnt = 0; bus_cnt < octets_per_if_num; bus_cnt++) {
690 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_cnt);
691 data_value |= tm->interface_params[if_id].as_bus_params[bus_cnt].
694 if (tm->interface_params[if_id].as_bus_params[bus_cnt].
695 mirror_enable_bitmask == 1) {
697 * Check mirror_enable_bitmask
698 * If it is enabled, CS + 4 bit in a word to be '1'
700 if ((tm->interface_params[if_id].as_bus_params[bus_cnt].
701 cs_bitmask & 0x1) != 0) {
702 data_value |= tm->interface_params[if_id].
703 as_bus_params[bus_cnt].
704 mirror_enable_bitmask << 4;
707 if ((tm->interface_params[if_id].as_bus_params[bus_cnt].
708 cs_bitmask & 0x2) != 0) {
709 data_value |= tm->interface_params[if_id].
710 as_bus_params[bus_cnt].
711 mirror_enable_bitmask << 5;
714 if ((tm->interface_params[if_id].as_bus_params[bus_cnt].
715 cs_bitmask & 0x4) != 0) {
716 data_value |= tm->interface_params[if_id].
717 as_bus_params[bus_cnt].
718 mirror_enable_bitmask << 6;
721 if ((tm->interface_params[if_id].as_bus_params[bus_cnt].
722 cs_bitmask & 0x8) != 0) {
723 data_value |= tm->interface_params[if_id].
724 as_bus_params[bus_cnt].
725 mirror_enable_bitmask << 7;
730 CHECK_STATUS(ddr3_tip_if_write
731 (dev_num, ACCESS_TYPE_UNICAST, if_id, DDR3_RANK_CTRL_REG,
737 static int ddr3_tip_rev3_rank_control(u32 dev_num, u32 if_id)
739 u32 data_value = 0, bus_cnt;
740 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
741 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
743 for (bus_cnt = 1; bus_cnt < octets_per_if_num; bus_cnt++) {
744 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_cnt);
745 if ((tm->interface_params[if_id].
746 as_bus_params[0].cs_bitmask !=
747 tm->interface_params[if_id].
748 as_bus_params[bus_cnt].cs_bitmask) ||
749 (tm->interface_params[if_id].
750 as_bus_params[0].mirror_enable_bitmask !=
751 tm->interface_params[if_id].
752 as_bus_params[bus_cnt].mirror_enable_bitmask))
753 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
754 ("WARNING:Wrong configuration for pup #%d CS mask and CS mirroring for all pups should be the same\n",
758 data_value |= tm->interface_params[if_id].
759 as_bus_params[0].cs_bitmask;
760 data_value |= tm->interface_params[if_id].
761 as_bus_params[0].mirror_enable_bitmask << 4;
763 CHECK_STATUS(ddr3_tip_if_write
764 (dev_num, ACCESS_TYPE_UNICAST, if_id, DDR3_RANK_CTRL_REG,
770 static int ddr3_tip_rank_control(u32 dev_num, u32 if_id)
772 if (ddr3_tip_dev_attr_get(dev_num, MV_ATTR_TIP_REV) == MV_TIP_REV_2)
773 return ddr3_tip_rev2_rank_control(dev_num, if_id);
775 return ddr3_tip_rev3_rank_control(dev_num, if_id);
779 * Algorithm Parameters Validation
781 int ddr3_tip_validate_algo_var(u32 value, u32 fail_value, char *var_name)
783 if (value == fail_value) {
784 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
785 ("Error: %s is not initialized (Algo Components Validation)\n",
793 int ddr3_tip_validate_algo_ptr(void *ptr, void *fail_value, char *ptr_name)
795 if (ptr == fail_value) {
796 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
797 ("Error: %s is not initialized (Algo Components Validation)\n",
805 int ddr3_tip_validate_algo_components(u8 dev_num)
809 /* Check DGL parameters*/
810 status &= ddr3_tip_validate_algo_var(ck_delay, PARAM_UNDEFINED, "ck_delay");
811 status &= ddr3_tip_validate_algo_var(phy_reg3_val, PARAM_UNDEFINED, "phy_reg3_val");
812 status &= ddr3_tip_validate_algo_var(g_rtt_nom, PARAM_UNDEFINED, "g_rtt_nom");
813 status &= ddr3_tip_validate_algo_var(g_dic, PARAM_UNDEFINED, "g_dic");
814 status &= ddr3_tip_validate_algo_var(odt_config, PARAM_UNDEFINED, "odt_config");
815 status &= ddr3_tip_validate_algo_var(g_zpri_data, PARAM_UNDEFINED, "g_zpri_data");
816 status &= ddr3_tip_validate_algo_var(g_znri_data, PARAM_UNDEFINED, "g_znri_data");
817 status &= ddr3_tip_validate_algo_var(g_zpri_ctrl, PARAM_UNDEFINED, "g_zpri_ctrl");
818 status &= ddr3_tip_validate_algo_var(g_znri_ctrl, PARAM_UNDEFINED, "g_znri_ctrl");
819 status &= ddr3_tip_validate_algo_var(g_zpodt_data, PARAM_UNDEFINED, "g_zpodt_data");
820 status &= ddr3_tip_validate_algo_var(g_znodt_data, PARAM_UNDEFINED, "g_znodt_data");
821 status &= ddr3_tip_validate_algo_var(g_zpodt_ctrl, PARAM_UNDEFINED, "g_zpodt_ctrl");
822 status &= ddr3_tip_validate_algo_var(g_znodt_ctrl, PARAM_UNDEFINED, "g_znodt_ctrl");
824 /* Check functions pointers */
825 status &= ddr3_tip_validate_algo_ptr(config_func_info[dev_num].tip_dunit_mux_select_func,
826 NULL, "tip_dunit_mux_select_func");
827 status &= ddr3_tip_validate_algo_ptr(config_func_info[dev_num].mv_ddr_dunit_write,
828 NULL, "mv_ddr_dunit_write");
829 status &= ddr3_tip_validate_algo_ptr(config_func_info[dev_num].mv_ddr_dunit_read,
830 NULL, "mv_ddr_dunit_read");
831 status &= ddr3_tip_validate_algo_ptr(config_func_info[dev_num].mv_ddr_phy_write,
832 NULL, "mv_ddr_phy_write");
833 status &= ddr3_tip_validate_algo_ptr(config_func_info[dev_num].mv_ddr_phy_read,
834 NULL, "mv_ddr_phy_read");
835 status &= ddr3_tip_validate_algo_ptr(config_func_info[dev_num].tip_get_freq_config_info_func,
836 NULL, "tip_get_freq_config_info_func");
837 status &= ddr3_tip_validate_algo_ptr(config_func_info[dev_num].tip_set_freq_divider_func,
838 NULL, "tip_set_freq_divider_func");
839 status &= ddr3_tip_validate_algo_ptr(config_func_info[dev_num].tip_get_clock_ratio,
840 NULL, "tip_get_clock_ratio");
842 status &= ddr3_tip_validate_algo_ptr(dq_map_table, NULL, "dq_map_table");
843 status &= ddr3_tip_validate_algo_var(dfs_low_freq, 0, "dfs_low_freq");
845 return (status == 1) ? MV_OK : MV_NOT_INITIALIZED;
849 int ddr3_pre_algo_config(void)
851 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
853 /* Set Bus3 ECC training mode */
854 if (DDR3_IS_ECC_PUP3_MODE(tm->bus_act_mask)) {
855 /* Set Bus3 ECC MUX */
856 CHECK_STATUS(ddr3_tip_if_write
857 (0, ACCESS_TYPE_UNICAST, PARAM_NOT_CARE,
858 DRAM_PINS_MUX_REG, 0x100, 0x100));
861 /* Set regular ECC training mode (bus4 and bus 3) */
862 if ((DDR3_IS_ECC_PUP4_MODE(tm->bus_act_mask)) ||
863 (DDR3_IS_ECC_PUP3_MODE(tm->bus_act_mask)) ||
864 (DDR3_IS_ECC_PUP8_MODE(tm->bus_act_mask))) {
865 /* Enable ECC Write MUX */
866 CHECK_STATUS(ddr3_tip_if_write
867 (0, ACCESS_TYPE_UNICAST, PARAM_NOT_CARE,
868 TRAINING_SW_2_REG, 0x100, 0x100));
869 /* General ECC enable */
870 CHECK_STATUS(ddr3_tip_if_write
871 (0, ACCESS_TYPE_UNICAST, PARAM_NOT_CARE,
872 SDRAM_CFG_REG, 0x40000, 0x40000));
873 /* Disable Read Data ECC MUX */
874 CHECK_STATUS(ddr3_tip_if_write
875 (0, ACCESS_TYPE_UNICAST, PARAM_NOT_CARE,
876 TRAINING_SW_2_REG, 0x0, 0x2));
882 int ddr3_post_algo_config(void)
884 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
887 status = ddr3_post_run_alg();
888 if (MV_OK != status) {
889 printf("DDR3 Post Run Alg - FAILED 0x%x\n", status);
893 /* Un_set ECC training mode */
894 if ((DDR3_IS_ECC_PUP4_MODE(tm->bus_act_mask)) ||
895 (DDR3_IS_ECC_PUP3_MODE(tm->bus_act_mask)) ||
896 (DDR3_IS_ECC_PUP8_MODE(tm->bus_act_mask))) {
897 /* Disable ECC Write MUX */
898 CHECK_STATUS(ddr3_tip_if_write
899 (0, ACCESS_TYPE_UNICAST, PARAM_NOT_CARE,
900 TRAINING_SW_2_REG, 0x0, 0x100));
901 /* General ECC and Bus3 ECC MUX remains enabled */
910 int hws_ddr3_tip_run_alg(u32 dev_num, enum hws_algo_type algo_type)
914 status = ddr3_pre_algo_config();
915 if (MV_OK != status) {
916 printf("DDR3 Pre Algo Config - FAILED 0x%x\n", status);
920 #ifdef ODT_TEST_SUPPORT
921 if (finger_test == 1)
922 return odt_test(dev_num, algo_type);
925 if (algo_type == ALGO_TYPE_DYNAMIC) {
926 status = ddr3_tip_ddr3_auto_tune(dev_num);
929 if (status != MV_OK) {
930 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
931 ("******** DRAM initialization Failed (res 0x%x) ********\n",
936 status = ddr3_post_algo_config();
937 if (MV_OK != status) {
938 printf("DDR3 Post Algo Config - FAILED 0x%x\n", status);
945 #ifdef ODT_TEST_SUPPORT
949 static int odt_test(u32 dev_num, enum hws_algo_type algo_type)
951 int ret = MV_OK, ret_tune = MV_OK;
952 int pfinger_val = 0, nfinger_val;
954 for (pfinger_val = p_finger_start; pfinger_val <= p_finger_end;
955 pfinger_val += p_finger_step) {
956 for (nfinger_val = n_finger_start; nfinger_val <= n_finger_end;
957 nfinger_val += n_finger_step) {
958 if (finger_test != 0) {
959 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
960 ("pfinger_val %d nfinger_val %d\n",
961 pfinger_val, nfinger_val));
963 * TODO: need to check the correctness
964 * of the following two lines.
966 g_zpodt_data = pfinger_val;
967 g_znodt_data = nfinger_val;
970 if (algo_type == ALGO_TYPE_DYNAMIC) {
971 ret = ddr3_tip_ddr3_auto_tune(dev_num);
976 if (ret_tune != MV_OK) {
977 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
978 ("Run_alg: tuning failed %d\n", ret_tune));
979 ret = (ret == MV_OK) ? ret_tune : ret;
989 int hws_ddr3_tip_select_ddr_controller(u32 dev_num, int enable)
991 return config_func_info[dev_num].
992 tip_dunit_mux_select_func((u8)dev_num, enable);
996 * Dunit Register Write
998 int ddr3_tip_if_write(u32 dev_num, enum hws_access_type interface_access,
999 u32 if_id, u32 reg_addr, u32 data_value, u32 mask)
1001 config_func_info[dev_num].mv_ddr_dunit_write(reg_addr, mask, data_value);
1007 * Dunit Register Read
1009 int ddr3_tip_if_read(u32 dev_num, enum hws_access_type interface_access,
1010 u32 if_id, u32 reg_addr, u32 *data, u32 mask)
1012 config_func_info[dev_num].mv_ddr_dunit_read(reg_addr, mask, data);
1018 * Dunit Register Polling
1020 int ddr3_tip_if_polling(u32 dev_num, enum hws_access_type access_type,
1021 u32 if_id, u32 exp_value, u32 mask, u32 offset,
1024 u32 poll_cnt = 0, interface_num = 0, start_if, end_if;
1025 u32 read_data[MAX_INTERFACE_NUM];
1027 int is_fail = 0, is_if_fail;
1028 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1030 if (access_type == ACCESS_TYPE_MULTICAST) {
1032 end_if = MAX_INTERFACE_NUM - 1;
1038 for (interface_num = start_if; interface_num <= end_if; interface_num++) {
1039 /* polling bit 3 for n times */
1040 VALIDATE_IF_ACTIVE(tm->if_act_mask, interface_num);
1043 for (poll_cnt = 0; poll_cnt < poll_tries; poll_cnt++) {
1045 ddr3_tip_if_read(dev_num, ACCESS_TYPE_UNICAST,
1046 interface_num, offset, read_data,
1051 if (read_data[interface_num] == exp_value)
1055 if (poll_cnt >= poll_tries) {
1056 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1057 ("max poll IF #%d\n", interface_num));
1062 training_result[training_stage][interface_num] =
1063 (is_if_fail == 1) ? TEST_FAILED : TEST_SUCCESS;
1066 return (is_fail == 0) ? MV_OK : MV_FAIL;
1072 int ddr3_tip_bus_read(u32 dev_num, u32 if_id,
1073 enum hws_access_type phy_access, u32 phy_id,
1074 enum hws_ddr_phy phy_type, u32 reg_addr, u32 *data)
1076 return config_func_info[dev_num].
1077 mv_ddr_phy_read(phy_access, phy_id, phy_type, reg_addr, data);
1083 int ddr3_tip_bus_write(u32 dev_num, enum hws_access_type interface_access,
1084 u32 if_id, enum hws_access_type phy_access,
1085 u32 phy_id, enum hws_ddr_phy phy_type, u32 reg_addr,
1088 return config_func_info[dev_num].
1089 mv_ddr_phy_write(phy_access, phy_id, phy_type, reg_addr, data_value, OPERATION_WRITE);
1094 * Phy read-modify-write
1096 int ddr3_tip_bus_read_modify_write(u32 dev_num, enum hws_access_type access_type,
1097 u32 interface_id, u32 phy_id,
1098 enum hws_ddr_phy phy_type, u32 reg_addr,
1099 u32 data_value, u32 reg_mask)
1101 u32 data_val = 0, if_id, start_if, end_if;
1102 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1104 if (access_type == ACCESS_TYPE_MULTICAST) {
1106 end_if = MAX_INTERFACE_NUM - 1;
1108 start_if = interface_id;
1109 end_if = interface_id;
1112 for (if_id = start_if; if_id <= end_if; if_id++) {
1113 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1114 CHECK_STATUS(ddr3_tip_bus_read
1115 (dev_num, if_id, ACCESS_TYPE_UNICAST, phy_id,
1116 phy_type, reg_addr, &data_val));
1117 data_value = (data_val & (~reg_mask)) | (data_value & reg_mask);
1118 CHECK_STATUS(ddr3_tip_bus_write
1119 (dev_num, ACCESS_TYPE_UNICAST, if_id,
1120 ACCESS_TYPE_UNICAST, phy_id, phy_type, reg_addr,
1130 int adll_calibration(u32 dev_num, enum hws_access_type access_type,
1131 u32 if_id, enum mv_ddr_freq frequency)
1133 struct hws_tip_freq_config_info freq_config_info;
1135 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
1136 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1138 /* Reset Diver_b assert -> de-assert */
1139 CHECK_STATUS(ddr3_tip_if_write
1140 (dev_num, access_type, if_id, SDRAM_CFG_REG,
1143 CHECK_STATUS(ddr3_tip_if_write
1144 (dev_num, access_type, if_id, SDRAM_CFG_REG,
1145 0x10000000, 0x10000000));
1147 CHECK_STATUS(config_func_info[dev_num].
1148 tip_get_freq_config_info_func((u8)dev_num, frequency,
1149 &freq_config_info));
1151 for (bus_cnt = 0; bus_cnt < octets_per_if_num; bus_cnt++) {
1152 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_cnt);
1153 CHECK_STATUS(ddr3_tip_bus_read_modify_write
1154 (dev_num, access_type, if_id, bus_cnt,
1155 DDR_PHY_DATA, ADLL_CFG0_PHY_REG,
1156 freq_config_info.bw_per_freq << 8, 0x700));
1157 CHECK_STATUS(ddr3_tip_bus_read_modify_write
1158 (dev_num, access_type, if_id, bus_cnt,
1159 DDR_PHY_DATA, ADLL_CFG2_PHY_REG,
1160 freq_config_info.rate_per_freq, 0x7));
1163 for (bus_cnt = 0; bus_cnt < DDR_IF_CTRL_SUBPHYS_NUM; bus_cnt++) {
1164 CHECK_STATUS(ddr3_tip_bus_read_modify_write
1165 (dev_num, ACCESS_TYPE_UNICAST, if_id, bus_cnt,
1166 DDR_PHY_CONTROL, ADLL_CFG0_PHY_REG,
1167 freq_config_info.bw_per_freq << 8, 0x700));
1168 CHECK_STATUS(ddr3_tip_bus_read_modify_write
1169 (dev_num, ACCESS_TYPE_UNICAST, if_id, bus_cnt,
1170 DDR_PHY_CONTROL, ADLL_CFG2_PHY_REG,
1171 freq_config_info.rate_per_freq, 0x7));
1174 /* DUnit to Phy drive post edge, ADLL reset assert de-assert */
1175 CHECK_STATUS(ddr3_tip_if_write
1176 (dev_num, access_type, if_id, DRAM_PHY_CFG_REG,
1177 0, (0x80000000 | 0x40000000)));
1178 mdelay(100 / (mv_ddr_freq_get(frequency)) / mv_ddr_freq_get(MV_DDR_FREQ_LOW_FREQ));
1179 CHECK_STATUS(ddr3_tip_if_write
1180 (dev_num, access_type, if_id, DRAM_PHY_CFG_REG,
1181 (0x80000000 | 0x40000000), (0x80000000 | 0x40000000)));
1183 /* polling for ADLL Done */
1184 if (ddr3_tip_if_polling(dev_num, access_type, if_id,
1185 0x3ff03ff, 0x3ff03ff, PHY_LOCK_STATUS_REG,
1186 MAX_POLLING_ITERATIONS) != MV_OK) {
1187 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1188 ("Freq_set: DDR3 poll failed(1)"));
1191 /* pup data_pup reset assert-> deassert */
1192 CHECK_STATUS(ddr3_tip_if_write
1193 (dev_num, access_type, if_id, SDRAM_CFG_REG,
1196 CHECK_STATUS(ddr3_tip_if_write
1197 (dev_num, access_type, if_id, SDRAM_CFG_REG,
1198 0x60000000, 0x60000000));
1203 int ddr3_tip_freq_set(u32 dev_num, enum hws_access_type access_type,
1204 u32 if_id, enum mv_ddr_freq frequency)
1206 u32 cl_value = 0, cwl_value = 0, mem_mask = 0, val = 0,
1207 bus_cnt = 0, t_wr = 0, t_ckclk = 0,
1209 u32 end_if, start_if;
1212 enum mv_ddr_speed_bin speed_bin_index = 0;
1213 struct hws_tip_freq_config_info freq_config_info;
1214 enum hws_result *flow_result = training_result[training_stage];
1218 u32 cs_mask[MAX_INTERFACE_NUM];
1219 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
1220 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1222 enum mv_ddr_timing timing = tm->interface_params[if_id].timing;
1223 u32 freq = mv_ddr_freq_get(frequency);
1225 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE,
1226 ("dev %d access %d IF %d freq %d\n", dev_num,
1227 access_type, if_id, frequency));
1229 if (frequency == MV_DDR_FREQ_LOW_FREQ)
1231 if (access_type == ACCESS_TYPE_MULTICAST) {
1233 end_if = MAX_INTERFACE_NUM - 1;
1239 /* calculate interface cs mask - Oferb 4/11 */
1240 /* speed bin can be different for each interface */
1241 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1242 /* cs enable is active low */
1243 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1244 cs_mask[if_id] = CS_BIT_MASK;
1245 training_result[training_stage][if_id] = TEST_SUCCESS;
1246 ddr3_tip_calc_cs_mask(dev_num, if_id, effective_cs,
1250 /* speed bin can be different for each interface */
1252 * moti b - need to remove the loop for multicas access functions
1253 * and loop the unicast access functions
1255 for (if_id = start_if; if_id <= end_if; if_id++) {
1256 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1258 flow_result[if_id] = TEST_SUCCESS;
1260 tm->interface_params[if_id].speed_bin_index;
1261 if (tm->interface_params[if_id].memory_freq ==
1264 tm->interface_params[if_id].cas_l;
1266 tm->interface_params[if_id].cas_wl;
1267 } else if (tm->cfg_src == MV_DDR_CFG_SPD) {
1268 tclk = 1000000 / freq;
1269 cl_value = mv_ddr_cl_calc(tm->timing_data[MV_DDR_TAA_MIN], tclk);
1270 if (cl_value == 0) {
1271 printf("mv_ddr: unsupported cas latency value found\n");
1274 cwl_value = mv_ddr_cwl_calc(tclk);
1275 if (cwl_value == 0) {
1276 printf("mv_ddr: unsupported cas write latency value found\n");
1280 cl_value = mv_ddr_cl_val_get(speed_bin_index, frequency);
1281 cwl_value = mv_ddr_cwl_val_get(speed_bin_index, frequency);
1284 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE,
1285 ("Freq_set dev 0x%x access 0x%x if 0x%x freq 0x%x speed %d:\n\t",
1286 dev_num, access_type, if_id,
1287 frequency, speed_bin_index));
1289 for (cnt_id = 0; cnt_id < MV_DDR_FREQ_LAST; cnt_id++) {
1290 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE,
1291 ("%d ", mv_ddr_cl_val_get(speed_bin_index, cnt_id)));
1294 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE, ("\n"));
1296 for (bus_index = 0; bus_index < octets_per_if_num;
1298 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_index);
1300 tm->interface_params[if_id].
1301 as_bus_params[bus_index].mirror_enable_bitmask;
1304 if (mem_mask != 0) {
1305 /* motib redundent in KW28 */
1306 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type,
1308 DUAL_DUNIT_CFG_REG, 0, 0x8));
1311 /* dll state after exiting SR */
1312 if (is_dll_off == 1) {
1313 CHECK_STATUS(ddr3_tip_if_write
1314 (dev_num, access_type, if_id,
1315 DFS_REG, 0x1, 0x1));
1317 CHECK_STATUS(ddr3_tip_if_write
1318 (dev_num, access_type, if_id,
1322 CHECK_STATUS(ddr3_tip_if_write
1323 (dev_num, access_type, if_id,
1324 DUNIT_MMASK_REG, 0, 0x1));
1325 /* DFS - block transactions */
1326 CHECK_STATUS(ddr3_tip_if_write
1327 (dev_num, access_type, if_id,
1328 DFS_REG, 0x2, 0x2));
1330 /* disable ODT in case of dll off */
1331 if (is_dll_off == 1) {
1332 CHECK_STATUS(ddr3_tip_if_write
1333 (dev_num, access_type, if_id,
1335 CHECK_STATUS(ddr3_tip_if_write
1336 (dev_num, access_type, if_id,
1338 CHECK_STATUS(ddr3_tip_if_write
1339 (dev_num, access_type, if_id,
1341 CHECK_STATUS(ddr3_tip_if_write
1342 (dev_num, access_type, if_id,
1346 /* DFS - Enter Self-Refresh */
1347 CHECK_STATUS(ddr3_tip_if_write
1348 (dev_num, access_type, if_id, DFS_REG, 0x4,
1350 /* polling on self refresh entry */
1351 if (ddr3_tip_if_polling(dev_num, ACCESS_TYPE_UNICAST,
1352 if_id, 0x8, 0x8, DFS_REG,
1353 MAX_POLLING_ITERATIONS) != MV_OK) {
1354 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1355 ("Freq_set: DDR3 poll failed on SR entry\n"));
1358 /* Calculate 2T mode */
1359 if (mode_2t != 0xff) {
1361 } else if (timing != MV_DDR_TIM_DEFAULT) {
1362 t2t = (timing == MV_DDR_TIM_2T) ? 1 : 0;
1364 /* Calculate number of CS per interface */
1365 cs_num = mv_ddr_cs_num_get();
1366 t2t = (cs_num == 1) ? 0 : 1;
1370 if (ddr3_tip_dev_attr_get(dev_num, MV_ATTR_INTERLEAVE_WA) == 1) {
1371 /* Use 1T mode if 1:1 ratio configured */
1372 if (config_func_info[dev_num].tip_get_clock_ratio(frequency) == 1) {
1374 CHECK_STATUS(ddr3_tip_if_write
1375 (dev_num, access_type, if_id,
1376 SDRAM_OPEN_PAGES_CTRL_REG, 0x0, 0x3C0));
1379 /* Middle or target freq */
1380 CHECK_STATUS(ddr3_tip_if_write
1381 (dev_num, access_type, if_id,
1382 SDRAM_OPEN_PAGES_CTRL_REG, 0x3C0, 0x3C0));
1385 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
1386 DUNIT_CTRL_LOW_REG, t2t << 3, 0x3 << 3));
1388 /* PLL configuration */
1389 config_func_info[dev_num].tip_set_freq_divider_func(dev_num, if_id,
1392 /* DFS - CL/CWL/WR parameters after exiting SR */
1393 CHECK_STATUS(ddr3_tip_if_write
1394 (dev_num, access_type, if_id, DFS_REG,
1395 (cl_mask_table[cl_value] << 8), 0xf00));
1396 CHECK_STATUS(ddr3_tip_if_write
1397 (dev_num, access_type, if_id, DFS_REG,
1398 (cwl_mask_table[cwl_value] << 12), 0x7000));
1400 t_ckclk = (MEGA / freq);
1401 t_wr = time_to_nclk(mv_ddr_speed_bin_timing_get
1403 SPEED_BIN_TWR), t_ckclk);
1405 CHECK_STATUS(ddr3_tip_if_write
1406 (dev_num, access_type, if_id, DFS_REG,
1407 (twr_mask_table[t_wr] << 16), 0x70000));
1409 /* Restore original RTT values if returning from DLL OFF mode */
1410 if (is_dll_off == 1) {
1411 CHECK_STATUS(ddr3_tip_if_write
1412 (dev_num, access_type, if_id, 0x1874,
1413 g_dic | g_rtt_nom, 0x266));
1414 CHECK_STATUS(ddr3_tip_if_write
1415 (dev_num, access_type, if_id, 0x1884,
1416 g_dic | g_rtt_nom, 0x266));
1417 CHECK_STATUS(ddr3_tip_if_write
1418 (dev_num, access_type, if_id, 0x1894,
1419 g_dic | g_rtt_nom, 0x266));
1420 CHECK_STATUS(ddr3_tip_if_write
1421 (dev_num, access_type, if_id, 0x18a4,
1422 g_dic | g_rtt_nom, 0x266));
1425 /* Reset divider_b assert -> de-assert */
1426 CHECK_STATUS(ddr3_tip_if_write
1427 (dev_num, access_type, if_id,
1428 SDRAM_CFG_REG, 0, 0x10000000));
1430 CHECK_STATUS(ddr3_tip_if_write
1431 (dev_num, access_type, if_id,
1432 SDRAM_CFG_REG, 0x10000000, 0x10000000));
1434 /* ADLL configuration function of process and frequency */
1435 CHECK_STATUS(config_func_info[dev_num].
1436 tip_get_freq_config_info_func(dev_num, frequency,
1437 &freq_config_info));
1439 /* TBD check milo5 using device ID ? */
1440 for (bus_cnt = 0; bus_cnt < octets_per_if_num;
1442 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_cnt);
1443 CHECK_STATUS(ddr3_tip_bus_read_modify_write
1444 (dev_num, ACCESS_TYPE_UNICAST,
1445 if_id, bus_cnt, DDR_PHY_DATA,
1449 /*freq_mask[dev_num][frequency] << 8 */
1451 CHECK_STATUS(ddr3_tip_bus_read_modify_write
1452 (dev_num, ACCESS_TYPE_UNICAST, if_id,
1453 bus_cnt, DDR_PHY_DATA, 0x94,
1454 freq_config_info.rate_per_freq, 0x7));
1457 /* Dunit to PHY drive post edge, ADLL reset assert -> de-assert */
1458 CHECK_STATUS(ddr3_tip_if_write
1459 (dev_num, access_type, if_id,
1460 DRAM_PHY_CFG_REG, 0,
1461 (0x80000000 | 0x40000000)));
1462 mdelay(100 / (freq / mv_ddr_freq_get(MV_DDR_FREQ_LOW_FREQ)));
1463 CHECK_STATUS(ddr3_tip_if_write
1464 (dev_num, access_type, if_id,
1465 DRAM_PHY_CFG_REG, (0x80000000 | 0x40000000),
1466 (0x80000000 | 0x40000000)));
1468 /* polling for ADLL Done */
1469 if (ddr3_tip_if_polling
1470 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x3ff03ff,
1471 0x3ff03ff, PHY_LOCK_STATUS_REG,
1472 MAX_POLLING_ITERATIONS) != MV_OK) {
1473 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1474 ("Freq_set: DDR3 poll failed(1)\n"));
1477 /* pup data_pup reset assert-> deassert */
1478 CHECK_STATUS(ddr3_tip_if_write
1479 (dev_num, access_type, if_id,
1480 SDRAM_CFG_REG, 0, 0x60000000));
1482 CHECK_STATUS(ddr3_tip_if_write
1483 (dev_num, access_type, if_id,
1484 SDRAM_CFG_REG, 0x60000000, 0x60000000));
1486 /* Set proper timing params before existing Self-Refresh */
1487 ddr3_tip_set_timing(dev_num, access_type, if_id, frequency);
1488 if (delay_enable != 0) {
1489 adll_tap = (is_dll_off == 1) ? 1000 : (MEGA / (freq * 64));
1490 ddr3_tip_cmd_addr_init_delay(dev_num, adll_tap);
1494 CHECK_STATUS(ddr3_tip_if_write
1495 (dev_num, access_type, if_id, DFS_REG, 0,
1497 if (ddr3_tip_if_polling
1498 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0, 0x8, DFS_REG,
1499 MAX_POLLING_ITERATIONS) != MV_OK) {
1500 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1501 ("Freq_set: DDR3 poll failed(2)"));
1504 /* Refresh Command */
1505 CHECK_STATUS(ddr3_tip_if_write
1506 (dev_num, access_type, if_id,
1507 SDRAM_OP_REG, 0x2, 0xf1f));
1508 if (ddr3_tip_if_polling
1509 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0, 0x1f,
1510 SDRAM_OP_REG, MAX_POLLING_ITERATIONS) != MV_OK) {
1511 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1512 ("Freq_set: DDR3 poll failed(3)"));
1515 /* Release DFS Block */
1516 CHECK_STATUS(ddr3_tip_if_write
1517 (dev_num, access_type, if_id, DFS_REG, 0,
1519 /* Controller to MBUS Retry - normal */
1520 CHECK_STATUS(ddr3_tip_if_write
1521 (dev_num, access_type, if_id, DUNIT_MMASK_REG,
1524 /* MRO: Burst Length 8, CL , Auto_precharge 0x16cc */
1526 ((cl_mask_table[cl_value] & 0x1) << 2) |
1527 ((cl_mask_table[cl_value] & 0xe) << 3);
1528 CHECK_STATUS(ddr3_tip_if_write
1529 (dev_num, access_type, if_id, MR0_REG,
1530 val, (0x7 << 4) | (1 << 2)));
1531 /* MR2: CWL = 10 , Auto Self-Refresh - disable */
1532 val = (cwl_mask_table[cwl_value] << 3) | g_rtt_wr;
1534 * nklein 24.10.13 - should not be here - leave value as set in
1535 * the init configuration val |= (1 << 9);
1536 * val |= ((tm->interface_params[if_id].
1537 * interface_temp == MV_DDR_TEMP_HIGH) ? (1 << 7) : 0);
1539 /* nklein 24.10.13 - see above comment */
1540 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type,
1542 val, (0x7 << 3) | (0x3 << 9)));
1545 val = ((cl_value - cwl_value + 1) << 4) |
1546 ((cl_value - cwl_value + 6) << 8) |
1547 ((cl_value - 1) << 12) | ((cl_value + 6) << 16);
1548 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type,
1549 if_id, DDR_ODT_TIMING_LOW_REG,
1551 val = 0x91 | ((cwl_value - 1) << 8) | ((cwl_value + 5) << 12);
1552 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type,
1553 if_id, DDR_ODT_TIMING_HIGH_REG,
1556 /* in case of ddr4 need to set the receiver to odt always 'on' (odt_config = '0')
1557 * in case of ddr3 configure the odt through the timing
1559 if (odt_config != 0) {
1560 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, DUNIT_ODT_CTRL_REG, 0xf, 0xf));
1563 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, DUNIT_ODT_CTRL_REG,
1568 val = ((cl_mask_table[cl_value] & 0x1) << 2) |
1569 ((cl_mask_table[cl_value] & 0xe) << 3);
1571 CHECK_STATUS(ddr3_tip_write_mrs_cmd(dev_num, cs_mask, MR_CMD0,
1572 val, (0x7 << 4) | (0x1 << 2)));
1575 val = (cwl_mask_table[cwl_value] << 3) | g_rtt_wr;
1576 CHECK_STATUS(ddr3_tip_write_mrs_cmd(dev_num, cs_mask, MR_CMD2,
1577 val, (0x7 << 3) | (0x3 << 9)));
1579 if (mem_mask != 0) {
1580 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type,
1593 static int ddr3_tip_write_odt(u32 dev_num, enum hws_access_type access_type,
1594 u32 if_id, u32 cl_value, u32 cwl_value)
1597 u32 val = (cl_value - cwl_value + 6);
1599 val = ((cl_value - cwl_value + 1) << 4) | ((val & 0xf) << 8) |
1600 (((cl_value - 1) & 0xf) << 12) |
1601 (((cl_value + 6) & 0xf) << 16) | (((val & 0x10) >> 4) << 21);
1602 val |= (((cl_value - 1) >> 4) << 22) | (((cl_value + 6) >> 4) << 23);
1604 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
1605 DDR_ODT_TIMING_LOW_REG, val, 0xffff0));
1606 val = 0x91 | ((cwl_value - 1) << 8) | ((cwl_value + 5) << 12);
1607 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
1608 DDR_ODT_TIMING_HIGH_REG, val, 0xffff));
1609 if (odt_additional == 1) {
1610 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type,
1612 SDRAM_ODT_CTRL_HIGH_REG,
1617 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
1618 DUNIT_ODT_CTRL_REG, 0xf, 0xf));
1624 * Set Timing values for training
1626 static int ddr3_tip_set_timing(u32 dev_num, enum hws_access_type access_type,
1627 u32 if_id, enum mv_ddr_freq frequency)
1629 u32 t_ckclk = 0, t_ras = 0;
1630 u32 t_rcd = 0, t_rp = 0, t_wr = 0, t_wtr = 0, t_rrd = 0, t_rtp = 0,
1631 t_rfc = 0, t_mod = 0, t_r2r = 0x3, t_r2r_high = 0,
1632 t_r2w_w2r = 0x3, t_r2w_w2r_high = 0x1, t_w2w = 0x3;
1633 u32 refresh_interval_cnt, t_hclk, t_refi, t_faw, t_pd, t_xpdll;
1634 u32 val = 0, page_size = 0, mask = 0;
1635 enum mv_ddr_speed_bin speed_bin_index;
1636 enum mv_ddr_die_capacity memory_size = MV_DDR_DIE_CAP_2GBIT;
1637 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1638 u32 freq = mv_ddr_freq_get(frequency);
1640 speed_bin_index = tm->interface_params[if_id].speed_bin_index;
1641 memory_size = tm->interface_params[if_id].memory_size;
1642 page_size = mv_ddr_page_size_get(tm->interface_params[if_id].bus_width, memory_size);
1643 t_ckclk = (MEGA / freq);
1645 t_hclk = MEGA / (freq / config_func_info[dev_num].tip_get_clock_ratio(frequency));
1647 t_refi = (tm->interface_params[if_id].interface_temp == MV_DDR_TEMP_HIGH) ? TREFI_HIGH : TREFI_LOW;
1648 t_refi *= 1000; /* psec */
1649 refresh_interval_cnt = t_refi / t_hclk; /* no units */
1651 if (page_size == 1) {
1652 t_faw = mv_ddr_speed_bin_timing_get(speed_bin_index, SPEED_BIN_TFAW1K);
1653 t_faw = time_to_nclk(t_faw, t_ckclk);
1654 t_faw = GET_MAX_VALUE(20, t_faw);
1655 } else { /* page size =2, we do not support page size 0.5k */
1656 t_faw = mv_ddr_speed_bin_timing_get(speed_bin_index, SPEED_BIN_TFAW2K);
1657 t_faw = time_to_nclk(t_faw, t_ckclk);
1658 t_faw = GET_MAX_VALUE(28, t_faw);
1661 t_pd = GET_MAX_VALUE(t_ckclk * 3, mv_ddr_speed_bin_timing_get(speed_bin_index, SPEED_BIN_TPD));
1662 t_pd = time_to_nclk(t_pd, t_ckclk);
1664 t_xpdll = GET_MAX_VALUE(t_ckclk * 10, mv_ddr_speed_bin_timing_get(speed_bin_index, SPEED_BIN_TXPDLL));
1665 t_xpdll = time_to_nclk(t_xpdll, t_ckclk);
1667 t_rrd = (page_size == 1) ? mv_ddr_speed_bin_timing_get(speed_bin_index,
1669 mv_ddr_speed_bin_timing_get(speed_bin_index, SPEED_BIN_TRRD2K);
1670 t_rrd = GET_MAX_VALUE(t_ckclk * 4, t_rrd);
1671 t_rtp = GET_MAX_VALUE(t_ckclk * 4, mv_ddr_speed_bin_timing_get(speed_bin_index,
1673 t_mod = GET_MAX_VALUE(t_ckclk * 12, 15000);
1674 t_wtr = GET_MAX_VALUE(t_ckclk * 4, mv_ddr_speed_bin_timing_get(speed_bin_index,
1676 t_ras = time_to_nclk(mv_ddr_speed_bin_timing_get(speed_bin_index,
1679 t_rcd = time_to_nclk(mv_ddr_speed_bin_timing_get(speed_bin_index,
1682 t_rp = time_to_nclk(mv_ddr_speed_bin_timing_get(speed_bin_index,
1685 t_wr = time_to_nclk(mv_ddr_speed_bin_timing_get(speed_bin_index,
1688 t_wtr = time_to_nclk(t_wtr, t_ckclk);
1689 t_rrd = time_to_nclk(t_rrd, t_ckclk);
1690 t_rtp = time_to_nclk(t_rtp, t_ckclk);
1691 t_rfc = time_to_nclk(mv_ddr_rfc_get(memory_size) * 1000, t_ckclk);
1692 t_mod = time_to_nclk(t_mod, t_ckclk);
1694 /* SDRAM Timing Low */
1695 val = (((t_ras - 1) & SDRAM_TIMING_LOW_TRAS_MASK) << SDRAM_TIMING_LOW_TRAS_OFFS) |
1696 (((t_rcd - 1) & SDRAM_TIMING_LOW_TRCD_MASK) << SDRAM_TIMING_LOW_TRCD_OFFS) |
1697 (((t_rcd - 1) >> SDRAM_TIMING_LOW_TRCD_OFFS & SDRAM_TIMING_HIGH_TRCD_MASK)
1698 << SDRAM_TIMING_HIGH_TRCD_OFFS) |
1699 (((t_rp - 1) & SDRAM_TIMING_LOW_TRP_MASK) << SDRAM_TIMING_LOW_TRP_OFFS) |
1700 (((t_rp - 1) >> SDRAM_TIMING_LOW_TRP_MASK & SDRAM_TIMING_HIGH_TRP_MASK)
1701 << SDRAM_TIMING_HIGH_TRP_OFFS) |
1702 (((t_wr - 1) & SDRAM_TIMING_LOW_TWR_MASK) << SDRAM_TIMING_LOW_TWR_OFFS) |
1703 (((t_wtr - 1) & SDRAM_TIMING_LOW_TWTR_MASK) << SDRAM_TIMING_LOW_TWTR_OFFS) |
1704 ((((t_ras - 1) >> 4) & SDRAM_TIMING_LOW_TRAS_HIGH_MASK) << SDRAM_TIMING_LOW_TRAS_HIGH_OFFS) |
1705 (((t_rrd - 1) & SDRAM_TIMING_LOW_TRRD_MASK) << SDRAM_TIMING_LOW_TRRD_OFFS) |
1706 (((t_rtp - 1) & SDRAM_TIMING_LOW_TRTP_MASK) << SDRAM_TIMING_LOW_TRTP_OFFS);
1708 mask = (SDRAM_TIMING_LOW_TRAS_MASK << SDRAM_TIMING_LOW_TRAS_OFFS) |
1709 (SDRAM_TIMING_LOW_TRCD_MASK << SDRAM_TIMING_LOW_TRCD_OFFS) |
1710 (SDRAM_TIMING_HIGH_TRCD_MASK << SDRAM_TIMING_HIGH_TRCD_OFFS) |
1711 (SDRAM_TIMING_LOW_TRP_MASK << SDRAM_TIMING_LOW_TRP_OFFS) |
1712 (SDRAM_TIMING_HIGH_TRP_MASK << SDRAM_TIMING_HIGH_TRP_OFFS) |
1713 (SDRAM_TIMING_LOW_TWR_MASK << SDRAM_TIMING_LOW_TWR_OFFS) |
1714 (SDRAM_TIMING_LOW_TWTR_MASK << SDRAM_TIMING_LOW_TWTR_OFFS) |
1715 (SDRAM_TIMING_LOW_TRAS_HIGH_MASK << SDRAM_TIMING_LOW_TRAS_HIGH_OFFS) |
1716 (SDRAM_TIMING_LOW_TRRD_MASK << SDRAM_TIMING_LOW_TRRD_OFFS) |
1717 (SDRAM_TIMING_LOW_TRTP_MASK << SDRAM_TIMING_LOW_TRTP_OFFS);
1719 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
1720 SDRAM_TIMING_LOW_REG, val, mask));
1722 /* SDRAM Timing High */
1726 val = (((t_rfc - 1) & SDRAM_TIMING_HIGH_TRFC_MASK) << SDRAM_TIMING_HIGH_TRFC_OFFS) |
1727 ((t_r2r & SDRAM_TIMING_HIGH_TR2R_MASK) << SDRAM_TIMING_HIGH_TR2R_OFFS) |
1728 ((t_r2w_w2r & SDRAM_TIMING_HIGH_TR2W_W2R_MASK) << SDRAM_TIMING_HIGH_TR2W_W2R_OFFS) |
1729 ((t_w2w & SDRAM_TIMING_HIGH_TW2W_MASK) << SDRAM_TIMING_HIGH_TW2W_OFFS) |
1730 ((((t_rfc - 1) >> 7) & SDRAM_TIMING_HIGH_TRFC_HIGH_MASK) << SDRAM_TIMING_HIGH_TRFC_HIGH_OFFS) |
1731 ((t_r2r_high & SDRAM_TIMING_HIGH_TR2R_HIGH_MASK) << SDRAM_TIMING_HIGH_TR2R_HIGH_OFFS) |
1732 ((t_r2w_w2r_high & SDRAM_TIMING_HIGH_TR2W_W2R_HIGH_MASK) << SDRAM_TIMING_HIGH_TR2W_W2R_HIGH_OFFS) |
1733 (((t_mod - 1) & SDRAM_TIMING_HIGH_TMOD_MASK) << SDRAM_TIMING_HIGH_TMOD_OFFS) |
1734 ((((t_mod - 1) >> 4) & SDRAM_TIMING_HIGH_TMOD_HIGH_MASK) << SDRAM_TIMING_HIGH_TMOD_HIGH_OFFS);
1736 mask = (SDRAM_TIMING_HIGH_TRFC_MASK << SDRAM_TIMING_HIGH_TRFC_OFFS) |
1737 (SDRAM_TIMING_HIGH_TR2R_MASK << SDRAM_TIMING_HIGH_TR2R_OFFS) |
1738 (SDRAM_TIMING_HIGH_TR2W_W2R_MASK << SDRAM_TIMING_HIGH_TR2W_W2R_OFFS) |
1739 (SDRAM_TIMING_HIGH_TW2W_MASK << SDRAM_TIMING_HIGH_TW2W_OFFS) |
1740 (SDRAM_TIMING_HIGH_TRFC_HIGH_MASK << SDRAM_TIMING_HIGH_TRFC_HIGH_OFFS) |
1741 (SDRAM_TIMING_HIGH_TR2R_HIGH_MASK << SDRAM_TIMING_HIGH_TR2R_HIGH_OFFS) |
1742 (SDRAM_TIMING_HIGH_TR2W_W2R_HIGH_MASK << SDRAM_TIMING_HIGH_TR2W_W2R_HIGH_OFFS) |
1743 (SDRAM_TIMING_HIGH_TMOD_MASK << SDRAM_TIMING_HIGH_TMOD_OFFS) |
1744 (SDRAM_TIMING_HIGH_TMOD_HIGH_MASK << SDRAM_TIMING_HIGH_TMOD_HIGH_OFFS);
1746 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
1747 SDRAM_TIMING_HIGH_REG, val, mask));
1749 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
1751 refresh_interval_cnt << REFRESH_OFFS,
1752 REFRESH_MASK << REFRESH_OFFS));
1753 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
1754 SDRAM_ADDR_CTRL_REG, (t_faw - 1) << T_FAW_OFFS,
1755 T_FAW_MASK << T_FAW_OFFS));
1757 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, DDR_TIMING_REG,
1758 (t_pd - 1) << DDR_TIMING_TPD_OFFS |
1759 (t_xpdll - 1) << DDR_TIMING_TXPDLL_OFFS,
1760 DDR_TIMING_TPD_MASK << DDR_TIMING_TPD_OFFS |
1761 DDR_TIMING_TXPDLL_MASK << DDR_TIMING_TXPDLL_OFFS));
1771 int ddr3_tip_write_cs_result(u32 dev_num, u32 offset)
1773 u32 if_id, bus_num, cs_bitmask, data_val, cs_num;
1774 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
1775 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1777 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1778 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1779 for (bus_num = 0; bus_num < octets_per_if_num;
1781 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_num);
1783 tm->interface_params[if_id].
1784 as_bus_params[bus_num].cs_bitmask;
1785 if (cs_bitmask != effective_cs) {
1786 cs_num = GET_CS_FROM_MASK(cs_bitmask);
1787 ddr3_tip_bus_read(dev_num, if_id,
1788 ACCESS_TYPE_UNICAST, bus_num,
1791 (effective_cs * 0x4),
1793 ddr3_tip_bus_write(dev_num,
1794 ACCESS_TYPE_UNICAST,
1796 ACCESS_TYPE_UNICAST,
1797 bus_num, DDR_PHY_DATA,
1811 int ddr3_tip_write_mrs_cmd(u32 dev_num, u32 *cs_mask_arr, enum mr_number mr_num, u32 data, u32 mask)
1814 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1816 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
1817 PARAM_NOT_CARE, mr_data[mr_num].reg_addr, data, mask));
1818 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1819 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1820 CHECK_STATUS(ddr3_tip_if_write
1821 (dev_num, ACCESS_TYPE_UNICAST, if_id,
1823 (cs_mask_arr[if_id] << 8) | mr_data[mr_num].cmd, 0xf1f));
1826 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1827 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1828 if (ddr3_tip_if_polling(dev_num, ACCESS_TYPE_UNICAST, if_id, 0,
1830 MAX_POLLING_ITERATIONS) != MV_OK) {
1831 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1832 ("write_mrs_cmd: Poll cmd fail"));
1840 * Reset XSB Read FIFO
1842 int ddr3_tip_reset_fifo_ptr(u32 dev_num)
1846 /* Configure PHY reset value to 0 in order to "clean" the FIFO */
1847 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
1848 if_id, 0x15c8, 0, 0xff000000));
1850 * Move PHY to RL mode (only in RL mode the PHY overrides FIFO values
1851 * during FIFO reset)
1853 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
1854 if_id, TRAINING_SW_2_REG,
1856 /* In order that above configuration will influence the PHY */
1857 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
1859 0x80000000, 0x80000000));
1860 /* Reset read fifo assertion */
1861 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
1862 if_id, 0x1400, 0, 0x40000000));
1863 /* Reset read fifo deassertion */
1864 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
1866 0x40000000, 0x40000000));
1867 /* Move PHY back to functional mode */
1868 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
1869 if_id, TRAINING_SW_2_REG,
1871 /* Stop training machine */
1872 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
1873 if_id, 0x15b4, 0x10000, 0x10000));
1879 * Reset Phy registers
1881 int ddr3_tip_ddr3_reset_phy_regs(u32 dev_num)
1883 u32 if_id, phy_id, cs;
1884 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
1885 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1887 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1888 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1889 for (phy_id = 0; phy_id < octets_per_if_num;
1891 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, phy_id);
1892 CHECK_STATUS(ddr3_tip_bus_write
1893 (dev_num, ACCESS_TYPE_UNICAST,
1894 if_id, ACCESS_TYPE_UNICAST,
1895 phy_id, DDR_PHY_DATA,
1896 WL_PHY_REG(effective_cs),
1898 CHECK_STATUS(ddr3_tip_bus_write
1899 (dev_num, ACCESS_TYPE_UNICAST, if_id,
1900 ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA,
1901 RL_PHY_REG(effective_cs),
1903 CHECK_STATUS(ddr3_tip_bus_write
1904 (dev_num, ACCESS_TYPE_UNICAST, if_id,
1905 ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA,
1906 CRX_PHY_REG(effective_cs), phy_reg3_val));
1907 CHECK_STATUS(ddr3_tip_bus_write
1908 (dev_num, ACCESS_TYPE_UNICAST, if_id,
1909 ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA,
1910 CTX_PHY_REG(effective_cs), phy_reg1_val));
1911 CHECK_STATUS(ddr3_tip_bus_write
1912 (dev_num, ACCESS_TYPE_UNICAST, if_id,
1913 ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA,
1914 PBS_TX_BCAST_PHY_REG(effective_cs), 0x0));
1915 CHECK_STATUS(ddr3_tip_bus_write
1916 (dev_num, ACCESS_TYPE_UNICAST, if_id,
1917 ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA,
1918 PBS_RX_BCAST_PHY_REG(effective_cs), 0));
1919 CHECK_STATUS(ddr3_tip_bus_write
1920 (dev_num, ACCESS_TYPE_UNICAST, if_id,
1921 ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA,
1922 PBS_TX_PHY_REG(effective_cs, DQSP_PAD), 0));
1923 CHECK_STATUS(ddr3_tip_bus_write
1924 (dev_num, ACCESS_TYPE_UNICAST, if_id,
1925 ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA,
1926 PBS_RX_PHY_REG(effective_cs, DQSP_PAD), 0));
1927 CHECK_STATUS(ddr3_tip_bus_write
1928 (dev_num, ACCESS_TYPE_UNICAST, if_id,
1929 ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA,
1930 PBS_TX_PHY_REG(effective_cs, DQSN_PAD), 0));
1931 CHECK_STATUS(ddr3_tip_bus_write
1932 (dev_num, ACCESS_TYPE_UNICAST, if_id,
1933 ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA,
1934 PBS_RX_PHY_REG(effective_cs, DQSN_PAD), 0));
1938 /* Set Receiver Calibration value */
1939 for (cs = 0; cs < MAX_CS_NUM; cs++) {
1940 /* PHY register 0xdb bits[5:0] - configure to 63 */
1941 CHECK_STATUS(ddr3_tip_bus_write
1942 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1943 ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1944 DDR_PHY_DATA, VREF_BCAST_PHY_REG(cs), 63));
1951 * Restore Dunit registers
1953 int ddr3_tip_restore_dunit_regs(u32 dev_num)
1957 mv_ddr_set_calib_controller();
1959 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
1960 PARAM_NOT_CARE, MAIN_PADS_CAL_MACH_CTRL_REG,
1962 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
1963 PARAM_NOT_CARE, MAIN_PADS_CAL_MACH_CTRL_REG,
1964 calibration_update_control << 3,
1966 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
1968 ODPG_WR_RD_MODE_ENA_REG,
1969 0xffff, MASK_ALL_BITS));
1971 for (index_cnt = 0; index_cnt < ARRAY_SIZE(odpg_default_value);
1973 CHECK_STATUS(ddr3_tip_if_write
1974 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1975 odpg_default_value[index_cnt].reg_addr,
1976 odpg_default_value[index_cnt].reg_data,
1977 odpg_default_value[index_cnt].reg_mask));
1983 int ddr3_tip_adll_regs_bypass(u32 dev_num, u32 reg_val1, u32 reg_val2)
1986 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
1987 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1989 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1990 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1991 for (phy_id = 0; phy_id < octets_per_if_num; phy_id++) {
1992 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, phy_id);
1993 CHECK_STATUS(ddr3_tip_bus_write
1994 (dev_num, ACCESS_TYPE_UNICAST, if_id,
1995 ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA,
1996 CTX_PHY_REG(effective_cs), reg_val1));
1997 CHECK_STATUS(ddr3_tip_bus_write
1998 (dev_num, ACCESS_TYPE_UNICAST, if_id,
1999 ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA,
2000 PBS_TX_BCAST_PHY_REG(effective_cs), reg_val2));
2008 * Auto tune main flow
2010 static int ddr3_tip_ddr3_training_main_flow(u32 dev_num)
2012 /* TODO: enable this functionality for other platforms */
2013 #if defined(CONFIG_ARMADA_38X) || defined(CONFIG_ARMADA_39X)
2014 struct init_cntr_param init_cntr_prm;
2017 int adll_bypass_flag = 0;
2019 unsigned int max_cs = mv_ddr_cs_num_get();
2020 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
2021 enum mv_ddr_freq freq = tm->interface_params[0].memory_freq;
2022 unsigned int *freq_tbl = mv_ddr_freq_tbl_get();
2024 #ifdef DDR_VIEWER_TOOL
2025 if (debug_training == DEBUG_LEVEL_TRACE) {
2026 CHECK_STATUS(print_device_info((u8)dev_num));
2030 ddr3_tip_validate_algo_components(dev_num);
2032 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
2033 CHECK_STATUS(ddr3_tip_ddr3_reset_phy_regs(dev_num));
2035 /* Set to 0 after each loop to avoid illegal value may be used */
2038 freq_tbl[MV_DDR_FREQ_LOW_FREQ] = dfs_low_freq;
2040 if (is_pll_before_init != 0) {
2041 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
2042 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
2043 config_func_info[dev_num].tip_set_freq_divider_func(
2044 (u8)dev_num, if_id, freq);
2048 /* TODO: enable this functionality for other platforms */
2049 #if defined(CONFIG_ARMADA_38X) || defined(CONFIG_ARMADA_39X)
2050 if (is_adll_calib_before_init != 0) {
2051 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2052 ("with adll calib before init\n"));
2053 adll_calibration(dev_num, ACCESS_TYPE_MULTICAST, 0, freq);
2056 if (is_reg_dump != 0) {
2057 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2058 ("Dump before init controller\n"));
2059 ddr3_tip_reg_dump(dev_num);
2062 if (mask_tune_func & INIT_CONTROLLER_MASK_BIT) {
2063 training_stage = INIT_CONTROLLER;
2064 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2065 ("INIT_CONTROLLER_MASK_BIT\n"));
2066 init_cntr_prm.do_mrs_phy = 1;
2067 init_cntr_prm.is_ctrl64_bit = 0;
2068 init_cntr_prm.init_phy = 1;
2069 init_cntr_prm.msys_init = 0;
2070 ret = hws_ddr3_tip_init_controller(dev_num, &init_cntr_prm);
2071 if (is_reg_dump != 0)
2072 ddr3_tip_reg_dump(dev_num);
2074 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2075 ("hws_ddr3_tip_init_controller failure\n"));
2076 if (debug_mode == 0)
2082 ret = adll_calibration(dev_num, ACCESS_TYPE_MULTICAST, 0, freq);
2084 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2085 ("adll_calibration failure\n"));
2086 if (debug_mode == 0)
2090 if (mask_tune_func & SET_LOW_FREQ_MASK_BIT) {
2091 training_stage = SET_LOW_FREQ;
2093 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
2094 ddr3_tip_adll_regs_bypass(dev_num, 0, 0x1f);
2095 adll_bypass_flag = 1;
2099 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2100 ("SET_LOW_FREQ_MASK_BIT %d\n",
2101 freq_tbl[low_freq]));
2102 ret = ddr3_tip_freq_set(dev_num, ACCESS_TYPE_MULTICAST,
2103 PARAM_NOT_CARE, low_freq);
2104 if (is_reg_dump != 0)
2105 ddr3_tip_reg_dump(dev_num);
2107 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2108 ("ddr3_tip_freq_set failure\n"));
2109 if (debug_mode == 0)
2114 if (mask_tune_func & WRITE_LEVELING_LF_MASK_BIT) {
2115 training_stage = WRITE_LEVELING_LF;
2116 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2117 ("WRITE_LEVELING_LF_MASK_BIT\n"));
2118 ret = ddr3_tip_dynamic_write_leveling(dev_num, 1);
2119 if (is_reg_dump != 0)
2120 ddr3_tip_reg_dump(dev_num);
2122 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2123 ("ddr3_tip_dynamic_write_leveling LF failure\n"));
2124 if (debug_mode == 0)
2129 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
2130 if (mask_tune_func & LOAD_PATTERN_MASK_BIT) {
2131 training_stage = LOAD_PATTERN;
2132 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2133 ("LOAD_PATTERN_MASK_BIT #%d\n",
2135 ret = ddr3_tip_load_all_pattern_to_mem(dev_num);
2136 if (is_reg_dump != 0)
2137 ddr3_tip_reg_dump(dev_num);
2139 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2140 ("ddr3_tip_load_all_pattern_to_mem failure CS #%d\n",
2142 if (debug_mode == 0)
2148 if (adll_bypass_flag == 1) {
2149 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
2150 ddr3_tip_adll_regs_bypass(dev_num, phy_reg1_val, 0);
2151 adll_bypass_flag = 0;
2155 /* Set to 0 after each loop to avoid illegal value may be used */
2158 if (mask_tune_func & SET_MEDIUM_FREQ_MASK_BIT) {
2159 training_stage = SET_MEDIUM_FREQ;
2160 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2161 ("SET_MEDIUM_FREQ_MASK_BIT %d\n",
2162 freq_tbl[medium_freq]));
2164 ddr3_tip_freq_set(dev_num, ACCESS_TYPE_MULTICAST,
2165 PARAM_NOT_CARE, medium_freq);
2166 if (is_reg_dump != 0)
2167 ddr3_tip_reg_dump(dev_num);
2169 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2170 ("ddr3_tip_freq_set failure\n"));
2171 if (debug_mode == 0)
2176 if (mask_tune_func & WRITE_LEVELING_MASK_BIT) {
2177 training_stage = WRITE_LEVELING;
2178 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2179 ("WRITE_LEVELING_MASK_BIT\n"));
2180 if ((rl_mid_freq_wa == 0) || (freq_tbl[medium_freq] == 533)) {
2181 ret = ddr3_tip_dynamic_write_leveling(dev_num, 0);
2184 ret = ddr3_tip_legacy_dynamic_write_leveling(dev_num);
2187 if (is_reg_dump != 0)
2188 ddr3_tip_reg_dump(dev_num);
2190 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2191 ("ddr3_tip_dynamic_write_leveling failure\n"));
2192 if (debug_mode == 0)
2197 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
2198 if (mask_tune_func & LOAD_PATTERN_2_MASK_BIT) {
2199 training_stage = LOAD_PATTERN_2;
2200 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2201 ("LOAD_PATTERN_2_MASK_BIT CS #%d\n",
2203 ret = ddr3_tip_load_all_pattern_to_mem(dev_num);
2204 if (is_reg_dump != 0)
2205 ddr3_tip_reg_dump(dev_num);
2207 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2208 ("ddr3_tip_load_all_pattern_to_mem failure CS #%d\n",
2210 if (debug_mode == 0)
2215 /* Set to 0 after each loop to avoid illegal value may be used */
2218 if (mask_tune_func & READ_LEVELING_MASK_BIT) {
2219 training_stage = READ_LEVELING;
2220 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2221 ("READ_LEVELING_MASK_BIT\n"));
2222 if ((rl_mid_freq_wa == 0) || (freq_tbl[medium_freq] == 533)) {
2223 ret = ddr3_tip_dynamic_read_leveling(dev_num, medium_freq);
2226 ret = ddr3_tip_legacy_dynamic_read_leveling(dev_num);
2229 if (is_reg_dump != 0)
2230 ddr3_tip_reg_dump(dev_num);
2232 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2233 ("ddr3_tip_dynamic_read_leveling failure\n"));
2234 if (debug_mode == 0)
2239 if (mask_tune_func & WRITE_LEVELING_SUPP_MASK_BIT) {
2240 training_stage = WRITE_LEVELING_SUPP;
2241 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2242 ("WRITE_LEVELING_SUPP_MASK_BIT\n"));
2243 ret = ddr3_tip_dynamic_write_leveling_supp(dev_num);
2244 if (is_reg_dump != 0)
2245 ddr3_tip_reg_dump(dev_num);
2247 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2248 ("ddr3_tip_dynamic_write_leveling_supp failure\n"));
2249 if (debug_mode == 0)
2254 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
2255 if (mask_tune_func & PBS_RX_MASK_BIT) {
2256 training_stage = PBS_RX;
2257 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2258 ("PBS_RX_MASK_BIT CS #%d\n",
2260 ret = ddr3_tip_pbs_rx(dev_num);
2261 if (is_reg_dump != 0)
2262 ddr3_tip_reg_dump(dev_num);
2264 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2265 ("ddr3_tip_pbs_rx failure CS #%d\n",
2267 if (debug_mode == 0)
2273 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
2274 if (mask_tune_func & PBS_TX_MASK_BIT) {
2275 training_stage = PBS_TX;
2276 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2277 ("PBS_TX_MASK_BIT CS #%d\n",
2279 ret = ddr3_tip_pbs_tx(dev_num);
2280 if (is_reg_dump != 0)
2281 ddr3_tip_reg_dump(dev_num);
2283 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2284 ("ddr3_tip_pbs_tx failure CS #%d\n",
2286 if (debug_mode == 0)
2291 /* Set to 0 after each loop to avoid illegal value may be used */
2294 if (mask_tune_func & SET_TARGET_FREQ_MASK_BIT) {
2295 training_stage = SET_TARGET_FREQ;
2296 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2297 ("SET_TARGET_FREQ_MASK_BIT %d\n",
2299 interface_params[first_active_if].
2301 ret = ddr3_tip_freq_set(dev_num, ACCESS_TYPE_MULTICAST,
2303 tm->interface_params[first_active_if].
2305 if (is_reg_dump != 0)
2306 ddr3_tip_reg_dump(dev_num);
2308 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2309 ("ddr3_tip_freq_set failure\n"));
2310 if (debug_mode == 0)
2315 if (mask_tune_func & WRITE_LEVELING_TF_MASK_BIT) {
2316 training_stage = WRITE_LEVELING_TF;
2317 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2318 ("WRITE_LEVELING_TF_MASK_BIT\n"));
2319 ret = ddr3_tip_dynamic_write_leveling(dev_num, 0);
2320 if (is_reg_dump != 0)
2321 ddr3_tip_reg_dump(dev_num);
2323 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2324 ("ddr3_tip_dynamic_write_leveling TF failure\n"));
2325 if (debug_mode == 0)
2330 if (mask_tune_func & LOAD_PATTERN_HIGH_MASK_BIT) {
2331 training_stage = LOAD_PATTERN_HIGH;
2332 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, ("LOAD_PATTERN_HIGH\n"));
2333 ret = ddr3_tip_load_all_pattern_to_mem(dev_num);
2334 if (is_reg_dump != 0)
2335 ddr3_tip_reg_dump(dev_num);
2337 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2338 ("ddr3_tip_load_all_pattern_to_mem failure\n"));
2339 if (debug_mode == 0)
2344 if (mask_tune_func & READ_LEVELING_TF_MASK_BIT) {
2345 training_stage = READ_LEVELING_TF;
2346 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2347 ("READ_LEVELING_TF_MASK_BIT\n"));
2348 ret = ddr3_tip_dynamic_read_leveling(dev_num, tm->
2349 interface_params[first_active_if].
2351 if (is_reg_dump != 0)
2352 ddr3_tip_reg_dump(dev_num);
2354 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2355 ("ddr3_tip_dynamic_read_leveling TF failure\n"));
2356 if (debug_mode == 0)
2361 if (mask_tune_func & RL_DQS_BURST_MASK_BIT) {
2362 training_stage = READ_LEVELING_TF;
2363 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2364 ("RL_DQS_BURST_MASK_BIT\n"));
2365 ret = mv_ddr_rl_dqs_burst(0, 0, tm->interface_params[0].memory_freq);
2366 if (is_reg_dump != 0)
2367 ddr3_tip_reg_dump(dev_num);
2369 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2370 ("mv_ddr_rl_dqs_burst TF failure\n"));
2371 if (debug_mode == 0)
2376 if (mask_tune_func & DM_PBS_TX_MASK_BIT) {
2377 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, ("DM_PBS_TX_MASK_BIT\n"));
2380 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
2381 if (mask_tune_func & VREF_CALIBRATION_MASK_BIT) {
2382 training_stage = VREF_CALIBRATION;
2383 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, ("VREF\n"));
2384 ret = ddr3_tip_vref(dev_num);
2385 if (is_reg_dump != 0) {
2386 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2388 ddr3_tip_reg_dump(dev_num);
2391 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2392 ("ddr3_tip_vref failure\n"));
2393 if (debug_mode == 0)
2398 /* Set to 0 after each loop to avoid illegal value may be used */
2401 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
2402 if (mask_tune_func & CENTRALIZATION_RX_MASK_BIT) {
2403 training_stage = CENTRALIZATION_RX;
2404 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2405 ("CENTRALIZATION_RX_MASK_BIT CS #%d\n",
2407 ret = ddr3_tip_centralization_rx(dev_num);
2408 if (is_reg_dump != 0)
2409 ddr3_tip_reg_dump(dev_num);
2411 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2412 ("ddr3_tip_centralization_rx failure CS #%d\n",
2414 if (debug_mode == 0)
2419 /* Set to 0 after each loop to avoid illegal value may be used */
2422 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
2423 if (mask_tune_func & WRITE_LEVELING_SUPP_TF_MASK_BIT) {
2424 training_stage = WRITE_LEVELING_SUPP_TF;
2425 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2426 ("WRITE_LEVELING_SUPP_TF_MASK_BIT CS #%d\n",
2428 ret = ddr3_tip_dynamic_write_leveling_supp(dev_num);
2429 if (is_reg_dump != 0)
2430 ddr3_tip_reg_dump(dev_num);
2432 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2433 ("ddr3_tip_dynamic_write_leveling_supp TF failure CS #%d\n",
2435 if (debug_mode == 0)
2440 /* Set to 0 after each loop to avoid illegal value may be used */
2444 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
2445 if (mask_tune_func & CENTRALIZATION_TX_MASK_BIT) {
2446 training_stage = CENTRALIZATION_TX;
2447 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2448 ("CENTRALIZATION_TX_MASK_BIT CS #%d\n",
2450 ret = ddr3_tip_centralization_tx(dev_num);
2451 if (is_reg_dump != 0)
2452 ddr3_tip_reg_dump(dev_num);
2454 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2455 ("ddr3_tip_centralization_tx failure CS #%d\n",
2457 if (debug_mode == 0)
2462 /* Set to 0 after each loop to avoid illegal value may be used */
2465 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, ("restore registers to default\n"));
2466 /* restore register values */
2467 CHECK_STATUS(ddr3_tip_restore_dunit_regs(dev_num));
2469 if (is_reg_dump != 0)
2470 ddr3_tip_reg_dump(dev_num);
2476 * DDR3 Dynamic training flow
2478 static int ddr3_tip_ddr3_auto_tune(u32 dev_num)
2482 int is_if_fail = 0, is_auto_tune_fail = 0;
2484 training_stage = INIT_CONTROLLER;
2486 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
2487 for (stage = 0; stage < MAX_STAGE_LIMIT; stage++)
2488 training_result[stage][if_id] = NO_TEST_DONE;
2491 status = ddr3_tip_ddr3_training_main_flow(dev_num);
2493 /* activate XSB test */
2494 if (xsb_validate_type != 0) {
2495 run_xsb_test(dev_num, xsb_validation_base_address, 1, 1,
2499 if (is_reg_dump != 0)
2500 ddr3_tip_reg_dump(dev_num);
2503 CHECK_STATUS(ddr3_tip_print_log(dev_num, window_mem_addr));
2505 #ifndef EXCLUDE_DEBUG_PRINTS
2506 if (status != MV_OK) {
2507 CHECK_STATUS(ddr3_tip_print_stability_log(dev_num));
2509 #endif /* EXCLUDE_DEBUG_PRINTS */
2511 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
2513 for (stage = 0; stage < MAX_STAGE_LIMIT; stage++) {
2514 if (training_result[stage][if_id] == TEST_FAILED)
2517 if (is_if_fail == 1) {
2518 is_auto_tune_fail = 1;
2519 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2520 ("Auto Tune failed for IF %d\n",
2525 if (((status == MV_FAIL) && (is_auto_tune_fail == 0)) ||
2526 ((status == MV_OK) && (is_auto_tune_fail == 1))) {
2528 * If MainFlow result and trainingResult DB not in sync,
2529 * issue warning (caused by no update of trainingResult DB
2532 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2533 ("Warning: Algorithm return value and Result DB"
2534 "are not synced (status 0x%x result DB %d)\n",
2535 status, is_auto_tune_fail));
2538 if ((status != MV_OK) || (is_auto_tune_fail == 1))
2545 * Enable init sequence
2547 int ddr3_tip_enable_init_sequence(u32 dev_num)
2550 u32 if_id = 0, mem_mask = 0, bus_index = 0;
2551 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
2552 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
2554 /* Enable init sequence */
2555 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, 0,
2556 SDRAM_INIT_CTRL_REG, 0x1, 0x1));
2558 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
2559 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
2561 if (ddr3_tip_if_polling
2562 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0, 0x1,
2563 SDRAM_INIT_CTRL_REG,
2564 MAX_POLLING_ITERATIONS) != MV_OK) {
2565 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2566 ("polling failed IF %d\n",
2573 for (bus_index = 0; bus_index < octets_per_if_num;
2575 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_index);
2577 tm->interface_params[if_id].
2578 as_bus_params[bus_index].mirror_enable_bitmask;
2581 if (mem_mask != 0) {
2582 /* Disable Multi CS */
2583 CHECK_STATUS(ddr3_tip_if_write
2584 (dev_num, ACCESS_TYPE_MULTICAST,
2585 if_id, DUAL_DUNIT_CFG_REG, 1 << 3,
2590 return (is_fail == 0) ? MV_OK : MV_FAIL;
2593 int ddr3_tip_register_dq_table(u32 dev_num, u32 *table)
2595 dq_map_table = table;
2601 * Check if pup search is locked
2603 int ddr3_tip_is_pup_lock(u32 *pup_buf, enum hws_training_result read_mode)
2605 u32 bit_start = 0, bit_end = 0, bit_id;
2607 if (read_mode == RESULT_PER_BIT) {
2609 bit_end = BUS_WIDTH_IN_BITS - 1;
2615 for (bit_id = bit_start; bit_id <= bit_end; bit_id++) {
2616 if (GET_LOCK_RESULT(pup_buf[bit_id]) == 0)
2624 * Get minimum buffer value
2626 u8 ddr3_tip_get_buf_min(u8 *buf_ptr)
2631 for (cnt = 0; cnt < BUS_WIDTH_IN_BITS; cnt++) {
2632 if (buf_ptr[cnt] < min_val)
2633 min_val = buf_ptr[cnt];
2640 * Get maximum buffer value
2642 u8 ddr3_tip_get_buf_max(u8 *buf_ptr)
2647 for (cnt = 0; cnt < BUS_WIDTH_IN_BITS; cnt++) {
2648 if (buf_ptr[cnt] > max_val)
2649 max_val = buf_ptr[cnt];
2656 * The following functions return memory parameters:
2657 * bus and device width, device size
2660 u32 hws_ddr3_get_bus_width(void)
2662 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
2664 return (DDR3_IS_16BIT_DRAM_MODE(tm->bus_act_mask) ==
2668 u32 hws_ddr3_get_device_width(u32 if_id)
2670 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
2672 return (tm->interface_params[if_id].bus_width ==
2673 MV_DDR_DEV_WIDTH_8BIT) ? 8 : 16;
2676 u32 hws_ddr3_get_device_size(u32 if_id)
2678 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
2680 if (tm->interface_params[if_id].memory_size >=
2681 MV_DDR_DIE_CAP_LAST) {
2682 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2683 ("Error: Wrong device size of Cs: %d",
2684 tm->interface_params[if_id].memory_size));
2687 return 1 << tm->interface_params[if_id].memory_size;
2691 int hws_ddr3_calc_mem_cs_size(u32 if_id, u32 cs, u32 *cs_size)
2693 u32 cs_mem_size, dev_size;
2695 dev_size = hws_ddr3_get_device_size(if_id);
2696 if (dev_size != 0) {
2697 cs_mem_size = ((hws_ddr3_get_bus_width() /
2698 hws_ddr3_get_device_width(if_id)) * dev_size);
2700 /* the calculated result in Gbytex16 to avoid float using */
2702 if (cs_mem_size == 2) {
2704 } else if (cs_mem_size == 4) {
2706 } else if (cs_mem_size == 8) {
2708 } else if (cs_mem_size == 16) {
2710 } else if (cs_mem_size == 32) {
2713 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2714 ("Error: Wrong Memory size of Cs: %d", cs));
2723 int hws_ddr3_cs_base_adr_calc(u32 if_id, u32 cs, u32 *cs_base_addr)
2725 u32 cs_mem_size = 0;
2726 #ifdef DEVICE_MAX_DRAM_ADDRESS_SIZE
2727 u32 physical_mem_size;
2728 u32 max_mem_size = DEVICE_MAX_DRAM_ADDRESS_SIZE;
2731 if (hws_ddr3_calc_mem_cs_size(if_id, cs, &cs_mem_size) != MV_OK)
2734 #ifdef DEVICE_MAX_DRAM_ADDRESS_SIZE
2735 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
2737 * if number of address pins doesn't allow to use max mem size that
2738 * is defined in topology mem size is defined by
2739 * DEVICE_MAX_DRAM_ADDRESS_SIZE
2741 physical_mem_size = mem_size[tm->interface_params[0].memory_size];
2743 if (hws_ddr3_get_device_width(cs) == 16) {
2745 * 16bit mem device can be twice more - no need in less
2748 max_mem_size = DEVICE_MAX_DRAM_ADDRESS_SIZE * 2;
2751 if (physical_mem_size > max_mem_size) {
2752 cs_mem_size = max_mem_size *
2753 (hws_ddr3_get_bus_width() /
2754 hws_ddr3_get_device_width(if_id));
2755 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2756 ("Updated Physical Mem size is from 0x%x to %x\n",
2758 DEVICE_MAX_DRAM_ADDRESS_SIZE));
2762 /* calculate CS base addr */
2763 *cs_base_addr = ((cs_mem_size) * cs) & 0xffff0000;
2768 /* TODO: consider to move to misl phy driver */
2770 MISL_PHY_DRV_OHM_30 = 0xf,
2771 MISL_PHY_DRV_OHM_48 = 0xa,
2772 MISL_PHY_DRV_OHM_80 = 0x6,
2773 MISL_PHY_DRV_OHM_120 = 0x4
2777 MISL_PHY_ODT_OHM_60 = 0x8,
2778 MISL_PHY_ODT_OHM_80 = 0x6,
2779 MISL_PHY_ODT_OHM_120 = 0x4,
2780 MISL_PHY_ODT_OHM_240 = 0x2
2783 static unsigned int mv_ddr_misl_phy_drv_calc(unsigned int cfg)
2789 val = MISL_PHY_DRV_OHM_30;
2792 val = MISL_PHY_DRV_OHM_48;
2795 val = MISL_PHY_DRV_OHM_80;
2797 case MV_DDR_OHM_120:
2798 val = MISL_PHY_DRV_OHM_120;
2801 val = PARAM_UNDEFINED;
2807 static unsigned int mv_ddr_misl_phy_odt_calc(unsigned int cfg)
2813 val = MISL_PHY_ODT_OHM_60;
2816 val = MISL_PHY_ODT_OHM_80;
2818 case MV_DDR_OHM_120:
2819 val = MISL_PHY_ODT_OHM_120;
2821 case MV_DDR_OHM_240:
2822 val = MISL_PHY_ODT_OHM_240;
2825 val = PARAM_UNDEFINED;
2831 unsigned int mv_ddr_misl_phy_drv_data_p_get(void)
2833 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
2834 unsigned int drv_data_p = mv_ddr_misl_phy_drv_calc(tm->edata.phy_edata.drv_data_p);
2836 if (drv_data_p == PARAM_UNDEFINED)
2837 printf("error: %s: unsupported drv_data_p parameter found\n", __func__);
2842 unsigned int mv_ddr_misl_phy_drv_data_n_get(void)
2844 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
2845 unsigned int drv_data_n = mv_ddr_misl_phy_drv_calc(tm->edata.phy_edata.drv_data_n);
2847 if (drv_data_n == PARAM_UNDEFINED)
2848 printf("error: %s: unsupported drv_data_n parameter found\n", __func__);
2853 unsigned int mv_ddr_misl_phy_drv_ctrl_p_get(void)
2855 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
2856 unsigned int drv_ctrl_p = mv_ddr_misl_phy_drv_calc(tm->edata.phy_edata.drv_ctrl_p);
2858 if (drv_ctrl_p == PARAM_UNDEFINED)
2859 printf("error: %s: unsupported drv_ctrl_p parameter found\n", __func__);
2864 unsigned int mv_ddr_misl_phy_drv_ctrl_n_get(void)
2866 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
2867 unsigned int drv_ctrl_n = mv_ddr_misl_phy_drv_calc(tm->edata.phy_edata.drv_ctrl_n);
2869 if (drv_ctrl_n == PARAM_UNDEFINED)
2870 printf("error: %s: unsupported drv_ctrl_n parameter found\n", __func__);
2875 unsigned int mv_ddr_misl_phy_odt_p_get(void)
2877 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
2878 unsigned int cs_num = mv_ddr_cs_num_get();
2879 unsigned int odt_p = PARAM_UNDEFINED;
2881 if (cs_num > 0 && cs_num <= MAX_CS_NUM)
2882 odt_p = mv_ddr_misl_phy_odt_calc(tm->edata.phy_edata.odt_p[cs_num - 1]);
2884 if (odt_p == PARAM_UNDEFINED)
2885 printf("error: %s: unsupported odt_p parameter found\n", __func__);
2890 unsigned int mv_ddr_misl_phy_odt_n_get(void)
2892 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
2893 unsigned int cs_num = mv_ddr_cs_num_get();
2894 unsigned int odt_n = PARAM_UNDEFINED;
2896 if (cs_num > 0 && cs_num <= MAX_CS_NUM)
2897 odt_n = mv_ddr_misl_phy_odt_calc(tm->edata.phy_edata.odt_n[cs_num - 1]);
2899 if (odt_n == PARAM_UNDEFINED)
2900 printf("error: %s: unsupported odt_n parameter found\n", __func__);