1 // SPDX-License-Identifier: GPL-2.0
3 * Copyright (C) Marvell International Ltd. and its affiliates
7 #include "mv_ddr_training_db.h"
8 #include "mv_ddr_regs.h"
12 u8 debug_pbs = DEBUG_LEVEL_ERROR;
15 * API to change flags outside of the lib
17 #if defined(SILENT_LIB)
18 void ddr3_hws_set_log_level(enum ddr_lib_debug_block block, u8 level)
22 #else /* SILENT_LIB */
23 /* Debug flags for other Training modules */
24 u8 debug_training_static = DEBUG_LEVEL_ERROR;
25 u8 debug_training = DEBUG_LEVEL_ERROR;
26 u8 debug_leveling = DEBUG_LEVEL_ERROR;
27 u8 debug_centralization = DEBUG_LEVEL_ERROR;
28 u8 debug_training_ip = DEBUG_LEVEL_ERROR;
29 u8 debug_training_bist = DEBUG_LEVEL_ERROR;
30 u8 debug_training_hw_alg = DEBUG_LEVEL_ERROR;
31 u8 debug_training_access = DEBUG_LEVEL_ERROR;
32 u8 debug_training_device = DEBUG_LEVEL_ERROR;
35 void mv_ddr_user_log_level_set(enum ddr_lib_debug_block block)
37 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
38 ddr3_hws_set_log_level(block, tm->debug_level);
41 void ddr3_hws_set_log_level(enum ddr_lib_debug_block block, u8 level)
44 case DEBUG_BLOCK_STATIC:
45 debug_training_static = level;
47 case DEBUG_BLOCK_TRAINING_MAIN:
48 debug_training = level;
50 case DEBUG_BLOCK_LEVELING:
51 debug_leveling = level;
53 case DEBUG_BLOCK_CENTRALIZATION:
54 debug_centralization = level;
60 debug_training_hw_alg = level;
62 case DEBUG_BLOCK_DEVICE:
63 debug_training_device = level;
65 case DEBUG_BLOCK_ACCESS:
66 debug_training_access = level;
68 case DEBUG_STAGES_REG_DUMP:
69 if (level == DEBUG_LEVEL_TRACE)
76 debug_training_static = level;
77 debug_training = level;
78 debug_leveling = level;
79 debug_centralization = level;
81 debug_training_hw_alg = level;
82 debug_training_access = level;
83 debug_training_device = level;
86 #endif /* SILENT_LIB */
88 #if defined(DDR_VIEWER_TOOL)
89 static char *convert_freq(enum mv_ddr_freq freq);
90 #if defined(EXCLUDE_SWITCH_DEBUG)
91 u32 ctrl_sweepres[ADLL_LENGTH][MAX_INTERFACE_NUM][MAX_BUS_NUM];
92 u32 ctrl_adll[MAX_CS_NUM * MAX_INTERFACE_NUM * MAX_BUS_NUM];
93 u32 ctrl_adll1[MAX_CS_NUM * MAX_INTERFACE_NUM * MAX_BUS_NUM];
94 u32 ctrl_level_phase[MAX_CS_NUM * MAX_INTERFACE_NUM * MAX_BUS_NUM];
95 #endif /* EXCLUDE_SWITCH_DEBUG */
96 #endif /* DDR_VIEWER_TOOL */
98 struct hws_tip_config_func_db config_func_info[MAX_DEVICE_NUM];
99 u8 is_default_centralization = 0;
100 u8 is_tune_result = 0;
101 u8 is_validate_window_per_if = 0;
102 u8 is_validate_window_per_pup = 0;
104 u32 is_bist_reset_bit = 1;
105 u8 is_run_leveling_sweep_tests;
107 static struct hws_xsb_info xsb_info[MAX_DEVICE_NUM];
110 * Dump Dunit & Phy registers
112 int ddr3_tip_reg_dump(u32 dev_num)
114 u32 if_id, reg_addr, data_value, bus_id;
115 u32 read_data[MAX_INTERFACE_NUM];
116 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
117 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
119 printf("-- dunit registers --\n");
120 for (reg_addr = 0x1400; reg_addr < 0x19f0; reg_addr += 4) {
121 printf("0x%x ", reg_addr);
122 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
123 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
124 CHECK_STATUS(ddr3_tip_if_read
125 (dev_num, ACCESS_TYPE_UNICAST,
126 if_id, reg_addr, read_data,
128 printf("0x%x ", read_data[if_id]);
133 printf("-- Phy registers --\n");
134 for (reg_addr = 0; reg_addr <= 0xff; reg_addr++) {
135 printf("0x%x ", reg_addr);
136 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
137 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
139 bus_id < octets_per_if_num;
141 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
142 CHECK_STATUS(ddr3_tip_bus_read
144 ACCESS_TYPE_UNICAST, bus_id,
145 DDR_PHY_DATA, reg_addr,
147 printf("0x%x ", data_value);
150 bus_id < octets_per_if_num;
152 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
153 CHECK_STATUS(ddr3_tip_bus_read
155 ACCESS_TYPE_UNICAST, bus_id,
156 DDR_PHY_CONTROL, reg_addr,
158 printf("0x%x ", data_value);
168 * Register access func registration
170 int ddr3_tip_init_config_func(u32 dev_num,
171 struct hws_tip_config_func_db *config_func)
173 if (config_func == NULL)
176 memcpy(&config_func_info[dev_num], config_func,
177 sizeof(struct hws_tip_config_func_db));
183 * Get training result info pointer
185 enum hws_result *ddr3_tip_get_result_ptr(u32 stage)
187 return training_result[stage];
193 int ddr3_tip_get_device_info(u32 dev_num, struct ddr3_device_info *info_ptr)
195 if (config_func_info[dev_num].tip_get_device_info_func != NULL) {
196 return config_func_info[dev_num].
197 tip_get_device_info_func((u8) dev_num, info_ptr);
203 #if defined(DDR_VIEWER_TOOL)
205 * Convert freq to character string
207 static char *convert_freq(enum mv_ddr_freq freq)
210 case MV_DDR_FREQ_LOW_FREQ:
211 return "MV_DDR_FREQ_LOW_FREQ";
213 case MV_DDR_FREQ_400:
216 case MV_DDR_FREQ_533:
219 case MV_DDR_FREQ_667:
222 case MV_DDR_FREQ_800:
225 case MV_DDR_FREQ_933:
228 case MV_DDR_FREQ_1066:
231 case MV_DDR_FREQ_311:
234 case MV_DDR_FREQ_333:
237 case MV_DDR_FREQ_467:
240 case MV_DDR_FREQ_850:
243 case MV_DDR_FREQ_900:
246 case MV_DDR_FREQ_360:
247 return "MV_DDR_FREQ_360";
249 case MV_DDR_FREQ_1000:
250 return "MV_DDR_FREQ_1000";
253 return "Unknown Frequency";
258 * Convert device ID to character string
260 static char *convert_dev_id(u32 dev_id)
273 return "Unknown Device";
278 * Convert device ID to character string
280 static char *convert_mem_size(u32 dev_id)
295 return "wrong mem size";
299 int print_device_info(u8 dev_num)
301 struct ddr3_device_info info_ptr;
302 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
304 CHECK_STATUS(ddr3_tip_get_device_info(dev_num, &info_ptr));
305 printf("=== DDR setup START===\n");
306 printf("\tDevice ID: %s\n", convert_dev_id(info_ptr.device_id));
307 printf("\tDDR3 CK delay: %d\n", info_ptr.ck_delay);
309 printf("=== DDR setup END===\n");
314 void hws_ddr3_tip_sweep_test(int enable)
317 is_validate_window_per_if = 1;
318 is_validate_window_per_pup = 1;
319 debug_training = DEBUG_LEVEL_TRACE;
321 is_validate_window_per_if = 0;
322 is_validate_window_per_pup = 0;
325 #endif /* DDR_VIEWER_TOOL */
327 char *ddr3_tip_convert_tune_result(enum hws_result tune_result)
329 switch (tune_result) {
335 return "NOT COMPLETED";
344 int ddr3_tip_print_log(u32 dev_num, u32 mem_addr)
347 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
349 #if defined(DDR_VIEWER_TOOL)
350 if ((is_validate_window_per_if != 0) ||
351 (is_validate_window_per_pup != 0)) {
353 enum mv_ddr_freq freq;
355 freq = tm->interface_params[first_active_if].memory_freq;
357 is_pup_log = (is_validate_window_per_pup != 0) ? 1 : 0;
358 printf("===VALIDATE WINDOW LOG START===\n");
359 printf("DDR Frequency: %s ======\n", convert_freq(freq));
360 /* print sweep windows */
361 ddr3_tip_run_sweep_test(dev_num, sweep_cnt, 1, is_pup_log);
362 ddr3_tip_run_sweep_test(dev_num, sweep_cnt, 0, is_pup_log);
363 #if defined(EXCLUDE_SWITCH_DEBUG)
364 if (is_run_leveling_sweep_tests == 1) {
365 ddr3_tip_run_leveling_sweep_test(dev_num, sweep_cnt, 0, is_pup_log);
366 ddr3_tip_run_leveling_sweep_test(dev_num, sweep_cnt, 1, is_pup_log);
368 #endif /* EXCLUDE_SWITCH_DEBUG */
369 ddr3_tip_print_all_pbs_result(dev_num);
370 ddr3_tip_print_wl_supp_result(dev_num);
371 printf("===VALIDATE WINDOW LOG END ===\n");
372 CHECK_STATUS(ddr3_tip_restore_dunit_regs(dev_num));
373 ddr3_tip_reg_dump(dev_num);
375 #endif /* DDR_VIEWER_TOOL */
377 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
378 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
380 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
381 ("IF %d Status:\n", if_id));
383 if (mask_tune_func & INIT_CONTROLLER_MASK_BIT) {
384 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
385 ("\tInit Controller: %s\n",
386 ddr3_tip_convert_tune_result
387 (training_result[INIT_CONTROLLER]
390 if (mask_tune_func & SET_LOW_FREQ_MASK_BIT) {
391 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
392 ("\tLow freq Config: %s\n",
393 ddr3_tip_convert_tune_result
394 (training_result[SET_LOW_FREQ]
397 if (mask_tune_func & LOAD_PATTERN_MASK_BIT) {
398 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
399 ("\tLoad Pattern: %s\n",
400 ddr3_tip_convert_tune_result
401 (training_result[LOAD_PATTERN]
404 if (mask_tune_func & SET_MEDIUM_FREQ_MASK_BIT) {
405 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
406 ("\tMedium freq Config: %s\n",
407 ddr3_tip_convert_tune_result
408 (training_result[SET_MEDIUM_FREQ]
411 if (mask_tune_func & WRITE_LEVELING_MASK_BIT) {
412 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
414 ddr3_tip_convert_tune_result
415 (training_result[WRITE_LEVELING]
418 if (mask_tune_func & LOAD_PATTERN_2_MASK_BIT) {
419 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
420 ("\tLoad Pattern: %s\n",
421 ddr3_tip_convert_tune_result
422 (training_result[LOAD_PATTERN_2]
425 if (mask_tune_func & READ_LEVELING_MASK_BIT) {
426 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
428 ddr3_tip_convert_tune_result
429 (training_result[READ_LEVELING]
432 if (mask_tune_func & WRITE_LEVELING_SUPP_MASK_BIT) {
433 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
435 ddr3_tip_convert_tune_result
436 (training_result[WRITE_LEVELING_SUPP]
439 if (mask_tune_func & PBS_RX_MASK_BIT) {
440 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
442 ddr3_tip_convert_tune_result
443 (training_result[PBS_RX]
446 if (mask_tune_func & PBS_TX_MASK_BIT) {
447 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
449 ddr3_tip_convert_tune_result
450 (training_result[PBS_TX]
453 if (mask_tune_func & SET_TARGET_FREQ_MASK_BIT) {
454 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
455 ("\tTarget freq Config: %s\n",
456 ddr3_tip_convert_tune_result
457 (training_result[SET_TARGET_FREQ]
460 if (mask_tune_func & WRITE_LEVELING_TF_MASK_BIT) {
461 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
463 ddr3_tip_convert_tune_result
464 (training_result[WRITE_LEVELING_TF]
467 if (mask_tune_func & READ_LEVELING_TF_MASK_BIT) {
468 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
470 ddr3_tip_convert_tune_result
471 (training_result[READ_LEVELING_TF]
474 if (mask_tune_func & WRITE_LEVELING_SUPP_TF_MASK_BIT) {
475 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
476 ("\tWL TF Supp: %s\n",
477 ddr3_tip_convert_tune_result
479 [WRITE_LEVELING_SUPP_TF]
482 if (mask_tune_func & CENTRALIZATION_RX_MASK_BIT) {
483 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
485 ddr3_tip_convert_tune_result
486 (training_result[CENTRALIZATION_RX]
489 if (mask_tune_func & VREF_CALIBRATION_MASK_BIT) {
490 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
491 ("\tVREF_CALIBRATION: %s\n",
492 ddr3_tip_convert_tune_result
493 (training_result[VREF_CALIBRATION]
496 if (mask_tune_func & CENTRALIZATION_TX_MASK_BIT) {
497 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
499 ddr3_tip_convert_tune_result
500 (training_result[CENTRALIZATION_TX]
508 #if !defined(EXCLUDE_DEBUG_PRINTS)
510 * Print stability log info
512 int ddr3_tip_print_stability_log(u32 dev_num)
514 u8 if_id = 0, csindex = 0, bus_id = 0, idx = 0;
516 u32 read_data[MAX_INTERFACE_NUM];
517 unsigned int max_cs = mv_ddr_cs_num_get();
518 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
521 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
522 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
523 printf("Title: I/F# , Tj, Calibration_n0, Calibration_p0, Calibration_n1, Calibration_p1, Calibration_n2, Calibration_p2,");
524 for (csindex = 0; csindex < max_cs; csindex++) {
525 printf("CS%d , ", csindex);
527 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
528 printf("VWTx, VWRx, WL_tot, WL_ADLL, WL_PH, RL_Tot, RL_ADLL, RL_PH, RL_Smp, Cen_tx, Cen_rx, Vref, DQVref,");
530 for (idx = 0; idx < 11; idx++)
531 printf("PBSTx-Pad%d,", idx);
533 for (idx = 0; idx < 11; idx++)
534 printf("PBSRx-Pad%d,", idx);
540 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
541 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
543 printf("Data: %d,%d,", if_id,
544 (config_func_info[dev_num].tip_get_temperature != NULL)
545 ? (config_func_info[dev_num].
546 tip_get_temperature(dev_num)) : (0));
548 CHECK_STATUS(ddr3_tip_if_read
549 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x14c8,
550 read_data, MASK_ALL_BITS));
551 printf("%d,%d,", ((read_data[if_id] & 0x3f0) >> 4),
552 ((read_data[if_id] & 0xfc00) >> 10));
553 CHECK_STATUS(ddr3_tip_if_read
554 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x17c8,
555 read_data, MASK_ALL_BITS));
556 printf("%d,%d,", ((read_data[if_id] & 0x3f0) >> 4),
557 ((read_data[if_id] & 0xfc00) >> 10));
558 CHECK_STATUS(ddr3_tip_if_read
559 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x1dc8,
560 read_data, MASK_ALL_BITS));
561 printf("%d,%d,", ((read_data[if_id] & 0x3f0000) >> 16),
562 ((read_data[if_id] & 0xfc00000) >> 22));
564 for (csindex = 0; csindex < max_cs; csindex++) {
565 printf("CS%d , ", csindex);
566 for (bus_id = 0; bus_id < MAX_BUS_NUM; bus_id++) {
568 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
569 ddr3_tip_bus_read(dev_num, if_id,
571 bus_id, DDR_PHY_DATA,
574 printf("%d,%d,", (reg_data & 0x1f),
575 ((reg_data & 0x3e0) >> 5));
577 ddr3_tip_bus_read(dev_num, if_id,
579 bus_id, DDR_PHY_DATA,
584 ((reg_data & 0x1c0) >> 6) * 32,
586 (reg_data & 0x1c0) >> 6);
588 CHECK_STATUS(ddr3_tip_if_read
589 (dev_num, ACCESS_TYPE_UNICAST,
591 RD_DATA_SMPL_DLYS_REG,
592 read_data, MASK_ALL_BITS));
595 (0x1f << (8 * csindex))) >>
597 ddr3_tip_bus_read(dev_num, if_id,
598 ACCESS_TYPE_UNICAST, bus_id,
602 printf("%d,%d,%d,%d,",
604 ((reg_data & 0x1c0) >> 6) * 32 +
605 read_data[if_id] * 64,
607 ((reg_data & 0x1c0) >> 6),
610 ddr3_tip_bus_read(dev_num, if_id,
611 ACCESS_TYPE_UNICAST, bus_id,
613 CTX_PHY_REG(csindex),
615 printf("%d,", (reg_data & 0x3f));
616 ddr3_tip_bus_read(dev_num, if_id,
617 ACCESS_TYPE_UNICAST, bus_id,
619 CRX_PHY_REG(csindex),
621 printf("%d,", (reg_data & 0x1f));
623 ddr3_tip_bus_read(dev_num, if_id,
624 ACCESS_TYPE_UNICAST, bus_id,
628 printf("%d,", (reg_data & 0x7));
630 /* Need to add the Read Function from device */
633 for (idx = 0; idx < 11; idx++) {
634 ddr3_tip_bus_read(dev_num, if_id,
636 bus_id, DDR_PHY_DATA,
640 printf("%d,", (reg_data & 0x3f));
643 for (idx = 0; idx < 11; idx++) {
644 ddr3_tip_bus_read(dev_num, if_id,
646 bus_id, DDR_PHY_DATA,
650 printf("%d,", (reg_data & 0x3f));
659 #endif /* EXCLUDE_DEBUG_PRINTS */
662 * Register XSB information
664 int ddr3_tip_register_xsb_info(u32 dev_num, struct hws_xsb_info *xsb_info_table)
666 memcpy(&xsb_info[dev_num], xsb_info_table, sizeof(struct hws_xsb_info));
673 int ddr3_tip_read_adll_value(u32 dev_num, u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],
674 u32 reg_addr, u32 mask)
677 u32 if_id = 0, bus_id = 0;
678 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
679 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
682 * multi CS support - reg_addr is calucalated in calling function
685 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
686 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
687 for (bus_id = 0; bus_id < octets_per_if_num;
689 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
690 CHECK_STATUS(ddr3_tip_bus_read(dev_num, if_id,
693 DDR_PHY_DATA, reg_addr,
696 octets_per_if_num + bus_id] =
707 int ddr3_tip_write_adll_value(u32 dev_num, u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],
710 u32 if_id = 0, bus_id = 0;
712 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
713 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
716 * multi CS support - reg_addr is calucalated in calling function
719 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
720 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
721 for (bus_id = 0; bus_id < octets_per_if_num;
723 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
724 data = pup_values[if_id *
727 CHECK_STATUS(ddr3_tip_bus_write(dev_num,
731 bus_id, DDR_PHY_DATA,
742 int read_phase_value(u32 dev_num, u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],
743 int reg_addr, u32 mask)
746 u32 if_id = 0, bus_id = 0;
747 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
748 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
750 /* multi CS support - reg_addr is calucalated in calling function with CS offset */
751 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
752 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
753 for (bus_id = 0; bus_id < octets_per_if_num; bus_id++) {
754 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
755 CHECK_STATUS(ddr3_tip_bus_read(dev_num, if_id,
758 DDR_PHY_DATA, reg_addr,
760 pup_values[if_id * octets_per_if_num + bus_id] = data_value & mask;
768 * Write Leveling Value
770 int write_leveling_value(u32 dev_num, u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],
771 u32 pup_ph_values[MAX_INTERFACE_NUM * MAX_BUS_NUM], int reg_addr)
773 u32 if_id = 0, bus_id = 0;
775 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
776 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
778 /* multi CS support - reg_addr is calucalated in calling function with CS offset */
779 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
780 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
781 for (bus_id = 0 ; bus_id < octets_per_if_num ; bus_id++) {
782 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
783 data = pup_values[if_id * octets_per_if_num + bus_id] +
784 pup_ph_values[if_id * octets_per_if_num + bus_id];
785 CHECK_STATUS(ddr3_tip_bus_write(dev_num,
799 #if !defined(EXCLUDE_SWITCH_DEBUG)
800 struct hws_tip_config_func_db config_func_info[MAX_DEVICE_NUM];
801 u32 start_xsb_offset = 0;
804 u8 is_dfs_disabled = 0;
805 u32 default_centrlization_value = 0x12;
806 u32 activate_select_before_run_alg = 1, activate_deselect_after_run_alg = 1,
807 rl_test = 0, reset_read_fifo = 0;
809 u32 ctrl_sweepres[ADLL_LENGTH][MAX_INTERFACE_NUM][MAX_BUS_NUM];
810 u32 ctrl_adll[MAX_CS_NUM * MAX_INTERFACE_NUM * MAX_BUS_NUM];
812 u32 xsb_test_table[][8] = {
813 {0x00000000, 0x11111111, 0x22222222, 0x33333333, 0x44444444, 0x55555555,
814 0x66666666, 0x77777777},
815 {0x88888888, 0x99999999, 0xaaaaaaaa, 0xbbbbbbbb, 0xcccccccc, 0xdddddddd,
816 0xeeeeeeee, 0xffffffff},
817 {0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
818 0x00000000, 0xffffffff},
819 {0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
820 0x00000000, 0xffffffff},
821 {0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
822 0x00000000, 0xffffffff},
823 {0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
824 0x00000000, 0xffffffff},
825 {0x00000000, 0x00000000, 0xffffffff, 0xffffffff, 0x00000000, 0x00000000,
826 0xffffffff, 0xffffffff},
827 {0x00000000, 0x00000000, 0x00000000, 0xffffffff, 0x00000000, 0x00000000,
828 0x00000000, 0x00000000},
829 {0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0x00000000, 0xffffffff,
830 0xffffffff, 0xffffffff}
833 int ddr3_tip_print_adll(void)
835 u32 bus_cnt = 0, if_id, data_p1, data_p2, ui_data3, dev_num = 0;
836 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
837 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
839 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
840 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
841 for (bus_cnt = 0; bus_cnt < octets_per_if_num;
843 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_cnt);
844 CHECK_STATUS(ddr3_tip_bus_read
846 ACCESS_TYPE_UNICAST, bus_cnt,
847 DDR_PHY_DATA, 0x1, &data_p1));
848 CHECK_STATUS(ddr3_tip_bus_read
849 (dev_num, if_id, ACCESS_TYPE_UNICAST,
850 bus_cnt, DDR_PHY_DATA, 0x2, &data_p2));
851 CHECK_STATUS(ddr3_tip_bus_read
852 (dev_num, if_id, ACCESS_TYPE_UNICAST,
853 bus_cnt, DDR_PHY_DATA, 0x3, &ui_data3));
854 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE,
855 (" IF %d bus_cnt %d phy_reg_1_data 0x%x phy_reg_2_data 0x%x phy_reg_3_data 0x%x\n",
856 if_id, bus_cnt, data_p1, data_p2,
864 #endif /* EXCLUDE_SWITCH_DEBUG */
866 #if defined(DDR_VIEWER_TOOL)
870 int print_adll(u32 dev_num, u32 adll[MAX_INTERFACE_NUM * MAX_BUS_NUM])
873 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
874 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
876 for (j = 0; j < octets_per_if_num; j++) {
877 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, j);
878 for (i = 0; i < MAX_INTERFACE_NUM; i++)
879 printf("%d ,", adll[i * octets_per_if_num + j]);
886 int print_ph(u32 dev_num, u32 adll[MAX_INTERFACE_NUM * MAX_BUS_NUM])
889 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
890 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
892 for (j = 0; j < octets_per_if_num; j++) {
893 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, j);
894 for (i = 0; i < MAX_INTERFACE_NUM; i++)
895 printf("%d ,", adll[i * octets_per_if_num + j] >> 6);
901 #endif /* DDR_VIEWER_TOOL */
903 #if !defined(EXCLUDE_SWITCH_DEBUG)
904 /* byte_index - only byte 0, 1, 2, or 3, oxff - test all bytes */
905 static u32 ddr3_tip_compare(u32 if_id, u32 *p_src, u32 *p_dst,
908 u32 burst_cnt = 0, addr_offset, i_id;
913 0xff) ? (u32) 0xffffffff : (u32) (0xff << (byte_index * 8));
914 for (burst_cnt = 0; burst_cnt < EXT_ACCESS_BURST_LENGTH; burst_cnt++) {
915 if ((p_src[burst_cnt] & addr_offset) !=
916 (p_dst[if_id] & addr_offset))
920 if (b_is_fail == 1) {
921 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
922 ("IF %d exp: ", if_id));
923 for (i_id = 0; i_id <= MAX_INTERFACE_NUM - 1; i_id++) {
924 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
925 ("0x%8x ", p_src[i_id]));
927 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
928 ("\n_i_f %d rcv: ", if_id));
929 for (i_id = 0; i_id <= MAX_INTERFACE_NUM - 1; i_id++) {
930 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
931 ("(0x%8x ", p_dst[i_id]));
933 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, ("\n "));
938 #endif /* EXCLUDE_SWITCH_DEBUG */
940 #if defined(DDR_VIEWER_TOOL)
944 int ddr3_tip_run_sweep_test(int dev_num, u32 repeat_num, u32 direction,
947 u32 pup = 0, start_pup = 0, end_pup = 0;
948 u32 adll = 0, rep = 0, pattern_idx = 0;
949 u32 res[MAX_INTERFACE_NUM] = { 0 };
953 enum hws_access_type pup_access;
955 unsigned int max_cs = mv_ddr_cs_num_get();
956 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
957 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
964 end_pup = octets_per_if_num - 1;
965 pup_access = ACCESS_TYPE_UNICAST;
969 pup_access = ACCESS_TYPE_MULTICAST;
972 for (cs = 0; cs < max_cs; cs++) {
973 reg = (direction == 0) ? CTX_PHY_REG(cs) : CRX_PHY_REG(cs);
974 for (adll = 0; adll < ADLL_LENGTH; adll++) {
976 if_id <= MAX_INTERFACE_NUM - 1;
981 for (pup = start_pup; pup <= end_pup; pup++) {
982 ctrl_sweepres[adll][if_id][pup] =
988 for (adll = 0; adll < (MAX_INTERFACE_NUM * MAX_BUS_NUM); adll++)
990 /* Save DQS value(after algorithm run) */
991 ddr3_tip_read_adll_value(dev_num, ctrl_adll,
995 * Sweep ADLL from 0:31 on all I/F on all Pup and perform
996 * BIST on each stage.
998 for (pup = start_pup; pup <= end_pup; pup++) {
999 for (adll = 0; adll < ADLL_LENGTH; adll++) {
1000 for (rep = 0; rep < repeat_num; rep++) {
1001 for (pattern_idx = PATTERN_KILLER_DQ0;
1002 pattern_idx < PATTERN_LAST;
1005 (direction == 0) ? (adll * 2) : adll;
1006 CHECK_STATUS(ddr3_tip_bus_write
1007 (dev_num, ACCESS_TYPE_MULTICAST, 0,
1008 pup_access, pup, DDR_PHY_DATA,
1010 hws_ddr3_run_bist(dev_num, sweep_pattern, res,
1012 /* ddr3_tip_reset_fifo_ptr(dev_num); */
1014 if_id < MAX_INTERFACE_NUM;
1019 ctrl_sweepres[adll][if_id][pup]
1025 ACCESS_TYPE_UNICAST,
1027 ACCESS_TYPE_UNICAST,
1041 printf("Final, CS %d,%s, Sweep, Result, Adll,", cs,
1042 ((direction == 0) ? "TX" : "RX"));
1043 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1044 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1046 for (pup = start_pup; pup <= end_pup; pup++) {
1047 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
1048 printf("I/F%d-PHY%d , ", if_id, pup);
1051 printf("I/F%d , ", if_id);
1056 for (adll = 0; adll < ADLL_LENGTH; adll++) {
1057 adll_value = (direction == 0) ? (adll * 2) : adll;
1058 printf("Final,%s, Sweep, Result, %d ,",
1059 ((direction == 0) ? "TX" : "RX"), adll_value);
1062 if_id <= MAX_INTERFACE_NUM - 1;
1064 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1065 for (pup = start_pup; pup <= end_pup; pup++) {
1067 ctrl_sweepres[adll][if_id]
1075 * Write back to the phy the Rx DQS value, we store in
1078 ddr3_tip_write_adll_value(dev_num, ctrl_adll, reg);
1079 /* print adll results */
1080 ddr3_tip_read_adll_value(dev_num, ctrl_adll, reg, MASK_ALL_BITS);
1081 printf("%s, DQS, ADLL,,,", (direction == 0) ? "Tx" : "Rx");
1082 print_adll(dev_num, ctrl_adll);
1084 ddr3_tip_reset_fifo_ptr(dev_num);
1089 #if defined(EXCLUDE_SWITCH_DEBUG)
1090 int ddr3_tip_run_leveling_sweep_test(int dev_num, u32 repeat_num,
1091 u32 direction, u32 mode)
1093 u32 pup = 0, start_pup = 0, end_pup = 0, start_adll = 0;
1094 u32 adll = 0, rep = 0, pattern_idx = 0;
1095 u32 read_data[MAX_INTERFACE_NUM];
1096 u32 res[MAX_INTERFACE_NUM] = { 0 };
1097 int if_id = 0, gap = 0;
1100 enum hws_access_type pup_access;
1102 unsigned int max_cs = mv_ddr_cs_num_get();
1103 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
1104 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1106 if (mode == 1) { /* per pup */
1108 end_pup = octets_per_if_num - 1;
1109 pup_access = ACCESS_TYPE_UNICAST;
1113 pup_access = ACCESS_TYPE_MULTICAST;
1116 for (cs = 0; cs < max_cs; cs++) {
1117 reg = (direction == 0) ? WL_PHY_REG(cs) : RL_PHY_REG(cs);
1118 for (adll = 0; adll < ADLL_LENGTH; adll++) {
1119 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
1120 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1121 for (pup = start_pup; pup <= end_pup; pup++)
1122 ctrl_sweepres[adll][if_id][pup] = 0;
1126 for (adll = 0; adll < MAX_INTERFACE_NUM * MAX_BUS_NUM; adll++) {
1127 ctrl_adll[adll] = 0;
1128 ctrl_level_phase[adll] = 0;
1129 ctrl_adll1[adll] = 0;
1132 /* save leveling value after running algorithm */
1133 ddr3_tip_read_adll_value(dev_num, ctrl_adll, reg, 0x1f);
1134 read_phase_value(dev_num, ctrl_level_phase, reg, 0x7 << 6);
1137 ddr3_tip_read_adll_value(dev_num, ctrl_adll1,
1138 CTX_PHY_REG(cs), MASK_ALL_BITS);
1140 /* Sweep ADLL from 0 to 31 on all interfaces, all pups,
1141 * and perform BIST on each stage
1143 for (pup = start_pup; pup <= end_pup; pup++) {
1144 for (adll = 0; adll < ADLL_LENGTH; adll++) {
1145 for (rep = 0; rep < repeat_num; rep++) {
1146 adll_value = (direction == 0) ? (adll * 2) : (adll * 3);
1147 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1148 start_adll = ctrl_adll[if_id * cs * octets_per_if_num + pup] +
1149 (ctrl_level_phase[if_id * cs *
1154 start_adll = (start_adll > 32) ? (start_adll - 32) : 0;
1156 start_adll = (start_adll > 48) ? (start_adll - 48) : 0;
1158 adll_value += start_adll;
1160 gap = ctrl_adll1[if_id * cs * octets_per_if_num + pup] -
1161 ctrl_adll[if_id * cs * octets_per_if_num + pup];
1162 gap = (((adll_value % 32) + gap) % 64);
1164 adll_value = ((adll_value % 32) +
1165 (((adll_value - (adll_value % 32)) / 32) << 6));
1167 CHECK_STATUS(ddr3_tip_bus_write(dev_num,
1168 ACCESS_TYPE_UNICAST,
1176 CHECK_STATUS(ddr3_tip_bus_write(dev_num,
1177 ACCESS_TYPE_UNICAST,
1186 for (pattern_idx = PATTERN_KILLER_DQ0;
1187 pattern_idx < PATTERN_LAST;
1189 hws_ddr3_run_bist(dev_num, sweep_pattern, res, cs);
1190 ddr3_tip_reset_fifo_ptr(dev_num);
1191 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1192 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1193 if (pup != 4) { /* TODO: remove literal */
1194 ctrl_sweepres[adll][if_id][pup] += res[if_id];
1196 CHECK_STATUS(ddr3_tip_if_read(dev_num,
1197 ACCESS_TYPE_UNICAST,
1202 ctrl_sweepres[adll][if_id][pup] += read_data[if_id];
1203 CHECK_STATUS(ddr3_tip_if_write(dev_num,
1204 ACCESS_TYPE_UNICAST,
1209 CHECK_STATUS(ddr3_tip_if_write(dev_num,
1210 ACCESS_TYPE_UNICAST,
1221 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1222 start_adll = ctrl_adll[if_id * cs * octets_per_if_num + pup] +
1223 ctrl_level_phase[if_id * cs * octets_per_if_num + pup];
1224 CHECK_STATUS(ddr3_tip_bus_write(dev_num, ACCESS_TYPE_UNICAST, if_id, pup_access, pup,
1225 DDR_PHY_DATA, reg, start_adll));
1227 CHECK_STATUS(ddr3_tip_bus_write(dev_num,
1228 ACCESS_TYPE_UNICAST,
1241 printf("Final,CS %d,%s,Leveling,Result,Adll,", cs, ((direction == 0) ? "TX" : "RX"));
1243 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1244 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1246 for (pup = start_pup; pup <= end_pup; pup++) {
1247 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
1248 printf("I/F%d-PHY%d , ", if_id, pup);
1251 printf("I/F%d , ", if_id);
1256 for (adll = 0; adll < ADLL_LENGTH; adll++) {
1257 adll_value = (direction == 0) ? ((adll * 2) - 32) : ((adll * 3) - 48);
1258 printf("Final,%s,LevelingSweep,Result, %d ,", ((direction == 0) ? "TX" : "RX"), adll_value);
1260 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1261 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1262 for (pup = start_pup; pup <= end_pup; pup++)
1263 printf("%8d , ", ctrl_sweepres[adll][if_id][pup]);
1268 /* write back to the phy the Rx DQS value, we store in the beginning */
1269 write_leveling_value(dev_num, ctrl_adll, ctrl_level_phase, reg);
1271 ddr3_tip_write_adll_value(dev_num, ctrl_adll1, CTX_PHY_REG(cs));
1273 /* print adll results */
1274 ddr3_tip_read_adll_value(dev_num, ctrl_adll, reg, MASK_ALL_BITS);
1275 printf("%s,DQS,Leveling,,,", (direction == 0) ? "Tx" : "Rx");
1276 print_adll(dev_num, ctrl_adll);
1277 print_ph(dev_num, ctrl_level_phase);
1279 ddr3_tip_reset_fifo_ptr(dev_num);
1283 #endif /* EXCLUDE_SWITCH_DEBUG */
1285 void print_topology(struct mv_ddr_topology_map *topology_db)
1290 printf("\tinterface_mask: 0x%x\n", topology_db->if_act_mask);
1291 printf("\tNumber of buses: 0x%x\n",
1292 ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE));
1293 printf("\tbus_act_mask: 0x%x\n", topology_db->bus_act_mask);
1295 for (ui = 0; ui < MAX_INTERFACE_NUM; ui++) {
1296 VALIDATE_IF_ACTIVE(topology_db->if_act_mask, ui);
1297 printf("\n\tInterface ID: %d\n", ui);
1298 printf("\t\tDDR Frequency: %s\n",
1299 convert_freq(topology_db->
1300 interface_params[ui].memory_freq));
1301 printf("\t\tSpeed_bin: %d\n",
1302 topology_db->interface_params[ui].speed_bin_index);
1303 printf("\t\tBus_width: %d\n",
1304 (4 << topology_db->interface_params[ui].bus_width));
1305 printf("\t\tMem_size: %s\n",
1306 convert_mem_size(topology_db->
1307 interface_params[ui].memory_size));
1308 printf("\t\tCAS-WL: %d\n",
1309 topology_db->interface_params[ui].cas_wl);
1310 printf("\t\tCAS-L: %d\n",
1311 topology_db->interface_params[ui].cas_l);
1312 printf("\t\tTemperature: %d\n",
1313 topology_db->interface_params[ui].interface_temp);
1315 for (uj = 0; uj < 4; uj++) {
1316 printf("\t\tBus %d parameters- CS Mask: 0x%x\t", uj,
1317 topology_db->interface_params[ui].
1318 as_bus_params[uj].cs_bitmask);
1319 printf("Mirror: 0x%x\t",
1320 topology_db->interface_params[ui].
1321 as_bus_params[uj].mirror_enable_bitmask);
1322 printf("DQS Swap is %s \t",
1324 interface_params[ui].as_bus_params[uj].
1325 is_dqs_swap == 1) ? "enabled" : "disabled");
1326 printf("Ck Swap:%s\t",
1328 interface_params[ui].as_bus_params[uj].
1329 is_ck_swap == 1) ? "enabled" : "disabled");
1334 #endif /* DDR_VIEWER_TOOL */
1336 #if !defined(EXCLUDE_SWITCH_DEBUG)
1338 * Execute XSB Test transaction (rd/wr/both)
1340 int run_xsb_test(u32 dev_num, u32 mem_addr, u32 write_type,
1341 u32 read_type, u32 burst_length)
1343 u32 seq = 0, if_id = 0, addr, cnt;
1344 int ret = MV_OK, ret_tmp;
1345 u32 data_read[MAX_INTERFACE_NUM];
1346 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1348 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1349 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1351 for (cnt = 0; cnt <= burst_length; cnt++) {
1352 seq = (seq + 1) % 8;
1353 if (write_type != 0) {
1354 CHECK_STATUS(ddr3_tip_ext_write
1355 (dev_num, if_id, addr, 1,
1356 xsb_test_table[seq]));
1358 if (read_type != 0) {
1359 CHECK_STATUS(ddr3_tip_ext_read
1360 (dev_num, if_id, addr, 1,
1363 if ((read_type != 0) && (write_type != 0)) {
1365 ddr3_tip_compare(if_id,
1366 xsb_test_table[seq],
1369 addr += (EXT_ACCESS_BURST_LENGTH * 4);
1370 ret = (ret != MV_OK) ? ret : ret_tmp;
1378 #else /*EXCLUDE_SWITCH_DEBUG */
1379 u32 start_xsb_offset = 0;
1381 int run_xsb_test(u32 dev_num, u32 mem_addr, u32 write_type,
1382 u32 read_type, u32 burst_length)
1387 #endif /* EXCLUDE_SWITCH_DEBUG */