1 // SPDX-License-Identifier: GPL-2.0
3 * Copyright (C) Marvell International Ltd. and its affiliates
9 u8 debug_pbs = DEBUG_LEVEL_ERROR;
12 * API to change flags outside of the lib
14 #if defined(SILENT_LIB)
15 void ddr3_hws_set_log_level(enum ddr_lib_debug_block block, u8 level)
19 #else /* SILENT_LIB */
20 /* Debug flags for other Training modules */
21 u8 debug_training_static = DEBUG_LEVEL_ERROR;
22 u8 debug_training = DEBUG_LEVEL_ERROR;
23 u8 debug_leveling = DEBUG_LEVEL_ERROR;
24 u8 debug_centralization = DEBUG_LEVEL_ERROR;
25 u8 debug_training_ip = DEBUG_LEVEL_ERROR;
26 u8 debug_training_bist = DEBUG_LEVEL_ERROR;
27 u8 debug_training_hw_alg = DEBUG_LEVEL_ERROR;
28 u8 debug_training_access = DEBUG_LEVEL_ERROR;
29 u8 debug_training_device = DEBUG_LEVEL_ERROR;
32 void mv_ddr_user_log_level_set(enum ddr_lib_debug_block block)
34 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
35 ddr3_hws_set_log_level(block, tm->debug_level);
38 void ddr3_hws_set_log_level(enum ddr_lib_debug_block block, u8 level)
41 case DEBUG_BLOCK_STATIC:
42 debug_training_static = level;
44 case DEBUG_BLOCK_TRAINING_MAIN:
45 debug_training = level;
47 case DEBUG_BLOCK_LEVELING:
48 debug_leveling = level;
50 case DEBUG_BLOCK_CENTRALIZATION:
51 debug_centralization = level;
57 debug_training_hw_alg = level;
59 case DEBUG_BLOCK_DEVICE:
60 debug_training_device = level;
62 case DEBUG_BLOCK_ACCESS:
63 debug_training_access = level;
65 case DEBUG_STAGES_REG_DUMP:
66 if (level == DEBUG_LEVEL_TRACE)
73 debug_training_static = level;
74 debug_training = level;
75 debug_leveling = level;
76 debug_centralization = level;
78 debug_training_hw_alg = level;
79 debug_training_access = level;
80 debug_training_device = level;
83 #endif /* SILENT_LIB */
85 #if defined(DDR_VIEWER_TOOL)
86 static char *convert_freq(enum hws_ddr_freq freq);
87 #if defined(EXCLUDE_SWITCH_DEBUG)
88 u32 ctrl_sweepres[ADLL_LENGTH][MAX_INTERFACE_NUM][MAX_BUS_NUM];
89 u32 ctrl_adll[MAX_CS_NUM * MAX_INTERFACE_NUM * MAX_BUS_NUM];
90 u32 ctrl_adll1[MAX_CS_NUM * MAX_INTERFACE_NUM * MAX_BUS_NUM];
91 u32 ctrl_level_phase[MAX_CS_NUM * MAX_INTERFACE_NUM * MAX_BUS_NUM];
92 #endif /* EXCLUDE_SWITCH_DEBUG */
93 #endif /* DDR_VIEWER_TOOL */
95 struct hws_tip_config_func_db config_func_info[MAX_DEVICE_NUM];
96 u8 is_default_centralization = 0;
97 u8 is_tune_result = 0;
98 u8 is_validate_window_per_if = 0;
99 u8 is_validate_window_per_pup = 0;
101 u32 is_bist_reset_bit = 1;
102 u8 is_run_leveling_sweep_tests;
104 static struct hws_xsb_info xsb_info[MAX_DEVICE_NUM];
107 * Dump Dunit & Phy registers
109 int ddr3_tip_reg_dump(u32 dev_num)
111 u32 if_id, reg_addr, data_value, bus_id;
112 u32 read_data[MAX_INTERFACE_NUM];
113 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
114 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
116 printf("-- dunit registers --\n");
117 for (reg_addr = 0x1400; reg_addr < 0x19f0; reg_addr += 4) {
118 printf("0x%x ", reg_addr);
119 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
120 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
121 CHECK_STATUS(ddr3_tip_if_read
122 (dev_num, ACCESS_TYPE_UNICAST,
123 if_id, reg_addr, read_data,
125 printf("0x%x ", read_data[if_id]);
130 printf("-- Phy registers --\n");
131 for (reg_addr = 0; reg_addr <= 0xff; reg_addr++) {
132 printf("0x%x ", reg_addr);
133 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
134 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
136 bus_id < octets_per_if_num;
138 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
139 CHECK_STATUS(ddr3_tip_bus_read
141 ACCESS_TYPE_UNICAST, bus_id,
142 DDR_PHY_DATA, reg_addr,
144 printf("0x%x ", data_value);
147 bus_id < octets_per_if_num;
149 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
150 CHECK_STATUS(ddr3_tip_bus_read
152 ACCESS_TYPE_UNICAST, bus_id,
153 DDR_PHY_CONTROL, reg_addr,
155 printf("0x%x ", data_value);
165 * Register access func registration
167 int ddr3_tip_init_config_func(u32 dev_num,
168 struct hws_tip_config_func_db *config_func)
170 if (config_func == NULL)
173 memcpy(&config_func_info[dev_num], config_func,
174 sizeof(struct hws_tip_config_func_db));
180 * Read training result table
182 int hws_ddr3_tip_read_training_result(
183 u32 dev_num, enum hws_result result[MAX_STAGE_LIMIT][MAX_INTERFACE_NUM])
188 memcpy(result, training_result,
189 sizeof(enum hws_result) *
197 * Get training result info pointer
199 enum hws_result *ddr3_tip_get_result_ptr(u32 stage)
201 return training_result[stage];
207 int ddr3_tip_get_device_info(u32 dev_num, struct ddr3_device_info *info_ptr)
209 if (config_func_info[dev_num].tip_get_device_info_func != NULL) {
210 return config_func_info[dev_num].
211 tip_get_device_info_func((u8) dev_num, info_ptr);
217 #if defined(DDR_VIEWER_TOOL)
219 * Convert freq to character string
221 static char *convert_freq(enum hws_ddr_freq freq)
224 case DDR_FREQ_LOW_FREQ:
225 return "DDR_FREQ_LOW_FREQ";
261 return "DDR_FREQ_360";
264 return "DDR_FREQ_1000";
267 return "Unknown Frequency";
272 * Convert device ID to character string
274 static char *convert_dev_id(u32 dev_id)
287 return "Unknown Device";
292 * Convert device ID to character string
294 static char *convert_mem_size(u32 dev_id)
309 return "wrong mem size";
313 int print_device_info(u8 dev_num)
315 struct ddr3_device_info info_ptr;
316 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
318 CHECK_STATUS(ddr3_tip_get_device_info(dev_num, &info_ptr));
319 printf("=== DDR setup START===\n");
320 printf("\tDevice ID: %s\n", convert_dev_id(info_ptr.device_id));
321 printf("\tDDR3 CK delay: %d\n", info_ptr.ck_delay);
323 printf("=== DDR setup END===\n");
328 void hws_ddr3_tip_sweep_test(int enable)
331 is_validate_window_per_if = 1;
332 is_validate_window_per_pup = 1;
333 debug_training = DEBUG_LEVEL_TRACE;
335 is_validate_window_per_if = 0;
336 is_validate_window_per_pup = 0;
339 #endif /* DDR_VIEWER_TOOL */
341 char *ddr3_tip_convert_tune_result(enum hws_result tune_result)
343 switch (tune_result) {
349 return "NOT COMPLETED";
358 int ddr3_tip_print_log(u32 dev_num, u32 mem_addr)
361 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
363 #if defined(DDR_VIEWER_TOOL)
364 if ((is_validate_window_per_if != 0) ||
365 (is_validate_window_per_pup != 0)) {
367 enum hws_ddr_freq freq;
369 freq = tm->interface_params[first_active_if].memory_freq;
371 is_pup_log = (is_validate_window_per_pup != 0) ? 1 : 0;
372 printf("===VALIDATE WINDOW LOG START===\n");
373 printf("DDR Frequency: %s ======\n", convert_freq(freq));
374 /* print sweep windows */
375 ddr3_tip_run_sweep_test(dev_num, sweep_cnt, 1, is_pup_log);
376 ddr3_tip_run_sweep_test(dev_num, sweep_cnt, 0, is_pup_log);
377 #if defined(EXCLUDE_SWITCH_DEBUG)
378 if (is_run_leveling_sweep_tests == 1) {
379 ddr3_tip_run_leveling_sweep_test(dev_num, sweep_cnt, 0, is_pup_log);
380 ddr3_tip_run_leveling_sweep_test(dev_num, sweep_cnt, 1, is_pup_log);
382 #endif /* EXCLUDE_SWITCH_DEBUG */
383 ddr3_tip_print_all_pbs_result(dev_num);
384 ddr3_tip_print_wl_supp_result(dev_num);
385 printf("===VALIDATE WINDOW LOG END ===\n");
386 CHECK_STATUS(ddr3_tip_restore_dunit_regs(dev_num));
387 ddr3_tip_reg_dump(dev_num);
389 #endif /* DDR_VIEWER_TOOL */
391 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
392 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
394 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
395 ("IF %d Status:\n", if_id));
397 if (mask_tune_func & INIT_CONTROLLER_MASK_BIT) {
398 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
399 ("\tInit Controller: %s\n",
400 ddr3_tip_convert_tune_result
401 (training_result[INIT_CONTROLLER]
404 if (mask_tune_func & SET_LOW_FREQ_MASK_BIT) {
405 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
406 ("\tLow freq Config: %s\n",
407 ddr3_tip_convert_tune_result
408 (training_result[SET_LOW_FREQ]
411 if (mask_tune_func & LOAD_PATTERN_MASK_BIT) {
412 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
413 ("\tLoad Pattern: %s\n",
414 ddr3_tip_convert_tune_result
415 (training_result[LOAD_PATTERN]
418 if (mask_tune_func & SET_MEDIUM_FREQ_MASK_BIT) {
419 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
420 ("\tMedium freq Config: %s\n",
421 ddr3_tip_convert_tune_result
422 (training_result[SET_MEDIUM_FREQ]
425 if (mask_tune_func & WRITE_LEVELING_MASK_BIT) {
426 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
428 ddr3_tip_convert_tune_result
429 (training_result[WRITE_LEVELING]
432 if (mask_tune_func & LOAD_PATTERN_2_MASK_BIT) {
433 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
434 ("\tLoad Pattern: %s\n",
435 ddr3_tip_convert_tune_result
436 (training_result[LOAD_PATTERN_2]
439 if (mask_tune_func & READ_LEVELING_MASK_BIT) {
440 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
442 ddr3_tip_convert_tune_result
443 (training_result[READ_LEVELING]
446 if (mask_tune_func & WRITE_LEVELING_SUPP_MASK_BIT) {
447 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
449 ddr3_tip_convert_tune_result
450 (training_result[WRITE_LEVELING_SUPP]
453 if (mask_tune_func & PBS_RX_MASK_BIT) {
454 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
456 ddr3_tip_convert_tune_result
457 (training_result[PBS_RX]
460 if (mask_tune_func & PBS_TX_MASK_BIT) {
461 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
463 ddr3_tip_convert_tune_result
464 (training_result[PBS_TX]
467 if (mask_tune_func & SET_TARGET_FREQ_MASK_BIT) {
468 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
469 ("\tTarget freq Config: %s\n",
470 ddr3_tip_convert_tune_result
471 (training_result[SET_TARGET_FREQ]
474 if (mask_tune_func & WRITE_LEVELING_TF_MASK_BIT) {
475 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
477 ddr3_tip_convert_tune_result
478 (training_result[WRITE_LEVELING_TF]
481 if (mask_tune_func & READ_LEVELING_TF_MASK_BIT) {
482 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
484 ddr3_tip_convert_tune_result
485 (training_result[READ_LEVELING_TF]
488 if (mask_tune_func & WRITE_LEVELING_SUPP_TF_MASK_BIT) {
489 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
490 ("\tWL TF Supp: %s\n",
491 ddr3_tip_convert_tune_result
493 [WRITE_LEVELING_SUPP_TF]
496 if (mask_tune_func & CENTRALIZATION_RX_MASK_BIT) {
497 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
499 ddr3_tip_convert_tune_result
500 (training_result[CENTRALIZATION_RX]
503 if (mask_tune_func & VREF_CALIBRATION_MASK_BIT) {
504 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
505 ("\tVREF_CALIBRATION: %s\n",
506 ddr3_tip_convert_tune_result
507 (training_result[VREF_CALIBRATION]
510 if (mask_tune_func & CENTRALIZATION_TX_MASK_BIT) {
511 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
513 ddr3_tip_convert_tune_result
514 (training_result[CENTRALIZATION_TX]
522 #if !defined(EXCLUDE_DEBUG_PRINTS)
524 * Print stability log info
526 int ddr3_tip_print_stability_log(u32 dev_num)
528 u8 if_id = 0, csindex = 0, bus_id = 0, idx = 0;
530 u32 read_data[MAX_INTERFACE_NUM];
531 u32 max_cs = ddr3_tip_max_cs_get(dev_num);
532 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
535 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
536 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
537 printf("Title: I/F# , Tj, Calibration_n0, Calibration_p0, Calibration_n1, Calibration_p1, Calibration_n2, Calibration_p2,");
538 for (csindex = 0; csindex < max_cs; csindex++) {
539 printf("CS%d , ", csindex);
541 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
542 printf("VWTx, VWRx, WL_tot, WL_ADLL, WL_PH, RL_Tot, RL_ADLL, RL_PH, RL_Smp, Cen_tx, Cen_rx, Vref, DQVref,");
544 for (idx = 0; idx < 11; idx++)
545 printf("PBSTx-Pad%d,", idx);
547 for (idx = 0; idx < 11; idx++)
548 printf("PBSRx-Pad%d,", idx);
554 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
555 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
557 printf("Data: %d,%d,", if_id,
558 (config_func_info[dev_num].tip_get_temperature != NULL)
559 ? (config_func_info[dev_num].
560 tip_get_temperature(dev_num)) : (0));
562 CHECK_STATUS(ddr3_tip_if_read
563 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x14c8,
564 read_data, MASK_ALL_BITS));
565 printf("%d,%d,", ((read_data[if_id] & 0x3f0) >> 4),
566 ((read_data[if_id] & 0xfc00) >> 10));
567 CHECK_STATUS(ddr3_tip_if_read
568 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x17c8,
569 read_data, MASK_ALL_BITS));
570 printf("%d,%d,", ((read_data[if_id] & 0x3f0) >> 4),
571 ((read_data[if_id] & 0xfc00) >> 10));
572 CHECK_STATUS(ddr3_tip_if_read
573 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x1dc8,
574 read_data, MASK_ALL_BITS));
575 printf("%d,%d,", ((read_data[if_id] & 0x3f0000) >> 16),
576 ((read_data[if_id] & 0xfc00000) >> 22));
578 for (csindex = 0; csindex < max_cs; csindex++) {
579 printf("CS%d , ", csindex);
580 for (bus_id = 0; bus_id < MAX_BUS_NUM; bus_id++) {
582 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
583 ddr3_tip_bus_read(dev_num, if_id,
585 bus_id, DDR_PHY_DATA,
588 printf("%d,%d,", (reg_data & 0x1f),
589 ((reg_data & 0x3e0) >> 5));
591 ddr3_tip_bus_read(dev_num, if_id,
593 bus_id, DDR_PHY_DATA,
598 ((reg_data & 0x1c0) >> 6) * 32,
600 (reg_data & 0x1c0) >> 6);
602 CHECK_STATUS(ddr3_tip_if_read
603 (dev_num, ACCESS_TYPE_UNICAST,
605 RD_DATA_SMPL_DLYS_REG,
606 read_data, MASK_ALL_BITS));
609 (0x1f << (8 * csindex))) >>
611 ddr3_tip_bus_read(dev_num, if_id,
612 ACCESS_TYPE_UNICAST, bus_id,
616 printf("%d,%d,%d,%d,",
618 ((reg_data & 0x1c0) >> 6) * 32 +
619 read_data[if_id] * 64,
621 ((reg_data & 0x1c0) >> 6),
624 ddr3_tip_bus_read(dev_num, if_id,
625 ACCESS_TYPE_UNICAST, bus_id,
627 CTX_PHY_REG(csindex),
629 printf("%d,", (reg_data & 0x3f));
630 ddr3_tip_bus_read(dev_num, if_id,
631 ACCESS_TYPE_UNICAST, bus_id,
633 CRX_PHY_REG(csindex),
635 printf("%d,", (reg_data & 0x1f));
637 ddr3_tip_bus_read(dev_num, if_id,
638 ACCESS_TYPE_UNICAST, bus_id,
642 printf("%d,", (reg_data & 0x7));
644 /* Need to add the Read Function from device */
647 for (idx = 0; idx < 11; idx++) {
648 ddr3_tip_bus_read(dev_num, if_id,
650 bus_id, DDR_PHY_DATA,
654 printf("%d,", (reg_data & 0x3f));
657 for (idx = 0; idx < 11; idx++) {
658 ddr3_tip_bus_read(dev_num, if_id,
660 bus_id, DDR_PHY_DATA,
664 printf("%d,", (reg_data & 0x3f));
673 #endif /* EXCLUDE_DEBUG_PRINTS */
676 * Register XSB information
678 int ddr3_tip_register_xsb_info(u32 dev_num, struct hws_xsb_info *xsb_info_table)
680 memcpy(&xsb_info[dev_num], xsb_info_table, sizeof(struct hws_xsb_info));
687 int ddr3_tip_read_adll_value(u32 dev_num, u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],
688 u32 reg_addr, u32 mask)
691 u32 if_id = 0, bus_id = 0;
692 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
693 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
696 * multi CS support - reg_addr is calucalated in calling function
699 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
700 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
701 for (bus_id = 0; bus_id < octets_per_if_num;
703 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
704 CHECK_STATUS(ddr3_tip_bus_read(dev_num, if_id,
707 DDR_PHY_DATA, reg_addr,
710 octets_per_if_num + bus_id] =
721 int ddr3_tip_write_adll_value(u32 dev_num, u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],
724 u32 if_id = 0, bus_id = 0;
726 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
727 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
730 * multi CS support - reg_addr is calucalated in calling function
733 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
734 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
735 for (bus_id = 0; bus_id < octets_per_if_num;
737 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
738 data = pup_values[if_id *
741 CHECK_STATUS(ddr3_tip_bus_write(dev_num,
745 bus_id, DDR_PHY_DATA,
756 int read_phase_value(u32 dev_num, u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],
757 int reg_addr, u32 mask)
760 u32 if_id = 0, bus_id = 0;
761 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
762 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
764 /* multi CS support - reg_addr is calucalated in calling function with CS offset */
765 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
766 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
767 for (bus_id = 0; bus_id < octets_per_if_num; bus_id++) {
768 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
769 CHECK_STATUS(ddr3_tip_bus_read(dev_num, if_id,
772 DDR_PHY_DATA, reg_addr,
774 pup_values[if_id * octets_per_if_num + bus_id] = data_value & mask;
782 * Write Leveling Value
784 int write_leveling_value(u32 dev_num, u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],
785 u32 pup_ph_values[MAX_INTERFACE_NUM * MAX_BUS_NUM], int reg_addr)
787 u32 if_id = 0, bus_id = 0;
789 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
790 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
792 /* multi CS support - reg_addr is calucalated in calling function with CS offset */
793 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
794 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
795 for (bus_id = 0 ; bus_id < octets_per_if_num ; bus_id++) {
796 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
797 data = pup_values[if_id * octets_per_if_num + bus_id] +
798 pup_ph_values[if_id * octets_per_if_num + bus_id];
799 CHECK_STATUS(ddr3_tip_bus_write(dev_num,
813 #if !defined(EXCLUDE_SWITCH_DEBUG)
814 struct hws_tip_config_func_db config_func_info[MAX_DEVICE_NUM];
815 u32 start_xsb_offset = 0;
818 u8 is_dfs_disabled = 0;
819 u32 default_centrlization_value = 0x12;
820 u32 activate_select_before_run_alg = 1, activate_deselect_after_run_alg = 1,
821 rl_test = 0, reset_read_fifo = 0;
823 u32 ctrl_sweepres[ADLL_LENGTH][MAX_INTERFACE_NUM][MAX_BUS_NUM];
824 u32 ctrl_adll[MAX_CS_NUM * MAX_INTERFACE_NUM * MAX_BUS_NUM];
826 u32 xsb_test_table[][8] = {
827 {0x00000000, 0x11111111, 0x22222222, 0x33333333, 0x44444444, 0x55555555,
828 0x66666666, 0x77777777},
829 {0x88888888, 0x99999999, 0xaaaaaaaa, 0xbbbbbbbb, 0xcccccccc, 0xdddddddd,
830 0xeeeeeeee, 0xffffffff},
831 {0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
832 0x00000000, 0xffffffff},
833 {0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
834 0x00000000, 0xffffffff},
835 {0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
836 0x00000000, 0xffffffff},
837 {0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
838 0x00000000, 0xffffffff},
839 {0x00000000, 0x00000000, 0xffffffff, 0xffffffff, 0x00000000, 0x00000000,
840 0xffffffff, 0xffffffff},
841 {0x00000000, 0x00000000, 0x00000000, 0xffffffff, 0x00000000, 0x00000000,
842 0x00000000, 0x00000000},
843 {0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0x00000000, 0xffffffff,
844 0xffffffff, 0xffffffff}
847 static int ddr3_tip_access_atr(u32 dev_num, u32 flag_id, u32 value, u32 **ptr);
849 int ddr3_tip_print_adll(void)
851 u32 bus_cnt = 0, if_id, data_p1, data_p2, ui_data3, dev_num = 0;
852 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
853 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
855 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
856 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
857 for (bus_cnt = 0; bus_cnt < octets_per_if_num;
859 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_cnt);
860 CHECK_STATUS(ddr3_tip_bus_read
862 ACCESS_TYPE_UNICAST, bus_cnt,
863 DDR_PHY_DATA, 0x1, &data_p1));
864 CHECK_STATUS(ddr3_tip_bus_read
865 (dev_num, if_id, ACCESS_TYPE_UNICAST,
866 bus_cnt, DDR_PHY_DATA, 0x2, &data_p2));
867 CHECK_STATUS(ddr3_tip_bus_read
868 (dev_num, if_id, ACCESS_TYPE_UNICAST,
869 bus_cnt, DDR_PHY_DATA, 0x3, &ui_data3));
870 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE,
871 (" IF %d bus_cnt %d phy_reg_1_data 0x%x phy_reg_2_data 0x%x phy_reg_3_data 0x%x\n",
872 if_id, bus_cnt, data_p1, data_p2,
881 * Set attribute value
883 int ddr3_tip_set_atr(u32 dev_num, u32 flag_id, u32 value)
886 u32 *ptr_flag = NULL;
888 ret = ddr3_tip_access_atr(dev_num, flag_id, value, &ptr_flag);
889 if (ptr_flag != NULL) {
890 printf("ddr3_tip_set_atr Flag ID 0x%x value is set to 0x%x (was 0x%x)\n",
891 flag_id, value, *ptr_flag);
894 printf("ddr3_tip_set_atr Flag ID 0x%x value is set to 0x%x\n",
904 static int ddr3_tip_access_atr(u32 dev_num, u32 flag_id, u32 value, u32 **ptr)
906 u32 tmp_val = 0, if_id = 0, pup_id = 0;
907 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
913 *ptr = (u32 *)&(tm->if_act_mask);
917 *ptr = (u32 *)&mask_tune_func;
921 low_freq = (enum hws_ddr_freq)value;
925 medium_freq = (enum hws_ddr_freq)value;
929 *ptr = (u32 *)&generic_init_controller;
933 *ptr = (u32 *)&start_xsb_offset;
937 *ptr = (u32 *)&is_rl_old;
941 *ptr = (u32 *)&is_freq_old;
945 *ptr = (u32 *)&is_dfs_disabled;
949 *ptr = (u32 *)&is_pll_before_init;
953 *ptr = (u32 *)&is_adll_calib_before_init;
956 *ptr = (u32 *)&is_tune_result;
960 *ptr = (u32 *)&is_validate_window_per_if;
964 *ptr = (u32 *)&is_validate_window_per_pup;
968 *ptr = (u32 *)&sweep_cnt;
972 *ptr = (u32 *)&is_bist_reset_bit;
976 *ptr = (u32 *)&is_dfs_in_init;
980 *ptr = (u32 *)&g_zpodt_data;
984 *ptr = (u32 *)&g_znodt_data;
991 *ptr = (u32 *)&(freq_val[DDR_FREQ_LOW_FREQ]);
995 *ptr = (u32 *)&start_pattern;
999 *ptr = (u32 *)&end_pattern;
1003 *ptr = (u32 *)&phy_reg0_val;
1007 *ptr = (u32 *)&phy_reg1_val;
1011 *ptr = (u32 *)&phy_reg2_val;
1015 *ptr = (u32 *)&phy_reg3_val;
1019 sweep_pattern = (enum hws_pattern)value;
1023 *ptr = (u32 *)&g_znri_data;
1027 *ptr = (u32 *)&g_zpri_data;
1031 *ptr = (u32 *)&finger_test;
1035 *ptr = (u32 *)&n_finger_start;
1039 *ptr = (u32 *)&n_finger_end;
1043 *ptr = (u32 *)&p_finger_start;
1047 *ptr = (u32 *)&p_finger_end;
1051 *ptr = (u32 *)&p_finger_step;
1055 *ptr = (u32 *)&n_finger_step;
1059 *ptr = (u32 *)&g_znri_ctrl;
1063 *ptr = (u32 *)&g_zpri_ctrl;
1067 *ptr = (u32 *)&is_reg_dump;
1071 *ptr = (u32 *)&vref_init_val;
1075 *ptr = (u32 *)&mode_2t;
1079 *ptr = (u32 *)&xsb_validate_type;
1083 *ptr = (u32 *)&xsb_validation_base_address;
1087 *ptr = (u32 *)&activate_select_before_run_alg;
1091 *ptr = (u32 *)&activate_deselect_after_run_alg;
1095 *ptr = (u32 *)&odt_additional;
1099 *ptr = (u32 *)&debug_mode;
1103 pbs_pattern = (enum hws_pattern)value;
1107 *ptr = (u32 *)&delay_enable;
1111 *ptr = (u32 *)&ck_delay;
1115 *ptr = (u32 *)&ca_delay;
1119 *ptr = (u32 *)&debug_dunit;
1123 debug_acc = (int)value;
1127 debug_training = (u8)value;
1131 debug_training_bist = (u8)value;
1135 debug_centralization = (u8)value;
1139 debug_training_ip = (u8)value;
1143 debug_leveling = (u8)value;
1147 debug_pbs = (u8)value;
1151 debug_training_static = (u8)value;
1155 debug_training_access = (u8)value;
1160 *ptr = &start_pattern;
1164 *ptr = &end_pattern;
1168 if ((flag_id >= 0x200) && (flag_id < 0x210)) {
1169 if_id = flag_id - 0x200;
1170 *ptr = (u32 *)&(tm->interface_params
1171 [if_id].memory_freq);
1172 } else if ((flag_id >= 0x210) && (flag_id < 0x220)) {
1173 if_id = flag_id - 0x210;
1174 *ptr = (u32 *)&(tm->interface_params
1175 [if_id].speed_bin_index);
1176 } else if ((flag_id >= 0x220) && (flag_id < 0x230)) {
1177 if_id = flag_id - 0x220;
1178 *ptr = (u32 *)&(tm->interface_params
1180 } else if ((flag_id >= 0x230) && (flag_id < 0x240)) {
1181 if_id = flag_id - 0x230;
1182 *ptr = (u32 *)&(tm->interface_params
1183 [if_id].memory_size);
1184 } else if ((flag_id >= 0x240) && (flag_id < 0x250)) {
1185 if_id = flag_id - 0x240;
1186 *ptr = (u32 *)&(tm->interface_params
1188 } else if ((flag_id >= 0x250) && (flag_id < 0x260)) {
1189 if_id = flag_id - 0x250;
1190 *ptr = (u32 *)&(tm->interface_params
1192 } else if ((flag_id >= 0x270) && (flag_id < 0x2cf)) {
1193 if_id = (flag_id - 0x270) / MAX_BUS_NUM;
1194 pup_id = (flag_id - 0x270) % MAX_BUS_NUM;
1195 *ptr = (u32 *)&(tm->interface_params[if_id].
1196 as_bus_params[pup_id].is_ck_swap);
1197 } else if ((flag_id >= 0x2d0) && (flag_id < 0x32f)) {
1198 if_id = (flag_id - 0x2d0) / MAX_BUS_NUM;
1199 pup_id = (flag_id - 0x2d0) % MAX_BUS_NUM;
1200 *ptr = (u32 *)&(tm->interface_params[if_id].
1201 as_bus_params[pup_id].is_dqs_swap);
1202 } else if ((flag_id >= 0x330) && (flag_id < 0x38f)) {
1203 if_id = (flag_id - 0x330) / MAX_BUS_NUM;
1204 pup_id = (flag_id - 0x330) % MAX_BUS_NUM;
1205 *ptr = (u32 *)&(tm->interface_params[if_id].
1206 as_bus_params[pup_id].cs_bitmask);
1207 } else if ((flag_id >= 0x390) && (flag_id < 0x3ef)) {
1208 if_id = (flag_id - 0x390) / MAX_BUS_NUM;
1209 pup_id = (flag_id - 0x390) % MAX_BUS_NUM;
1210 *ptr = (u32 *)&(tm->interface_params
1211 [if_id].as_bus_params
1212 [pup_id].mirror_enable_bitmask);
1213 } else if ((flag_id >= 0x500) && (flag_id <= 0x50f)) {
1214 tmp_val = flag_id - 0x320;
1215 *ptr = (u32 *)&(clamp_tbl[tmp_val]);
1217 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1218 ("flag_id out of boundary %d\n",
1220 return MV_BAD_PARAM;
1227 #endif /* EXCLUDE_SWITCH_DEBUG */
1229 #if defined(DDR_VIEWER_TOOL)
1233 int print_adll(u32 dev_num, u32 adll[MAX_INTERFACE_NUM * MAX_BUS_NUM])
1236 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
1237 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1239 for (j = 0; j < octets_per_if_num; j++) {
1240 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, j);
1241 for (i = 0; i < MAX_INTERFACE_NUM; i++)
1242 printf("%d ,", adll[i * octets_per_if_num + j]);
1249 int print_ph(u32 dev_num, u32 adll[MAX_INTERFACE_NUM * MAX_BUS_NUM])
1252 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
1253 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1255 for (j = 0; j < octets_per_if_num; j++) {
1256 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, j);
1257 for (i = 0; i < MAX_INTERFACE_NUM; i++)
1258 printf("%d ,", adll[i * octets_per_if_num + j] >> 6);
1264 #endif /* DDR_VIEWER_TOOL */
1266 #if !defined(EXCLUDE_SWITCH_DEBUG)
1267 /* byte_index - only byte 0, 1, 2, or 3, oxff - test all bytes */
1268 static u32 ddr3_tip_compare(u32 if_id, u32 *p_src, u32 *p_dst,
1271 u32 burst_cnt = 0, addr_offset, i_id;
1276 0xff) ? (u32) 0xffffffff : (u32) (0xff << (byte_index * 8));
1277 for (burst_cnt = 0; burst_cnt < EXT_ACCESS_BURST_LENGTH; burst_cnt++) {
1278 if ((p_src[burst_cnt] & addr_offset) !=
1279 (p_dst[if_id] & addr_offset))
1283 if (b_is_fail == 1) {
1284 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1285 ("IF %d exp: ", if_id));
1286 for (i_id = 0; i_id <= MAX_INTERFACE_NUM - 1; i_id++) {
1287 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1288 ("0x%8x ", p_src[i_id]));
1290 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1291 ("\n_i_f %d rcv: ", if_id));
1292 for (i_id = 0; i_id <= MAX_INTERFACE_NUM - 1; i_id++) {
1293 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1294 ("(0x%8x ", p_dst[i_id]));
1296 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, ("\n "));
1301 #endif /* EXCLUDE_SWITCH_DEBUG */
1303 #if defined(DDR_VIEWER_TOOL)
1307 int ddr3_tip_run_sweep_test(int dev_num, u32 repeat_num, u32 direction,
1310 u32 pup = 0, start_pup = 0, end_pup = 0;
1311 u32 adll = 0, rep = 0, pattern_idx = 0;
1312 u32 res[MAX_INTERFACE_NUM] = { 0 };
1316 enum hws_access_type pup_access;
1318 u32 max_cs = ddr3_tip_max_cs_get(dev_num);
1319 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
1320 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1327 end_pup = octets_per_if_num - 1;
1328 pup_access = ACCESS_TYPE_UNICAST;
1332 pup_access = ACCESS_TYPE_MULTICAST;
1335 for (cs = 0; cs < max_cs; cs++) {
1336 reg = (direction == 0) ? CTX_PHY_REG(cs) : CRX_PHY_REG(cs);
1337 for (adll = 0; adll < ADLL_LENGTH; adll++) {
1339 if_id <= MAX_INTERFACE_NUM - 1;
1344 for (pup = start_pup; pup <= end_pup; pup++) {
1345 ctrl_sweepres[adll][if_id][pup] =
1351 for (adll = 0; adll < (MAX_INTERFACE_NUM * MAX_BUS_NUM); adll++)
1352 ctrl_adll[adll] = 0;
1353 /* Save DQS value(after algorithm run) */
1354 ddr3_tip_read_adll_value(dev_num, ctrl_adll,
1355 reg, MASK_ALL_BITS);
1358 * Sweep ADLL from 0:31 on all I/F on all Pup and perform
1359 * BIST on each stage.
1361 for (pup = start_pup; pup <= end_pup; pup++) {
1362 for (adll = 0; adll < ADLL_LENGTH; adll++) {
1363 for (rep = 0; rep < repeat_num; rep++) {
1364 for (pattern_idx = PATTERN_KILLER_DQ0;
1365 pattern_idx < PATTERN_LAST;
1368 (direction == 0) ? (adll * 2) : adll;
1369 CHECK_STATUS(ddr3_tip_bus_write
1370 (dev_num, ACCESS_TYPE_MULTICAST, 0,
1371 pup_access, pup, DDR_PHY_DATA,
1373 hws_ddr3_run_bist(dev_num, sweep_pattern, res,
1375 /* ddr3_tip_reset_fifo_ptr(dev_num); */
1377 if_id < MAX_INTERFACE_NUM;
1382 ctrl_sweepres[adll][if_id][pup]
1388 ACCESS_TYPE_UNICAST,
1390 ACCESS_TYPE_UNICAST,
1404 printf("Final, CS %d,%s, Sweep, Result, Adll,", cs,
1405 ((direction == 0) ? "TX" : "RX"));
1406 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1407 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1409 for (pup = start_pup; pup <= end_pup; pup++) {
1410 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
1411 printf("I/F%d-PHY%d , ", if_id, pup);
1414 printf("I/F%d , ", if_id);
1419 for (adll = 0; adll < ADLL_LENGTH; adll++) {
1420 adll_value = (direction == 0) ? (adll * 2) : adll;
1421 printf("Final,%s, Sweep, Result, %d ,",
1422 ((direction == 0) ? "TX" : "RX"), adll_value);
1425 if_id <= MAX_INTERFACE_NUM - 1;
1427 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1428 for (pup = start_pup; pup <= end_pup; pup++) {
1430 ctrl_sweepres[adll][if_id]
1438 * Write back to the phy the Rx DQS value, we store in
1441 ddr3_tip_write_adll_value(dev_num, ctrl_adll, reg);
1442 /* print adll results */
1443 ddr3_tip_read_adll_value(dev_num, ctrl_adll, reg, MASK_ALL_BITS);
1444 printf("%s, DQS, ADLL,,,", (direction == 0) ? "Tx" : "Rx");
1445 print_adll(dev_num, ctrl_adll);
1447 ddr3_tip_reset_fifo_ptr(dev_num);
1452 #if defined(EXCLUDE_SWITCH_DEBUG)
1453 int ddr3_tip_run_leveling_sweep_test(int dev_num, u32 repeat_num,
1454 u32 direction, u32 mode)
1456 u32 pup = 0, start_pup = 0, end_pup = 0, start_adll = 0;
1457 u32 adll = 0, rep = 0, pattern_idx = 0;
1458 u32 read_data[MAX_INTERFACE_NUM];
1459 u32 res[MAX_INTERFACE_NUM] = { 0 };
1460 int if_id = 0, gap = 0;
1463 enum hws_access_type pup_access;
1465 u32 max_cs = ddr3_tip_max_cs_get(dev_num);
1466 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
1467 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1469 if (mode == 1) { /* per pup */
1471 end_pup = octets_per_if_num - 1;
1472 pup_access = ACCESS_TYPE_UNICAST;
1476 pup_access = ACCESS_TYPE_MULTICAST;
1479 for (cs = 0; cs < max_cs; cs++) {
1480 reg = (direction == 0) ? WL_PHY_REG(cs) : RL_PHY_REG(cs);
1481 for (adll = 0; adll < ADLL_LENGTH; adll++) {
1482 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
1483 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1484 for (pup = start_pup; pup <= end_pup; pup++)
1485 ctrl_sweepres[adll][if_id][pup] = 0;
1489 for (adll = 0; adll < MAX_INTERFACE_NUM * MAX_BUS_NUM; adll++) {
1490 ctrl_adll[adll] = 0;
1491 ctrl_level_phase[adll] = 0;
1492 ctrl_adll1[adll] = 0;
1495 /* save leveling value after running algorithm */
1496 ddr3_tip_read_adll_value(dev_num, ctrl_adll, reg, 0x1f);
1497 read_phase_value(dev_num, ctrl_level_phase, reg, 0x7 << 6);
1500 ddr3_tip_read_adll_value(dev_num, ctrl_adll1,
1501 CTX_PHY_REG(cs), MASK_ALL_BITS);
1503 /* Sweep ADLL from 0 to 31 on all interfaces, all pups,
1504 * and perform BIST on each stage
1506 for (pup = start_pup; pup <= end_pup; pup++) {
1507 for (adll = 0; adll < ADLL_LENGTH; adll++) {
1508 for (rep = 0; rep < repeat_num; rep++) {
1509 adll_value = (direction == 0) ? (adll * 2) : (adll * 3);
1510 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1511 start_adll = ctrl_adll[if_id * cs * octets_per_if_num + pup] +
1512 (ctrl_level_phase[if_id * cs *
1517 start_adll = (start_adll > 32) ? (start_adll - 32) : 0;
1519 start_adll = (start_adll > 48) ? (start_adll - 48) : 0;
1521 adll_value += start_adll;
1523 gap = ctrl_adll1[if_id * cs * octets_per_if_num + pup] -
1524 ctrl_adll[if_id * cs * octets_per_if_num + pup];
1525 gap = (((adll_value % 32) + gap) % 64);
1527 adll_value = ((adll_value % 32) +
1528 (((adll_value - (adll_value % 32)) / 32) << 6));
1530 CHECK_STATUS(ddr3_tip_bus_write(dev_num,
1531 ACCESS_TYPE_UNICAST,
1539 CHECK_STATUS(ddr3_tip_bus_write(dev_num,
1540 ACCESS_TYPE_UNICAST,
1549 for (pattern_idx = PATTERN_KILLER_DQ0;
1550 pattern_idx < PATTERN_LAST;
1552 hws_ddr3_run_bist(dev_num, sweep_pattern, res, cs);
1553 ddr3_tip_reset_fifo_ptr(dev_num);
1554 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1555 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1556 if (pup != 4) { /* TODO: remove literal */
1557 ctrl_sweepres[adll][if_id][pup] += res[if_id];
1559 CHECK_STATUS(ddr3_tip_if_read(dev_num,
1560 ACCESS_TYPE_UNICAST,
1565 ctrl_sweepres[adll][if_id][pup] += read_data[if_id];
1566 CHECK_STATUS(ddr3_tip_if_write(dev_num,
1567 ACCESS_TYPE_UNICAST,
1572 CHECK_STATUS(ddr3_tip_if_write(dev_num,
1573 ACCESS_TYPE_UNICAST,
1584 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1585 start_adll = ctrl_adll[if_id * cs * octets_per_if_num + pup] +
1586 ctrl_level_phase[if_id * cs * octets_per_if_num + pup];
1587 CHECK_STATUS(ddr3_tip_bus_write(dev_num, ACCESS_TYPE_UNICAST, if_id, pup_access, pup,
1588 DDR_PHY_DATA, reg, start_adll));
1590 CHECK_STATUS(ddr3_tip_bus_write(dev_num,
1591 ACCESS_TYPE_UNICAST,
1604 printf("Final,CS %d,%s,Leveling,Result,Adll,", cs, ((direction == 0) ? "TX" : "RX"));
1606 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1607 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1609 for (pup = start_pup; pup <= end_pup; pup++) {
1610 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
1611 printf("I/F%d-PHY%d , ", if_id, pup);
1614 printf("I/F%d , ", if_id);
1619 for (adll = 0; adll < ADLL_LENGTH; adll++) {
1620 adll_value = (direction == 0) ? ((adll * 2) - 32) : ((adll * 3) - 48);
1621 printf("Final,%s,LevelingSweep,Result, %d ,", ((direction == 0) ? "TX" : "RX"), adll_value);
1623 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1624 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1625 for (pup = start_pup; pup <= end_pup; pup++)
1626 printf("%8d , ", ctrl_sweepres[adll][if_id][pup]);
1631 /* write back to the phy the Rx DQS value, we store in the beginning */
1632 write_leveling_value(dev_num, ctrl_adll, ctrl_level_phase, reg);
1634 ddr3_tip_write_adll_value(dev_num, ctrl_adll1, CTX_PHY_REG(cs));
1636 /* print adll results */
1637 ddr3_tip_read_adll_value(dev_num, ctrl_adll, reg, MASK_ALL_BITS);
1638 printf("%s,DQS,Leveling,,,", (direction == 0) ? "Tx" : "Rx");
1639 print_adll(dev_num, ctrl_adll);
1640 print_ph(dev_num, ctrl_level_phase);
1642 ddr3_tip_reset_fifo_ptr(dev_num);
1646 #endif /* EXCLUDE_SWITCH_DEBUG */
1648 void print_topology(struct mv_ddr_topology_map *topology_db)
1653 printf("\tinterface_mask: 0x%x\n", topology_db->if_act_mask);
1654 printf("\tNumber of buses: 0x%x\n",
1655 ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE));
1656 printf("\tbus_act_mask: 0x%x\n", topology_db->bus_act_mask);
1658 for (ui = 0; ui < MAX_INTERFACE_NUM; ui++) {
1659 VALIDATE_IF_ACTIVE(topology_db->if_act_mask, ui);
1660 printf("\n\tInterface ID: %d\n", ui);
1661 printf("\t\tDDR Frequency: %s\n",
1662 convert_freq(topology_db->
1663 interface_params[ui].memory_freq));
1664 printf("\t\tSpeed_bin: %d\n",
1665 topology_db->interface_params[ui].speed_bin_index);
1666 printf("\t\tBus_width: %d\n",
1667 (4 << topology_db->interface_params[ui].bus_width));
1668 printf("\t\tMem_size: %s\n",
1669 convert_mem_size(topology_db->
1670 interface_params[ui].memory_size));
1671 printf("\t\tCAS-WL: %d\n",
1672 topology_db->interface_params[ui].cas_wl);
1673 printf("\t\tCAS-L: %d\n",
1674 topology_db->interface_params[ui].cas_l);
1675 printf("\t\tTemperature: %d\n",
1676 topology_db->interface_params[ui].interface_temp);
1678 for (uj = 0; uj < 4; uj++) {
1679 printf("\t\tBus %d parameters- CS Mask: 0x%x\t", uj,
1680 topology_db->interface_params[ui].
1681 as_bus_params[uj].cs_bitmask);
1682 printf("Mirror: 0x%x\t",
1683 topology_db->interface_params[ui].
1684 as_bus_params[uj].mirror_enable_bitmask);
1685 printf("DQS Swap is %s \t",
1687 interface_params[ui].as_bus_params[uj].
1688 is_dqs_swap == 1) ? "enabled" : "disabled");
1689 printf("Ck Swap:%s\t",
1691 interface_params[ui].as_bus_params[uj].
1692 is_ck_swap == 1) ? "enabled" : "disabled");
1697 #endif /* DDR_VIEWER_TOOL */
1699 #if !defined(EXCLUDE_SWITCH_DEBUG)
1701 * Execute XSB Test transaction (rd/wr/both)
1703 int run_xsb_test(u32 dev_num, u32 mem_addr, u32 write_type,
1704 u32 read_type, u32 burst_length)
1706 u32 seq = 0, if_id = 0, addr, cnt;
1707 int ret = MV_OK, ret_tmp;
1708 u32 data_read[MAX_INTERFACE_NUM];
1709 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1711 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1712 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1714 for (cnt = 0; cnt <= burst_length; cnt++) {
1715 seq = (seq + 1) % 8;
1716 if (write_type != 0) {
1717 CHECK_STATUS(ddr3_tip_ext_write
1718 (dev_num, if_id, addr, 1,
1719 xsb_test_table[seq]));
1721 if (read_type != 0) {
1722 CHECK_STATUS(ddr3_tip_ext_read
1723 (dev_num, if_id, addr, 1,
1726 if ((read_type != 0) && (write_type != 0)) {
1728 ddr3_tip_compare(if_id,
1729 xsb_test_table[seq],
1732 addr += (EXT_ACCESS_BURST_LENGTH * 4);
1733 ret = (ret != MV_OK) ? ret : ret_tmp;
1741 #else /*EXCLUDE_SWITCH_DEBUG */
1742 u32 start_xsb_offset = 0;
1744 int run_xsb_test(u32 dev_num, u32 mem_addr, u32 write_type,
1745 u32 read_type, u32 burst_length)
1750 #endif /* EXCLUDE_SWITCH_DEBUG */