1 // SPDX-License-Identifier: GPL-2.0
3 * Copyright (C) Marvell International Ltd. and its affiliates
7 #include "mv_ddr_regs.h"
9 #define VREF_INITIAL_STEP 3
10 #define VREF_SECOND_STEP 1
11 #define VREF_MAX_INDEX 7
12 #define MAX_VALUE (1024 - 1)
13 #define MIN_VALUE (-MAX_VALUE)
14 #define GET_RD_SAMPLE_DELAY(data, cs) ((data >> rd_sample_mask[cs]) & 0xf)
17 int ddr3_tip_centr_skip_min_win_check = 0;
18 u8 current_vref[MAX_BUS_NUM][MAX_INTERFACE_NUM];
19 u8 last_vref[MAX_BUS_NUM][MAX_INTERFACE_NUM];
20 u16 current_valid_window[MAX_BUS_NUM][MAX_INTERFACE_NUM];
21 u16 last_valid_window[MAX_BUS_NUM][MAX_INTERFACE_NUM];
22 u8 lim_vref[MAX_BUS_NUM][MAX_INTERFACE_NUM];
23 u8 interface_state[MAX_INTERFACE_NUM];
24 u8 vref_window_size[MAX_INTERFACE_NUM][MAX_BUS_NUM];
25 u8 vref_window_size_th = 12;
27 static u8 pup_st[MAX_BUS_NUM][MAX_INTERFACE_NUM];
29 static u32 rd_sample_mask[] = {
38 #define VREF_CONVERGE 2
41 * ODT additional timing
43 int ddr3_tip_write_additional_odt_setting(u32 dev_num, u32 if_id)
45 u32 cs_num = 0, max_read_sample = 0, min_read_sample = 0x1f;
46 u32 data_read[MAX_INTERFACE_NUM] = { 0 };
47 u32 read_sample[MAX_CS_NUM];
50 int max_phase = MIN_VALUE, current_phase;
51 enum hws_access_type access_type = ACCESS_TYPE_UNICAST;
52 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
53 unsigned int max_cs = mv_ddr_cs_num_get();
55 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
58 CHECK_STATUS(ddr3_tip_if_read(dev_num, access_type, if_id,
59 RD_DATA_SMPL_DLYS_REG,
60 data_read, MASK_ALL_BITS));
61 val = data_read[if_id];
63 for (cs_num = 0; cs_num < max_cs; cs_num++) {
64 read_sample[cs_num] = GET_RD_SAMPLE_DELAY(val, cs_num);
66 /* find maximum of read_samples */
67 if (read_sample[cs_num] >= max_read_sample) {
68 if (read_sample[cs_num] == max_read_sample)
69 max_phase = MIN_VALUE;
71 max_read_sample = read_sample[cs_num];
74 pup_index < octets_per_if_num;
76 CHECK_STATUS(ddr3_tip_bus_read
78 ACCESS_TYPE_UNICAST, pup_index,
83 current_phase = ((int)val & 0xe0) >> 6;
84 if (current_phase >= max_phase)
85 max_phase = current_phase;
90 if (read_sample[cs_num] < min_read_sample)
91 min_read_sample = read_sample[cs_num];
94 min_read_sample = min_read_sample - 1;
95 max_read_sample = max_read_sample + 4 + (max_phase + 1) / 2 + 1;
96 if (min_read_sample >= 0xf)
97 min_read_sample = 0xf;
98 if (max_read_sample >= 0x1f)
99 max_read_sample = 0x1f;
101 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
102 DDR_ODT_TIMING_LOW_REG,
103 ((min_read_sample - 1) << 12),
105 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
106 DDR_ODT_TIMING_LOW_REG,
107 (max_read_sample << 16),
113 int get_valid_win_rx(u32 dev_num, u32 if_id, u8 res[4])
115 u32 reg_pup = RESULT_PHY_REG;
125 for (i = 0; i < 4; i++) {
126 CHECK_STATUS(ddr3_tip_bus_read(dev_num, if_id,
127 ACCESS_TYPE_UNICAST, i,
128 DDR_PHY_DATA, reg_pup,
130 res[i] = (reg_data >> RESULT_PHY_RX_OFFS) & 0x1f;
137 * This algorithm deals with the vertical optimum from Voltage point of view
138 * of the sample signal.
139 * Voltage sample point can improve the Eye / window size of the bit and the
141 * The problem is that it is tune for all DQ the same so there isn't any
143 * It is more like centralization.
144 * But because we don't have The training SM support we do it a bit more
145 * smart search to save time.
147 int ddr3_tip_vref(u32 dev_num)
150 * The Vref register have non linear order. Need to check what will be
151 * in future projects.
154 1, 2, 3, 4, 5, 6, 7, 0
156 /* State and parameter definitions */
157 u32 initial_step = VREF_INITIAL_STEP;
158 /* need to be assign with minus ????? */
159 u32 second_step = VREF_SECOND_STEP;
160 u32 algo_run_flag = 0, currrent_vref = 0;
162 u32 pup = 0, if_id = 0, num_pup = 0, rep = 0;
165 u32 copy_start_pattern, copy_end_pattern;
166 enum hws_result *flow_result = ddr3_tip_get_result_ptr(training_stage);
168 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
169 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
171 CHECK_STATUS(ddr3_tip_special_rx(dev_num));
173 /* save start/end pattern */
174 copy_start_pattern = start_pattern;
175 copy_end_pattern = end_pattern;
177 /* set vref as centralization pattern */
178 start_pattern = PATTERN_VREF;
179 end_pattern = PATTERN_VREF;
182 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
183 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
185 pup < octets_per_if_num; pup++) {
186 current_vref[pup][if_id] = 0;
187 last_vref[pup][if_id] = 0;
188 lim_vref[pup][if_id] = 0;
189 current_valid_window[pup][if_id] = 0;
190 last_valid_window[pup][if_id] = 0;
191 if (vref_window_size[if_id][pup] >
192 vref_window_size_th) {
193 pup_st[pup][if_id] = VREF_CONVERGE;
194 DEBUG_TRAINING_HW_ALG(
196 ("VREF config, IF[ %d ]pup[ %d ] - Vref tune not requered (%d)\n",
197 if_id, pup, __LINE__));
199 pup_st[pup][if_id] = VREF_STEP_1;
200 CHECK_STATUS(ddr3_tip_bus_read
202 ACCESS_TYPE_UNICAST, pup,
203 DDR_PHY_DATA, reg_addr, &val));
204 CHECK_STATUS(ddr3_tip_bus_write
205 (dev_num, ACCESS_TYPE_UNICAST,
206 if_id, ACCESS_TYPE_UNICAST,
207 pup, DDR_PHY_DATA, reg_addr,
208 (val & (~0xf)) | vref_map[0]));
209 DEBUG_TRAINING_HW_ALG(
211 ("VREF config, IF[ %d ]pup[ %d ] - Vref = %X (%d)\n",
213 (val & (~0xf)) | vref_map[0],
217 interface_state[if_id] = 0;
220 /* TODO: Set number of active interfaces */
221 num_pup = octets_per_if_num * MAX_INTERFACE_NUM;
223 while ((algo_run_flag <= num_pup) & (while_count < 10)) {
225 for (rep = 1; rep < 4; rep++) {
226 ddr3_tip_centr_skip_min_win_check = 1;
227 ddr3_tip_centralization_rx(dev_num);
228 ddr3_tip_centr_skip_min_win_check = 0;
230 /* Read Valid window results only for non converge pups */
231 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
232 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
233 if (interface_state[if_id] != 4) {
234 get_valid_win_rx(dev_num, if_id, res);
236 pup < octets_per_if_num;
239 (tm->bus_act_mask, pup);
245 current_valid_window[pup]
247 (current_valid_window[pup]
248 [if_id] * (rep - 1) +
249 1000 * res[pup]) / rep;
255 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
256 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
257 DEBUG_TRAINING_HW_ALG(
259 ("current_valid_window: IF[ %d ] - ", if_id));
262 pup < octets_per_if_num; pup++) {
263 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
264 DEBUG_TRAINING_HW_ALG(DEBUG_LEVEL_TRACE,
269 DEBUG_TRAINING_HW_ALG(DEBUG_LEVEL_TRACE, ("\n"));
272 /* Compare results and respond as function of state */
273 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
274 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
276 pup < octets_per_if_num; pup++) {
277 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
278 DEBUG_TRAINING_HW_ALG(DEBUG_LEVEL_TRACE,
279 ("I/F[ %d ], pup[ %d ] STATE #%d (%d)\n",
284 if (pup_st[pup][if_id] == VREF_CONVERGE)
287 DEBUG_TRAINING_HW_ALG(DEBUG_LEVEL_TRACE,
288 ("I/F[ %d ], pup[ %d ] CHECK progress - Current %d Last %d, limit VREF %d (%d)\n",
290 current_valid_window[pup]
292 last_valid_window[pup]
293 [if_id], lim_vref[pup]
297 * The -1 is for solution resolution +/- 1 tap
300 if (current_valid_window[pup][if_id] + 200 >=
301 (last_valid_window[pup][if_id])) {
302 if (pup_st[pup][if_id] == VREF_STEP_1) {
304 * We stay in the same state and
305 * step just update the window
306 * size (take the max) and Vref
308 if (current_vref[pup]
309 [if_id] == VREF_MAX_INDEX) {
311 * If we step to the end
312 * and didn't converge
326 DEBUG_TRAINING_HW_ALG
328 ("I/F[ %d ], pup[ %d ] VREF_CONVERGE - Vref = %X (%d)\n",
334 /* continue to update the Vref index */
345 if (current_vref[pup]
361 last_valid_window[pup]
363 GET_MAX(current_valid_window
369 /* update the Vref for next stage */
376 ACCESS_TYPE_UNICAST, pup,
377 DDR_PHY_DATA, reg_addr,
384 ACCESS_TYPE_UNICAST, pup,
385 DDR_PHY_DATA, reg_addr,
387 vref_map[currrent_vref]));
388 DEBUG_TRAINING_HW_ALG
390 ("VREF config, IF[ %d ]pup[ %d ] - Vref = %X (%d)\n",
393 vref_map[currrent_vref],
395 } else if (pup_st[pup][if_id]
398 * We keep on search back with
399 * the same step size.
401 last_valid_window[pup]
403 GET_MAX(current_valid_window
408 last_vref[pup][if_id] =
412 /* we finish all search space */
413 if ((current_vref[pup]
414 [if_id] - second_step) == lim_vref[pup][if_id]) {
416 * If we step to the end
417 * and didn't converge
439 DEBUG_TRAINING_HW_ALG
441 ("I/F[ %d ], pup[ %d ] VREF_CONVERGE - Vref = %X (%d)\n",
447 /* we finish all search space */
448 if (current_vref[pup]
453 * If we step to the end
454 * and didn't converge
469 DEBUG_TRAINING_HW_ALG
471 ("I/F[ %d ], pup[ %d ] VREF_CONVERGE - Vref = %X (%d)\n",
484 /* Update the Vref for next stage */
491 ACCESS_TYPE_UNICAST, pup,
492 DDR_PHY_DATA, reg_addr,
499 ACCESS_TYPE_UNICAST, pup,
500 DDR_PHY_DATA, reg_addr,
502 vref_map[currrent_vref]));
503 DEBUG_TRAINING_HW_ALG
505 ("VREF config, IF[ %d ]pup[ %d ] - Vref = %X (%d)\n",
508 vref_map[currrent_vref],
512 /* we change state and change step */
513 if (pup_st[pup][if_id] == VREF_STEP_1) {
516 lim_vref[pup][if_id] =
518 [if_id] - initial_step;
519 last_valid_window[pup]
521 current_valid_window[pup]
523 last_vref[pup][if_id] =
526 current_vref[pup][if_id] =
527 last_vref[pup][if_id] -
530 /* Update the Vref for next stage */
534 ACCESS_TYPE_UNICAST, pup,
535 DDR_PHY_DATA, reg_addr,
542 ACCESS_TYPE_UNICAST, pup,
543 DDR_PHY_DATA, reg_addr,
545 vref_map[current_vref[pup]
547 DEBUG_TRAINING_HW_ALG
549 ("VREF config, IF[ %d ]pup[ %d ] - Vref = %X (%d)\n",
552 vref_map[current_vref[pup]
556 } else if (pup_st[pup][if_id] == VREF_STEP_2) {
558 * The last search was the max
559 * point set value and exit
564 ACCESS_TYPE_UNICAST, pup,
565 DDR_PHY_DATA, reg_addr,
572 ACCESS_TYPE_UNICAST, pup,
573 DDR_PHY_DATA, reg_addr,
575 vref_map[last_vref[pup]
577 DEBUG_TRAINING_HW_ALG
579 ("VREF config, IF[ %d ]pup[ %d ] - Vref = %X (%d)\n",
582 vref_map[last_vref[pup]
588 interface_state[if_id]++;
589 DEBUG_TRAINING_HW_ALG
591 ("I/F[ %d ], pup[ %d ] VREF_CONVERGE - Vref = %X (%d)\n",
601 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
602 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
604 pup < octets_per_if_num; pup++) {
605 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
606 CHECK_STATUS(ddr3_tip_bus_read
608 ACCESS_TYPE_UNICAST, pup,
609 DDR_PHY_DATA, reg_addr, &val));
610 DEBUG_TRAINING_HW_ALG(
612 ("FINAL values: I/F[ %d ], pup[ %d ] - Vref = %X (%d)\n",
613 if_id, pup, val, __LINE__));
617 flow_result[if_id] = TEST_SUCCESS;
619 /* restore start/end pattern */
620 start_pattern = copy_start_pattern;
621 end_pattern = copy_end_pattern;
629 int ddr3_tip_cmd_addr_init_delay(u32 dev_num, u32 adll_tap)
632 u32 ck_num_adll_tap = 0, ca_num_adll_tap = 0, data = 0;
633 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
636 * ck_delay_table is delaying the of the clock signal only.
637 * (to overcome timing issues between_c_k & command/address signals)
640 * ca_delay is delaying the of the entire command & Address signals
641 * (include Clock signal to overcome DGL error on the Clock versus
646 if (ck_delay == PARAM_UNDEFINED)
647 DEBUG_TRAINING_HW_ALG(
649 ("ERROR: ck_delay is not initialized!\n"));
651 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
652 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
654 /* Calc delay ps in ADLL tap */
655 ck_num_adll_tap = ck_delay / adll_tap;
656 ca_num_adll_tap = ca_delay / adll_tap;
658 data = (ck_num_adll_tap & 0x3f) +
659 ((ca_num_adll_tap & 0x3f) << 10);
662 * Set the ADLL number to the CK ADLL for Interfaces for
665 DEBUG_TRAINING_HW_ALG(
667 ("ck_num_adll_tap %d ca_num_adll_tap %d adll_tap %d\n",
668 ck_num_adll_tap, ca_num_adll_tap, adll_tap));
670 CHECK_STATUS(ddr3_tip_bus_write(dev_num, ACCESS_TYPE_UNICAST,
671 if_id, ACCESS_TYPE_MULTICAST,
672 PARAM_NOT_CARE, DDR_PHY_CONTROL,