1 // SPDX-License-Identifier: GPL-2.0
3 * Copyright (C) Marvell International Ltd. and its affiliates
7 #include "mv_ddr_regs.h"
9 #define VREF_INITIAL_STEP 3
10 #define VREF_SECOND_STEP 1
11 #define VREF_MAX_INDEX 7
12 #define MAX_VALUE (1024 - 1)
13 #define MIN_VALUE (-MAX_VALUE)
14 #define GET_RD_SAMPLE_DELAY(data, cs) ((data >> rd_sample_mask[cs]) & 0xf)
17 int ddr3_tip_centr_skip_min_win_check = 0;
18 u8 current_vref[MAX_BUS_NUM][MAX_INTERFACE_NUM];
19 u8 last_vref[MAX_BUS_NUM][MAX_INTERFACE_NUM];
20 u16 current_valid_window[MAX_BUS_NUM][MAX_INTERFACE_NUM];
21 u16 last_valid_window[MAX_BUS_NUM][MAX_INTERFACE_NUM];
22 u8 lim_vref[MAX_BUS_NUM][MAX_INTERFACE_NUM];
23 u8 interface_state[MAX_INTERFACE_NUM];
24 u8 vref_window_size[MAX_INTERFACE_NUM][MAX_BUS_NUM];
25 u8 vref_window_size_th = 12;
27 static u8 pup_st[MAX_BUS_NUM][MAX_INTERFACE_NUM];
29 static u32 rd_sample_mask[] = {
38 #define VREF_CONVERGE 2
41 * ODT additional timing
43 int ddr3_tip_write_additional_odt_setting(u32 dev_num, u32 if_id)
45 u32 cs_num = 0, max_read_sample = 0, min_read_sample = 0x1f;
46 u32 data_read[MAX_INTERFACE_NUM] = { 0 };
47 u32 read_sample[MAX_CS_NUM];
50 int max_phase = MIN_VALUE, current_phase;
51 enum hws_access_type access_type = ACCESS_TYPE_UNICAST;
52 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
54 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
57 CHECK_STATUS(ddr3_tip_if_read(dev_num, access_type, if_id,
58 RD_DATA_SMPL_DLYS_REG,
59 data_read, MASK_ALL_BITS));
60 val = data_read[if_id];
62 for (cs_num = 0; cs_num < MAX_CS_NUM; cs_num++) {
63 read_sample[cs_num] = GET_RD_SAMPLE_DELAY(val, cs_num);
65 /* find maximum of read_samples */
66 if (read_sample[cs_num] >= max_read_sample) {
67 if (read_sample[cs_num] == max_read_sample)
68 max_phase = MIN_VALUE;
70 max_read_sample = read_sample[cs_num];
73 pup_index < octets_per_if_num;
75 CHECK_STATUS(ddr3_tip_bus_read
77 ACCESS_TYPE_UNICAST, pup_index,
82 current_phase = ((int)val & 0xe0) >> 6;
83 if (current_phase >= max_phase)
84 max_phase = current_phase;
89 if (read_sample[cs_num] < min_read_sample)
90 min_read_sample = read_sample[cs_num];
93 min_read_sample = min_read_sample - 1;
94 max_read_sample = max_read_sample + 4 + (max_phase + 1) / 2 + 1;
95 if (min_read_sample >= 0xf)
96 min_read_sample = 0xf;
97 if (max_read_sample >= 0x1f)
98 max_read_sample = 0x1f;
100 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
101 DDR_ODT_TIMING_LOW_REG,
102 ((min_read_sample - 1) << 12),
104 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
105 DDR_ODT_TIMING_LOW_REG,
106 (max_read_sample << 16),
112 int get_valid_win_rx(u32 dev_num, u32 if_id, u8 res[4])
114 u32 reg_pup = RESULT_PHY_REG;
124 for (i = 0; i < 4; i++) {
125 CHECK_STATUS(ddr3_tip_bus_read(dev_num, if_id,
126 ACCESS_TYPE_UNICAST, i,
127 DDR_PHY_DATA, reg_pup,
129 res[i] = (reg_data >> RESULT_PHY_RX_OFFS) & 0x1f;
136 * This algorithm deals with the vertical optimum from Voltage point of view
137 * of the sample signal.
138 * Voltage sample point can improve the Eye / window size of the bit and the
140 * The problem is that it is tune for all DQ the same so there isn't any
142 * It is more like centralization.
143 * But because we don't have The training SM support we do it a bit more
144 * smart search to save time.
146 int ddr3_tip_vref(u32 dev_num)
149 * The Vref register have non linear order. Need to check what will be
150 * in future projects.
153 1, 2, 3, 4, 5, 6, 7, 0
155 /* State and parameter definitions */
156 u32 initial_step = VREF_INITIAL_STEP;
157 /* need to be assign with minus ????? */
158 u32 second_step = VREF_SECOND_STEP;
159 u32 algo_run_flag = 0, currrent_vref = 0;
161 u32 pup = 0, if_id = 0, num_pup = 0, rep = 0;
164 u32 copy_start_pattern, copy_end_pattern;
165 enum hws_result *flow_result = ddr3_tip_get_result_ptr(training_stage);
167 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
168 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
170 CHECK_STATUS(ddr3_tip_special_rx(dev_num));
172 /* save start/end pattern */
173 copy_start_pattern = start_pattern;
174 copy_end_pattern = end_pattern;
176 /* set vref as centralization pattern */
177 start_pattern = PATTERN_VREF;
178 end_pattern = PATTERN_VREF;
181 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
182 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
184 pup < octets_per_if_num; pup++) {
185 current_vref[pup][if_id] = 0;
186 last_vref[pup][if_id] = 0;
187 lim_vref[pup][if_id] = 0;
188 current_valid_window[pup][if_id] = 0;
189 last_valid_window[pup][if_id] = 0;
190 if (vref_window_size[if_id][pup] >
191 vref_window_size_th) {
192 pup_st[pup][if_id] = VREF_CONVERGE;
193 DEBUG_TRAINING_HW_ALG(
195 ("VREF config, IF[ %d ]pup[ %d ] - Vref tune not requered (%d)\n",
196 if_id, pup, __LINE__));
198 pup_st[pup][if_id] = VREF_STEP_1;
199 CHECK_STATUS(ddr3_tip_bus_read
201 ACCESS_TYPE_UNICAST, pup,
202 DDR_PHY_DATA, reg_addr, &val));
203 CHECK_STATUS(ddr3_tip_bus_write
204 (dev_num, ACCESS_TYPE_UNICAST,
205 if_id, ACCESS_TYPE_UNICAST,
206 pup, DDR_PHY_DATA, reg_addr,
207 (val & (~0xf)) | vref_map[0]));
208 DEBUG_TRAINING_HW_ALG(
210 ("VREF config, IF[ %d ]pup[ %d ] - Vref = %X (%d)\n",
212 (val & (~0xf)) | vref_map[0],
216 interface_state[if_id] = 0;
219 /* TODO: Set number of active interfaces */
220 num_pup = octets_per_if_num * MAX_INTERFACE_NUM;
222 while ((algo_run_flag <= num_pup) & (while_count < 10)) {
224 for (rep = 1; rep < 4; rep++) {
225 ddr3_tip_centr_skip_min_win_check = 1;
226 ddr3_tip_centralization_rx(dev_num);
227 ddr3_tip_centr_skip_min_win_check = 0;
229 /* Read Valid window results only for non converge pups */
230 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
231 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
232 if (interface_state[if_id] != 4) {
233 get_valid_win_rx(dev_num, if_id, res);
235 pup < octets_per_if_num;
238 (tm->bus_act_mask, pup);
244 current_valid_window[pup]
246 (current_valid_window[pup]
247 [if_id] * (rep - 1) +
248 1000 * res[pup]) / rep;
254 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
255 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
256 DEBUG_TRAINING_HW_ALG(
258 ("current_valid_window: IF[ %d ] - ", if_id));
261 pup < octets_per_if_num; pup++) {
262 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
263 DEBUG_TRAINING_HW_ALG(DEBUG_LEVEL_TRACE,
268 DEBUG_TRAINING_HW_ALG(DEBUG_LEVEL_TRACE, ("\n"));
271 /* Compare results and respond as function of state */
272 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
273 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
275 pup < octets_per_if_num; pup++) {
276 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
277 DEBUG_TRAINING_HW_ALG(DEBUG_LEVEL_TRACE,
278 ("I/F[ %d ], pup[ %d ] STATE #%d (%d)\n",
283 if (pup_st[pup][if_id] == VREF_CONVERGE)
286 DEBUG_TRAINING_HW_ALG(DEBUG_LEVEL_TRACE,
287 ("I/F[ %d ], pup[ %d ] CHECK progress - Current %d Last %d, limit VREF %d (%d)\n",
289 current_valid_window[pup]
291 last_valid_window[pup]
292 [if_id], lim_vref[pup]
296 * The -1 is for solution resolution +/- 1 tap
299 if (current_valid_window[pup][if_id] + 200 >=
300 (last_valid_window[pup][if_id])) {
301 if (pup_st[pup][if_id] == VREF_STEP_1) {
303 * We stay in the same state and
304 * step just update the window
305 * size (take the max) and Vref
307 if (current_vref[pup]
308 [if_id] == VREF_MAX_INDEX) {
310 * If we step to the end
311 * and didn't converge
325 DEBUG_TRAINING_HW_ALG
327 ("I/F[ %d ], pup[ %d ] VREF_CONVERGE - Vref = %X (%d)\n",
333 /* continue to update the Vref index */
344 if (current_vref[pup]
360 last_valid_window[pup]
362 GET_MAX(current_valid_window
368 /* update the Vref for next stage */
375 ACCESS_TYPE_UNICAST, pup,
376 DDR_PHY_DATA, reg_addr,
383 ACCESS_TYPE_UNICAST, pup,
384 DDR_PHY_DATA, reg_addr,
386 vref_map[currrent_vref]));
387 DEBUG_TRAINING_HW_ALG
389 ("VREF config, IF[ %d ]pup[ %d ] - Vref = %X (%d)\n",
392 vref_map[currrent_vref],
394 } else if (pup_st[pup][if_id]
397 * We keep on search back with
398 * the same step size.
400 last_valid_window[pup]
402 GET_MAX(current_valid_window
407 last_vref[pup][if_id] =
411 /* we finish all search space */
412 if ((current_vref[pup]
413 [if_id] - second_step) == lim_vref[pup][if_id]) {
415 * If we step to the end
416 * and didn't converge
438 DEBUG_TRAINING_HW_ALG
440 ("I/F[ %d ], pup[ %d ] VREF_CONVERGE - Vref = %X (%d)\n",
446 /* we finish all search space */
447 if (current_vref[pup]
452 * If we step to the end
453 * and didn't converge
468 DEBUG_TRAINING_HW_ALG
470 ("I/F[ %d ], pup[ %d ] VREF_CONVERGE - Vref = %X (%d)\n",
483 /* Update the Vref for next stage */
490 ACCESS_TYPE_UNICAST, pup,
491 DDR_PHY_DATA, reg_addr,
498 ACCESS_TYPE_UNICAST, pup,
499 DDR_PHY_DATA, reg_addr,
501 vref_map[currrent_vref]));
502 DEBUG_TRAINING_HW_ALG
504 ("VREF config, IF[ %d ]pup[ %d ] - Vref = %X (%d)\n",
507 vref_map[currrent_vref],
511 /* we change state and change step */
512 if (pup_st[pup][if_id] == VREF_STEP_1) {
515 lim_vref[pup][if_id] =
517 [if_id] - initial_step;
518 last_valid_window[pup]
520 current_valid_window[pup]
522 last_vref[pup][if_id] =
525 current_vref[pup][if_id] =
526 last_vref[pup][if_id] -
529 /* Update the Vref for next stage */
533 ACCESS_TYPE_UNICAST, pup,
534 DDR_PHY_DATA, reg_addr,
541 ACCESS_TYPE_UNICAST, pup,
542 DDR_PHY_DATA, reg_addr,
544 vref_map[current_vref[pup]
546 DEBUG_TRAINING_HW_ALG
548 ("VREF config, IF[ %d ]pup[ %d ] - Vref = %X (%d)\n",
551 vref_map[current_vref[pup]
555 } else if (pup_st[pup][if_id] == VREF_STEP_2) {
557 * The last search was the max
558 * point set value and exit
563 ACCESS_TYPE_UNICAST, pup,
564 DDR_PHY_DATA, reg_addr,
571 ACCESS_TYPE_UNICAST, pup,
572 DDR_PHY_DATA, reg_addr,
574 vref_map[last_vref[pup]
576 DEBUG_TRAINING_HW_ALG
578 ("VREF config, IF[ %d ]pup[ %d ] - Vref = %X (%d)\n",
581 vref_map[last_vref[pup]
587 interface_state[if_id]++;
588 DEBUG_TRAINING_HW_ALG
590 ("I/F[ %d ], pup[ %d ] VREF_CONVERGE - Vref = %X (%d)\n",
600 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
601 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
603 pup < octets_per_if_num; pup++) {
604 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
605 CHECK_STATUS(ddr3_tip_bus_read
607 ACCESS_TYPE_UNICAST, pup,
608 DDR_PHY_DATA, reg_addr, &val));
609 DEBUG_TRAINING_HW_ALG(
611 ("FINAL values: I/F[ %d ], pup[ %d ] - Vref = %X (%d)\n",
612 if_id, pup, val, __LINE__));
616 flow_result[if_id] = TEST_SUCCESS;
618 /* restore start/end pattern */
619 start_pattern = copy_start_pattern;
620 end_pattern = copy_end_pattern;
628 int ddr3_tip_cmd_addr_init_delay(u32 dev_num, u32 adll_tap)
631 u32 ck_num_adll_tap = 0, ca_num_adll_tap = 0, data = 0;
632 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
635 * ck_delay_table is delaying the of the clock signal only.
636 * (to overcome timing issues between_c_k & command/address signals)
639 * ca_delay is delaying the of the entire command & Address signals
640 * (include Clock signal to overcome DGL error on the Clock versus
645 if (ck_delay == PARAM_UNDEFINED)
646 DEBUG_TRAINING_HW_ALG(
648 ("ERROR: ck_delay is not initialized!\n"));
650 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
651 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
653 /* Calc delay ps in ADLL tap */
654 ck_num_adll_tap = ck_delay / adll_tap;
655 ca_num_adll_tap = ca_delay / adll_tap;
657 data = (ck_num_adll_tap & 0x3f) +
658 ((ca_num_adll_tap & 0x3f) << 10);
661 * Set the ADLL number to the CK ADLL for Interfaces for
664 DEBUG_TRAINING_HW_ALG(
666 ("ck_num_adll_tap %d ca_num_adll_tap %d adll_tap %d\n",
667 ck_num_adll_tap, ca_num_adll_tap, adll_tap));
669 CHECK_STATUS(ddr3_tip_bus_write(dev_num, ACCESS_TYPE_UNICAST,
670 if_id, ACCESS_TYPE_MULTICAST,
671 PARAM_NOT_CARE, DDR_PHY_CONTROL,