1 // SPDX-License-Identifier: GPL-2.0
3 * Copyright (C) Marvell International Ltd. and its affiliates
8 #define VREF_INITIAL_STEP 3
9 #define VREF_SECOND_STEP 1
10 #define VREF_MAX_INDEX 7
11 #define MAX_VALUE (1024 - 1)
12 #define MIN_VALUE (-MAX_VALUE)
13 #define GET_RD_SAMPLE_DELAY(data, cs) ((data >> rd_sample_mask[cs]) & 0xf)
16 int ddr3_tip_centr_skip_min_win_check = 0;
17 u8 current_vref[MAX_BUS_NUM][MAX_INTERFACE_NUM];
18 u8 last_vref[MAX_BUS_NUM][MAX_INTERFACE_NUM];
19 u16 current_valid_window[MAX_BUS_NUM][MAX_INTERFACE_NUM];
20 u16 last_valid_window[MAX_BUS_NUM][MAX_INTERFACE_NUM];
21 u8 lim_vref[MAX_BUS_NUM][MAX_INTERFACE_NUM];
22 u8 interface_state[MAX_INTERFACE_NUM];
23 u8 vref_window_size[MAX_INTERFACE_NUM][MAX_BUS_NUM];
24 u8 vref_window_size_th = 12;
26 static u8 pup_st[MAX_BUS_NUM][MAX_INTERFACE_NUM];
28 static u32 rd_sample_mask[] = {
37 #define VREF_CONVERGE 2
40 * ODT additional timing
42 int ddr3_tip_write_additional_odt_setting(u32 dev_num, u32 if_id)
44 u32 cs_num = 0, max_read_sample = 0, min_read_sample = 0x1f;
45 u32 data_read[MAX_INTERFACE_NUM] = { 0 };
46 u32 read_sample[MAX_CS_NUM];
49 int max_phase = MIN_VALUE, current_phase;
50 enum hws_access_type access_type = ACCESS_TYPE_UNICAST;
51 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
53 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
56 CHECK_STATUS(ddr3_tip_if_read(dev_num, access_type, if_id,
57 RD_DATA_SMPL_DLYS_REG,
58 data_read, MASK_ALL_BITS));
59 val = data_read[if_id];
61 for (cs_num = 0; cs_num < MAX_CS_NUM; cs_num++) {
62 read_sample[cs_num] = GET_RD_SAMPLE_DELAY(val, cs_num);
64 /* find maximum of read_samples */
65 if (read_sample[cs_num] >= max_read_sample) {
66 if (read_sample[cs_num] == max_read_sample)
67 max_phase = MIN_VALUE;
69 max_read_sample = read_sample[cs_num];
72 pup_index < octets_per_if_num;
74 CHECK_STATUS(ddr3_tip_bus_read
76 ACCESS_TYPE_UNICAST, pup_index,
81 current_phase = ((int)val & 0xe0) >> 6;
82 if (current_phase >= max_phase)
83 max_phase = current_phase;
88 if (read_sample[cs_num] < min_read_sample)
89 min_read_sample = read_sample[cs_num];
92 min_read_sample = min_read_sample - 1;
93 max_read_sample = max_read_sample + 4 + (max_phase + 1) / 2 + 1;
94 if (min_read_sample >= 0xf)
95 min_read_sample = 0xf;
96 if (max_read_sample >= 0x1f)
97 max_read_sample = 0x1f;
99 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
100 DDR_ODT_TIMING_LOW_REG,
101 ((min_read_sample - 1) << 12),
103 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
104 DDR_ODT_TIMING_LOW_REG,
105 (max_read_sample << 16),
111 int get_valid_win_rx(u32 dev_num, u32 if_id, u8 res[4])
113 u32 reg_pup = RESULT_PHY_REG;
123 for (i = 0; i < 4; i++) {
124 CHECK_STATUS(ddr3_tip_bus_read(dev_num, if_id,
125 ACCESS_TYPE_UNICAST, i,
126 DDR_PHY_DATA, reg_pup,
128 res[i] = (reg_data >> RESULT_PHY_RX_OFFS) & 0x1f;
135 * This algorithm deals with the vertical optimum from Voltage point of view
136 * of the sample signal.
137 * Voltage sample point can improve the Eye / window size of the bit and the
139 * The problem is that it is tune for all DQ the same so there isn't any
141 * It is more like centralization.
142 * But because we don't have The training SM support we do it a bit more
143 * smart search to save time.
145 int ddr3_tip_vref(u32 dev_num)
148 * The Vref register have non linear order. Need to check what will be
149 * in future projects.
152 1, 2, 3, 4, 5, 6, 7, 0
154 /* State and parameter definitions */
155 u32 initial_step = VREF_INITIAL_STEP;
156 /* need to be assign with minus ????? */
157 u32 second_step = VREF_SECOND_STEP;
158 u32 algo_run_flag = 0, currrent_vref = 0;
160 u32 pup = 0, if_id = 0, num_pup = 0, rep = 0;
163 u32 copy_start_pattern, copy_end_pattern;
164 enum hws_result *flow_result = ddr3_tip_get_result_ptr(training_stage);
166 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
167 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
169 CHECK_STATUS(ddr3_tip_special_rx(dev_num));
171 /* save start/end pattern */
172 copy_start_pattern = start_pattern;
173 copy_end_pattern = end_pattern;
175 /* set vref as centralization pattern */
176 start_pattern = PATTERN_VREF;
177 end_pattern = PATTERN_VREF;
180 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
181 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
183 pup < octets_per_if_num; pup++) {
184 current_vref[pup][if_id] = 0;
185 last_vref[pup][if_id] = 0;
186 lim_vref[pup][if_id] = 0;
187 current_valid_window[pup][if_id] = 0;
188 last_valid_window[pup][if_id] = 0;
189 if (vref_window_size[if_id][pup] >
190 vref_window_size_th) {
191 pup_st[pup][if_id] = VREF_CONVERGE;
192 DEBUG_TRAINING_HW_ALG(
194 ("VREF config, IF[ %d ]pup[ %d ] - Vref tune not requered (%d)\n",
195 if_id, pup, __LINE__));
197 pup_st[pup][if_id] = VREF_STEP_1;
198 CHECK_STATUS(ddr3_tip_bus_read
200 ACCESS_TYPE_UNICAST, pup,
201 DDR_PHY_DATA, reg_addr, &val));
202 CHECK_STATUS(ddr3_tip_bus_write
203 (dev_num, ACCESS_TYPE_UNICAST,
204 if_id, ACCESS_TYPE_UNICAST,
205 pup, DDR_PHY_DATA, reg_addr,
206 (val & (~0xf)) | vref_map[0]));
207 DEBUG_TRAINING_HW_ALG(
209 ("VREF config, IF[ %d ]pup[ %d ] - Vref = %X (%d)\n",
211 (val & (~0xf)) | vref_map[0],
215 interface_state[if_id] = 0;
218 /* TODO: Set number of active interfaces */
219 num_pup = octets_per_if_num * MAX_INTERFACE_NUM;
221 while ((algo_run_flag <= num_pup) & (while_count < 10)) {
223 for (rep = 1; rep < 4; rep++) {
224 ddr3_tip_centr_skip_min_win_check = 1;
225 ddr3_tip_centralization_rx(dev_num);
226 ddr3_tip_centr_skip_min_win_check = 0;
228 /* Read Valid window results only for non converge pups */
229 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
230 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
231 if (interface_state[if_id] != 4) {
232 get_valid_win_rx(dev_num, if_id, res);
234 pup < octets_per_if_num;
237 (tm->bus_act_mask, pup);
243 current_valid_window[pup]
245 (current_valid_window[pup]
246 [if_id] * (rep - 1) +
247 1000 * res[pup]) / rep;
253 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
254 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
255 DEBUG_TRAINING_HW_ALG(
257 ("current_valid_window: IF[ %d ] - ", if_id));
260 pup < octets_per_if_num; pup++) {
261 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
262 DEBUG_TRAINING_HW_ALG(DEBUG_LEVEL_TRACE,
267 DEBUG_TRAINING_HW_ALG(DEBUG_LEVEL_TRACE, ("\n"));
270 /* Compare results and respond as function of state */
271 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
272 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
274 pup < octets_per_if_num; pup++) {
275 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
276 DEBUG_TRAINING_HW_ALG(DEBUG_LEVEL_TRACE,
277 ("I/F[ %d ], pup[ %d ] STATE #%d (%d)\n",
282 if (pup_st[pup][if_id] == VREF_CONVERGE)
285 DEBUG_TRAINING_HW_ALG(DEBUG_LEVEL_TRACE,
286 ("I/F[ %d ], pup[ %d ] CHECK progress - Current %d Last %d, limit VREF %d (%d)\n",
288 current_valid_window[pup]
290 last_valid_window[pup]
291 [if_id], lim_vref[pup]
295 * The -1 is for solution resolution +/- 1 tap
298 if (current_valid_window[pup][if_id] + 200 >=
299 (last_valid_window[pup][if_id])) {
300 if (pup_st[pup][if_id] == VREF_STEP_1) {
302 * We stay in the same state and
303 * step just update the window
304 * size (take the max) and Vref
306 if (current_vref[pup]
307 [if_id] == VREF_MAX_INDEX) {
309 * If we step to the end
310 * and didn't converge
324 DEBUG_TRAINING_HW_ALG
326 ("I/F[ %d ], pup[ %d ] VREF_CONVERGE - Vref = %X (%d)\n",
332 /* continue to update the Vref index */
343 if (current_vref[pup]
359 last_valid_window[pup]
361 GET_MAX(current_valid_window
367 /* update the Vref for next stage */
374 ACCESS_TYPE_UNICAST, pup,
375 DDR_PHY_DATA, reg_addr,
382 ACCESS_TYPE_UNICAST, pup,
383 DDR_PHY_DATA, reg_addr,
385 vref_map[currrent_vref]));
386 DEBUG_TRAINING_HW_ALG
388 ("VREF config, IF[ %d ]pup[ %d ] - Vref = %X (%d)\n",
391 vref_map[currrent_vref],
393 } else if (pup_st[pup][if_id]
396 * We keep on search back with
397 * the same step size.
399 last_valid_window[pup]
401 GET_MAX(current_valid_window
406 last_vref[pup][if_id] =
410 /* we finish all search space */
411 if ((current_vref[pup]
412 [if_id] - second_step) == lim_vref[pup][if_id]) {
414 * If we step to the end
415 * and didn't converge
437 DEBUG_TRAINING_HW_ALG
439 ("I/F[ %d ], pup[ %d ] VREF_CONVERGE - Vref = %X (%d)\n",
445 /* we finish all search space */
446 if (current_vref[pup]
451 * If we step to the end
452 * and didn't converge
467 DEBUG_TRAINING_HW_ALG
469 ("I/F[ %d ], pup[ %d ] VREF_CONVERGE - Vref = %X (%d)\n",
482 /* Update the Vref for next stage */
489 ACCESS_TYPE_UNICAST, pup,
490 DDR_PHY_DATA, reg_addr,
497 ACCESS_TYPE_UNICAST, pup,
498 DDR_PHY_DATA, reg_addr,
500 vref_map[currrent_vref]));
501 DEBUG_TRAINING_HW_ALG
503 ("VREF config, IF[ %d ]pup[ %d ] - Vref = %X (%d)\n",
506 vref_map[currrent_vref],
510 /* we change state and change step */
511 if (pup_st[pup][if_id] == VREF_STEP_1) {
514 lim_vref[pup][if_id] =
516 [if_id] - initial_step;
517 last_valid_window[pup]
519 current_valid_window[pup]
521 last_vref[pup][if_id] =
524 current_vref[pup][if_id] =
525 last_vref[pup][if_id] -
528 /* Update the Vref for next stage */
532 ACCESS_TYPE_UNICAST, pup,
533 DDR_PHY_DATA, reg_addr,
540 ACCESS_TYPE_UNICAST, pup,
541 DDR_PHY_DATA, reg_addr,
543 vref_map[current_vref[pup]
545 DEBUG_TRAINING_HW_ALG
547 ("VREF config, IF[ %d ]pup[ %d ] - Vref = %X (%d)\n",
550 vref_map[current_vref[pup]
554 } else if (pup_st[pup][if_id] == VREF_STEP_2) {
556 * The last search was the max
557 * point set value and exit
562 ACCESS_TYPE_UNICAST, pup,
563 DDR_PHY_DATA, reg_addr,
570 ACCESS_TYPE_UNICAST, pup,
571 DDR_PHY_DATA, reg_addr,
573 vref_map[last_vref[pup]
575 DEBUG_TRAINING_HW_ALG
577 ("VREF config, IF[ %d ]pup[ %d ] - Vref = %X (%d)\n",
580 vref_map[last_vref[pup]
586 interface_state[if_id]++;
587 DEBUG_TRAINING_HW_ALG
589 ("I/F[ %d ], pup[ %d ] VREF_CONVERGE - Vref = %X (%d)\n",
599 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
600 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
602 pup < octets_per_if_num; pup++) {
603 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
604 CHECK_STATUS(ddr3_tip_bus_read
606 ACCESS_TYPE_UNICAST, pup,
607 DDR_PHY_DATA, reg_addr, &val));
608 DEBUG_TRAINING_HW_ALG(
610 ("FINAL values: I/F[ %d ], pup[ %d ] - Vref = %X (%d)\n",
611 if_id, pup, val, __LINE__));
615 flow_result[if_id] = TEST_SUCCESS;
617 /* restore start/end pattern */
618 start_pattern = copy_start_pattern;
619 end_pattern = copy_end_pattern;
627 int ddr3_tip_cmd_addr_init_delay(u32 dev_num, u32 adll_tap)
630 u32 ck_num_adll_tap = 0, ca_num_adll_tap = 0, data = 0;
631 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
634 * ck_delay_table is delaying the of the clock signal only.
635 * (to overcome timing issues between_c_k & command/address signals)
638 * ca_delay is delaying the of the entire command & Address signals
639 * (include Clock signal to overcome DGL error on the Clock versus
644 if (ck_delay == PARAM_UNDEFINED)
645 DEBUG_TRAINING_HW_ALG(
647 ("ERROR: ck_delay is not initialized!\n"));
649 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
650 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
652 /* Calc delay ps in ADLL tap */
653 ck_num_adll_tap = ck_delay / adll_tap;
654 ca_num_adll_tap = ca_delay / adll_tap;
656 data = (ck_num_adll_tap & 0x3f) +
657 ((ca_num_adll_tap & 0x3f) << 10);
660 * Set the ADLL number to the CK ADLL for Interfaces for
663 DEBUG_TRAINING_HW_ALG(
665 ("ck_num_adll_tap %d ca_num_adll_tap %d adll_tap %d\n",
666 ck_num_adll_tap, ca_num_adll_tap, adll_tap));
668 CHECK_STATUS(ddr3_tip_bus_write(dev_num, ACCESS_TYPE_UNICAST,
669 if_id, ACCESS_TYPE_MULTICAST,
670 PARAM_NOT_CARE, DDR_PHY_CONTROL,