2 * Copyright (C) Marvell International Ltd. and its affiliates
4 * SPDX-License-Identifier: GPL-2.0
10 #include <asm/arch/cpu.h>
11 #include <asm/arch/soc.h>
13 #include "ddr3_init.h"
15 #define VREF_INITIAL_STEP 3
16 #define VREF_SECOND_STEP 1
17 #define VREF_MAX_INDEX 7
18 #define MAX_VALUE (1024 - 1)
19 #define MIN_VALUE (-MAX_VALUE)
20 #define GET_RD_SAMPLE_DELAY(data, cs) ((data >> rd_sample_mask[cs]) & 0x1f)
22 u32 ck_delay = (u32)-1, ck_delay_16 = (u32)-1;
24 int ddr3_tip_centr_skip_min_win_check = 0;
25 u8 current_vref[MAX_BUS_NUM][MAX_INTERFACE_NUM];
26 u8 last_vref[MAX_BUS_NUM][MAX_INTERFACE_NUM];
27 u16 current_valid_window[MAX_BUS_NUM][MAX_INTERFACE_NUM];
28 u16 last_valid_window[MAX_BUS_NUM][MAX_INTERFACE_NUM];
29 u8 lim_vref[MAX_BUS_NUM][MAX_INTERFACE_NUM];
30 u8 interface_state[MAX_INTERFACE_NUM];
31 u8 vref_window_size[MAX_INTERFACE_NUM][MAX_BUS_NUM];
32 u8 vref_window_size_th = 12;
34 static u8 pup_st[MAX_BUS_NUM][MAX_INTERFACE_NUM];
36 static u32 rd_sample_mask[] = {
45 #define VREF_CONVERGE 2
48 * ODT additional timing
50 int ddr3_tip_write_additional_odt_setting(u32 dev_num, u32 if_id)
52 u32 cs_num = 0, max_cs = 0, max_read_sample = 0, min_read_sample = 0x1f;
53 u32 data_read[MAX_INTERFACE_NUM] = { 0 };
54 u32 read_sample[MAX_CS_NUM];
57 int max_phase = MIN_VALUE, current_phase;
58 enum hws_access_type access_type = ACCESS_TYPE_UNICAST;
59 struct hws_topology_map *tm = ddr3_get_topology_map();
61 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
62 DUNIT_ODT_CONTROL_REG,
64 CHECK_STATUS(ddr3_tip_if_read(dev_num, access_type, if_id,
65 READ_DATA_SAMPLE_DELAY,
66 data_read, MASK_ALL_BITS));
67 val = data_read[if_id];
69 max_cs = hws_ddr3_tip_max_cs_get();
71 for (cs_num = 0; cs_num < max_cs; cs_num++) {
72 read_sample[cs_num] = GET_RD_SAMPLE_DELAY(val, cs_num);
74 /* find maximum of read_samples */
75 if (read_sample[cs_num] >= max_read_sample) {
76 if (read_sample[cs_num] == max_read_sample) {
77 /* search for max phase */;
79 max_read_sample = read_sample[cs_num];
80 max_phase = MIN_VALUE;
84 pup_index < tm->num_of_bus_per_interface;
86 CHECK_STATUS(ddr3_tip_bus_read
88 ACCESS_TYPE_UNICAST, pup_index,
90 RL_PHY_REG + CS_REG_VALUE(cs_num),
93 current_phase = ((int)val & 0xe0) >> 6;
94 if (current_phase >= max_phase)
95 max_phase = current_phase;
100 if (read_sample[cs_num] < min_read_sample)
101 min_read_sample = read_sample[cs_num];
104 if (min_read_sample <= tm->interface_params[if_id].cas_l) {
105 min_read_sample = (int)tm->interface_params[if_id].cas_l;
108 min_read_sample = min_read_sample - 1;
109 max_read_sample = max_read_sample + 4 + (max_phase + 1) / 2 + 1;
110 if (max_read_sample >= 0x1f)
111 max_read_sample = 0x1f;
113 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
115 ((min_read_sample - 1) << 12),
117 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
119 (max_read_sample << 16),
125 int get_valid_win_rx(u32 dev_num, u32 if_id, u8 res[4])
127 u32 reg_pup = RESULT_DB_PHY_REG_ADDR;
137 for (i = 0; i < 4; i++) {
138 CHECK_STATUS(ddr3_tip_bus_read(dev_num, if_id,
139 ACCESS_TYPE_UNICAST, i,
140 DDR_PHY_DATA, reg_pup,
142 res[i] = (reg_data >> RESULT_DB_PHY_REG_RX_OFFSET) & 0x1f;
149 * This algorithm deals with the vertical optimum from Voltage point of view
150 * of the sample signal.
151 * Voltage sample point can improve the Eye / window size of the bit and the
153 * The problem is that it is tune for all DQ the same so there isn't any
155 * It is more like centralization.
156 * But because we don't have The training SM support we do it a bit more
157 * smart search to save time.
159 int ddr3_tip_vref(u32 dev_num)
162 * The Vref register have non linear order. Need to check what will be
163 * in future projects.
166 1, 2, 3, 4, 5, 6, 7, 0
168 /* State and parameter definitions */
169 u32 initial_step = VREF_INITIAL_STEP;
170 /* need to be assign with minus ????? */
171 u32 second_step = VREF_SECOND_STEP;
172 u32 algo_run_flag = 0, currrent_vref = 0;
174 u32 pup = 0, if_id = 0, num_pup = 0, rep = 0;
177 u32 copy_start_pattern, copy_end_pattern;
178 enum hws_result *flow_result = ddr3_tip_get_result_ptr(training_stage);
180 struct hws_topology_map *tm = ddr3_get_topology_map();
182 CHECK_STATUS(ddr3_tip_special_rx(dev_num));
184 /* save start/end pattern */
185 copy_start_pattern = start_pattern;
186 copy_end_pattern = end_pattern;
188 /* set vref as centralization pattern */
189 start_pattern = PATTERN_VREF;
190 end_pattern = PATTERN_VREF;
193 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
194 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
196 pup < tm->num_of_bus_per_interface; pup++) {
197 current_vref[pup][if_id] = 0;
198 last_vref[pup][if_id] = 0;
199 lim_vref[pup][if_id] = 0;
200 current_valid_window[pup][if_id] = 0;
201 last_valid_window[pup][if_id] = 0;
202 if (vref_window_size[if_id][pup] >
203 vref_window_size_th) {
204 pup_st[pup][if_id] = VREF_CONVERGE;
205 DEBUG_TRAINING_HW_ALG(
207 ("VREF config, IF[ %d ]pup[ %d ] - Vref tune not requered (%d)\n",
208 if_id, pup, __LINE__));
210 pup_st[pup][if_id] = VREF_STEP_1;
211 CHECK_STATUS(ddr3_tip_bus_read
213 ACCESS_TYPE_UNICAST, pup,
214 DDR_PHY_DATA, reg_addr, &val));
215 CHECK_STATUS(ddr3_tip_bus_write
216 (dev_num, ACCESS_TYPE_UNICAST,
217 if_id, ACCESS_TYPE_UNICAST,
218 pup, DDR_PHY_DATA, reg_addr,
219 (val & (~0xf)) | vref_map[0]));
220 DEBUG_TRAINING_HW_ALG(
222 ("VREF config, IF[ %d ]pup[ %d ] - Vref = %X (%d)\n",
224 (val & (~0xf)) | vref_map[0],
228 interface_state[if_id] = 0;
231 /* TODO: Set number of active interfaces */
232 num_pup = tm->num_of_bus_per_interface * MAX_INTERFACE_NUM;
234 while ((algo_run_flag <= num_pup) & (while_count < 10)) {
236 for (rep = 1; rep < 4; rep++) {
237 ddr3_tip_centr_skip_min_win_check = 1;
238 ddr3_tip_centralization_rx(dev_num);
239 ddr3_tip_centr_skip_min_win_check = 0;
241 /* Read Valid window results only for non converge pups */
242 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
243 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
244 if (interface_state[if_id] != 4) {
245 get_valid_win_rx(dev_num, if_id, res);
247 pup < tm->num_of_bus_per_interface;
250 (tm->bus_act_mask, pup);
256 current_valid_window[pup]
258 (current_valid_window[pup]
259 [if_id] * (rep - 1) +
260 1000 * res[pup]) / rep;
266 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
267 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
268 DEBUG_TRAINING_HW_ALG(
270 ("current_valid_window: IF[ %d ] - ", if_id));
273 pup < tm->num_of_bus_per_interface; pup++) {
274 VALIDATE_ACTIVE(tm->bus_act_mask, pup);
275 DEBUG_TRAINING_HW_ALG(DEBUG_LEVEL_TRACE,
280 DEBUG_TRAINING_HW_ALG(DEBUG_LEVEL_TRACE, ("\n"));
283 /* Compare results and respond as function of state */
284 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
285 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
287 pup < tm->num_of_bus_per_interface; pup++) {
288 VALIDATE_ACTIVE(tm->bus_act_mask, pup);
289 DEBUG_TRAINING_HW_ALG(DEBUG_LEVEL_TRACE,
290 ("I/F[ %d ], pup[ %d ] STATE #%d (%d)\n",
295 if (pup_st[pup][if_id] == VREF_CONVERGE)
298 DEBUG_TRAINING_HW_ALG(DEBUG_LEVEL_TRACE,
299 ("I/F[ %d ], pup[ %d ] CHECK progress - Current %d Last %d, limit VREF %d (%d)\n",
301 current_valid_window[pup]
303 last_valid_window[pup]
304 [if_id], lim_vref[pup]
308 * The -1 is for solution resolution +/- 1 tap
311 if (current_valid_window[pup][if_id] + 200 >=
312 (last_valid_window[pup][if_id])) {
313 if (pup_st[pup][if_id] == VREF_STEP_1) {
315 * We stay in the same state and
316 * step just update the window
317 * size (take the max) and Vref
319 if (current_vref[pup]
320 [if_id] == VREF_MAX_INDEX) {
322 * If we step to the end
323 * and didn't converge
337 DEBUG_TRAINING_HW_ALG
339 ("I/F[ %d ], pup[ %d ] VREF_CONVERGE - Vref = %X (%d)\n",
345 /* continue to update the Vref index */
356 if (current_vref[pup]
372 last_valid_window[pup]
374 GET_MAX(current_valid_window
380 /* update the Vref for next stage */
387 ACCESS_TYPE_UNICAST, pup,
388 DDR_PHY_DATA, reg_addr,
395 ACCESS_TYPE_UNICAST, pup,
396 DDR_PHY_DATA, reg_addr,
398 vref_map[currrent_vref]));
399 DEBUG_TRAINING_HW_ALG
401 ("VREF config, IF[ %d ]pup[ %d ] - Vref = %X (%d)\n",
404 vref_map[currrent_vref],
406 } else if (pup_st[pup][if_id]
409 * We keep on search back with
410 * the same step size.
412 last_valid_window[pup]
414 GET_MAX(current_valid_window
419 last_vref[pup][if_id] =
423 /* we finish all search space */
424 if ((current_vref[pup]
425 [if_id] - second_step) == lim_vref[pup][if_id]) {
427 * If we step to the end
428 * and didn't converge
450 DEBUG_TRAINING_HW_ALG
452 ("I/F[ %d ], pup[ %d ] VREF_CONVERGE - Vref = %X (%d)\n",
458 /* we finish all search space */
459 if (current_vref[pup]
464 * If we step to the end
465 * and didn't converge
480 DEBUG_TRAINING_HW_ALG
482 ("I/F[ %d ], pup[ %d ] VREF_CONVERGE - Vref = %X (%d)\n",
495 /* Update the Vref for next stage */
502 ACCESS_TYPE_UNICAST, pup,
503 DDR_PHY_DATA, reg_addr,
510 ACCESS_TYPE_UNICAST, pup,
511 DDR_PHY_DATA, reg_addr,
513 vref_map[currrent_vref]));
514 DEBUG_TRAINING_HW_ALG
516 ("VREF config, IF[ %d ]pup[ %d ] - Vref = %X (%d)\n",
519 vref_map[currrent_vref],
523 /* we change state and change step */
524 if (pup_st[pup][if_id] == VREF_STEP_1) {
527 lim_vref[pup][if_id] =
529 [if_id] - initial_step;
530 last_valid_window[pup]
532 current_valid_window[pup]
534 last_vref[pup][if_id] =
537 current_vref[pup][if_id] =
538 last_vref[pup][if_id] -
541 /* Update the Vref for next stage */
545 ACCESS_TYPE_UNICAST, pup,
546 DDR_PHY_DATA, reg_addr,
553 ACCESS_TYPE_UNICAST, pup,
554 DDR_PHY_DATA, reg_addr,
556 vref_map[current_vref[pup]
558 DEBUG_TRAINING_HW_ALG
560 ("VREF config, IF[ %d ]pup[ %d ] - Vref = %X (%d)\n",
563 vref_map[current_vref[pup]
567 } else if (pup_st[pup][if_id] == VREF_STEP_2) {
569 * The last search was the max
570 * point set value and exit
575 ACCESS_TYPE_UNICAST, pup,
576 DDR_PHY_DATA, reg_addr,
583 ACCESS_TYPE_UNICAST, pup,
584 DDR_PHY_DATA, reg_addr,
586 vref_map[last_vref[pup]
588 DEBUG_TRAINING_HW_ALG
590 ("VREF config, IF[ %d ]pup[ %d ] - Vref = %X (%d)\n",
593 vref_map[last_vref[pup]
599 interface_state[if_id]++;
600 DEBUG_TRAINING_HW_ALG
602 ("I/F[ %d ], pup[ %d ] VREF_CONVERGE - Vref = %X (%d)\n",
612 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
613 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
615 pup < tm->num_of_bus_per_interface; pup++) {
616 VALIDATE_ACTIVE(tm->bus_act_mask, pup);
617 CHECK_STATUS(ddr3_tip_bus_read
619 ACCESS_TYPE_UNICAST, pup,
620 DDR_PHY_DATA, reg_addr, &val));
621 DEBUG_TRAINING_HW_ALG(
623 ("FINAL values: I/F[ %d ], pup[ %d ] - Vref = %X (%d)\n",
624 if_id, pup, val, __LINE__));
628 flow_result[if_id] = TEST_SUCCESS;
630 /* restore start/end pattern */
631 start_pattern = copy_start_pattern;
632 end_pattern = copy_end_pattern;
640 int ddr3_tip_cmd_addr_init_delay(u32 dev_num, u32 adll_tap)
643 u32 ck_num_adll_tap = 0, ca_num_adll_tap = 0, data = 0;
644 struct hws_topology_map *tm = ddr3_get_topology_map();
647 * ck_delay_table is delaying the of the clock signal only.
648 * (to overcome timing issues between_c_k & command/address signals)
651 * ca_delay is delaying the of the entire command & Address signals
652 * (include Clock signal to overcome DGL error on the Clock versus
657 if ((ck_delay == -1) || (ck_delay_16 == -1)) {
658 DEBUG_TRAINING_HW_ALG(
660 ("ERROR: One of ck_delay values not initialized!!!\n"));
663 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
664 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
665 /* Calc delay ps in ADLL tap */
666 if (tm->interface_params[if_id].bus_width ==
668 ck_num_adll_tap = ck_delay_16 / adll_tap;
670 ck_num_adll_tap = ck_delay / adll_tap;
672 ca_num_adll_tap = ca_delay / adll_tap;
673 data = (ck_num_adll_tap & 0x3f) +
674 ((ca_num_adll_tap & 0x3f) << 10);
677 * Set the ADLL number to the CK ADLL for Interfaces for
680 DEBUG_TRAINING_HW_ALG(
682 ("ck_num_adll_tap %d ca_num_adll_tap %d adll_tap %d\n",
683 ck_num_adll_tap, ca_num_adll_tap, adll_tap));
685 CHECK_STATUS(ddr3_tip_bus_write(dev_num, ACCESS_TYPE_UNICAST,
686 if_id, ACCESS_TYPE_MULTICAST,
687 PARAM_NOT_CARE, DDR_PHY_CONTROL,