2 * Copyright (C) Marvell International Ltd. and its affiliates
4 * SPDX-License-Identifier: GPL-2.0
10 #include <asm/arch/cpu.h>
11 #include <asm/arch/soc.h>
13 #include "ddr3_init.h"
15 #define VREF_INITIAL_STEP 3
16 #define VREF_SECOND_STEP 1
17 #define VREF_MAX_INDEX 7
18 #define MAX_VALUE (1024 - 1)
19 #define MIN_VALUE (-MAX_VALUE)
20 #define GET_RD_SAMPLE_DELAY(data, cs) ((data >> rd_sample_mask[cs]) & 0xf)
22 u32 ck_delay = (u32)-1, ck_delay_16 = (u32)-1;
24 int ddr3_tip_centr_skip_min_win_check = 0;
25 u8 current_vref[MAX_BUS_NUM][MAX_INTERFACE_NUM];
26 u8 last_vref[MAX_BUS_NUM][MAX_INTERFACE_NUM];
27 u16 current_valid_window[MAX_BUS_NUM][MAX_INTERFACE_NUM];
28 u16 last_valid_window[MAX_BUS_NUM][MAX_INTERFACE_NUM];
29 u8 lim_vref[MAX_BUS_NUM][MAX_INTERFACE_NUM];
30 u8 interface_state[MAX_INTERFACE_NUM];
31 u8 vref_window_size[MAX_INTERFACE_NUM][MAX_BUS_NUM];
32 u8 vref_window_size_th = 12;
34 static u8 pup_st[MAX_BUS_NUM][MAX_INTERFACE_NUM];
36 static u32 rd_sample_mask[] = {
45 #define VREF_CONVERGE 2
48 * ODT additional timing
50 int ddr3_tip_write_additional_odt_setting(u32 dev_num, u32 if_id)
52 u32 cs_num = 0, max_read_sample = 0, min_read_sample = 0;
53 u32 data_read[MAX_INTERFACE_NUM] = { 0 };
54 u32 read_sample[MAX_CS_NUM];
57 int max_phase = MIN_VALUE, current_phase;
58 enum hws_access_type access_type = ACCESS_TYPE_UNICAST;
59 struct hws_topology_map *tm = ddr3_get_topology_map();
61 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
62 DUNIT_ODT_CONTROL_REG,
64 CHECK_STATUS(ddr3_tip_if_read(dev_num, access_type, if_id,
65 READ_DATA_SAMPLE_DELAY,
66 data_read, MASK_ALL_BITS));
67 val = data_read[if_id];
69 for (cs_num = 0; cs_num < MAX_CS_NUM; cs_num++) {
70 read_sample[cs_num] = GET_RD_SAMPLE_DELAY(val, cs_num);
72 /* find maximum of read_samples */
73 if (read_sample[cs_num] >= max_read_sample) {
74 if (read_sample[cs_num] == max_read_sample)
75 max_phase = MIN_VALUE;
77 max_read_sample = read_sample[cs_num];
80 pup_index < tm->num_of_bus_per_interface;
82 CHECK_STATUS(ddr3_tip_bus_read
84 ACCESS_TYPE_UNICAST, pup_index,
86 RL_PHY_REG + CS_REG_VALUE(cs_num),
89 current_phase = ((int)val & 0xe0) >> 6;
90 if (current_phase >= max_phase)
91 max_phase = current_phase;
96 if (read_sample[cs_num] < min_read_sample)
97 min_read_sample = read_sample[cs_num];
100 min_read_sample = min_read_sample - 1;
101 max_read_sample = max_read_sample + 4 + (max_phase + 1) / 2 + 1;
102 if (min_read_sample >= 0xf)
103 min_read_sample = 0xf;
104 if (max_read_sample >= 0x1f)
105 max_read_sample = 0x1f;
107 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
109 ((min_read_sample - 1) << 12),
111 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
113 (max_read_sample << 16),
119 int get_valid_win_rx(u32 dev_num, u32 if_id, u8 res[4])
121 u32 reg_pup = RESULT_DB_PHY_REG_ADDR;
131 for (i = 0; i < 4; i++) {
132 CHECK_STATUS(ddr3_tip_bus_read(dev_num, if_id,
133 ACCESS_TYPE_UNICAST, i,
134 DDR_PHY_DATA, reg_pup,
136 res[i] = (reg_data >> RESULT_DB_PHY_REG_RX_OFFSET) & 0x1f;
143 * This algorithm deals with the vertical optimum from Voltage point of view
144 * of the sample signal.
145 * Voltage sample point can improve the Eye / window size of the bit and the
147 * The problem is that it is tune for all DQ the same so there isn't any
149 * It is more like centralization.
150 * But because we don't have The training SM support we do it a bit more
151 * smart search to save time.
153 int ddr3_tip_vref(u32 dev_num)
156 * The Vref register have non linear order. Need to check what will be
157 * in future projects.
160 1, 2, 3, 4, 5, 6, 7, 0
162 /* State and parameter definitions */
163 u32 initial_step = VREF_INITIAL_STEP;
164 /* need to be assign with minus ????? */
165 u32 second_step = VREF_SECOND_STEP;
166 u32 algo_run_flag = 0, currrent_vref = 0;
168 u32 pup = 0, if_id = 0, num_pup = 0, rep = 0;
171 u32 copy_start_pattern, copy_end_pattern;
172 enum hws_result *flow_result = ddr3_tip_get_result_ptr(training_stage);
174 struct hws_topology_map *tm = ddr3_get_topology_map();
176 CHECK_STATUS(ddr3_tip_special_rx(dev_num));
178 /* save start/end pattern */
179 copy_start_pattern = start_pattern;
180 copy_end_pattern = end_pattern;
182 /* set vref as centralization pattern */
183 start_pattern = PATTERN_VREF;
184 end_pattern = PATTERN_VREF;
187 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
188 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
190 pup < tm->num_of_bus_per_interface; pup++) {
191 current_vref[pup][if_id] = 0;
192 last_vref[pup][if_id] = 0;
193 lim_vref[pup][if_id] = 0;
194 current_valid_window[pup][if_id] = 0;
195 last_valid_window[pup][if_id] = 0;
196 if (vref_window_size[if_id][pup] >
197 vref_window_size_th) {
198 pup_st[pup][if_id] = VREF_CONVERGE;
199 DEBUG_TRAINING_HW_ALG(
201 ("VREF config, IF[ %d ]pup[ %d ] - Vref tune not requered (%d)\n",
202 if_id, pup, __LINE__));
204 pup_st[pup][if_id] = VREF_STEP_1;
205 CHECK_STATUS(ddr3_tip_bus_read
207 ACCESS_TYPE_UNICAST, pup,
208 DDR_PHY_DATA, reg_addr, &val));
209 CHECK_STATUS(ddr3_tip_bus_write
210 (dev_num, ACCESS_TYPE_UNICAST,
211 if_id, ACCESS_TYPE_UNICAST,
212 pup, DDR_PHY_DATA, reg_addr,
213 (val & (~0xf)) | vref_map[0]));
214 DEBUG_TRAINING_HW_ALG(
216 ("VREF config, IF[ %d ]pup[ %d ] - Vref = %X (%d)\n",
218 (val & (~0xf)) | vref_map[0],
222 interface_state[if_id] = 0;
225 /* TODO: Set number of active interfaces */
226 num_pup = tm->num_of_bus_per_interface * MAX_INTERFACE_NUM;
228 while ((algo_run_flag <= num_pup) & (while_count < 10)) {
230 for (rep = 1; rep < 4; rep++) {
231 ddr3_tip_centr_skip_min_win_check = 1;
232 ddr3_tip_centralization_rx(dev_num);
233 ddr3_tip_centr_skip_min_win_check = 0;
235 /* Read Valid window results only for non converge pups */
236 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
237 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
238 if (interface_state[if_id] != 4) {
239 get_valid_win_rx(dev_num, if_id, res);
241 pup < tm->num_of_bus_per_interface;
244 (tm->bus_act_mask, pup);
250 current_valid_window[pup]
252 (current_valid_window[pup]
253 [if_id] * (rep - 1) +
254 1000 * res[pup]) / rep;
260 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
261 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
262 DEBUG_TRAINING_HW_ALG(
264 ("current_valid_window: IF[ %d ] - ", if_id));
267 pup < tm->num_of_bus_per_interface; pup++) {
268 VALIDATE_ACTIVE(tm->bus_act_mask, pup);
269 DEBUG_TRAINING_HW_ALG(DEBUG_LEVEL_TRACE,
274 DEBUG_TRAINING_HW_ALG(DEBUG_LEVEL_TRACE, ("\n"));
277 /* Compare results and respond as function of state */
278 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
279 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
281 pup < tm->num_of_bus_per_interface; pup++) {
282 VALIDATE_ACTIVE(tm->bus_act_mask, pup);
283 DEBUG_TRAINING_HW_ALG(DEBUG_LEVEL_TRACE,
284 ("I/F[ %d ], pup[ %d ] STATE #%d (%d)\n",
289 if (pup_st[pup][if_id] == VREF_CONVERGE)
292 DEBUG_TRAINING_HW_ALG(DEBUG_LEVEL_TRACE,
293 ("I/F[ %d ], pup[ %d ] CHECK progress - Current %d Last %d, limit VREF %d (%d)\n",
295 current_valid_window[pup]
297 last_valid_window[pup]
298 [if_id], lim_vref[pup]
302 * The -1 is for solution resolution +/- 1 tap
305 if (current_valid_window[pup][if_id] + 200 >=
306 (last_valid_window[pup][if_id])) {
307 if (pup_st[pup][if_id] == VREF_STEP_1) {
309 * We stay in the same state and
310 * step just update the window
311 * size (take the max) and Vref
313 if (current_vref[pup]
314 [if_id] == VREF_MAX_INDEX) {
316 * If we step to the end
317 * and didn't converge
331 DEBUG_TRAINING_HW_ALG
333 ("I/F[ %d ], pup[ %d ] VREF_CONVERGE - Vref = %X (%d)\n",
339 /* continue to update the Vref index */
350 if (current_vref[pup]
366 last_valid_window[pup]
368 GET_MAX(current_valid_window
374 /* update the Vref for next stage */
381 ACCESS_TYPE_UNICAST, pup,
382 DDR_PHY_DATA, reg_addr,
389 ACCESS_TYPE_UNICAST, pup,
390 DDR_PHY_DATA, reg_addr,
392 vref_map[currrent_vref]));
393 DEBUG_TRAINING_HW_ALG
395 ("VREF config, IF[ %d ]pup[ %d ] - Vref = %X (%d)\n",
398 vref_map[currrent_vref],
400 } else if (pup_st[pup][if_id]
403 * We keep on search back with
404 * the same step size.
406 last_valid_window[pup]
408 GET_MAX(current_valid_window
413 last_vref[pup][if_id] =
417 /* we finish all search space */
418 if ((current_vref[pup]
419 [if_id] - second_step) == lim_vref[pup][if_id]) {
421 * If we step to the end
422 * and didn't converge
444 DEBUG_TRAINING_HW_ALG
446 ("I/F[ %d ], pup[ %d ] VREF_CONVERGE - Vref = %X (%d)\n",
452 /* we finish all search space */
453 if (current_vref[pup]
458 * If we step to the end
459 * and didn't converge
474 DEBUG_TRAINING_HW_ALG
476 ("I/F[ %d ], pup[ %d ] VREF_CONVERGE - Vref = %X (%d)\n",
489 /* Update the Vref for next stage */
496 ACCESS_TYPE_UNICAST, pup,
497 DDR_PHY_DATA, reg_addr,
504 ACCESS_TYPE_UNICAST, pup,
505 DDR_PHY_DATA, reg_addr,
507 vref_map[currrent_vref]));
508 DEBUG_TRAINING_HW_ALG
510 ("VREF config, IF[ %d ]pup[ %d ] - Vref = %X (%d)\n",
513 vref_map[currrent_vref],
517 /* we change state and change step */
518 if (pup_st[pup][if_id] == VREF_STEP_1) {
521 lim_vref[pup][if_id] =
523 [if_id] - initial_step;
524 last_valid_window[pup]
526 current_valid_window[pup]
528 last_vref[pup][if_id] =
531 current_vref[pup][if_id] =
532 last_vref[pup][if_id] -
535 /* Update the Vref for next stage */
539 ACCESS_TYPE_UNICAST, pup,
540 DDR_PHY_DATA, reg_addr,
547 ACCESS_TYPE_UNICAST, pup,
548 DDR_PHY_DATA, reg_addr,
550 vref_map[current_vref[pup]
552 DEBUG_TRAINING_HW_ALG
554 ("VREF config, IF[ %d ]pup[ %d ] - Vref = %X (%d)\n",
557 vref_map[current_vref[pup]
561 } else if (pup_st[pup][if_id] == VREF_STEP_2) {
563 * The last search was the max
564 * point set value and exit
569 ACCESS_TYPE_UNICAST, pup,
570 DDR_PHY_DATA, reg_addr,
577 ACCESS_TYPE_UNICAST, pup,
578 DDR_PHY_DATA, reg_addr,
580 vref_map[last_vref[pup]
582 DEBUG_TRAINING_HW_ALG
584 ("VREF config, IF[ %d ]pup[ %d ] - Vref = %X (%d)\n",
587 vref_map[last_vref[pup]
593 interface_state[if_id]++;
594 DEBUG_TRAINING_HW_ALG
596 ("I/F[ %d ], pup[ %d ] VREF_CONVERGE - Vref = %X (%d)\n",
606 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
607 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
609 pup < tm->num_of_bus_per_interface; pup++) {
610 VALIDATE_ACTIVE(tm->bus_act_mask, pup);
611 CHECK_STATUS(ddr3_tip_bus_read
613 ACCESS_TYPE_UNICAST, pup,
614 DDR_PHY_DATA, reg_addr, &val));
615 DEBUG_TRAINING_HW_ALG(
617 ("FINAL values: I/F[ %d ], pup[ %d ] - Vref = %X (%d)\n",
618 if_id, pup, val, __LINE__));
622 flow_result[if_id] = TEST_SUCCESS;
624 /* restore start/end pattern */
625 start_pattern = copy_start_pattern;
626 end_pattern = copy_end_pattern;
634 int ddr3_tip_cmd_addr_init_delay(u32 dev_num, u32 adll_tap)
637 u32 ck_num_adll_tap = 0, ca_num_adll_tap = 0, data = 0;
638 struct hws_topology_map *tm = ddr3_get_topology_map();
641 * ck_delay_table is delaying the of the clock signal only.
642 * (to overcome timing issues between_c_k & command/address signals)
645 * ca_delay is delaying the of the entire command & Address signals
646 * (include Clock signal to overcome DGL error on the Clock versus
651 if ((ck_delay == -1) || (ck_delay_16 == -1)) {
652 DEBUG_TRAINING_HW_ALG(
654 ("ERROR: One of ck_delay values not initialized!!!\n"));
657 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
658 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
659 /* Calc delay ps in ADLL tap */
660 if (tm->interface_params[if_id].bus_width ==
662 ck_num_adll_tap = ck_delay_16 / adll_tap;
664 ck_num_adll_tap = ck_delay / adll_tap;
666 ca_num_adll_tap = ca_delay / adll_tap;
667 data = (ck_num_adll_tap & 0x3f) +
668 ((ca_num_adll_tap & 0x3f) << 10);
671 * Set the ADLL number to the CK ADLL for Interfaces for
674 DEBUG_TRAINING_HW_ALG(
676 ("ck_num_adll_tap %d ca_num_adll_tap %d adll_tap %d\n",
677 ck_num_adll_tap, ca_num_adll_tap, adll_tap));
679 CHECK_STATUS(ddr3_tip_bus_write(dev_num, ACCESS_TYPE_UNICAST,
680 if_id, ACCESS_TYPE_MULTICAST,
681 PARAM_NOT_CARE, DDR_PHY_CONTROL,