2 * Copyright (C) Marvell International Ltd. and its affiliates
4 * SPDX-License-Identifier: GPL-2.0
10 #include <asm/arch/cpu.h>
11 #include <asm/arch/soc.h>
13 #include "ddr3_init.h"
15 #define WL_ITERATION_NUM 10
16 #define ONE_CLOCK_ERROR_SHIFT 2
17 #define ALIGN_ERROR_SHIFT -2
19 static u32 pup_mask_table[] = {
26 static struct write_supp_result wr_supp_res[MAX_INTERFACE_NUM][MAX_BUS_NUM];
28 static int ddr3_tip_dynamic_write_leveling_seq(u32 dev_num);
29 static int ddr3_tip_dynamic_read_leveling_seq(u32 dev_num);
30 static int ddr3_tip_dynamic_per_bit_read_leveling_seq(u32 dev_num);
31 static int ddr3_tip_wl_supp_align_err_shift(u32 dev_num, u32 if_id, u32 bus_id,
33 static int ddr3_tip_wl_supp_align_phase_shift(u32 dev_num, u32 if_id,
34 u32 bus_id, u32 offset,
36 static int ddr3_tip_xsb_compare_test(u32 dev_num, u32 if_id, u32 bus_id,
37 u32 edge_offset, u32 bus_id_delta);
38 static int ddr3_tip_wl_supp_one_clk_err_shift(u32 dev_num, u32 if_id,
39 u32 bus_id, u32 bus_id_delta);
41 u32 hws_ddr3_tip_max_cs_get(void)
45 struct hws_topology_map *tm = ddr3_get_topology_map();
48 for (c_cs = 0; c_cs < NUM_OF_CS; c_cs++) {
50 interface_params[0].as_bus_params[0].
59 /*****************************************************************************
61 ******************************************************************************/
62 int ddr3_tip_dynamic_read_leveling(u32 dev_num, u32 freq)
65 u32 max_cs = hws_ddr3_tip_max_cs_get();
66 u32 bus_num, if_id, cl_val;
67 enum hws_speed_bin speed_bin_index;
68 /* save current CS value */
69 u32 cs_enable_reg_val[MAX_INTERFACE_NUM] = { 0 };
70 int is_any_pup_fail = 0;
71 u32 data_read[MAX_INTERFACE_NUM + 1] = { 0 };
72 u8 rl_values[NUM_OF_CS][MAX_BUS_NUM][MAX_INTERFACE_NUM];
73 struct pattern_info *pattern_table = ddr3_tip_get_pattern_table();
74 u16 *mask_results_pup_reg_map = ddr3_tip_get_mask_results_pup_reg_map();
75 struct hws_topology_map *tm = ddr3_get_topology_map();
77 if (rl_version == 0) {
83 CHECK_STATUS(ddr3_tip_if_write(
84 dev_num, ACCESS_TYPE_MULTICAST,
85 PARAM_NOT_CARE, TRAINING_REG,
87 CHECK_STATUS(ddr3_tip_if_write(
88 dev_num, ACCESS_TYPE_MULTICAST,
90 TRAINING_PATTERN_BASE_ADDRESS_REG,
92 CHECK_STATUS(ddr3_tip_if_write(
93 dev_num, ACCESS_TYPE_MULTICAST,
94 PARAM_NOT_CARE, TRAINING_REG,
95 (u32)(1 << 31), (u32)(1 << 31)));
97 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
98 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
99 training_result[training_stage][if_id] = TEST_SUCCESS;
100 if (ddr3_tip_if_polling
101 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0,
102 (u32)(1 << 31), TRAINING_REG,
103 MAX_POLLING_ITERATIONS) != MV_OK) {
106 ("RL: DDR3 poll failed(1) IF %d\n",
108 training_result[training_stage][if_id] =
116 /* read read-leveling result */
117 CHECK_STATUS(ddr3_tip_if_read
118 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
119 TRAINING_REG, data_read, 1 << 30));
120 /* exit read leveling mode */
121 CHECK_STATUS(ddr3_tip_if_write
122 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
123 TRAINING_SW_2_REG, 0x8, 0x9));
124 CHECK_STATUS(ddr3_tip_if_write
125 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
126 TRAINING_SW_1_REG, 1 << 16, 1 << 16));
128 /* disable RL machine all Trn_CS[3:0] , [16:0] */
130 CHECK_STATUS(ddr3_tip_if_write
131 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
132 TRAINING_REG, 0, 0xf1ffff));
134 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
135 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
136 if ((data_read[if_id] & (1 << 30)) == 0) {
139 ("\n_read Leveling failed for IF %d\n",
141 training_result[training_stage][if_id] =
151 for (effective_cs = 0; effective_cs < NUM_OF_CS; effective_cs++)
152 for (bus_num = 0; bus_num < MAX_BUS_NUM; bus_num++)
153 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++)
154 rl_values[effective_cs][bus_num][if_id] = 0;
156 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
157 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
158 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
159 training_result[training_stage][if_id] = TEST_SUCCESS;
161 /* save current cs enable reg val */
162 CHECK_STATUS(ddr3_tip_if_read
163 (dev_num, ACCESS_TYPE_UNICAST, if_id,
164 CS_ENABLE_REG, cs_enable_reg_val,
166 /* enable single cs */
167 CHECK_STATUS(ddr3_tip_if_write
168 (dev_num, ACCESS_TYPE_UNICAST, if_id,
169 CS_ENABLE_REG, (1 << 3), (1 << 3)));
172 ddr3_tip_reset_fifo_ptr(dev_num);
175 * Phase 1: Load pattern (using ODPG)
177 * enter Read Leveling mode
178 * only 27 bits are masked
179 * assuming non multi-CS configuration
180 * write to CS = 0 for the non multi CS configuration, note
181 * that the results shall be read back to the required CS !!!
184 /* BUS count is 0 shifted 26 */
185 CHECK_STATUS(ddr3_tip_if_write
186 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
187 ODPG_DATA_CONTROL_REG, 0x3, 0x3));
188 CHECK_STATUS(ddr3_tip_configure_odpg
189 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 0,
190 pattern_table[PATTERN_RL].num_of_phases_tx, 0,
191 pattern_table[PATTERN_RL].num_of_phases_rx, 0, 0,
192 effective_cs, STRESS_NONE, DURATION_SINGLE));
194 /* load pattern to ODPG */
195 ddr3_tip_load_pattern_to_odpg(dev_num, ACCESS_TYPE_MULTICAST,
196 PARAM_NOT_CARE, PATTERN_RL,
197 pattern_table[PATTERN_RL].
201 * Phase 2: ODPG to Read Leveling mode
204 /* General Training Opcode register */
205 CHECK_STATUS(ddr3_tip_if_write
206 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
207 ODPG_WRITE_READ_MODE_ENABLE_REG, 0,
210 CHECK_STATUS(ddr3_tip_if_write
211 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
212 ODPG_TRAINING_CONTROL_REG,
213 (0x301b01 | effective_cs << 2), 0x3c3fef));
215 /* Object1 opcode register 0 & 1 */
216 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
217 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
219 tm->interface_params[if_id].speed_bin_index;
221 cas_latency_table[speed_bin_index].cl_val[freq];
222 data = (cl_val << 17) | (0x3 << 25);
223 mask = (0xff << 9) | (0x1f << 17) | (0x3 << 25);
224 CHECK_STATUS(ddr3_tip_if_write
225 (dev_num, ACCESS_TYPE_UNICAST, if_id,
226 ODPG_OBJ1_OPCODE_REG, data, mask));
229 /* Set iteration count to max value */
230 CHECK_STATUS(ddr3_tip_if_write
231 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
232 TRAINING_OPCODE_1_REG, 0xd00, 0xd00));
235 * Phase 2: Mask config
238 ddr3_tip_dynamic_read_leveling_seq(dev_num);
241 * Phase 3: Read Leveling execution
244 /* temporary jira dunit=14751 */
245 CHECK_STATUS(ddr3_tip_if_write
246 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
247 TRAINING_DBG_1_REG, 0, (u32)(1 << 31)));
248 /* configure phy reset value */
249 CHECK_STATUS(ddr3_tip_if_write
250 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
251 TRAINING_DBG_3_REG, (0x7f << 24),
253 /* data pup rd reset enable */
254 CHECK_STATUS(ddr3_tip_if_write
255 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
256 SDRAM_CONFIGURATION_REG, 0, (1 << 30)));
257 /* data pup rd reset disable */
258 CHECK_STATUS(ddr3_tip_if_write
259 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
260 SDRAM_CONFIGURATION_REG, (1 << 30), (1 << 30)));
261 /* training SW override & training RL mode */
262 CHECK_STATUS(ddr3_tip_if_write
263 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
264 TRAINING_SW_2_REG, 0x1, 0x9));
265 /* training enable */
266 CHECK_STATUS(ddr3_tip_if_write
267 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
268 TRAINING_REG, (1 << 24) | (1 << 20),
269 (1 << 24) | (1 << 20)));
270 CHECK_STATUS(ddr3_tip_if_write
271 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
272 TRAINING_REG, (u32)(1 << 31), (u32)(1 << 31)));
274 /********* trigger training *******************/
275 /* Trigger, poll on status and disable ODPG */
276 CHECK_STATUS(ddr3_tip_if_write
277 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
278 ODPG_TRAINING_TRIGGER_REG, 0x1, 0x1));
279 CHECK_STATUS(ddr3_tip_if_write
280 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
281 ODPG_TRAINING_STATUS_REG, 0x1, 0x1));
283 /* check for training done + results pass */
284 if (ddr3_tip_if_polling
285 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 0x2, 0x2,
286 ODPG_TRAINING_STATUS_REG,
287 MAX_POLLING_ITERATIONS) != MV_OK) {
288 DEBUG_LEVELING(DEBUG_LEVEL_ERROR,
289 ("Training Done Failed\n"));
293 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
294 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
295 CHECK_STATUS(ddr3_tip_if_read
296 (dev_num, ACCESS_TYPE_UNICAST,
298 ODPG_TRAINING_TRIGGER_REG, data_read,
300 data = data_read[if_id];
302 DEBUG_LEVELING(DEBUG_LEVEL_ERROR,
303 ("Training Result Failed\n"));
307 /*disable ODPG - Back to functional mode */
308 CHECK_STATUS(ddr3_tip_if_write
309 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
310 ODPG_ENABLE_REG, 0x1 << ODPG_DISABLE_OFFS,
311 (0x1 << ODPG_DISABLE_OFFS)));
312 if (ddr3_tip_if_polling
313 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 0x0, 0x1,
314 ODPG_ENABLE_REG, MAX_POLLING_ITERATIONS) != MV_OK) {
315 DEBUG_LEVELING(DEBUG_LEVEL_ERROR,
316 ("ODPG disable failed "));
319 CHECK_STATUS(ddr3_tip_if_write
320 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
321 ODPG_DATA_CONTROL_REG, 0, MASK_ALL_BITS));
323 /* double loop on bus, pup */
324 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
325 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
326 /* check training done */
329 bus_num < tm->num_of_bus_per_interface;
331 VALIDATE_ACTIVE(tm->bus_act_mask, bus_num);
332 if (ddr3_tip_if_polling
333 (dev_num, ACCESS_TYPE_UNICAST,
334 if_id, (1 << 25), (1 << 25),
335 mask_results_pup_reg_map[bus_num],
336 MAX_POLLING_ITERATIONS) != MV_OK) {
337 DEBUG_LEVELING(DEBUG_LEVEL_ERROR,
338 ("\n_r_l: DDR3 poll failed(2) for bus %d",
342 /* read result per pup */
343 CHECK_STATUS(ddr3_tip_if_read
347 mask_results_pup_reg_map
348 [bus_num], data_read,
350 rl_values[effective_cs][bus_num]
351 [if_id] = (u8)data_read[if_id];
355 if (is_any_pup_fail == 1) {
356 training_result[training_stage][if_id] =
363 DEBUG_LEVELING(DEBUG_LEVEL_INFO, ("RL exit read leveling\n"));
366 * Phase 3: Exit Read Leveling
369 CHECK_STATUS(ddr3_tip_if_write
370 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
371 TRAINING_SW_2_REG, (1 << 3), (1 << 3)));
372 CHECK_STATUS(ddr3_tip_if_write
373 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
374 TRAINING_SW_1_REG, (1 << 16), (1 << 16)));
375 /* set ODPG to functional */
376 CHECK_STATUS(ddr3_tip_if_write
377 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
378 ODPG_DATA_CONTROL_REG, 0x0, MASK_ALL_BITS));
381 * Copy the result from the effective CS search to the
384 /*ddr3_tip_write_cs_result(dev_num, RL_PHY_REG); */
385 CHECK_STATUS(ddr3_tip_if_write
386 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
387 ODPG_DATA_CONTROL_REG, 0x0, MASK_ALL_BITS));
390 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
391 /* double loop on bus, pup */
392 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
393 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
395 bus_num < tm->num_of_bus_per_interface;
397 VALIDATE_ACTIVE(tm->bus_act_mask, bus_num);
398 /* read result per pup from arry */
399 data = rl_values[effective_cs][bus_num][if_id];
400 data = (data & 0x1f) |
401 (((data & 0xe0) >> 5) << 6);
402 ddr3_tip_bus_write(dev_num,
406 bus_num, DDR_PHY_DATA,
409 0) ? 0x0 : 0x4), data);
413 /* Set to 0 after each loop to avoid illegal value may be used */
416 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
417 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
418 /* restore cs enable value */
419 CHECK_STATUS(ddr3_tip_if_write
420 (dev_num, ACCESS_TYPE_UNICAST, if_id,
421 CS_ENABLE_REG, cs_enable_reg_val[if_id],
423 if (odt_config != 0) {
424 CHECK_STATUS(ddr3_tip_write_additional_odt_setting
429 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
430 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
431 if (training_result[training_stage][if_id] == TEST_FAILED)
439 * Legacy Dynamic write leveling
441 int ddr3_tip_legacy_dynamic_write_leveling(u32 dev_num)
443 u32 c_cs, if_id, cs_mask = 0;
444 u32 max_cs = hws_ddr3_tip_max_cs_get();
445 struct hws_topology_map *tm = ddr3_get_topology_map();
448 * In TRAINIUNG reg (0x15b0) write 0x80000008 | cs_mask:
450 * cs_mask = 0x1 <<20 Trn_CS0 - CS0 is included in the DDR3 training
451 * cs_mask = 0x1 <<21 Trn_CS1 - CS1 is included in the DDR3 training
452 * cs_mask = 0x1 <<22 Trn_CS2 - CS2 is included in the DDR3 training
453 * cs_mask = 0x1 <<23 Trn_CS3 - CS3 is included in the DDR3 training
454 * Trn_auto_seq = write leveling
456 for (c_cs = 0; c_cs < max_cs; c_cs++)
457 cs_mask = cs_mask | 1 << (20 + c_cs);
459 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
460 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
461 CHECK_STATUS(ddr3_tip_if_write
462 (dev_num, ACCESS_TYPE_MULTICAST, 0,
463 TRAINING_REG, (0x80000008 | cs_mask),
466 if (ddr3_tip_if_polling
467 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0,
468 (u32)0x80000000, TRAINING_REG,
469 MAX_POLLING_ITERATIONS) != MV_OK) {
470 DEBUG_LEVELING(DEBUG_LEVEL_ERROR,
471 ("polling failed for Old WL result\n"));
480 * Legacy Dynamic read leveling
482 int ddr3_tip_legacy_dynamic_read_leveling(u32 dev_num)
484 u32 c_cs, if_id, cs_mask = 0;
485 u32 max_cs = hws_ddr3_tip_max_cs_get();
486 struct hws_topology_map *tm = ddr3_get_topology_map();
489 * In TRAINIUNG reg (0x15b0) write 0x80000040 | cs_mask:
491 * cs_mask = 0x1 <<20 Trn_CS0 - CS0 is included in the DDR3 training
492 * cs_mask = 0x1 <<21 Trn_CS1 - CS1 is included in the DDR3 training
493 * cs_mask = 0x1 <<22 Trn_CS2 - CS2 is included in the DDR3 training
494 * cs_mask = 0x1 <<23 Trn_CS3 - CS3 is included in the DDR3 training
495 * Trn_auto_seq = Read Leveling using training pattern
497 for (c_cs = 0; c_cs < max_cs; c_cs++)
498 cs_mask = cs_mask | 1 << (20 + c_cs);
500 CHECK_STATUS(ddr3_tip_if_write
501 (dev_num, ACCESS_TYPE_MULTICAST, 0, TRAINING_REG,
502 (0x80000040 | cs_mask), 0xffffffff));
505 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
506 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
507 if (ddr3_tip_if_polling
508 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0,
509 (u32)0x80000000, TRAINING_REG,
510 MAX_POLLING_ITERATIONS) != MV_OK) {
511 DEBUG_LEVELING(DEBUG_LEVEL_ERROR,
512 ("polling failed for Old RL result\n"));
521 * Dynamic per bit read leveling
523 int ddr3_tip_dynamic_per_bit_read_leveling(u32 dev_num, u32 freq)
526 u32 bus_num, if_id, cl_val, bit_num;
527 u32 curr_numb, curr_min_delay;
528 int adll_array[3] = { 0, -0xa, 0x14 };
529 u32 phyreg3_arr[MAX_INTERFACE_NUM][MAX_BUS_NUM];
530 enum hws_speed_bin speed_bin_index;
531 int is_any_pup_fail = 0;
533 u32 cs_enable_reg_val[MAX_INTERFACE_NUM]; /* save current CS value */
534 u32 data_read[MAX_INTERFACE_NUM];
535 int per_bit_rl_pup_status[MAX_INTERFACE_NUM][MAX_BUS_NUM];
536 u32 data2_write[MAX_INTERFACE_NUM][MAX_BUS_NUM];
537 struct pattern_info *pattern_table = ddr3_tip_get_pattern_table();
538 u16 *mask_results_dq_reg_map = ddr3_tip_get_mask_results_dq_reg();
539 struct hws_topology_map *tm = ddr3_get_topology_map();
541 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
542 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
544 bus_num <= tm->num_of_bus_per_interface; bus_num++) {
545 VALIDATE_ACTIVE(tm->bus_act_mask, bus_num);
546 per_bit_rl_pup_status[if_id][bus_num] = 0;
547 data2_write[if_id][bus_num] = 0;
548 /* read current value of phy register 0x3 */
549 CHECK_STATUS(ddr3_tip_bus_read
550 (dev_num, if_id, ACCESS_TYPE_UNICAST,
551 bus_num, DDR_PHY_DATA,
552 READ_CENTRALIZATION_PHY_REG,
553 &phyreg3_arr[if_id][bus_num]));
558 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
559 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
560 training_result[training_stage][if_id] = TEST_SUCCESS;
562 /* save current cs enable reg val */
563 CHECK_STATUS(ddr3_tip_if_read
564 (dev_num, ACCESS_TYPE_UNICAST, if_id,
565 CS_ENABLE_REG, &cs_enable_reg_val[if_id],
567 /* enable single cs */
568 CHECK_STATUS(ddr3_tip_if_write
569 (dev_num, ACCESS_TYPE_UNICAST, if_id,
570 CS_ENABLE_REG, (1 << 3), (1 << 3)));
573 ddr3_tip_reset_fifo_ptr(dev_num);
574 for (curr_numb = 0; curr_numb < 3; curr_numb++) {
576 * Phase 1: Load pattern (using ODPG)
578 * enter Read Leveling mode
579 * only 27 bits are masked
580 * assuming non multi-CS configuration
581 * write to CS = 0 for the non multi CS configuration, note that
582 * the results shall be read back to the required CS !!!
585 /* BUS count is 0 shifted 26 */
586 CHECK_STATUS(ddr3_tip_if_write
587 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
588 ODPG_DATA_CONTROL_REG, 0x3, 0x3));
589 CHECK_STATUS(ddr3_tip_configure_odpg
590 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 0,
591 pattern_table[PATTERN_TEST].num_of_phases_tx, 0,
592 pattern_table[PATTERN_TEST].num_of_phases_rx, 0,
593 0, 0, STRESS_NONE, DURATION_SINGLE));
595 /* load pattern to ODPG */
596 ddr3_tip_load_pattern_to_odpg(dev_num, ACCESS_TYPE_MULTICAST,
597 PARAM_NOT_CARE, PATTERN_TEST,
598 pattern_table[PATTERN_TEST].
602 * Phase 2: ODPG to Read Leveling mode
605 /* General Training Opcode register */
606 CHECK_STATUS(ddr3_tip_if_write
607 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
608 ODPG_WRITE_READ_MODE_ENABLE_REG, 0,
610 CHECK_STATUS(ddr3_tip_if_write
611 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
612 ODPG_TRAINING_CONTROL_REG, 0x301b01, 0x3c3fef));
614 /* Object1 opcode register 0 & 1 */
615 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
616 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
618 tm->interface_params[if_id].speed_bin_index;
620 cas_latency_table[speed_bin_index].cl_val[freq];
621 data = (cl_val << 17) | (0x3 << 25);
622 mask = (0xff << 9) | (0x1f << 17) | (0x3 << 25);
623 CHECK_STATUS(ddr3_tip_if_write
624 (dev_num, ACCESS_TYPE_UNICAST, if_id,
625 ODPG_OBJ1_OPCODE_REG, data, mask));
628 /* Set iteration count to max value */
629 CHECK_STATUS(ddr3_tip_if_write
630 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
631 TRAINING_OPCODE_1_REG, 0xd00, 0xd00));
634 * Phase 2: Mask config
637 ddr3_tip_dynamic_per_bit_read_leveling_seq(dev_num);
640 * Phase 3: Read Leveling execution
643 /* temporary jira dunit=14751 */
644 CHECK_STATUS(ddr3_tip_if_write
645 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
646 TRAINING_DBG_1_REG, 0, (u32)(1 << 31)));
647 /* configure phy reset value */
648 CHECK_STATUS(ddr3_tip_if_write
649 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
650 TRAINING_DBG_3_REG, (0x7f << 24),
652 /* data pup rd reset enable */
653 CHECK_STATUS(ddr3_tip_if_write
654 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
655 SDRAM_CONFIGURATION_REG, 0, (1 << 30)));
656 /* data pup rd reset disable */
657 CHECK_STATUS(ddr3_tip_if_write
658 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
659 SDRAM_CONFIGURATION_REG, (1 << 30), (1 << 30)));
660 /* training SW override & training RL mode */
661 CHECK_STATUS(ddr3_tip_if_write
662 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
663 TRAINING_SW_2_REG, 0x1, 0x9));
664 /* training enable */
665 CHECK_STATUS(ddr3_tip_if_write
666 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
667 TRAINING_REG, (1 << 24) | (1 << 20),
668 (1 << 24) | (1 << 20)));
669 CHECK_STATUS(ddr3_tip_if_write
670 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
671 TRAINING_REG, (u32)(1 << 31), (u32)(1 << 31)));
673 /********* trigger training *******************/
674 /* Trigger, poll on status and disable ODPG */
675 CHECK_STATUS(ddr3_tip_if_write
676 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
677 ODPG_TRAINING_TRIGGER_REG, 0x1, 0x1));
678 CHECK_STATUS(ddr3_tip_if_write
679 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
680 ODPG_TRAINING_STATUS_REG, 0x1, 0x1));
682 /*check for training done + results pass */
683 if (ddr3_tip_if_polling
684 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 0x2, 0x2,
685 ODPG_TRAINING_STATUS_REG,
686 MAX_POLLING_ITERATIONS) != MV_OK) {
687 DEBUG_LEVELING(DEBUG_LEVEL_ERROR,
688 ("Training Done Failed\n"));
692 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
693 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
694 CHECK_STATUS(ddr3_tip_if_read
695 (dev_num, ACCESS_TYPE_UNICAST,
697 ODPG_TRAINING_TRIGGER_REG, data_read,
699 data = data_read[if_id];
701 DEBUG_LEVELING(DEBUG_LEVEL_ERROR,
702 ("Training Result Failed\n"));
706 /*disable ODPG - Back to functional mode */
707 CHECK_STATUS(ddr3_tip_if_write
708 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
709 ODPG_ENABLE_REG, 0x1 << ODPG_DISABLE_OFFS,
710 (0x1 << ODPG_DISABLE_OFFS)));
711 if (ddr3_tip_if_polling
712 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 0x0, 0x1,
713 ODPG_ENABLE_REG, MAX_POLLING_ITERATIONS) != MV_OK) {
714 DEBUG_LEVELING(DEBUG_LEVEL_ERROR,
715 ("ODPG disable failed "));
718 CHECK_STATUS(ddr3_tip_if_write
719 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
720 ODPG_DATA_CONTROL_REG, 0, MASK_ALL_BITS));
722 /* double loop on bus, pup */
723 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
724 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
725 /* check training done */
727 bus_num < tm->num_of_bus_per_interface;
729 VALIDATE_ACTIVE(tm->bus_act_mask, bus_num);
731 if (per_bit_rl_pup_status[if_id][bus_num]
734 for (bit_num = 0; bit_num < 8;
736 if (ddr3_tip_if_polling
741 mask_results_dq_reg_map
742 [bus_num * 8 + bit_num],
743 MAX_POLLING_ITERATIONS) !=
747 ("\n_r_l: DDR3 poll failed(2) for bus %d bit %d\n",
751 /* read result per pup */
757 mask_results_dq_reg_map
769 if (curr_min_delay == 0)
776 if (data > data2_write[if_id][bus_num])
784 if (data2_write[if_id][bus_num] <=
786 MAX_DQ_READ_LEVELING_DELAY)) {
787 per_bit_rl_pup_status[if_id]
794 /* check if there is need to search new phyreg3 value */
796 /* if there is DLL that is not checked yet */
797 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1;
799 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
801 bus_num < tm->num_of_bus_per_interface;
803 VALIDATE_ACTIVE(tm->bus_act_mask,
805 if (per_bit_rl_pup_status[if_id]
807 /* go to next ADLL value */
814 bus_num, DDR_PHY_DATA,
815 READ_CENTRALIZATION_PHY_REG,
818 adll_array[curr_numb])));
826 } /* if (curr_numb < 2) */
829 } /* for ( curr_numb = 0; curr_numb <3; curr_numb++) */
831 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
832 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
833 for (bus_num = 0; bus_num < tm->num_of_bus_per_interface;
835 VALIDATE_ACTIVE(tm->bus_act_mask, bus_num);
836 if (per_bit_rl_pup_status[if_id][bus_num] == 1)
837 ddr3_tip_bus_write(dev_num,
841 bus_num, DDR_PHY_DATA,
843 CS_REG_VALUE(effective_cs),
850 /* TBD flow does not support multi CS */
852 * cs_bitmask = tm->interface_params[if_id].
853 * as_bus_params[bus_num].cs_bitmask;
855 /* divide by 4 is used for retrieving the CS number */
857 * TBD BC2 - what is the PHY address for other
858 * CS ddr3_tip_write_cs_result() ???
861 * find what should be written to PHY
862 * - max delay that is less than threshold
864 if (is_any_pup_fail == 1) {
865 training_result[training_stage][if_id] = TEST_FAILED;
870 DEBUG_LEVELING(DEBUG_LEVEL_INFO, ("RL exit read leveling\n"));
873 * Phase 3: Exit Read Leveling
876 CHECK_STATUS(ddr3_tip_if_write
877 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
878 TRAINING_SW_2_REG, (1 << 3), (1 << 3)));
879 CHECK_STATUS(ddr3_tip_if_write
880 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
881 TRAINING_SW_1_REG, (1 << 16), (1 << 16)));
882 /* set ODPG to functional */
883 CHECK_STATUS(ddr3_tip_if_write
884 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
885 ODPG_DATA_CONTROL_REG, 0x0, MASK_ALL_BITS));
887 * Copy the result from the effective CS search to the real
890 ddr3_tip_write_cs_result(dev_num, RL_PHY_REG);
891 CHECK_STATUS(ddr3_tip_if_write
892 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
893 ODPG_DATA_CONTROL_REG, 0x0, MASK_ALL_BITS));
895 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
896 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
897 /* restore cs enable value */
898 CHECK_STATUS(ddr3_tip_if_write
899 (dev_num, ACCESS_TYPE_UNICAST, if_id,
900 CS_ENABLE_REG, cs_enable_reg_val[if_id],
902 if (odt_config != 0) {
903 CHECK_STATUS(ddr3_tip_write_additional_odt_setting
908 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
909 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
910 if (training_result[training_stage][if_id] == TEST_FAILED)
917 int ddr3_tip_calc_cs_mask(u32 dev_num, u32 if_id, u32 effective_cs,
920 u32 all_bus_cs = 0, same_bus_cs;
922 struct hws_topology_map *tm = ddr3_get_topology_map();
924 *cs_mask = same_bus_cs = CS_BIT_MASK;
927 * In some of the devices (such as BC2), the CS is per pup and there
928 * for mixed mode is valid on like other devices where CS configuration
930 * In order to know that, we do 'Or' and 'And' operation between all
932 * If they are they are not the same then it's mixed mode so all CS
933 * should be configured (when configuring the MRS)
935 for (bus_cnt = 0; bus_cnt < tm->num_of_bus_per_interface; bus_cnt++) {
936 VALIDATE_ACTIVE(tm->bus_act_mask, bus_cnt);
938 all_bus_cs |= tm->interface_params[if_id].
939 as_bus_params[bus_cnt].cs_bitmask;
940 same_bus_cs &= tm->interface_params[if_id].
941 as_bus_params[bus_cnt].cs_bitmask;
943 /* cs enable is active low */
944 *cs_mask &= ~tm->interface_params[if_id].
945 as_bus_params[bus_cnt].cs_bitmask;
948 if (all_bus_cs == same_bus_cs)
949 *cs_mask = (*cs_mask | (~(1 << effective_cs))) & CS_BIT_MASK;
955 * Dynamic write leveling
957 int ddr3_tip_dynamic_write_leveling(u32 dev_num)
959 u32 reg_data = 0, iter, if_id, bus_cnt;
960 u32 cs_enable_reg_val[MAX_INTERFACE_NUM] = { 0 };
961 u32 cs_mask[MAX_INTERFACE_NUM];
962 u32 read_data_sample_delay_vals[MAX_INTERFACE_NUM] = { 0 };
963 u32 read_data_ready_delay_vals[MAX_INTERFACE_NUM] = { 0 };
965 u32 res_values[MAX_INTERFACE_NUM * MAX_BUS_NUM] = { 0 };
966 u32 test_res = 0; /* 0 - success for all pup */
967 u32 data_read[MAX_INTERFACE_NUM];
968 u8 wl_values[NUM_OF_CS][MAX_BUS_NUM][MAX_INTERFACE_NUM];
969 u16 *mask_results_pup_reg_map = ddr3_tip_get_mask_results_pup_reg_map();
970 u32 cs_mask0[MAX_INTERFACE_NUM] = { 0 };
971 u32 max_cs = hws_ddr3_tip_max_cs_get();
972 struct hws_topology_map *tm = ddr3_get_topology_map();
974 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
975 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
977 training_result[training_stage][if_id] = TEST_SUCCESS;
979 /* save Read Data Sample Delay */
980 CHECK_STATUS(ddr3_tip_if_read
981 (dev_num, ACCESS_TYPE_UNICAST, if_id,
982 READ_DATA_SAMPLE_DELAY,
983 read_data_sample_delay_vals, MASK_ALL_BITS));
984 /* save Read Data Ready Delay */
985 CHECK_STATUS(ddr3_tip_if_read
986 (dev_num, ACCESS_TYPE_UNICAST, if_id,
987 READ_DATA_READY_DELAY, read_data_ready_delay_vals,
989 /* save current cs reg val */
990 CHECK_STATUS(ddr3_tip_if_read
991 (dev_num, ACCESS_TYPE_UNICAST, if_id,
992 CS_ENABLE_REG, cs_enable_reg_val, MASK_ALL_BITS));
996 * Phase 1: DRAM 2 Write Leveling mode
999 /*Assert 10 refresh commands to DRAM to all CS */
1000 for (iter = 0; iter < WL_ITERATION_NUM; iter++) {
1001 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1002 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1003 CHECK_STATUS(ddr3_tip_if_write
1004 (dev_num, ACCESS_TYPE_UNICAST,
1005 if_id, SDRAM_OPERATION_REG,
1006 (u32)((~(0xf) << 8) | 0x2), 0xf1f));
1009 /* check controller back to normal */
1010 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1011 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1012 if (ddr3_tip_if_polling
1013 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0, 0x1f,
1014 SDRAM_OPERATION_REG, MAX_POLLING_ITERATIONS) != MV_OK) {
1015 DEBUG_LEVELING(DEBUG_LEVEL_ERROR,
1016 ("WL: DDR3 poll failed(3)"));
1020 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
1021 /*enable write leveling to all cs - Q off , WL n */
1022 /* calculate interface cs mask */
1023 CHECK_STATUS(ddr3_tip_write_mrs_cmd(dev_num, cs_mask0, MRS1_CMD,
1026 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1027 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1028 /* cs enable is active low */
1029 ddr3_tip_calc_cs_mask(dev_num, if_id, effective_cs,
1033 /* Enable Output buffer to relevant CS - Q on , WL on */
1034 CHECK_STATUS(ddr3_tip_write_mrs_cmd
1035 (dev_num, cs_mask, MRS1_CMD, 0x80, 0x1080));
1037 /*enable odt for relevant CS */
1038 CHECK_STATUS(ddr3_tip_if_write
1039 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1040 0x1498, (0x3 << (effective_cs * 2)), 0xf));
1043 * Phase 2: Set training IP to write leveling mode
1046 CHECK_STATUS(ddr3_tip_dynamic_write_leveling_seq(dev_num));
1049 * Phase 3: Trigger training
1052 CHECK_STATUS(ddr3_tip_if_write
1053 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1054 ODPG_TRAINING_TRIGGER_REG, 0x1, 0x1));
1056 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
1057 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1060 if (ddr3_tip_if_polling
1061 (dev_num, ACCESS_TYPE_UNICAST, if_id,
1062 (1 << 1), (1 << 1), ODPG_TRAINING_STATUS_REG,
1063 MAX_POLLING_ITERATIONS) != MV_OK) {
1066 ("WL: DDR3 poll (4) failed (Data: 0x%x)\n",
1069 #if !defined(CONFIG_ARMADA_38X) /*Disabled. JIRA #1498 */
1071 CHECK_STATUS(ddr3_tip_if_read
1072 (dev_num, ACCESS_TYPE_UNICAST,
1074 ODPG_TRAINING_TRIGGER_REG,
1075 ®_data, (1 << 2)));
1076 if (reg_data != 0) {
1079 ("WL: WL failed IF %d reg_data=0x%x\n",
1086 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1087 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1089 if (ddr3_tip_if_polling
1090 (dev_num, ACCESS_TYPE_UNICAST, if_id,
1091 (1 << 1), (1 << 1), ODPG_TRAINING_STATUS_REG,
1092 MAX_POLLING_ITERATIONS) != MV_OK) {
1095 ("WL: DDR3 poll (4) failed (Data: 0x%x)\n",
1098 #if !defined(CONFIG_ARMADA_38X) /*Disabled. JIRA #1498 */
1099 CHECK_STATUS(ddr3_tip_if_read
1100 (dev_num, ACCESS_TYPE_UNICAST,
1102 ODPG_TRAINING_STATUS_REG,
1103 data_read, (1 << 2)));
1104 reg_data = data_read[if_id];
1105 if (reg_data != 0) {
1108 ("WL: WL failed IF %d reg_data=0x%x\n",
1113 /* check for training completion per bus */
1115 bus_cnt < tm->num_of_bus_per_interface;
1117 VALIDATE_ACTIVE(tm->bus_act_mask,
1119 /* training status */
1120 CHECK_STATUS(ddr3_tip_if_read
1122 ACCESS_TYPE_UNICAST,
1124 mask_results_pup_reg_map
1125 [bus_cnt], data_read,
1127 reg_data = data_read[if_id];
1130 ("WL: IF %d BUS %d reg 0x%x\n",
1131 if_id, bus_cnt, reg_data));
1132 if (reg_data == 0) {
1135 tm->num_of_bus_per_interface)
1138 CHECK_STATUS(ddr3_tip_if_read
1140 ACCESS_TYPE_UNICAST,
1142 mask_results_pup_reg_map
1143 [bus_cnt], data_read,
1146 * Save the read value that should be
1147 * write to PHY register
1149 wl_values[effective_cs]
1151 (u8)data_read[if_id];
1157 * Phase 4: Exit write leveling mode
1160 /* disable DQs toggling */
1161 CHECK_STATUS(ddr3_tip_if_write
1162 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1163 WR_LEVELING_DQS_PATTERN_REG, 0x0, 0x1));
1165 /* Update MRS 1 (WL off) */
1166 CHECK_STATUS(ddr3_tip_write_mrs_cmd(dev_num, cs_mask0, MRS1_CMD,
1169 /* Update MRS 1 (return to functional mode - Q on , WL off) */
1170 CHECK_STATUS(ddr3_tip_write_mrs_cmd
1171 (dev_num, cs_mask0, MRS1_CMD, 0x0, 0x1080));
1173 /* set phy to normal mode */
1174 CHECK_STATUS(ddr3_tip_if_write
1175 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1176 TRAINING_SW_2_REG, 0x5, 0x7));
1178 /* exit sw override mode */
1179 CHECK_STATUS(ddr3_tip_if_write
1180 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1181 TRAINING_SW_2_REG, 0x4, 0x7));
1185 * Phase 5: Load WL values to each PHY
1188 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
1189 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1190 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1193 bus_cnt < tm->num_of_bus_per_interface;
1195 VALIDATE_ACTIVE(tm->bus_act_mask, bus_cnt);
1196 /* check if result == pass */
1199 tm->num_of_bus_per_interface) +
1202 * read result control register
1206 wl_values[effective_cs][bus_cnt]
1209 * Write into write leveling register
1210 * ([4:0] ADLL, [8:6] Phase, [15:10]
1211 * (centralization) ADLL + 0x10)
1215 (((reg_data & 0xe0) >> 5) << 6) |
1216 (((reg_data & 0x1f) +
1217 phy_reg1_val) << 10);
1220 ACCESS_TYPE_UNICAST,
1222 ACCESS_TYPE_UNICAST,
1227 CS_REGISTER_ADDR_OFFSET,
1232 * read result control register
1235 CHECK_STATUS(ddr3_tip_if_read
1237 ACCESS_TYPE_UNICAST,
1239 mask_results_pup_reg_map
1240 [bus_cnt], data_read,
1242 reg_data = data_read[if_id];
1245 ("WL: IF %d BUS %d failed, reg 0x%x\n",
1246 if_id, bus_cnt, reg_data));
1250 if (test_res != 0) {
1251 training_result[training_stage][if_id] =
1256 /* Set to 0 after each loop to avoid illegal value may be used */
1260 * Copy the result from the effective CS search to the real
1263 /* ddr3_tip_write_cs_result(dev_num, WL_PHY_REG); */
1264 /* restore saved values */
1265 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1266 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1267 /* restore Read Data Sample Delay */
1268 CHECK_STATUS(ddr3_tip_if_write
1269 (dev_num, ACCESS_TYPE_UNICAST, if_id,
1270 READ_DATA_SAMPLE_DELAY,
1271 read_data_sample_delay_vals[if_id],
1274 /* restore Read Data Ready Delay */
1275 CHECK_STATUS(ddr3_tip_if_write
1276 (dev_num, ACCESS_TYPE_UNICAST, if_id,
1277 READ_DATA_READY_DELAY,
1278 read_data_ready_delay_vals[if_id],
1281 /* enable multi cs */
1282 CHECK_STATUS(ddr3_tip_if_write
1283 (dev_num, ACCESS_TYPE_UNICAST, if_id,
1284 CS_ENABLE_REG, cs_enable_reg_val[if_id],
1288 /* Disable modt0 for CS0 training - need to adjust for multy CS */
1289 CHECK_STATUS(ddr3_tip_if_write
1290 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 0x1498,
1293 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1294 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1295 if (training_result[training_stage][if_id] == TEST_FAILED)
1303 * Dynamic write leveling supplementary
1305 int ddr3_tip_dynamic_write_leveling_supp(u32 dev_num)
1308 u32 if_id, bus_id, data, data_tmp;
1310 struct hws_topology_map *tm = ddr3_get_topology_map();
1312 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1313 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1316 for (bus_id = 0; bus_id < GET_TOPOLOGY_NUM_OF_BUSES();
1318 VALIDATE_ACTIVE(tm->bus_act_mask, bus_id);
1319 wr_supp_res[if_id][bus_id].is_pup_fail = 1;
1320 CHECK_STATUS(ddr3_tip_bus_read
1321 (dev_num, if_id, ACCESS_TYPE_UNICAST,
1322 bus_id, DDR_PHY_DATA,
1323 WRITE_CENTRALIZATION_PHY_REG +
1324 effective_cs * CS_REGISTER_ADDR_OFFSET,
1328 ("WL Supp: adll_offset=0 data delay = %d\n",
1330 if (ddr3_tip_wl_supp_align_phase_shift
1331 (dev_num, if_id, bus_id, 0, 0) == MV_OK) {
1334 ("WL Supp: IF %d bus_id %d adll_offset=0 Success !\n",
1341 CHECK_STATUS(ddr3_tip_bus_write
1342 (dev_num, ACCESS_TYPE_UNICAST, if_id,
1343 ACCESS_TYPE_UNICAST, bus_id, DDR_PHY_DATA,
1344 WRITE_CENTRALIZATION_PHY_REG +
1345 effective_cs * CS_REGISTER_ADDR_OFFSET,
1346 data + adll_offset));
1347 CHECK_STATUS(ddr3_tip_bus_read
1348 (dev_num, if_id, ACCESS_TYPE_UNICAST,
1349 bus_id, DDR_PHY_DATA,
1350 WRITE_CENTRALIZATION_PHY_REG +
1351 effective_cs * CS_REGISTER_ADDR_OFFSET,
1355 ("WL Supp: adll_offset= %d data delay = %d\n",
1356 adll_offset, data_tmp));
1358 if (ddr3_tip_wl_supp_align_phase_shift
1359 (dev_num, if_id, bus_id, adll_offset, 0) == MV_OK) {
1362 ("WL Supp: IF %d bus_id %d adll_offset= %d Success !\n",
1363 if_id, bus_id, adll_offset));
1369 CHECK_STATUS(ddr3_tip_bus_write
1370 (dev_num, ACCESS_TYPE_UNICAST, if_id,
1371 ACCESS_TYPE_UNICAST, bus_id, DDR_PHY_DATA,
1372 WRITE_CENTRALIZATION_PHY_REG +
1373 effective_cs * CS_REGISTER_ADDR_OFFSET,
1374 data + adll_offset));
1375 CHECK_STATUS(ddr3_tip_bus_read
1376 (dev_num, if_id, ACCESS_TYPE_UNICAST,
1377 bus_id, DDR_PHY_DATA,
1378 WRITE_CENTRALIZATION_PHY_REG +
1379 effective_cs * CS_REGISTER_ADDR_OFFSET,
1383 ("WL Supp: adll_offset= %d data delay = %d\n",
1384 adll_offset, data_tmp));
1385 if (ddr3_tip_wl_supp_align_phase_shift
1386 (dev_num, if_id, bus_id, adll_offset, 0) == MV_OK) {
1389 ("WL Supp: IF %d bus_id %d adll_offset= %d Success !\n",
1390 if_id, bus_id, adll_offset));
1395 ("WL Supp: IF %d bus_id %d Failed !\n",
1400 DEBUG_LEVELING(DEBUG_LEVEL_TRACE,
1401 ("WL Supp: IF %d bus_id %d is_pup_fail %d\n",
1402 if_id, bus_id, is_if_fail));
1404 if (is_if_fail == 1) {
1405 DEBUG_LEVELING(DEBUG_LEVEL_ERROR,
1406 ("WL Supp: IF %d failed\n", if_id));
1407 training_result[training_stage][if_id] = TEST_FAILED;
1409 training_result[training_stage][if_id] = TEST_SUCCESS;
1413 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1414 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1415 if (training_result[training_stage][if_id] == TEST_FAILED)
1425 static int ddr3_tip_wl_supp_align_phase_shift(u32 dev_num, u32 if_id,
1426 u32 bus_id, u32 offset,
1429 wr_supp_res[if_id][bus_id].stage = PHASE_SHIFT;
1430 if (ddr3_tip_xsb_compare_test(dev_num, if_id, bus_id,
1431 0, bus_id_delta) == MV_OK) {
1432 wr_supp_res[if_id][bus_id].is_pup_fail = 0;
1434 } else if (ddr3_tip_xsb_compare_test(dev_num, if_id, bus_id,
1435 ONE_CLOCK_ERROR_SHIFT,
1436 bus_id_delta) == MV_OK) {
1438 wr_supp_res[if_id][bus_id].stage = CLOCK_SHIFT;
1439 DEBUG_LEVELING(DEBUG_LEVEL_TRACE,
1440 ("Supp: 1 error clock for if %d pup %d with ofsset %d success\n",
1441 if_id, bus_id, offset));
1442 ddr3_tip_wl_supp_one_clk_err_shift(dev_num, if_id, bus_id, 0);
1443 wr_supp_res[if_id][bus_id].is_pup_fail = 0;
1445 } else if (ddr3_tip_xsb_compare_test(dev_num, if_id, bus_id,
1447 bus_id_delta) == MV_OK) {
1449 DEBUG_LEVELING(DEBUG_LEVEL_TRACE,
1450 ("Supp: align error for if %d pup %d with ofsset %d success\n",
1451 if_id, bus_id, offset));
1452 wr_supp_res[if_id][bus_id].stage = ALIGN_SHIFT;
1453 ddr3_tip_wl_supp_align_err_shift(dev_num, if_id, bus_id, 0);
1454 wr_supp_res[if_id][bus_id].is_pup_fail = 0;
1457 wr_supp_res[if_id][bus_id].is_pup_fail = 1;
1465 static int ddr3_tip_xsb_compare_test(u32 dev_num, u32 if_id, u32 bus_id,
1466 u32 edge_offset, u32 bus_id_delta)
1468 u32 num_of_succ_byte_compare, word_in_pattern, abs_offset;
1470 u32 read_pattern[TEST_PATTERN_LENGTH * 2];
1471 struct pattern_info *pattern_table = ddr3_tip_get_pattern_table();
1472 u32 pattern_test_pattern_table[8];
1474 for (i = 0; i < 8; i++) {
1475 pattern_test_pattern_table[i] =
1476 pattern_table_get_word(dev_num, PATTERN_TEST, (u8)i);
1479 /* extern write, than read and compare */
1480 CHECK_STATUS(ddr3_tip_ext_write
1482 (pattern_table[PATTERN_TEST].start_addr +
1483 ((SDRAM_CS_SIZE + 1) * effective_cs)), 1,
1484 pattern_test_pattern_table));
1486 CHECK_STATUS(ddr3_tip_reset_fifo_ptr(dev_num));
1488 CHECK_STATUS(ddr3_tip_ext_read
1490 (pattern_table[PATTERN_TEST].start_addr +
1491 ((SDRAM_CS_SIZE + 1) * effective_cs)), 1, read_pattern));
1495 ("XSB-compt: IF %d bus_id %d 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x\n",
1496 if_id, bus_id, read_pattern[0], read_pattern[1],
1497 read_pattern[2], read_pattern[3], read_pattern[4],
1498 read_pattern[5], read_pattern[6], read_pattern[7]));
1500 /* compare byte per pup */
1501 num_of_succ_byte_compare = 0;
1502 for (word_in_pattern = start_xsb_offset;
1503 word_in_pattern < (TEST_PATTERN_LENGTH * 2); word_in_pattern++) {
1504 word_offset = word_in_pattern + edge_offset;
1505 if ((word_offset > (TEST_PATTERN_LENGTH * 2 - 1)) ||
1509 if ((read_pattern[word_in_pattern] & pup_mask_table[bus_id]) ==
1510 (pattern_test_pattern_table[word_offset] &
1511 pup_mask_table[bus_id]))
1512 num_of_succ_byte_compare++;
1515 abs_offset = (edge_offset > 0) ? edge_offset : -edge_offset;
1516 if (num_of_succ_byte_compare == ((TEST_PATTERN_LENGTH * 2) -
1517 abs_offset - start_xsb_offset)) {
1520 ("XSB-compt: IF %d bus_id %d num_of_succ_byte_compare %d - Success\n",
1521 if_id, bus_id, num_of_succ_byte_compare));
1526 ("XSB-compt: IF %d bus_id %d num_of_succ_byte_compare %d - Fail !\n",
1527 if_id, bus_id, num_of_succ_byte_compare));
1531 ("XSB-compt: expected 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x\n",
1532 pattern_test_pattern_table[0],
1533 pattern_test_pattern_table[1],
1534 pattern_test_pattern_table[2],
1535 pattern_test_pattern_table[3],
1536 pattern_test_pattern_table[4],
1537 pattern_test_pattern_table[5],
1538 pattern_test_pattern_table[6],
1539 pattern_test_pattern_table[7]));
1542 ("XSB-compt: recieved 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x\n",
1543 read_pattern[0], read_pattern[1],
1544 read_pattern[2], read_pattern[3],
1545 read_pattern[4], read_pattern[5],
1546 read_pattern[6], read_pattern[7]));
1550 ("XSB-compt: IF %d bus_id %d num_of_succ_byte_compare %d - Fail !\n",
1551 if_id, bus_id, num_of_succ_byte_compare));
1558 * Clock error shift - function moves the write leveling delay 1cc forward
1560 static int ddr3_tip_wl_supp_one_clk_err_shift(u32 dev_num, u32 if_id,
1561 u32 bus_id, u32 bus_id_delta)
1565 DEBUG_LEVELING(DEBUG_LEVEL_TRACE, ("One_clk_err_shift\n"));
1567 CHECK_STATUS(ddr3_tip_bus_read
1568 (dev_num, if_id, ACCESS_TYPE_UNICAST, bus_id,
1569 DDR_PHY_DATA, WL_PHY_REG, &data));
1570 phase = ((data >> 6) & 0x7);
1572 DEBUG_LEVELING(DEBUG_LEVEL_TRACE,
1573 ("One_clk_err_shift: IF %d bus_id %d phase %d adll %d\n",
1574 if_id, bus_id, phase, adll));
1576 if ((phase == 0) || (phase == 1)) {
1577 CHECK_STATUS(ddr3_tip_bus_read_modify_write
1578 (dev_num, ACCESS_TYPE_UNICAST, if_id, bus_id,
1579 DDR_PHY_DATA, 0, (phase + 2), 0x1f));
1580 } else if (phase == 2) {
1582 data = (3 << 6) + (0x1f);
1583 CHECK_STATUS(ddr3_tip_bus_read_modify_write
1584 (dev_num, ACCESS_TYPE_UNICAST, if_id,
1585 bus_id, DDR_PHY_DATA, 0, data,
1586 (0x7 << 6 | 0x1f)));
1588 CHECK_STATUS(ddr3_tip_bus_read_modify_write
1589 (dev_num, ACCESS_TYPE_UNICAST, if_id,
1590 bus_id, DDR_PHY_DATA, 1, data, 0x3f));
1603 static int ddr3_tip_wl_supp_align_err_shift(u32 dev_num, u32 if_id,
1604 u32 bus_id, u32 bus_id_delta)
1609 /* Shift WL result 1 phase back */
1610 CHECK_STATUS(ddr3_tip_bus_read(dev_num, if_id, ACCESS_TYPE_UNICAST,
1611 bus_id, DDR_PHY_DATA, WL_PHY_REG,
1613 phase = ((data >> 6) & 0x7);
1617 ("Wl_supp_align_err_shift: IF %d bus_id %d phase %d adll %d\n",
1618 if_id, bus_id, phase, adll));
1627 CHECK_STATUS(ddr3_tip_bus_read_modify_write
1628 (dev_num, ACCESS_TYPE_UNICAST,
1629 if_id, bus_id, DDR_PHY_DATA,
1630 0, data, (0x7 << 6 | 0x1f)));
1632 CHECK_STATUS(ddr3_tip_bus_read_modify_write
1633 (dev_num, ACCESS_TYPE_UNICAST,
1634 if_id, bus_id, DDR_PHY_DATA,
1641 } else if ((phase == 2) || (phase == 3)) {
1643 data = (phase << 6) + (adll & 0x1f);
1644 CHECK_STATUS(ddr3_tip_bus_read_modify_write
1645 (dev_num, ACCESS_TYPE_UNICAST, if_id, bus_id,
1646 DDR_PHY_DATA, 0, data, (0x7 << 6 | 0x1f)));
1649 DEBUG_LEVELING(DEBUG_LEVEL_ERROR,
1650 ("Wl_supp_align_err_shift: unexpected phase\n"));
1659 * Dynamic write leveling sequence
1661 static int ddr3_tip_dynamic_write_leveling_seq(u32 dev_num)
1664 u16 *mask_results_pup_reg_map = ddr3_tip_get_mask_results_pup_reg_map();
1665 u16 *mask_results_dq_reg_map = ddr3_tip_get_mask_results_dq_reg();
1666 struct hws_topology_map *tm = ddr3_get_topology_map();
1668 CHECK_STATUS(ddr3_tip_if_write
1669 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1670 TRAINING_SW_2_REG, 0x1, 0x5));
1671 CHECK_STATUS(ddr3_tip_if_write
1672 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1673 TRAINING_WRITE_LEVELING_REG, 0x50, 0xff));
1674 CHECK_STATUS(ddr3_tip_if_write
1675 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1676 TRAINING_WRITE_LEVELING_REG, 0x5c, 0xff));
1677 CHECK_STATUS(ddr3_tip_if_write
1678 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1679 ODPG_TRAINING_CONTROL_REG, 0x381b82, 0x3c3faf));
1680 CHECK_STATUS(ddr3_tip_if_write
1681 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1682 ODPG_OBJ1_OPCODE_REG, (0x3 << 25), (0x3ffff << 9)));
1683 CHECK_STATUS(ddr3_tip_if_write
1684 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1685 ODPG_OBJ1_ITER_CNT_REG, 0x80, 0xffff));
1686 CHECK_STATUS(ddr3_tip_if_write
1687 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1688 ODPG_WRITE_LEVELING_DONE_CNTR_REG, 0x14, 0xff));
1689 CHECK_STATUS(ddr3_tip_if_write
1690 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1691 TRAINING_WRITE_LEVELING_REG, 0xff5c, 0xffff));
1694 for (dq_id = 0; dq_id < MAX_DQ_NUM; dq_id++) {
1695 CHECK_STATUS(ddr3_tip_if_write
1696 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1697 mask_results_dq_reg_map[dq_id], 0x1 << 24,
1701 /* Mask all results */
1702 for (bus_id = 0; bus_id < tm->num_of_bus_per_interface; bus_id++) {
1703 CHECK_STATUS(ddr3_tip_if_write
1704 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1705 mask_results_pup_reg_map[bus_id], 0x1 << 24,
1709 /* Unmask only wanted */
1710 for (bus_id = 0; bus_id < tm->num_of_bus_per_interface; bus_id++) {
1711 VALIDATE_ACTIVE(tm->bus_act_mask, bus_id);
1712 CHECK_STATUS(ddr3_tip_if_write
1713 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1714 mask_results_pup_reg_map[bus_id], 0, 0x1 << 24));
1717 CHECK_STATUS(ddr3_tip_if_write
1718 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1719 WR_LEVELING_DQS_PATTERN_REG, 0x1, 0x1));
1725 * Dynamic read leveling sequence
1727 static int ddr3_tip_dynamic_read_leveling_seq(u32 dev_num)
1730 u16 *mask_results_pup_reg_map = ddr3_tip_get_mask_results_pup_reg_map();
1731 u16 *mask_results_dq_reg_map = ddr3_tip_get_mask_results_dq_reg();
1732 struct hws_topology_map *tm = ddr3_get_topology_map();
1735 for (dq_id = 0; dq_id < MAX_DQ_NUM; dq_id++) {
1736 CHECK_STATUS(ddr3_tip_if_write
1737 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1738 mask_results_dq_reg_map[dq_id], 0x1 << 24,
1742 /* Mask all results */
1743 for (bus_id = 0; bus_id < tm->num_of_bus_per_interface; bus_id++) {
1744 CHECK_STATUS(ddr3_tip_if_write
1745 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1746 mask_results_pup_reg_map[bus_id], 0x1 << 24,
1750 /* Unmask only wanted */
1751 for (bus_id = 0; bus_id < tm->num_of_bus_per_interface; bus_id++) {
1752 VALIDATE_ACTIVE(tm->bus_act_mask, bus_id);
1753 CHECK_STATUS(ddr3_tip_if_write
1754 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1755 mask_results_pup_reg_map[bus_id], 0, 0x1 << 24));
1762 * Dynamic read leveling sequence
1764 static int ddr3_tip_dynamic_per_bit_read_leveling_seq(u32 dev_num)
1767 u16 *mask_results_pup_reg_map = ddr3_tip_get_mask_results_pup_reg_map();
1768 u16 *mask_results_dq_reg_map = ddr3_tip_get_mask_results_dq_reg();
1769 struct hws_topology_map *tm = ddr3_get_topology_map();
1772 for (dq_id = 0; dq_id < MAX_DQ_NUM; dq_id++) {
1773 CHECK_STATUS(ddr3_tip_if_write
1774 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1775 mask_results_dq_reg_map[dq_id], 0x1 << 24,
1779 /* Mask all results */
1780 for (bus_id = 0; bus_id < tm->num_of_bus_per_interface; bus_id++) {
1781 CHECK_STATUS(ddr3_tip_if_write
1782 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1783 mask_results_pup_reg_map[bus_id], 0x1 << 24,
1787 /* Unmask only wanted */
1788 for (dq_id = 0; dq_id < MAX_DQ_NUM; dq_id++) {
1789 VALIDATE_ACTIVE(tm->bus_act_mask, dq_id / 8);
1790 CHECK_STATUS(ddr3_tip_if_write
1791 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1792 mask_results_dq_reg_map[dq_id], 0x0 << 24,
1800 * Print write leveling supplementary results
1802 int ddr3_tip_print_wl_supp_result(u32 dev_num)
1804 u32 bus_id = 0, if_id = 0;
1805 struct hws_topology_map *tm = ddr3_get_topology_map();
1807 DEBUG_LEVELING(DEBUG_LEVEL_INFO,
1808 ("I/F0 PUP0 Result[0 - success, 1-fail] ...\n"));
1810 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1811 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1812 for (bus_id = 0; bus_id < tm->num_of_bus_per_interface;
1814 VALIDATE_ACTIVE(tm->bus_act_mask, bus_id);
1815 DEBUG_LEVELING(DEBUG_LEVEL_INFO,
1816 ("%d ,", wr_supp_res[if_id]
1817 [bus_id].is_pup_fail));
1822 ("I/F0 PUP0 Stage[0-phase_shift, 1-clock_shift, 2-align_shift] ...\n"));
1824 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1825 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1826 for (bus_id = 0; bus_id < tm->num_of_bus_per_interface;
1828 VALIDATE_ACTIVE(tm->bus_act_mask, bus_id);
1829 DEBUG_LEVELING(DEBUG_LEVEL_INFO,
1830 ("%d ,", wr_supp_res[if_id]