2 * Copyright (C) Marvell International Ltd. and its affiliates
4 * SPDX-License-Identifier: GPL-2.0
10 #include <asm/arch/cpu.h>
11 #include <asm/arch/soc.h>
13 #include "ddr3_init.h"
15 #define TYPICAL_PBS_VALUE 12
17 u32 nominal_adll[MAX_INTERFACE_NUM * MAX_BUS_NUM];
18 enum hws_training_ip_stat train_status[MAX_INTERFACE_NUM];
19 u8 result_mat[MAX_INTERFACE_NUM][MAX_BUS_NUM][BUS_WIDTH_IN_BITS];
20 u8 result_mat_rx_dqs[MAX_INTERFACE_NUM][MAX_BUS_NUM][MAX_CS_NUM];
21 /* 4-EEWA, 3-EWA, 2-SWA, 1-Fail, 0-Pass */
22 u8 result_all_bit[MAX_BUS_NUM * BUS_WIDTH_IN_BITS * MAX_INTERFACE_NUM];
23 u8 max_pbs_per_pup[MAX_INTERFACE_NUM][MAX_BUS_NUM];
24 u8 min_pbs_per_pup[MAX_INTERFACE_NUM][MAX_BUS_NUM];
25 u8 max_adll_per_pup[MAX_INTERFACE_NUM][MAX_BUS_NUM];
26 u8 min_adll_per_pup[MAX_INTERFACE_NUM][MAX_BUS_NUM];
27 u32 pbsdelay_per_pup[NUM_OF_PBS_MODES][MAX_INTERFACE_NUM][MAX_BUS_NUM];
28 u8 adll_shift_lock[MAX_INTERFACE_NUM][MAX_BUS_NUM];
29 u8 adll_shift_val[MAX_INTERFACE_NUM][MAX_BUS_NUM];
30 enum hws_pattern pbs_pattern = PATTERN_VREF;
31 static u8 pup_state[MAX_INTERFACE_NUM][MAX_BUS_NUM];
38 * Returns: OK if success, other error code if fail.
40 int ddr3_tip_pbs(u32 dev_num, enum pbs_dir pbs_mode)
42 u32 res0[MAX_INTERFACE_NUM];
43 int adll_tap = MEGA / freq_val[medium_freq] / 64;
45 enum hws_search_dir search_dir =
46 (pbs_mode == PBS_RX_MODE) ? HWS_HIGH2LOW : HWS_LOW2HIGH;
47 enum hws_dir dir = (pbs_mode == PBS_RX_MODE) ? OPER_READ : OPER_WRITE;
48 int iterations = (pbs_mode == PBS_RX_MODE) ? 31 : 63;
49 u32 res_valid_mask = (pbs_mode == PBS_RX_MODE) ? 0x1f : 0x3f;
50 int init_val = (search_dir == HWS_LOW2HIGH) ? 0 : iterations;
51 enum hws_edge_compare search_edge = EDGE_FP;
52 u32 pup = 0, bit = 0, if_id = 0, all_lock = 0, cs_num = 0;
54 u32 validation_val = 0;
55 u32 cs_enable_reg_val[MAX_INTERFACE_NUM];
56 u16 *mask_results_dq_reg_map = ddr3_tip_get_mask_results_dq_reg();
58 struct hws_topology_map *tm = ddr3_get_topology_map();
60 /* save current cs enable reg val */
61 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
62 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
64 /* save current cs enable reg val */
65 CHECK_STATUS(ddr3_tip_if_read
66 (dev_num, ACCESS_TYPE_UNICAST, if_id,
67 CS_ENABLE_REG, cs_enable_reg_val, MASK_ALL_BITS));
69 /* enable single cs */
70 CHECK_STATUS(ddr3_tip_if_write
71 (dev_num, ACCESS_TYPE_UNICAST, if_id,
72 CS_ENABLE_REG, (1 << 3), (1 << 3)));
75 reg_addr = (pbs_mode == PBS_RX_MODE) ?
76 (READ_CENTRALIZATION_PHY_REG +
77 (effective_cs * CS_REGISTER_ADDR_OFFSET)) :
78 (WRITE_CENTRALIZATION_PHY_REG +
79 (effective_cs * CS_REGISTER_ADDR_OFFSET));
80 read_adll_value(nominal_adll, reg_addr, MASK_ALL_BITS);
82 /* stage 1 shift ADLL */
83 ddr3_tip_ip_training(dev_num, ACCESS_TYPE_MULTICAST,
84 PARAM_NOT_CARE, ACCESS_TYPE_MULTICAST,
85 PARAM_NOT_CARE, RESULT_PER_BIT,
86 HWS_CONTROL_ELEMENT_ADLL, search_dir, dir,
87 tm->if_act_mask, init_val, iterations,
88 pbs_pattern, search_edge, CS_SINGLE, cs_num,
90 validation_val = (pbs_mode == PBS_RX_MODE) ? 0x1f : 0;
91 for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) {
92 VALIDATE_ACTIVE(tm->bus_act_mask, pup);
93 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
94 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
95 min_adll_per_pup[if_id][pup] =
96 (pbs_mode == PBS_RX_MODE) ? 0x1f : 0x3f;
97 pup_state[if_id][pup] = 0x3;
98 adll_shift_lock[if_id][pup] = 1;
99 max_adll_per_pup[if_id][pup] = 0x0;
104 for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) {
105 VALIDATE_ACTIVE(tm->bus_act_mask, pup);
106 for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) {
107 CHECK_STATUS(ddr3_tip_if_read
108 (dev_num, ACCESS_TYPE_MULTICAST,
110 mask_results_dq_reg_map[
111 bit + pup * BUS_WIDTH_IN_BITS],
112 res0, MASK_ALL_BITS));
113 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1;
115 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
116 DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE,
117 ("FP I/F %d, bit:%d, pup:%d res0 0x%x\n",
120 if (pup_state[if_id][pup] != 3)
122 /* if not EBA state than move to next pup */
124 if ((res0[if_id] & 0x2000000) == 0) {
125 DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE,
126 ("-- Fail Training IP\n"));
127 /* training machine failed */
128 pup_state[if_id][pup] = 1;
129 adll_shift_lock[if_id][pup] = 0;
133 else if ((res0[if_id] & res_valid_mask) ==
135 DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE,
136 ("-- FAIL EBA %d %d %d %d\n",
139 pup_state[if_id][pup] = 4;
140 /* this pup move to EEBA */
141 adll_shift_lock[if_id][pup] = 0;
145 * The search ended in Pass we need
149 (pbs_mode == PBS_RX_MODE) ?
151 res_valid_mask) + 1) :
153 res_valid_mask) - 1);
154 max_adll_per_pup[if_id][pup] =
155 (max_adll_per_pup[if_id][pup] <
158 max_adll_per_pup[if_id][pup];
159 min_adll_per_pup[if_id][pup] =
161 min_adll_per_pup[if_id][pup]) ?
162 min_adll_per_pup[if_id][pup] :
166 * vs the Rx we are searching for the
167 * smallest value of DQ shift so all
170 adll_shift_val[if_id][pup] =
171 (pbs_mode == PBS_RX_MODE) ?
172 max_adll_per_pup[if_id][pup] :
173 min_adll_per_pup[if_id][pup];
180 for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) {
181 VALIDATE_ACTIVE(tm->bus_act_mask, pup);
182 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
183 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
185 if (pup_state[if_id][pup] != 4)
188 * if pup state different from EEBA than move to
191 reg_addr = (pbs_mode == PBS_RX_MODE) ?
192 (0x54 + effective_cs * 0x10) :
193 (0x14 + effective_cs * 0x10);
194 CHECK_STATUS(ddr3_tip_bus_write
195 (dev_num, ACCESS_TYPE_UNICAST, if_id,
196 ACCESS_TYPE_UNICAST, pup, DDR_PHY_DATA,
198 reg_addr = (pbs_mode == PBS_RX_MODE) ?
199 (0x55 + effective_cs * 0x10) :
200 (0x15 + effective_cs * 0x10);
201 CHECK_STATUS(ddr3_tip_bus_write
202 (dev_num, ACCESS_TYPE_UNICAST, if_id,
203 ACCESS_TYPE_UNICAST, pup, DDR_PHY_DATA,
205 /* initialize the Edge2 Max. */
206 adll_shift_val[if_id][pup] = 0;
207 min_adll_per_pup[if_id][pup] =
208 (pbs_mode == PBS_RX_MODE) ? 0x1f : 0x3f;
209 max_adll_per_pup[if_id][pup] = 0x0;
211 ddr3_tip_ip_training(dev_num, ACCESS_TYPE_MULTICAST,
213 ACCESS_TYPE_MULTICAST,
214 PARAM_NOT_CARE, RESULT_PER_BIT,
215 HWS_CONTROL_ELEMENT_ADLL,
217 tm->if_act_mask, init_val,
218 iterations, pbs_pattern,
219 search_edge, CS_SINGLE, cs_num,
221 DEBUG_PBS_ENGINE(DEBUG_LEVEL_INFO,
222 ("ADLL shift results:\n"));
224 for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) {
225 CHECK_STATUS(ddr3_tip_if_read
226 (dev_num, ACCESS_TYPE_MULTICAST,
228 mask_results_dq_reg_map[
231 res0, MASK_ALL_BITS));
232 DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE,
233 ("FP I/F %d, bit:%d, pup:%d res0 0x%x\n",
237 if ((res0[if_id] & 0x2000000) == 0) {
238 DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE,
239 (" -- EEBA Fail\n"));
240 bit = BUS_WIDTH_IN_BITS;
242 DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE,
243 ("-- EEBA Fail Training IP\n"));
245 * training machine failed but pass
246 * before in the EBA so maybe the DQS
249 pup_state[if_id][pup] = 2;
250 adll_shift_lock[if_id][pup] = 0;
251 reg_addr = (pbs_mode == PBS_RX_MODE) ?
252 (0x54 + effective_cs * 0x10) :
253 (0x14 + effective_cs * 0x10);
254 CHECK_STATUS(ddr3_tip_bus_write
258 ACCESS_TYPE_UNICAST, pup,
259 DDR_PHY_DATA, reg_addr,
261 reg_addr = (pbs_mode == PBS_RX_MODE) ?
262 (0x55 + effective_cs * 0x10) :
263 (0x15 + effective_cs * 0x10);
264 CHECK_STATUS(ddr3_tip_bus_write
268 ACCESS_TYPE_UNICAST, pup,
269 DDR_PHY_DATA, reg_addr,
272 } else if ((res0[if_id] & res_valid_mask) ==
275 bit = BUS_WIDTH_IN_BITS;
276 DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE,
278 /* this pup move to SBA */
279 pup_state[if_id][pup] = 2;
280 adll_shift_lock[if_id][pup] = 0;
281 reg_addr = (pbs_mode == PBS_RX_MODE) ?
282 (0x54 + effective_cs * 0x10) :
283 (0x14 + effective_cs * 0x10);
284 CHECK_STATUS(ddr3_tip_bus_write
288 ACCESS_TYPE_UNICAST, pup,
289 DDR_PHY_DATA, reg_addr,
291 reg_addr = (pbs_mode == PBS_RX_MODE) ?
292 (0x55 + effective_cs * 0x10) :
293 (0x15 + effective_cs * 0x10);
294 CHECK_STATUS(ddr3_tip_bus_write
298 ACCESS_TYPE_UNICAST, pup,
299 DDR_PHY_DATA, reg_addr,
303 adll_shift_lock[if_id][pup] = 1;
305 * The search ended in Pass we need
309 (pbs_mode == PBS_RX_MODE) ?
311 res_valid_mask) + 1) :
313 res_valid_mask) - 1);
314 max_adll_per_pup[if_id][pup] =
315 (max_adll_per_pup[if_id][pup] <
318 max_adll_per_pup[if_id][pup];
319 min_adll_per_pup[if_id][pup] =
321 min_adll_per_pup[if_id][pup]) ?
322 min_adll_per_pup[if_id][pup] :
325 * vs the Rx we are searching for the
326 * smallest value of DQ shift so all Bus
329 adll_shift_val[if_id][pup] =
330 (pbs_mode == PBS_RX_MODE) ?
331 max_adll_per_pup[if_id][pup] :
332 min_adll_per_pup[if_id][pup];
338 /* Print Stage result */
339 for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) {
340 VALIDATE_ACTIVE(tm->bus_act_mask, pup);
341 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
342 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
343 DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE,
344 ("FP I/F %d, ADLL Shift for EBA: pup[%d] Lock status = %d Lock Val = %d,%d\n",
346 adll_shift_lock[if_id][pup],
347 max_adll_per_pup[if_id][pup],
348 min_adll_per_pup[if_id][pup]));
351 DEBUG_PBS_ENGINE(DEBUG_LEVEL_INFO,
352 ("Update ADLL Shift of all pups:\n"));
354 for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) {
355 VALIDATE_ACTIVE(tm->bus_act_mask, pup);
356 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
357 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
358 if (adll_shift_lock[if_id][pup] != 1)
360 /* if pup not locked continue to next pup */
362 reg_addr = (pbs_mode == PBS_RX_MODE) ?
363 (0x3 + effective_cs * 4) :
364 (0x1 + effective_cs * 4);
365 CHECK_STATUS(ddr3_tip_bus_write
366 (dev_num, ACCESS_TYPE_UNICAST, if_id,
367 ACCESS_TYPE_UNICAST, pup, DDR_PHY_DATA,
368 reg_addr, adll_shift_val[if_id][pup]));
369 DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE,
370 ("FP I/F %d, Pup[%d] = %d\n", if_id,
371 pup, adll_shift_val[if_id][pup]));
376 /* Start the Per Bit Skew search */
377 for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) {
378 VALIDATE_ACTIVE(tm->bus_act_mask, pup);
379 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
380 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
381 max_pbs_per_pup[if_id][pup] = 0x0;
382 min_pbs_per_pup[if_id][pup] = 0x1f;
383 for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) {
384 /* reset result for PBS */
385 result_all_bit[bit + pup * BUS_WIDTH_IN_BITS +
386 if_id * MAX_BUS_NUM *
387 BUS_WIDTH_IN_BITS] = 0;
393 search_dir = HWS_LOW2HIGH;
394 /* !!!!! ran sh (search_dir == HWS_LOW2HIGH)?0:iterations; */
397 ddr3_tip_ip_training(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
398 ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
399 RESULT_PER_BIT, HWS_CONTROL_ELEMENT_DQ_SKEW,
400 search_dir, dir, tm->if_act_mask, init_val,
401 iterations, pbs_pattern, search_edge,
402 CS_SINGLE, cs_num, train_status);
404 for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) {
405 VALIDATE_ACTIVE(tm->bus_act_mask, pup);
406 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
407 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
408 if (adll_shift_lock[if_id][pup] != 1) {
409 /* if pup not lock continue to next pup */
413 for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) {
414 CHECK_STATUS(ddr3_tip_if_read
415 (dev_num, ACCESS_TYPE_MULTICAST,
417 mask_results_dq_reg_map[
419 pup * BUS_WIDTH_IN_BITS],
420 res0, MASK_ALL_BITS));
421 DEBUG_PBS_ENGINE(DEBUG_LEVEL_INFO,
422 ("Per Bit Skew search, FP I/F %d, bit:%d, pup:%d res0 0x%x\n",
425 if ((res0[if_id] & 0x2000000) == 0) {
426 DEBUG_PBS_ENGINE(DEBUG_LEVEL_INFO,
427 ("--EBA PBS Fail - Training IP machine\n"));
428 /* exit the bit loop */
429 bit = BUS_WIDTH_IN_BITS;
431 * ADLL is no long in lock need new
434 adll_shift_lock[if_id][pup] = 0;
436 pup_state[if_id][pup] = 2;
437 max_pbs_per_pup[if_id][pup] = 0x0;
438 min_pbs_per_pup[if_id][pup] = 0x1f;
441 temp = (u8)(res0[if_id] &
443 max_pbs_per_pup[if_id][pup] =
445 max_pbs_per_pup[if_id][pup]) ?
447 max_pbs_per_pup[if_id][pup];
448 min_pbs_per_pup[if_id][pup] =
450 min_pbs_per_pup[if_id][pup]) ?
452 min_pbs_per_pup[if_id][pup];
454 pup * BUS_WIDTH_IN_BITS +
455 if_id * MAX_BUS_NUM *
463 /* Check all Pup lock */
465 for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) {
466 VALIDATE_ACTIVE(tm->bus_act_mask, pup);
467 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
468 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
469 all_lock = all_lock * adll_shift_lock[if_id][pup];
473 /* Only if not all Pups Lock */
475 DEBUG_PBS_ENGINE(DEBUG_LEVEL_INFO,
476 ("##########ADLL shift for SBA###########\n"));
478 /* ADLL shift for SBA */
479 search_dir = (pbs_mode == PBS_RX_MODE) ? HWS_LOW2HIGH :
481 init_val = (search_dir == HWS_LOW2HIGH) ? 0 : iterations;
482 for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) {
483 VALIDATE_ACTIVE(tm->bus_act_mask, pup);
484 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1;
486 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
487 if (adll_shift_lock[if_id][pup] == 1) {
488 /*if pup lock continue to next pup */
491 /*init the var altogth init before */
492 adll_shift_lock[if_id][pup] = 0;
493 reg_addr = (pbs_mode == PBS_RX_MODE) ?
494 (0x54 + effective_cs * 0x10) :
495 (0x14 + effective_cs * 0x10);
496 CHECK_STATUS(ddr3_tip_bus_write
497 (dev_num, ACCESS_TYPE_UNICAST,
498 if_id, ACCESS_TYPE_UNICAST, pup,
499 DDR_PHY_DATA, reg_addr, 0));
500 reg_addr = (pbs_mode == PBS_RX_MODE) ?
501 (0x55 + effective_cs * 0x10) :
502 (0x15 + effective_cs * 0x10);
503 CHECK_STATUS(ddr3_tip_bus_write
504 (dev_num, ACCESS_TYPE_UNICAST,
505 if_id, ACCESS_TYPE_UNICAST, pup,
506 DDR_PHY_DATA, reg_addr, 0));
507 reg_addr = (pbs_mode == PBS_RX_MODE) ?
508 (0x5f + effective_cs * 0x10) :
509 (0x1f + effective_cs * 0x10);
510 CHECK_STATUS(ddr3_tip_bus_write
511 (dev_num, ACCESS_TYPE_UNICAST,
512 if_id, ACCESS_TYPE_UNICAST, pup,
513 DDR_PHY_DATA, reg_addr, 0));
514 /* initilaze the Edge2 Max. */
515 adll_shift_val[if_id][pup] = 0;
516 min_adll_per_pup[if_id][pup] = 0x1f;
517 max_adll_per_pup[if_id][pup] = 0x0;
519 ddr3_tip_ip_training(dev_num,
520 ACCESS_TYPE_MULTICAST,
522 ACCESS_TYPE_MULTICAST,
525 HWS_CONTROL_ELEMENT_ADLL,
528 init_val, iterations,
530 search_edge, CS_SINGLE,
531 cs_num, train_status);
533 for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) {
534 CHECK_STATUS(ddr3_tip_if_read
536 ACCESS_TYPE_MULTICAST,
538 mask_results_dq_reg_map
542 res0, MASK_ALL_BITS));
545 ("FP I/F %d, bit:%d, pup:%d res0 0x%x\n",
546 if_id, bit, pup, res0[if_id]));
547 if ((res0[if_id] & 0x2000000) == 0) {
548 /* exit the bit loop */
549 bit = BUS_WIDTH_IN_BITS;
550 /* Fail SBA --> Fail PBS */
551 pup_state[if_id][pup] = 1;
558 * - increment to get all
561 adll_shift_lock[if_id][pup]++;
563 * The search ended in Pass
567 (pbs_mode == PBS_RX_MODE) ?
568 ((res0[if_id] & res_valid_mask) + 1) :
569 ((res0[if_id] & res_valid_mask) - 1);
570 max_adll_per_pup[if_id][pup] =
571 (max_adll_per_pup[if_id]
572 [pup] < res0[if_id]) ?
574 max_adll_per_pup[if_id][pup];
575 min_adll_per_pup[if_id][pup] =
577 min_adll_per_pup[if_id]
579 min_adll_per_pup[if_id][pup] :
582 * vs the Rx we are searching for
583 * the smallest value of DQ shift
584 * so all Bus would fail
586 adll_shift_val[if_id][pup] =
587 (pbs_mode == PBS_RX_MODE) ?
588 max_adll_per_pup[if_id][pup] :
589 min_adll_per_pup[if_id][pup];
593 adll_shift_lock[if_id][pup] =
594 (adll_shift_lock[if_id][pup] == 8) ?
596 reg_addr = (pbs_mode == PBS_RX_MODE) ?
597 (0x3 + effective_cs * 4) :
598 (0x1 + effective_cs * 4);
599 CHECK_STATUS(ddr3_tip_bus_write
600 (dev_num, ACCESS_TYPE_UNICAST,
601 if_id, ACCESS_TYPE_UNICAST, pup,
602 DDR_PHY_DATA, reg_addr,
603 adll_shift_val[if_id][pup]));
606 ("adll_shift_lock[%x][%x] = %x\n",
608 adll_shift_lock[if_id][pup]));
612 /* End ADLL Shift for SBA */
613 /* Start the Per Bit Skew search */
614 /* The ADLL shift finished with a Pass */
615 search_edge = (pbs_mode == PBS_RX_MODE) ? EDGE_PF : EDGE_FP;
616 search_dir = (pbs_mode == PBS_RX_MODE) ?
617 HWS_LOW2HIGH : HWS_HIGH2LOW;
619 /* - The initial value is different in Rx and Tx mode */
620 init_val = (pbs_mode == PBS_RX_MODE) ? 0 : iterations;
622 ddr3_tip_ip_training(dev_num, ACCESS_TYPE_MULTICAST,
623 PARAM_NOT_CARE, ACCESS_TYPE_MULTICAST,
624 PARAM_NOT_CARE, RESULT_PER_BIT,
625 HWS_CONTROL_ELEMENT_DQ_SKEW,
626 search_dir, dir, tm->if_act_mask,
627 init_val, iterations, pbs_pattern,
628 search_edge, CS_SINGLE, cs_num,
631 for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) {
632 VALIDATE_ACTIVE(tm->bus_act_mask, pup);
633 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1;
635 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
636 for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) {
637 CHECK_STATUS(ddr3_tip_if_read
639 ACCESS_TYPE_MULTICAST,
641 mask_results_dq_reg_map
645 res0, MASK_ALL_BITS));
646 if (pup_state[if_id][pup] != 2) {
648 * if pup is not SBA continue
651 bit = BUS_WIDTH_IN_BITS;
656 ("Per Bit Skew search, PF I/F %d, bit:%d, pup:%d res0 0x%x\n",
657 if_id, bit, pup, res0[if_id]));
658 if ((res0[if_id] & 0x2000000) == 0) {
663 max_pbs_per_pup[if_id][pup] =
668 if_id * MAX_BUS_NUM *
672 temp = (u8)(res0[if_id] &
674 max_pbs_per_pup[if_id][pup] =
676 max_pbs_per_pup[if_id]
680 min_pbs_per_pup[if_id][pup] =
682 min_pbs_per_pup[if_id]
689 if_id * MAX_BUS_NUM *
692 adll_shift_lock[if_id][pup] = 1;
698 /* Check all Pup state */
700 for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) {
702 * DEBUG_PBS_ENGINE(DEBUG_LEVEL_INFO,
703 * ("pup_state[%d][%d] = %d\n",if_id,pup,pup_state
711 for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) {
712 VALIDATE_ACTIVE(tm->bus_act_mask, pup);
713 for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) {
714 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1;
716 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
717 /* if pup not lock continue to next pup */
718 if (adll_shift_lock[if_id][pup] != 1) {
721 ("PBS failed for IF #%d\n",
723 training_result[training_stage][if_id]
726 result_mat[if_id][pup][bit] = 0;
727 max_pbs_per_pup[if_id][pup] = 0;
728 min_pbs_per_pup[if_id][pup] = 0;
731 training_stage][if_id] =
732 (training_result[training_stage]
733 [if_id] == TEST_FAILED) ?
734 TEST_FAILED : TEST_SUCCESS;
735 result_mat[if_id][pup][bit] =
739 if_id * MAX_BUS_NUM *
741 min_pbs_per_pup[if_id][pup];
745 ("The abs min_pbs[%d][%d] = %d\n",
747 min_pbs_per_pup[if_id][pup]));
752 /* Clean all results */
753 ddr3_tip_clean_pbs_result(dev_num, pbs_mode);
755 /* DQ PBS register update with the final result */
756 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
757 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
758 for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) {
759 VALIDATE_ACTIVE(tm->bus_act_mask, pup);
763 ("Final Results: if_id %d, pup %d, Pup State: %d\n",
764 if_id, pup, pup_state[if_id][pup]));
765 for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) {
766 if (dq_map_table == NULL) {
769 ("dq_map_table not initialized\n"));
772 pad_num = dq_map_table[
773 bit + pup * BUS_WIDTH_IN_BITS +
774 if_id * BUS_WIDTH_IN_BITS *
775 tm->num_of_bus_per_interface];
776 DEBUG_PBS_ENGINE(DEBUG_LEVEL_INFO,
778 result_mat[if_id][pup]
780 reg_addr = (pbs_mode == PBS_RX_MODE) ?
781 (PBS_RX_PHY_REG + effective_cs * 0x10) :
782 (PBS_TX_PHY_REG + effective_cs * 0x10);
783 CHECK_STATUS(ddr3_tip_bus_write
784 (dev_num, ACCESS_TYPE_UNICAST,
785 if_id, ACCESS_TYPE_UNICAST, pup,
786 DDR_PHY_DATA, reg_addr + pad_num,
787 result_mat[if_id][pup][bit]));
789 pbsdelay_per_pup[pbs_mode][if_id][pup] =
790 (max_pbs_per_pup[if_id][pup] ==
791 min_pbs_per_pup[if_id][pup]) ?
793 ((max_adll_per_pup[if_id][pup] -
794 min_adll_per_pup[if_id][pup]) * adll_tap /
795 (max_pbs_per_pup[if_id][pup] -
796 min_pbs_per_pup[if_id][pup]));
798 /* RX results ready, write RX also */
799 if (pbs_mode == PBS_TX_MODE) {
800 /* Write TX results */
801 reg_addr = (0x14 + effective_cs * 0x10);
802 CHECK_STATUS(ddr3_tip_bus_write
803 (dev_num, ACCESS_TYPE_UNICAST,
804 if_id, ACCESS_TYPE_UNICAST, pup,
805 DDR_PHY_DATA, reg_addr,
806 (max_pbs_per_pup[if_id][pup] -
807 min_pbs_per_pup[if_id][pup]) /
809 reg_addr = (0x15 + effective_cs * 0x10);
810 CHECK_STATUS(ddr3_tip_bus_write
811 (dev_num, ACCESS_TYPE_UNICAST,
812 if_id, ACCESS_TYPE_UNICAST, pup,
813 DDR_PHY_DATA, reg_addr,
814 (max_pbs_per_pup[if_id][pup] -
815 min_pbs_per_pup[if_id][pup]) /
818 /* Write previously stored RX results */
819 reg_addr = (0x54 + effective_cs * 0x10);
820 CHECK_STATUS(ddr3_tip_bus_write
821 (dev_num, ACCESS_TYPE_UNICAST,
822 if_id, ACCESS_TYPE_UNICAST, pup,
823 DDR_PHY_DATA, reg_addr,
824 result_mat_rx_dqs[if_id][pup]
826 reg_addr = (0x55 + effective_cs * 0x10);
827 CHECK_STATUS(ddr3_tip_bus_write
828 (dev_num, ACCESS_TYPE_UNICAST,
829 if_id, ACCESS_TYPE_UNICAST, pup,
830 DDR_PHY_DATA, reg_addr,
831 result_mat_rx_dqs[if_id][pup]
835 * RX results may affect RL results correctess,
836 * so just store the results that will written
839 result_mat_rx_dqs[if_id][pup][effective_cs] =
840 (max_pbs_per_pup[if_id][pup] -
841 min_pbs_per_pup[if_id][pup]) / 2;
845 (", PBS tap=%d [psec] ==> skew observed = %d\n",
846 pbsdelay_per_pup[pbs_mode][if_id][pup],
847 ((max_pbs_per_pup[if_id][pup] -
848 min_pbs_per_pup[if_id][pup]) *
849 pbsdelay_per_pup[pbs_mode][if_id][pup])));
853 /* Write back to the phy the default values */
854 reg_addr = (pbs_mode == PBS_RX_MODE) ?
855 (READ_CENTRALIZATION_PHY_REG + effective_cs * 4) :
856 (WRITE_CENTRALIZATION_PHY_REG + effective_cs * 4);
857 write_adll_value(nominal_adll, reg_addr);
859 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
860 reg_addr = (pbs_mode == PBS_RX_MODE) ?
861 (0x5a + effective_cs * 0x10) :
862 (0x1a + effective_cs * 0x10);
863 CHECK_STATUS(ddr3_tip_bus_write
864 (dev_num, ACCESS_TYPE_UNICAST, if_id,
865 ACCESS_TYPE_UNICAST, pup, DDR_PHY_DATA, reg_addr,
868 /* restore cs enable value */
869 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
870 CHECK_STATUS(ddr3_tip_if_write
871 (dev_num, ACCESS_TYPE_UNICAST, if_id,
872 CS_ENABLE_REG, cs_enable_reg_val[if_id],
877 CHECK_STATUS(ddr3_tip_if_write
878 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
879 ODPG_WRITE_READ_MODE_ENABLE_REG, 0xffff, MASK_ALL_BITS));
880 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
882 * meaning that there is no VW exist at all (No lock at
883 * the EBA ADLL shift at EBS)
885 if (pup_state[if_id][pup] == 1)
893 * Name: ddr3_tip_pbs_rx.
897 * Returns: OK if success, other error code if fail.
899 int ddr3_tip_pbs_rx(u32 uidev_num)
901 return ddr3_tip_pbs(uidev_num, PBS_RX_MODE);
905 * Name: ddr3_tip_pbs_tx.
909 * Returns: OK if success, other error code if fail.
911 int ddr3_tip_pbs_tx(u32 uidev_num)
913 return ddr3_tip_pbs(uidev_num, PBS_TX_MODE);
916 #ifndef EXCLUDE_SWITCH_DEBUG
920 int ddr3_tip_print_all_pbs_result(u32 dev_num)
923 u32 max_cs = hws_ddr3_tip_max_cs_get();
925 for (curr_cs = 0; curr_cs < max_cs; curr_cs++) {
926 ddr3_tip_print_pbs_result(dev_num, curr_cs, PBS_RX_MODE);
927 ddr3_tip_print_pbs_result(dev_num, curr_cs, PBS_TX_MODE);
936 int ddr3_tip_print_pbs_result(u32 dev_num, u32 cs_num, enum pbs_dir pbs_mode)
938 u32 data_value = 0, bit = 0, if_id = 0, pup = 0;
939 u32 reg_addr = (pbs_mode == PBS_RX_MODE) ?
940 (PBS_RX_PHY_REG + cs_num * 0x10) :
941 (PBS_TX_PHY_REG + cs_num * 0x10);
942 struct hws_topology_map *tm = ddr3_get_topology_map();
944 printf("CS%d, %s ,PBS\n", cs_num,
945 (pbs_mode == PBS_RX_MODE) ? "Rx" : "Tx");
947 for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) {
948 printf("%s, DQ", (pbs_mode == PBS_RX_MODE) ? "Rx" : "Tx");
949 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
950 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
951 printf("%d ,PBS,,, ", bit);
952 for (pup = 0; pup <= tm->num_of_bus_per_interface;
954 VALIDATE_ACTIVE(tm->bus_act_mask, pup);
955 CHECK_STATUS(ddr3_tip_bus_read
957 ACCESS_TYPE_UNICAST, pup,
958 DDR_PHY_DATA, reg_addr + bit,
960 printf("%d , ", data_value);
974 int ddr3_tip_clean_pbs_result(u32 dev_num, enum pbs_dir pbs_mode)
977 u32 reg_addr = (pbs_mode == PBS_RX_MODE) ?
978 (PBS_RX_PHY_REG + effective_cs * 0x10) :
979 (PBS_TX_PHY_REG + effective_cs * 0x10);
980 struct hws_topology_map *tm = ddr3_get_topology_map();
982 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
983 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
984 for (pup = 0; pup <= tm->num_of_bus_per_interface; pup++) {
985 for (bit = 0; bit <= BUS_WIDTH_IN_BITS + 3; bit++) {
986 CHECK_STATUS(ddr3_tip_bus_write
987 (dev_num, ACCESS_TYPE_UNICAST,
988 if_id, ACCESS_TYPE_UNICAST, pup,
989 DDR_PHY_DATA, reg_addr + bit, 0));