1 // SPDX-License-Identifier: GPL-2.0
3 * Copyright (C) Marvell International Ltd. and its affiliates
10 #include <asm/arch/cpu.h>
11 #include <asm/arch/soc.h>
13 #include "ddr3_init.h"
16 u8 debug_pbs = DEBUG_LEVEL_ERROR;
19 * API to change flags outside of the lib
22 /* Debug flags for other Training modules */
23 u8 debug_training_static = DEBUG_LEVEL_ERROR;
24 u8 debug_training = DEBUG_LEVEL_ERROR;
25 u8 debug_leveling = DEBUG_LEVEL_ERROR;
26 u8 debug_centralization = DEBUG_LEVEL_ERROR;
27 u8 debug_training_ip = DEBUG_LEVEL_ERROR;
28 u8 debug_training_bist = DEBUG_LEVEL_ERROR;
29 u8 debug_training_hw_alg = DEBUG_LEVEL_ERROR;
30 u8 debug_training_access = DEBUG_LEVEL_ERROR;
31 u8 debug_training_a38x = DEBUG_LEVEL_ERROR;
33 void ddr3_hws_set_log_level(enum ddr_lib_debug_block block, u8 level)
36 case DEBUG_BLOCK_STATIC:
37 debug_training_static = level;
39 case DEBUG_BLOCK_TRAINING_MAIN:
40 debug_training = level;
42 case DEBUG_BLOCK_LEVELING:
43 debug_leveling = level;
45 case DEBUG_BLOCK_CENTRALIZATION:
46 debug_centralization = level;
52 debug_training_hw_alg = level;
54 case DEBUG_BLOCK_DEVICE:
55 debug_training_a38x = level;
57 case DEBUG_BLOCK_ACCESS:
58 debug_training_access = level;
60 case DEBUG_STAGES_REG_DUMP:
61 if (level == DEBUG_LEVEL_TRACE)
68 debug_training_static = level;
69 debug_training = level;
70 debug_leveling = level;
71 debug_centralization = level;
73 debug_training_hw_alg = level;
74 debug_training_access = level;
75 debug_training_a38x = level;
79 void ddr3_hws_set_log_level(enum ddr_lib_debug_block block, u8 level)
85 struct hws_tip_config_func_db config_func_info[HWS_MAX_DEVICE_NUM];
86 u8 is_default_centralization = 0;
87 u8 is_tune_result = 0;
88 u8 is_validate_window_per_if = 0;
89 u8 is_validate_window_per_pup = 0;
91 u32 is_bist_reset_bit = 1;
92 static struct hws_xsb_info xsb_info[HWS_MAX_DEVICE_NUM];
95 * Dump Dunit & Phy registers
97 int ddr3_tip_reg_dump(u32 dev_num)
99 u32 if_id, reg_addr, data_value, bus_id;
100 u32 read_data[MAX_INTERFACE_NUM];
101 struct hws_topology_map *tm = ddr3_get_topology_map();
103 printf("-- dunit registers --\n");
104 for (reg_addr = 0x1400; reg_addr < 0x19f0; reg_addr += 4) {
105 printf("0x%x ", reg_addr);
106 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
107 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
108 CHECK_STATUS(ddr3_tip_if_read
109 (dev_num, ACCESS_TYPE_UNICAST,
110 if_id, reg_addr, read_data,
112 printf("0x%x ", read_data[if_id]);
117 printf("-- Phy registers --\n");
118 for (reg_addr = 0; reg_addr <= 0xff; reg_addr++) {
119 printf("0x%x ", reg_addr);
120 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
121 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
123 bus_id < tm->num_of_bus_per_interface;
125 VALIDATE_ACTIVE(tm->bus_act_mask, bus_id);
126 CHECK_STATUS(ddr3_tip_bus_read
128 ACCESS_TYPE_UNICAST, bus_id,
129 DDR_PHY_DATA, reg_addr,
131 printf("0x%x ", data_value);
134 bus_id < tm->num_of_bus_per_interface;
136 VALIDATE_ACTIVE(tm->bus_act_mask, bus_id);
137 CHECK_STATUS(ddr3_tip_bus_read
139 ACCESS_TYPE_UNICAST, bus_id,
140 DDR_PHY_CONTROL, reg_addr,
142 printf("0x%x ", data_value);
152 * Register access func registration
154 int ddr3_tip_init_config_func(u32 dev_num,
155 struct hws_tip_config_func_db *config_func)
157 if (config_func == NULL)
160 memcpy(&config_func_info[dev_num], config_func,
161 sizeof(struct hws_tip_config_func_db));
167 * Get training result info pointer
169 enum hws_result *ddr3_tip_get_result_ptr(u32 stage)
171 return training_result[stage];
177 int ddr3_tip_get_device_info(u32 dev_num, struct ddr3_device_info *info_ptr)
179 if (config_func_info[dev_num].tip_get_device_info_func != NULL) {
180 return config_func_info[dev_num].
181 tip_get_device_info_func((u8) dev_num, info_ptr);
187 #ifndef EXCLUDE_SWITCH_DEBUG
189 * Convert freq to character string
191 static char *convert_freq(enum hws_ddr_freq freq)
194 case DDR_FREQ_LOW_FREQ:
195 return "DDR_FREQ_LOW_FREQ";
228 return "DDR_FREQ_360";
231 return "DDR_FREQ_1000";
233 return "Unknown Frequency";
238 * Convert device ID to character string
240 static char *convert_dev_id(u32 dev_id)
253 return "Unknown Device";
258 * Convert device ID to character string
260 static char *convert_mem_size(u32 dev_id)
275 return "wrong mem size";
279 int print_device_info(u8 dev_num)
281 struct ddr3_device_info info_ptr;
282 struct hws_topology_map *tm = ddr3_get_topology_map();
284 CHECK_STATUS(ddr3_tip_get_device_info(dev_num, &info_ptr));
285 printf("=== DDR setup START===\n");
286 printf("\tDevice ID: %s\n", convert_dev_id(info_ptr.device_id));
287 printf("\tDDR3 CK delay: %d\n", info_ptr.ck_delay);
289 printf("=== DDR setup END===\n");
294 void hws_ddr3_tip_sweep_test(int enable)
297 is_validate_window_per_if = 1;
298 is_validate_window_per_pup = 1;
299 debug_training = DEBUG_LEVEL_TRACE;
301 is_validate_window_per_if = 0;
302 is_validate_window_per_pup = 0;
307 char *ddr3_tip_convert_tune_result(enum hws_result tune_result)
309 switch (tune_result) {
315 return "NOT COMPLETED";
324 int ddr3_tip_print_log(u32 dev_num, u32 mem_addr)
327 struct hws_topology_map *tm = ddr3_get_topology_map();
329 #ifndef EXCLUDE_SWITCH_DEBUG
330 if ((is_validate_window_per_if != 0) ||
331 (is_validate_window_per_pup != 0)) {
333 enum hws_ddr_freq freq;
335 freq = tm->interface_params[first_active_if].memory_freq;
337 is_pup_log = (is_validate_window_per_pup != 0) ? 1 : 0;
338 printf("===VALIDATE WINDOW LOG START===\n");
339 printf("DDR Frequency: %s ======\n", convert_freq(freq));
340 /* print sweep windows */
341 ddr3_tip_run_sweep_test(dev_num, sweep_cnt, 1, is_pup_log);
342 ddr3_tip_run_sweep_test(dev_num, sweep_cnt, 0, is_pup_log);
343 ddr3_tip_print_all_pbs_result(dev_num);
344 ddr3_tip_print_wl_supp_result(dev_num);
345 printf("===VALIDATE WINDOW LOG END ===\n");
346 CHECK_STATUS(ddr3_tip_restore_dunit_regs(dev_num));
347 ddr3_tip_reg_dump(dev_num);
351 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
352 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
354 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
355 ("IF %d Status:\n", if_id));
357 if (mask_tune_func & INIT_CONTROLLER_MASK_BIT) {
358 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
359 ("\tInit Controller: %s\n",
360 ddr3_tip_convert_tune_result
361 (training_result[INIT_CONTROLLER]
364 if (mask_tune_func & SET_LOW_FREQ_MASK_BIT) {
365 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
366 ("\tLow freq Config: %s\n",
367 ddr3_tip_convert_tune_result
368 (training_result[SET_LOW_FREQ]
371 if (mask_tune_func & LOAD_PATTERN_MASK_BIT) {
372 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
373 ("\tLoad Pattern: %s\n",
374 ddr3_tip_convert_tune_result
375 (training_result[LOAD_PATTERN]
378 if (mask_tune_func & SET_MEDIUM_FREQ_MASK_BIT) {
379 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
380 ("\tMedium freq Config: %s\n",
381 ddr3_tip_convert_tune_result
382 (training_result[SET_MEDIUM_FREQ]
385 if (mask_tune_func & WRITE_LEVELING_MASK_BIT) {
386 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
388 ddr3_tip_convert_tune_result
389 (training_result[WRITE_LEVELING]
392 if (mask_tune_func & LOAD_PATTERN_2_MASK_BIT) {
393 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
394 ("\tLoad Pattern: %s\n",
395 ddr3_tip_convert_tune_result
396 (training_result[LOAD_PATTERN_2]
399 if (mask_tune_func & READ_LEVELING_MASK_BIT) {
400 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
402 ddr3_tip_convert_tune_result
403 (training_result[READ_LEVELING]
406 if (mask_tune_func & WRITE_LEVELING_SUPP_MASK_BIT) {
407 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
409 ddr3_tip_convert_tune_result
410 (training_result[WRITE_LEVELING_SUPP]
413 if (mask_tune_func & PBS_RX_MASK_BIT) {
414 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
416 ddr3_tip_convert_tune_result
417 (training_result[PBS_RX]
420 if (mask_tune_func & PBS_TX_MASK_BIT) {
421 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
423 ddr3_tip_convert_tune_result
424 (training_result[PBS_TX]
427 if (mask_tune_func & SET_TARGET_FREQ_MASK_BIT) {
428 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
429 ("\tTarget freq Config: %s\n",
430 ddr3_tip_convert_tune_result
431 (training_result[SET_TARGET_FREQ]
434 if (mask_tune_func & WRITE_LEVELING_TF_MASK_BIT) {
435 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
437 ddr3_tip_convert_tune_result
438 (training_result[WRITE_LEVELING_TF]
441 if (mask_tune_func & READ_LEVELING_TF_MASK_BIT) {
442 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
444 ddr3_tip_convert_tune_result
445 (training_result[READ_LEVELING_TF]
448 if (mask_tune_func & WRITE_LEVELING_SUPP_TF_MASK_BIT) {
449 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
450 ("\tWL TF Supp: %s\n",
451 ddr3_tip_convert_tune_result
453 [WRITE_LEVELING_SUPP_TF]
456 if (mask_tune_func & CENTRALIZATION_RX_MASK_BIT) {
457 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
459 ddr3_tip_convert_tune_result
460 (training_result[CENTRALIZATION_RX]
463 if (mask_tune_func & VREF_CALIBRATION_MASK_BIT) {
464 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
465 ("\tVREF_CALIBRATION: %s\n",
466 ddr3_tip_convert_tune_result
467 (training_result[VREF_CALIBRATION]
470 if (mask_tune_func & CENTRALIZATION_TX_MASK_BIT) {
471 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
473 ddr3_tip_convert_tune_result
474 (training_result[CENTRALIZATION_TX]
483 * Print stability log info
485 int ddr3_tip_print_stability_log(u32 dev_num)
487 u8 if_id = 0, csindex = 0, bus_id = 0, idx = 0;
489 u32 read_data[MAX_INTERFACE_NUM];
490 u32 max_cs = hws_ddr3_tip_max_cs_get();
491 struct hws_topology_map *tm = ddr3_get_topology_map();
494 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
495 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
496 printf("Title: I/F# , Tj, Calibration_n0, Calibration_p0, Calibration_n1, Calibration_p1, Calibration_n2, Calibration_p2,");
497 for (csindex = 0; csindex < max_cs; csindex++) {
498 printf("CS%d , ", csindex);
500 VALIDATE_ACTIVE(tm->bus_act_mask, bus_id);
501 printf("VWTx, VWRx, WL_tot, WL_ADLL, WL_PH, RL_Tot, RL_ADLL, RL_PH, RL_Smp, Cen_tx, Cen_rx, Vref, DQVref,");
503 for (idx = 0; idx < 11; idx++)
504 printf("PBSTx-Pad%d,", idx);
506 for (idx = 0; idx < 11; idx++)
507 printf("PBSRx-Pad%d,", idx);
513 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
514 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
516 printf("Data: %d,%d,", if_id,
517 (config_func_info[dev_num].tip_get_temperature != NULL)
518 ? (config_func_info[dev_num].
519 tip_get_temperature(dev_num)) : (0));
521 CHECK_STATUS(ddr3_tip_if_read
522 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x14c8,
523 read_data, MASK_ALL_BITS));
524 printf("%d,%d,", ((read_data[if_id] & 0x3f0) >> 4),
525 ((read_data[if_id] & 0xfc00) >> 10));
526 CHECK_STATUS(ddr3_tip_if_read
527 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x17c8,
528 read_data, MASK_ALL_BITS));
529 printf("%d,%d,", ((read_data[if_id] & 0x3f0) >> 4),
530 ((read_data[if_id] & 0xfc00) >> 10));
531 CHECK_STATUS(ddr3_tip_if_read
532 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x1dc8,
533 read_data, MASK_ALL_BITS));
534 printf("%d,%d,", ((read_data[if_id] & 0x3f0000) >> 16),
535 ((read_data[if_id] & 0xfc00000) >> 22));
537 for (csindex = 0; csindex < max_cs; csindex++) {
538 printf("CS%d , ", csindex);
539 for (bus_id = 0; bus_id < MAX_BUS_NUM; bus_id++) {
541 VALIDATE_ACTIVE(tm->bus_act_mask, bus_id);
542 ddr3_tip_bus_read(dev_num, if_id,
544 bus_id, DDR_PHY_DATA,
545 RESULT_DB_PHY_REG_ADDR +
547 printf("%d,%d,", (reg_data & 0x1f),
548 ((reg_data & 0x3e0) >> 5));
550 ddr3_tip_bus_read(dev_num, if_id,
552 bus_id, DDR_PHY_DATA,
554 csindex * 4, ®_data);
557 ((reg_data & 0x1c0) >> 6) * 32,
559 (reg_data & 0x1c0) >> 6);
561 CHECK_STATUS(ddr3_tip_if_read
562 (dev_num, ACCESS_TYPE_UNICAST,
564 READ_DATA_SAMPLE_DELAY,
565 read_data, MASK_ALL_BITS));
568 (0xf << (4 * csindex))) >>
570 ddr3_tip_bus_read(dev_num, if_id,
571 ACCESS_TYPE_UNICAST, bus_id,
573 RL_PHY_REG + csindex * 4,
575 printf("%d,%d,%d,%d,",
577 ((reg_data & 0x1c0) >> 6) * 32 +
578 read_data[if_id] * 64,
580 ((reg_data & 0x1c0) >> 6),
583 ddr3_tip_bus_read(dev_num, if_id,
584 ACCESS_TYPE_UNICAST, bus_id,
586 WRITE_CENTRALIZATION_PHY_REG
587 + csindex * 4, ®_data);
588 printf("%d,", (reg_data & 0x3f));
589 ddr3_tip_bus_read(dev_num, if_id,
590 ACCESS_TYPE_UNICAST, bus_id,
592 READ_CENTRALIZATION_PHY_REG
593 + csindex * 4, ®_data);
594 printf("%d,", (reg_data & 0x1f));
596 ddr3_tip_bus_read(dev_num, if_id,
597 ACCESS_TYPE_UNICAST, bus_id,
601 printf("%d,", (reg_data & 0x7));
603 /* Need to add the Read Function from device */
606 for (idx = 0; idx < 11; idx++) {
607 ddr3_tip_bus_read(dev_num, if_id,
609 bus_id, DDR_PHY_DATA,
613 printf("%d,", (reg_data & 0x3f));
616 for (idx = 0; idx < 11; idx++) {
617 ddr3_tip_bus_read(dev_num, if_id,
619 bus_id, DDR_PHY_DATA,
623 printf("%d,", (reg_data & 0x3f));
626 for (idx = 0; idx < 11; idx++) {
627 ddr3_tip_bus_read(dev_num, if_id,
629 bus_id, DDR_PHY_DATA,
633 printf("%d,", (reg_data & 0x3f));
644 * Register XSB information
646 int ddr3_tip_register_xsb_info(u32 dev_num, struct hws_xsb_info *xsb_info_table)
648 memcpy(&xsb_info[dev_num], xsb_info_table, sizeof(struct hws_xsb_info));
655 int read_adll_value(u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],
656 int reg_addr, u32 mask)
659 u32 if_id = 0, bus_id = 0;
661 struct hws_topology_map *tm = ddr3_get_topology_map();
664 * multi CS support - reg_addr is calucalated in calling function
667 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
668 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
669 for (bus_id = 0; bus_id < tm->num_of_bus_per_interface;
671 VALIDATE_ACTIVE(tm->bus_act_mask, bus_id);
672 CHECK_STATUS(ddr3_tip_bus_read(dev_num, if_id,
675 DDR_PHY_DATA, reg_addr,
678 tm->num_of_bus_per_interface + bus_id] =
689 int write_adll_value(u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],
692 u32 if_id = 0, bus_id = 0;
693 u32 dev_num = 0, data;
694 struct hws_topology_map *tm = ddr3_get_topology_map();
697 * multi CS support - reg_addr is calucalated in calling function
700 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
701 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
702 for (bus_id = 0; bus_id < tm->num_of_bus_per_interface;
704 VALIDATE_ACTIVE(tm->bus_act_mask, bus_id);
705 data = pup_values[if_id *
706 tm->num_of_bus_per_interface +
708 CHECK_STATUS(ddr3_tip_bus_write(dev_num,
712 bus_id, DDR_PHY_DATA,
720 #ifndef EXCLUDE_SWITCH_DEBUG
721 u32 rl_version = 1; /* 0 - old RL machine */
722 struct hws_tip_config_func_db config_func_info[HWS_MAX_DEVICE_NUM];
723 u32 start_xsb_offset = 0;
726 u8 is_dfs_disabled = 0;
727 u32 default_centrlization_value = 0x12;
729 u32 activate_select_before_run_alg = 1, activate_deselect_after_run_alg = 1,
730 rl_test = 0, reset_read_fifo = 0;
732 u32 ctrl_sweepres[ADLL_LENGTH][MAX_INTERFACE_NUM][MAX_BUS_NUM];
733 u32 ctrl_adll[MAX_CS_NUM * MAX_INTERFACE_NUM * MAX_BUS_NUM];
735 0, 4, 8, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
738 u32 xsb_test_table[][8] = {
739 {0x00000000, 0x11111111, 0x22222222, 0x33333333, 0x44444444, 0x55555555,
740 0x66666666, 0x77777777},
741 {0x88888888, 0x99999999, 0xaaaaaaaa, 0xbbbbbbbb, 0xcccccccc, 0xdddddddd,
742 0xeeeeeeee, 0xffffffff},
743 {0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
744 0x00000000, 0xffffffff},
745 {0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
746 0x00000000, 0xffffffff},
747 {0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
748 0x00000000, 0xffffffff},
749 {0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
750 0x00000000, 0xffffffff},
751 {0x00000000, 0x00000000, 0xffffffff, 0xffffffff, 0x00000000, 0x00000000,
752 0xffffffff, 0xffffffff},
753 {0x00000000, 0x00000000, 0x00000000, 0xffffffff, 0x00000000, 0x00000000,
754 0x00000000, 0x00000000},
755 {0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0x00000000, 0xffffffff,
756 0xffffffff, 0xffffffff}
759 static int ddr3_tip_access_atr(u32 dev_num, u32 flag_id, u32 value, u32 **ptr);
761 int ddr3_tip_print_adll(void)
763 u32 bus_cnt = 0, if_id, data_p1, data_p2, ui_data3, dev_num = 0;
764 struct hws_topology_map *tm = ddr3_get_topology_map();
766 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
767 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
768 for (bus_cnt = 0; bus_cnt < GET_TOPOLOGY_NUM_OF_BUSES();
770 VALIDATE_ACTIVE(tm->bus_act_mask, bus_cnt);
771 CHECK_STATUS(ddr3_tip_bus_read
773 ACCESS_TYPE_UNICAST, bus_cnt,
774 DDR_PHY_DATA, 0x1, &data_p1));
775 CHECK_STATUS(ddr3_tip_bus_read
776 (dev_num, if_id, ACCESS_TYPE_UNICAST,
777 bus_cnt, DDR_PHY_DATA, 0x2, &data_p2));
778 CHECK_STATUS(ddr3_tip_bus_read
779 (dev_num, if_id, ACCESS_TYPE_UNICAST,
780 bus_cnt, DDR_PHY_DATA, 0x3, &ui_data3));
781 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE,
782 (" IF %d bus_cnt %d phy_reg_1_data 0x%x phy_reg_2_data 0x%x phy_reg_3_data 0x%x\n",
783 if_id, bus_cnt, data_p1, data_p2,
792 * Set attribute value
794 int ddr3_tip_set_atr(u32 dev_num, u32 flag_id, u32 value)
797 u32 *ptr_flag = NULL;
799 ret = ddr3_tip_access_atr(dev_num, flag_id, value, &ptr_flag);
800 if (ptr_flag != NULL) {
801 printf("ddr3_tip_set_atr Flag ID 0x%x value is set to 0x%x (was 0x%x)\n",
802 flag_id, value, *ptr_flag);
805 printf("ddr3_tip_set_atr Flag ID 0x%x value is set to 0x%x\n",
815 static int ddr3_tip_access_atr(u32 dev_num, u32 flag_id, u32 value, u32 **ptr)
817 u32 tmp_val = 0, if_id = 0, pup_id = 0;
818 struct hws_topology_map *tm = ddr3_get_topology_map();
824 *ptr = (u32 *)&(tm->if_act_mask);
828 *ptr = (u32 *)&mask_tune_func;
832 *ptr = (u32 *)&low_freq;
836 *ptr = (u32 *)&medium_freq;
840 *ptr = (u32 *)&generic_init_controller;
844 *ptr = (u32 *)&rl_version;
848 *ptr = (u32 *)&start_xsb_offset;
852 *ptr = (u32 *)&is_rl_old;
856 *ptr = (u32 *)&is_freq_old;
860 *ptr = (u32 *)&is_dfs_disabled;
864 *ptr = (u32 *)&is_pll_before_init;
868 *ptr = (u32 *)&is_adll_calib_before_init;
871 *ptr = (u32 *)&is_tune_result;
875 *ptr = (u32 *)&is_validate_window_per_if;
879 *ptr = (u32 *)&is_validate_window_per_pup;
883 *ptr = (u32 *)&sweep_cnt;
887 *ptr = (u32 *)&is_bist_reset_bit;
891 *ptr = (u32 *)&is_dfs_in_init;
895 *ptr = (u32 *)&p_finger;
899 *ptr = (u32 *)&n_finger;
903 *ptr = (u32 *)&init_freq;
907 *ptr = (u32 *)&(freq_val[DDR_FREQ_LOW_FREQ]);
911 *ptr = (u32 *)&start_pattern;
915 *ptr = (u32 *)&end_pattern;
919 *ptr = (u32 *)&phy_reg0_val;
923 *ptr = (u32 *)&phy_reg1_val;
927 *ptr = (u32 *)&phy_reg2_val;
931 *ptr = (u32 *)&phy_reg3_val;
935 *ptr = (u32 *)&sweep_pattern;
939 *ptr = (u32 *)&is_rzq6;
943 *ptr = (u32 *)&znri_data_phy_val;
947 *ptr = (u32 *)&zpri_data_phy_val;
951 *ptr = (u32 *)&finger_test;
955 *ptr = (u32 *)&n_finger_start;
959 *ptr = (u32 *)&n_finger_end;
963 *ptr = (u32 *)&p_finger_start;
967 *ptr = (u32 *)&p_finger_end;
971 *ptr = (u32 *)&p_finger_step;
975 *ptr = (u32 *)&n_finger_step;
979 *ptr = (u32 *)&znri_ctrl_phy_val;
983 *ptr = (u32 *)&zpri_ctrl_phy_val;
987 *ptr = (u32 *)&is_reg_dump;
995 *ptr = (u32 *)&mode2_t;
999 *ptr = (u32 *)&xsb_validate_type;
1003 *ptr = (u32 *)&xsb_validation_base_address;
1007 *ptr = (u32 *)&activate_select_before_run_alg;
1011 *ptr = (u32 *)&activate_deselect_after_run_alg;
1015 *ptr = (u32 *)&odt_additional;
1019 *ptr = (u32 *)&debug_mode;
1023 *ptr = (u32 *)&pbs_pattern;
1027 *ptr = (u32 *)&delay_enable;
1031 *ptr = (u32 *)&ck_delay;
1035 *ptr = (u32 *)&ck_delay_16;
1039 *ptr = (u32 *)&ca_delay;
1043 *ptr = (u32 *)&debug_dunit;
1047 debug_acc = (int)value;
1051 debug_training = (u8)value;
1055 debug_training_bist = (u8)value;
1059 debug_centralization = (u8)value;
1063 debug_training_ip = (u8)value;
1067 debug_leveling = (u8)value;
1071 debug_pbs = (u8)value;
1075 debug_training_static = (u8)value;
1079 debug_training_access = (u8)value;
1083 *ptr = &start_pattern;
1087 *ptr = &end_pattern;
1091 if ((flag_id >= 0x200) && (flag_id < 0x210)) {
1092 if_id = flag_id - 0x200;
1093 *ptr = (u32 *)&(tm->interface_params
1094 [if_id].memory_freq);
1095 } else if ((flag_id >= 0x210) && (flag_id < 0x220)) {
1096 if_id = flag_id - 0x210;
1097 *ptr = (u32 *)&(tm->interface_params
1098 [if_id].speed_bin_index);
1099 } else if ((flag_id >= 0x220) && (flag_id < 0x230)) {
1100 if_id = flag_id - 0x220;
1101 *ptr = (u32 *)&(tm->interface_params
1103 } else if ((flag_id >= 0x230) && (flag_id < 0x240)) {
1104 if_id = flag_id - 0x230;
1105 *ptr = (u32 *)&(tm->interface_params
1106 [if_id].memory_size);
1107 } else if ((flag_id >= 0x240) && (flag_id < 0x250)) {
1108 if_id = flag_id - 0x240;
1109 *ptr = (u32 *)&(tm->interface_params
1111 } else if ((flag_id >= 0x250) && (flag_id < 0x260)) {
1112 if_id = flag_id - 0x250;
1113 *ptr = (u32 *)&(tm->interface_params
1115 } else if ((flag_id >= 0x270) && (flag_id < 0x2cf)) {
1116 if_id = (flag_id - 0x270) / MAX_BUS_NUM;
1117 pup_id = (flag_id - 0x270) % MAX_BUS_NUM;
1118 *ptr = (u32 *)&(tm->interface_params[if_id].
1119 as_bus_params[pup_id].is_ck_swap);
1120 } else if ((flag_id >= 0x2d0) && (flag_id < 0x32f)) {
1121 if_id = (flag_id - 0x2d0) / MAX_BUS_NUM;
1122 pup_id = (flag_id - 0x2d0) % MAX_BUS_NUM;
1123 *ptr = (u32 *)&(tm->interface_params[if_id].
1124 as_bus_params[pup_id].is_dqs_swap);
1125 } else if ((flag_id >= 0x330) && (flag_id < 0x38f)) {
1126 if_id = (flag_id - 0x330) / MAX_BUS_NUM;
1127 pup_id = (flag_id - 0x330) % MAX_BUS_NUM;
1128 *ptr = (u32 *)&(tm->interface_params[if_id].
1129 as_bus_params[pup_id].cs_bitmask);
1130 } else if ((flag_id >= 0x390) && (flag_id < 0x3ef)) {
1131 if_id = (flag_id - 0x390) / MAX_BUS_NUM;
1132 pup_id = (flag_id - 0x390) % MAX_BUS_NUM;
1133 *ptr = (u32 *)&(tm->interface_params
1134 [if_id].as_bus_params
1135 [pup_id].mirror_enable_bitmask);
1136 } else if ((flag_id >= 0x500) && (flag_id <= 0x50f)) {
1137 tmp_val = flag_id - 0x320;
1138 *ptr = (u32 *)&(clamp_tbl[tmp_val]);
1140 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1141 ("flag_id out of boundary %d\n",
1143 return MV_BAD_PARAM;
1150 #ifndef EXCLUDE_SWITCH_DEBUG
1154 int print_adll(u32 dev_num, u32 adll[MAX_INTERFACE_NUM * MAX_BUS_NUM])
1157 struct hws_topology_map *tm = ddr3_get_topology_map();
1159 for (j = 0; j < tm->num_of_bus_per_interface; j++) {
1160 VALIDATE_ACTIVE(tm->bus_act_mask, j);
1161 for (i = 0; i < MAX_INTERFACE_NUM; i++) {
1163 adll[i * tm->num_of_bus_per_interface + j]);
1172 /* byte_index - only byte 0, 1, 2, or 3, oxff - test all bytes */
1173 static u32 ddr3_tip_compare(u32 if_id, u32 *p_src, u32 *p_dst,
1176 u32 burst_cnt = 0, addr_offset, i_id;
1181 0xff) ? (u32) 0xffffffff : (u32) (0xff << (byte_index * 8));
1182 for (burst_cnt = 0; burst_cnt < EXT_ACCESS_BURST_LENGTH; burst_cnt++) {
1183 if ((p_src[burst_cnt] & addr_offset) !=
1184 (p_dst[burst_cnt] & addr_offset))
1188 if (b_is_fail == 1) {
1189 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1190 ("IF %d exp: ", if_id));
1191 for (i_id = 0; i_id <= MAX_INTERFACE_NUM - 1; i_id++) {
1192 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1193 ("0x%8x ", p_src[i_id]));
1195 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1196 ("\n_i_f %d rcv: ", if_id));
1197 for (i_id = 0; i_id <= MAX_INTERFACE_NUM - 1; i_id++) {
1198 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1199 ("(0x%8x ", p_dst[i_id]));
1201 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, ("\n "));
1207 /* test_type = 0-tx , 1-rx */
1208 int ddr3_tip_sweep_test(u32 dev_num, u32 test_type,
1209 u32 mem_addr, u32 is_modify_adll,
1210 u32 start_if, u32 end_if, u32 startpup, u32 endpup)
1212 u32 bus_cnt = 0, adll_val = 0, if_id, ui_prev_adll, ui_mask_bit,
1213 end_adll, start_adll;
1215 struct hws_topology_map *tm = ddr3_get_topology_map();
1217 if (test_type == 0) {
1221 end_adll = ui_mask_bit;
1226 end_adll = ui_mask_bit;
1229 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
1230 ("==============================\n"));
1231 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
1232 ("Test type %d (0-tx, 1-rx)\n", test_type));
1234 for (if_id = start_if; if_id <= end_if; if_id++) {
1235 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1236 for (bus_cnt = startpup; bus_cnt < endpup; bus_cnt++) {
1237 CHECK_STATUS(ddr3_tip_bus_read
1238 (dev_num, if_id, ACCESS_TYPE_UNICAST,
1239 bus_cnt, DDR_PHY_DATA, reg_addr,
1242 for (adll_val = start_adll; adll_val <= end_adll;
1244 if (is_modify_adll == 1) {
1245 CHECK_STATUS(ddr3_tip_bus_read_modify_write
1247 ACCESS_TYPE_UNICAST,
1249 DDR_PHY_DATA, reg_addr,
1250 adll_val, ui_mask_bit));
1253 if (is_modify_adll == 1) {
1254 CHECK_STATUS(ddr3_tip_bus_write
1255 (dev_num, ACCESS_TYPE_UNICAST,
1256 if_id, ACCESS_TYPE_UNICAST,
1257 bus_cnt, DDR_PHY_DATA, reg_addr,
1260 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, ("\n"));
1262 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, ("\n"));
1268 #ifndef EXCLUDE_SWITCH_DEBUG
1272 int ddr3_tip_run_sweep_test(int dev_num, u32 repeat_num, u32 direction,
1275 u32 pup = 0, start_pup = 0, end_pup = 0;
1277 u32 res[MAX_INTERFACE_NUM] = { 0 };
1280 int reg = (direction == 0) ? WRITE_CENTRALIZATION_PHY_REG :
1281 READ_CENTRALIZATION_PHY_REG;
1282 enum hws_access_type pup_access;
1284 u32 max_cs = hws_ddr3_tip_max_cs_get();
1285 struct hws_topology_map *tm = ddr3_get_topology_map();
1290 end_pup = tm->num_of_bus_per_interface - 1;
1291 pup_access = ACCESS_TYPE_UNICAST;
1295 pup_access = ACCESS_TYPE_MULTICAST;
1298 for (cs = 0; cs < max_cs; cs++) {
1299 for (adll = 0; adll < ADLL_LENGTH; adll++) {
1301 if_id <= MAX_INTERFACE_NUM - 1;
1306 for (pup = start_pup; pup <= end_pup; pup++) {
1307 ctrl_sweepres[adll][if_id][pup] =
1313 for (adll = 0; adll < (MAX_INTERFACE_NUM * MAX_BUS_NUM); adll++)
1314 ctrl_adll[adll] = 0;
1315 /* Save DQS value(after algorithm run) */
1316 read_adll_value(ctrl_adll,
1317 (reg + (cs * CS_REGISTER_ADDR_OFFSET)),
1321 * Sweep ADLL from 0:31 on all I/F on all Pup and perform
1322 * BIST on each stage.
1324 for (pup = start_pup; pup <= end_pup; pup++) {
1325 for (adll = 0; adll < ADLL_LENGTH; adll++) {
1327 (direction == 0) ? (adll * 2) : adll;
1328 CHECK_STATUS(ddr3_tip_bus_write
1329 (dev_num, ACCESS_TYPE_MULTICAST, 0,
1330 pup_access, pup, DDR_PHY_DATA,
1331 reg + CS_REG_VALUE(cs),
1333 hws_ddr3_run_bist(dev_num, sweep_pattern, res,
1335 /* ddr3_tip_reset_fifo_ptr(dev_num); */
1337 if_id <= MAX_INTERFACE_NUM - 1;
1342 ctrl_sweepres[adll][if_id][pup]
1348 ACCESS_TYPE_UNICAST,
1350 ACCESS_TYPE_UNICAST,
1353 reg + CS_REG_VALUE(cs),
1356 tm->num_of_bus_per_interface
1362 printf("Final, CS %d,%s, Sweep, Result, Adll,", cs,
1363 ((direction == 0) ? "TX" : "RX"));
1364 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1365 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1367 for (pup = start_pup; pup <= end_pup; pup++) {
1368 VALIDATE_ACTIVE(tm->bus_act_mask, pup);
1369 printf("I/F%d-PHY%d , ", if_id, pup);
1372 printf("I/F%d , ", if_id);
1377 for (adll = 0; adll < ADLL_LENGTH; adll++) {
1378 adll_value = (direction == 0) ? (adll * 2) : adll;
1379 printf("Final,%s, Sweep, Result, %d ,",
1380 ((direction == 0) ? "TX" : "RX"), adll_value);
1383 if_id <= MAX_INTERFACE_NUM - 1;
1385 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1386 for (pup = start_pup; pup <= end_pup; pup++) {
1388 ctrl_sweepres[adll][if_id]
1396 * Write back to the phy the Rx DQS value, we store in
1399 write_adll_value(ctrl_adll,
1400 (reg + cs * CS_REGISTER_ADDR_OFFSET));
1401 /* print adll results */
1402 read_adll_value(ctrl_adll, (reg + cs * CS_REGISTER_ADDR_OFFSET),
1404 printf("%s, DQS, ADLL,,,", (direction == 0) ? "Tx" : "Rx");
1405 print_adll(dev_num, ctrl_adll);
1407 ddr3_tip_reset_fifo_ptr(dev_num);
1412 void print_topology(struct hws_topology_map *topology_db)
1416 printf("\tinterface_mask: 0x%x\n", topology_db->if_act_mask);
1417 printf("\tNum Bus: %d\n", topology_db->num_of_bus_per_interface);
1418 printf("\tbus_act_mask: 0x%x\n", topology_db->bus_act_mask);
1420 for (ui = 0; ui < MAX_INTERFACE_NUM; ui++) {
1421 VALIDATE_ACTIVE(topology_db->if_act_mask, ui);
1422 printf("\n\tInterface ID: %d\n", ui);
1423 printf("\t\tDDR Frequency: %s\n",
1424 convert_freq(topology_db->
1425 interface_params[ui].memory_freq));
1426 printf("\t\tSpeed_bin: %d\n",
1427 topology_db->interface_params[ui].speed_bin_index);
1428 printf("\t\tBus_width: %d\n",
1429 (4 << topology_db->interface_params[ui].bus_width));
1430 printf("\t\tMem_size: %s\n",
1431 convert_mem_size(topology_db->
1432 interface_params[ui].memory_size));
1433 printf("\t\tCAS-WL: %d\n",
1434 topology_db->interface_params[ui].cas_wl);
1435 printf("\t\tCAS-L: %d\n",
1436 topology_db->interface_params[ui].cas_l);
1437 printf("\t\tTemperature: %d\n",
1438 topology_db->interface_params[ui].interface_temp);
1440 for (uj = 0; uj < 4; uj++) {
1441 printf("\t\tBus %d parameters- CS Mask: 0x%x\t", uj,
1442 topology_db->interface_params[ui].
1443 as_bus_params[uj].cs_bitmask);
1444 printf("Mirror: 0x%x\t",
1445 topology_db->interface_params[ui].
1446 as_bus_params[uj].mirror_enable_bitmask);
1447 printf("DQS Swap is %s \t",
1449 interface_params[ui].as_bus_params[uj].
1450 is_dqs_swap == 1) ? "enabled" : "disabled");
1451 printf("Ck Swap:%s\t",
1453 interface_params[ui].as_bus_params[uj].
1454 is_ck_swap == 1) ? "enabled" : "disabled");
1462 * Execute XSB Test transaction (rd/wr/both)
1464 int run_xsb_test(u32 dev_num, u32 mem_addr, u32 write_type,
1465 u32 read_type, u32 burst_length)
1467 u32 seq = 0, if_id = 0, addr, cnt;
1468 int ret = MV_OK, ret_tmp;
1469 u32 data_read[MAX_INTERFACE_NUM];
1470 struct hws_topology_map *tm = ddr3_get_topology_map();
1472 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1473 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1475 for (cnt = 0; cnt <= burst_length; cnt++) {
1476 seq = (seq + 1) % 8;
1477 if (write_type != 0) {
1478 CHECK_STATUS(ddr3_tip_ext_write
1479 (dev_num, if_id, addr, 1,
1480 xsb_test_table[seq]));
1482 if (read_type != 0) {
1483 CHECK_STATUS(ddr3_tip_ext_read
1484 (dev_num, if_id, addr, 1,
1487 if ((read_type != 0) && (write_type != 0)) {
1489 ddr3_tip_compare(if_id,
1490 xsb_test_table[seq],
1493 addr += (EXT_ACCESS_BURST_LENGTH * 4);
1494 ret = (ret != MV_OK) ? ret : ret_tmp;
1502 #else /*EXCLUDE_SWITCH_DEBUG */
1504 u32 rl_version = 1; /* 0 - old RL machine */
1506 u32 start_xsb_offset = 0;
1507 u8 cs_mask_reg[] = {
1508 0, 4, 8, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
1511 int run_xsb_test(u32 dev_num, u32 mem_addr, u32 write_type,
1512 u32 read_type, u32 burst_length)