2 * Copyright (C) Marvell International Ltd. and its affiliates
4 * SPDX-License-Identifier: GPL-2.0
10 #include <asm/arch/cpu.h>
11 #include <asm/arch/soc.h>
13 #include "ddr3_init.h"
15 #define GET_MAX_VALUE(x, y) \
16 ((x) > (y)) ? (x) : (y)
17 #define CEIL_DIVIDE(x, y) \
18 ((x - (x / y) * y) == 0) ? ((x / y) - 1) : (x / y)
20 #define TIME_2_CLOCK_CYCLES CEIL_DIVIDE
22 #define GET_CS_FROM_MASK(mask) (cs_mask2_num[mask])
23 #define CS_CBE_VALUE(cs_num) (cs_cbe_reg[cs_num])
25 #define TIMES_9_TREFI_CYCLES 0x8
27 u32 window_mem_addr = 0;
31 u32 phy_reg3_val = 0xa;
32 enum hws_ddr_freq init_freq = DDR_FREQ_667;
33 enum hws_ddr_freq low_freq = DDR_FREQ_LOW_FREQ;
34 enum hws_ddr_freq medium_freq;
36 u32 odt_additional = 1;
37 u32 *dq_map_table = NULL;
40 #if defined(CONFIG_ARMADA_38X) || defined(CONFIG_ALLEYCAT3) || \
41 defined(CONFIG_ARMADA_39X)
42 u32 is_pll_before_init = 0, is_adll_calib_before_init = 0, is_dfs_in_init = 0;
43 u32 dfs_low_freq = 130;
45 u32 is_pll_before_init = 0, is_adll_calib_before_init = 1, is_dfs_in_init = 0;
46 u32 dfs_low_freq = 100;
48 u32 g_rtt_nom_c_s0, g_rtt_nom_c_s1;
49 u8 calibration_update_control; /* 2 external only, 1 is internal only */
51 enum hws_result training_result[MAX_STAGE_LIMIT][MAX_INTERFACE_NUM];
52 enum auto_tune_stage training_stage = INIT_CONTROLLER;
53 u32 finger_test = 0, p_finger_start = 11, p_finger_end = 64,
54 n_finger_start = 11, n_finger_end = 64,
55 p_finger_step = 3, n_finger_step = 3;
56 u32 clamp_tbl[] = { 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3 };
58 /* Initiate to 0xff, this variable is define by user in debug mode */
60 u32 xsb_validate_type = 0;
61 u32 xsb_validation_base_address = 0xf000;
62 u32 first_active_if = 0;
63 u32 dfs_low_phy1 = 0x1f;
65 int use_broadcast = 0;
66 struct hws_tip_freq_config_info *freq_info_table = NULL;
67 u8 is_cbe_required = 0;
70 int rl_mid_freq_wa = 0;
74 u32 mask_tune_func = (SET_MEDIUM_FREQ_MASK_BIT |
75 WRITE_LEVELING_MASK_BIT |
76 LOAD_PATTERN_2_MASK_BIT |
77 READ_LEVELING_MASK_BIT |
78 SET_TARGET_FREQ_MASK_BIT | WRITE_LEVELING_TF_MASK_BIT |
79 READ_LEVELING_TF_MASK_BIT |
80 CENTRALIZATION_RX_MASK_BIT | CENTRALIZATION_TX_MASK_BIT);
82 void ddr3_print_version(void)
84 printf(DDR3_TIP_VERSION_STRING);
87 static int ddr3_tip_ddr3_training_main_flow(u32 dev_num);
88 static int ddr3_tip_write_odt(u32 dev_num, enum hws_access_type access_type,
89 u32 if_id, u32 cl_value, u32 cwl_value);
90 static int ddr3_tip_ddr3_auto_tune(u32 dev_num);
91 static int is_bus_access_done(u32 dev_num, u32 if_id,
92 u32 dunit_reg_adrr, u32 bit);
93 #ifdef ODT_TEST_SUPPORT
94 static int odt_test(u32 dev_num, enum hws_algo_type algo_type);
97 int adll_calibration(u32 dev_num, enum hws_access_type access_type,
98 u32 if_id, enum hws_ddr_freq frequency);
99 static int ddr3_tip_set_timing(u32 dev_num, enum hws_access_type access_type,
100 u32 if_id, enum hws_ddr_freq frequency);
102 static struct page_element page_param[] = {
105 * page-size(K) page-size(K) mask
119 static u8 mem_size_config[MEM_SIZE_LAST] = {
127 static u8 cs_mask2_num[] = { 0, 0, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3 };
129 static struct reg_data odpg_default_value[] = {
130 {0x1034, 0x38000, MASK_ALL_BITS},
131 {0x1038, 0x0, MASK_ALL_BITS},
132 {0x10b0, 0x0, MASK_ALL_BITS},
133 {0x10b8, 0x0, MASK_ALL_BITS},
134 {0x10c0, 0x0, MASK_ALL_BITS},
135 {0x10f0, 0x0, MASK_ALL_BITS},
136 {0x10f4, 0x0, MASK_ALL_BITS},
137 {0x10f8, 0xff, MASK_ALL_BITS},
138 {0x10fc, 0xffff, MASK_ALL_BITS},
139 {0x1130, 0x0, MASK_ALL_BITS},
140 {0x1830, 0x2000000, MASK_ALL_BITS},
141 {0x14d0, 0x0, MASK_ALL_BITS},
142 {0x14d4, 0x0, MASK_ALL_BITS},
143 {0x14d8, 0x0, MASK_ALL_BITS},
144 {0x14dc, 0x0, MASK_ALL_BITS},
145 {0x1454, 0x0, MASK_ALL_BITS},
146 {0x1594, 0x0, MASK_ALL_BITS},
147 {0x1598, 0x0, MASK_ALL_BITS},
148 {0x159c, 0x0, MASK_ALL_BITS},
149 {0x15a0, 0x0, MASK_ALL_BITS},
150 {0x15a4, 0x0, MASK_ALL_BITS},
151 {0x15a8, 0x0, MASK_ALL_BITS},
152 {0x15ac, 0x0, MASK_ALL_BITS},
153 {0x1604, 0x0, MASK_ALL_BITS},
154 {0x1608, 0x0, MASK_ALL_BITS},
155 {0x160c, 0x0, MASK_ALL_BITS},
156 {0x1610, 0x0, MASK_ALL_BITS},
157 {0x1614, 0x0, MASK_ALL_BITS},
158 {0x1618, 0x0, MASK_ALL_BITS},
159 {0x1624, 0x0, MASK_ALL_BITS},
160 {0x1690, 0x0, MASK_ALL_BITS},
161 {0x1694, 0x0, MASK_ALL_BITS},
162 {0x1698, 0x0, MASK_ALL_BITS},
163 {0x169c, 0x0, MASK_ALL_BITS},
164 {0x14b8, 0x6f67, MASK_ALL_BITS},
165 {0x1630, 0x0, MASK_ALL_BITS},
166 {0x1634, 0x0, MASK_ALL_BITS},
167 {0x1638, 0x0, MASK_ALL_BITS},
168 {0x163c, 0x0, MASK_ALL_BITS},
169 {0x16b0, 0x0, MASK_ALL_BITS},
170 {0x16b4, 0x0, MASK_ALL_BITS},
171 {0x16b8, 0x0, MASK_ALL_BITS},
172 {0x16bc, 0x0, MASK_ALL_BITS},
173 {0x16c0, 0x0, MASK_ALL_BITS},
174 {0x16c4, 0x0, MASK_ALL_BITS},
175 {0x16c8, 0x0, MASK_ALL_BITS},
176 {0x16cc, 0x1, MASK_ALL_BITS},
177 {0x16f0, 0x1, MASK_ALL_BITS},
178 {0x16f4, 0x0, MASK_ALL_BITS},
179 {0x16f8, 0x0, MASK_ALL_BITS},
180 {0x16fc, 0x0, MASK_ALL_BITS}
183 static int ddr3_tip_bus_access(u32 dev_num, enum hws_access_type interface_access,
184 u32 if_id, enum hws_access_type phy_access,
185 u32 phy_id, enum hws_ddr_phy phy_type, u32 reg_addr,
186 u32 data_value, enum hws_operation oper_type);
187 static int ddr3_tip_pad_inv(u32 dev_num, u32 if_id);
188 static int ddr3_tip_rank_control(u32 dev_num, u32 if_id);
191 * Update global training parameters by data from user
193 int ddr3_tip_tune_training_params(u32 dev_num,
194 struct tune_train_params *params)
196 if (params->ck_delay != -1)
197 ck_delay = params->ck_delay;
198 if (params->ck_delay_16 != -1)
199 ck_delay_16 = params->ck_delay_16;
200 if (params->phy_reg3_val != -1)
201 phy_reg3_val = params->phy_reg3_val;
209 int ddr3_tip_configure_cs(u32 dev_num, u32 if_id, u32 cs_num, u32 enable)
211 u32 data, addr_hi, data_high;
213 struct hws_topology_map *tm = ddr3_get_topology_map();
216 data = (tm->interface_params[if_id].bus_width ==
217 BUS_WIDTH_8) ? 0 : 1;
218 CHECK_STATUS(ddr3_tip_if_write
219 (dev_num, ACCESS_TYPE_UNICAST, if_id,
220 SDRAM_ACCESS_CONTROL_REG, (data << (cs_num * 4)),
221 0x3 << (cs_num * 4)));
222 mem_index = tm->interface_params[if_id].memory_size;
224 addr_hi = mem_size_config[mem_index] & 0x3;
225 CHECK_STATUS(ddr3_tip_if_write
226 (dev_num, ACCESS_TYPE_UNICAST, if_id,
227 SDRAM_ACCESS_CONTROL_REG,
228 (addr_hi << (2 + cs_num * 4)),
229 0x3 << (2 + cs_num * 4)));
231 data_high = (mem_size_config[mem_index] & 0x4) >> 2;
232 CHECK_STATUS(ddr3_tip_if_write
233 (dev_num, ACCESS_TYPE_UNICAST, if_id,
234 SDRAM_ACCESS_CONTROL_REG,
235 data_high << (20 + cs_num), 1 << (20 + cs_num)));
237 /* Enable Address Select Mode */
238 CHECK_STATUS(ddr3_tip_if_write
239 (dev_num, ACCESS_TYPE_UNICAST, if_id,
240 SDRAM_ACCESS_CONTROL_REG, 1 << (16 + cs_num),
241 1 << (16 + cs_num)));
247 CHECK_STATUS(ddr3_tip_if_write
248 (dev_num, ACCESS_TYPE_UNICAST, if_id,
249 DDR_CONTROL_LOW_REG, (enable << (cs_num + 11)),
250 1 << (cs_num + 11)));
253 CHECK_STATUS(ddr3_tip_if_write
254 (dev_num, ACCESS_TYPE_UNICAST, if_id,
255 DDR_CONTROL_LOW_REG, (enable << 15), 1 << 15));
263 * Calculate number of CS
265 static int calc_cs_num(u32 dev_num, u32 if_id, u32 *cs_num)
272 struct hws_topology_map *tm = ddr3_get_topology_map();
274 for (bus_cnt = 0; bus_cnt < GET_TOPOLOGY_NUM_OF_BUSES(); bus_cnt++) {
275 VALIDATE_ACTIVE(tm->bus_act_mask, bus_cnt);
277 cs_bitmask = tm->interface_params[if_id].
278 as_bus_params[bus_cnt].cs_bitmask;
279 for (cs = 0; cs < MAX_CS_NUM; cs++) {
280 if ((cs_bitmask >> cs) & 1)
284 if (curr_cs_num == 0) {
285 curr_cs_num = cs_count;
286 } else if (cs_count != curr_cs_num) {
287 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
288 ("CS number is different per bus (IF %d BUS %d cs_num %d curr_cs_num %d)\n",
289 if_id, bus_cnt, cs_count,
291 return MV_NOT_SUPPORTED;
294 *cs_num = curr_cs_num;
300 * Init Controller Flow
302 int hws_ddr3_tip_init_controller(u32 dev_num, struct init_cntr_param *init_cntr_prm)
306 u32 t_refi = 0, t_hclk = 0, t_ckclk = 0, t_faw = 0, t_pd = 0,
307 t_wr = 0, t2t = 0, txpdll = 0;
308 u32 data_value = 0, bus_width = 0, page_size = 0, cs_cnt = 0,
309 mem_mask = 0, bus_index = 0;
310 enum hws_speed_bin speed_bin_index = SPEED_BIN_DDR_2133N;
311 enum hws_mem_size memory_size = MEM_2G;
312 enum hws_ddr_freq freq = init_freq;
313 enum hws_timing timing;
315 u32 cl_value = 0, cwl_val = 0;
316 u32 refresh_interval_cnt = 0, bus_cnt = 0, adll_tap = 0;
317 enum hws_access_type access_type = ACCESS_TYPE_UNICAST;
318 u32 data_read[MAX_INTERFACE_NUM];
319 struct hws_topology_map *tm = ddr3_get_topology_map();
320 u32 odt_config = g_odt_config_2cs;
322 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE,
323 ("Init_controller, do_mrs_phy=%d, is_ctrl64_bit=%d\n",
324 init_cntr_prm->do_mrs_phy,
325 init_cntr_prm->is_ctrl64_bit));
327 if (init_cntr_prm->init_phy == 1) {
328 CHECK_STATUS(ddr3_tip_configure_phy(dev_num));
331 if (generic_init_controller == 1) {
332 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
333 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
334 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE,
335 ("active IF %d\n", if_id));
338 bus_index < GET_TOPOLOGY_NUM_OF_BUSES();
340 VALIDATE_ACTIVE(tm->bus_act_mask, bus_index);
342 tm->interface_params[if_id].
343 as_bus_params[bus_index].mirror_enable_bitmask;
347 CHECK_STATUS(ddr3_tip_if_write
348 (dev_num, ACCESS_TYPE_MULTICAST,
349 if_id, CS_ENABLE_REG, 0,
354 tm->interface_params[if_id].
357 tm->interface_params[if_id].
361 (tm->interface_params[if_id].
363 HWS_TEMP_HIGH) ? TREFI_HIGH : TREFI_LOW;
364 t_refi *= 1000; /* psec */
365 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE,
366 ("memy_size %d speed_bin_ind %d freq %d t_refi %d\n",
367 memory_size, speed_bin_index, freq,
369 /* HCLK & CK CLK in 2:1[ps] */
370 /* t_ckclk is external clock */
371 t_ckclk = (MEGA / freq_val[freq]);
372 /* t_hclk is internal clock */
373 t_hclk = 2 * t_ckclk;
374 refresh_interval_cnt = t_refi / t_hclk; /* no units */
376 (DDR3_IS_16BIT_DRAM_MODE(tm->bus_act_mask)
379 if (init_cntr_prm->is_ctrl64_bit)
383 (refresh_interval_cnt | 0x4000 |
385 32) ? 0x8000 : 0) | 0x1000000) & ~(1 << 26);
387 /* Interface Bus Width */
389 CHECK_STATUS(ddr3_tip_if_write
390 (dev_num, access_type, if_id,
391 SDRAM_CONFIGURATION_REG, data_value,
394 /* Interleave first command pre-charge enable (TBD) */
395 CHECK_STATUS(ddr3_tip_if_write
396 (dev_num, access_type, if_id,
397 SDRAM_OPEN_PAGE_CONTROL_REG, (1 << 10),
400 /* PHY configuration */
402 * Postamble Length = 1.5cc, Addresscntl to clk skew
403 * \BD, Preamble length normal, parralal ADLL enable
405 CHECK_STATUS(ddr3_tip_if_write
406 (dev_num, access_type, if_id,
407 DRAM_PHY_CONFIGURATION, 0x28, 0x3e));
408 if (init_cntr_prm->is_ctrl64_bit) {
410 CHECK_STATUS(ddr3_tip_if_write
411 (dev_num, access_type, if_id,
412 DRAM_PHY_CONFIGURATION, 0x0,
416 /* calibration block disable */
417 /* Xbar Read buffer select (for Internal access) */
418 CHECK_STATUS(ddr3_tip_if_write
419 (dev_num, access_type, if_id,
420 CALIB_MACHINE_CTRL_REG, 0x1200c,
422 CHECK_STATUS(ddr3_tip_if_write
423 (dev_num, access_type, if_id,
424 CALIB_MACHINE_CTRL_REG,
425 calibration_update_control << 3, 0x3 << 3));
427 /* Pad calibration control - enable */
428 CHECK_STATUS(ddr3_tip_if_write
429 (dev_num, access_type, if_id,
430 CALIB_MACHINE_CTRL_REG, 0x1, 0x1));
435 * Address ctrl \96 Part of the Generic code
436 * The next configuration is done:
442 * Per Dunit get from the Map_topology the parameters:
444 * t_faw is per Dunit not per CS
447 (tm->interface_params[if_id].
449 BUS_WIDTH_8) ? page_param[memory_size].
450 page_size_8bit : page_param[memory_size].
454 (page_size == 1) ? speed_bin_table(speed_bin_index,
456 : speed_bin_table(speed_bin_index,
459 data_value = TIME_2_CLOCK_CYCLES(t_faw, t_ckclk);
460 data_value = data_value << 24;
461 CHECK_STATUS(ddr3_tip_if_write
462 (dev_num, access_type, if_id,
463 SDRAM_ACCESS_CONTROL_REG, data_value,
467 (tm->interface_params[if_id].
468 bus_width == BUS_WIDTH_8) ? 0 : 1;
470 /* create merge cs mask for all cs available in dunit */
472 bus_cnt < GET_TOPOLOGY_NUM_OF_BUSES();
474 VALIDATE_ACTIVE(tm->bus_act_mask, bus_cnt);
476 tm->interface_params[if_id].
477 as_bus_params[bus_cnt].cs_bitmask;
479 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE,
480 ("Init_controller IF %d cs_mask %d\n",
483 * Configure the next upon the Map Topology \96 If the
484 * Dunit is CS0 Configure CS0 if it is multi CS
485 * configure them both: The Bust_width it\92s the
486 * Memory Bus width \96 x8 or x16
488 for (cs_cnt = 0; cs_cnt < NUM_OF_CS; cs_cnt++) {
489 ddr3_tip_configure_cs(dev_num, if_id, cs_cnt,
490 ((cs_mask & (1 << cs_cnt)) ? 1
494 if (init_cntr_prm->do_mrs_phy) {
496 * MR0 \96 Part of the Generic code
497 * The next configuration is done:
500 * get for each dunit what is it Speed_bin &
501 * Target Frequency. From those both parameters
502 * get the appropriate Cas_l from the CL table
505 tm->interface_params[if_id].
508 tm->interface_params[if_id].
510 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE,
511 ("cl_value 0x%x cwl_val 0x%x\n",
513 t_wr = TIME_2_CLOCK_CYCLES(speed_bin_table(speed_bin_index,
517 ((cl_mask_table[cl_value] & 0x1) << 2) |
518 ((cl_mask_table[cl_value] & 0xe) << 3);
519 CHECK_STATUS(ddr3_tip_if_write
520 (dev_num, access_type, if_id,
522 (0x7 << 4) | (1 << 2)));
523 CHECK_STATUS(ddr3_tip_if_write
524 (dev_num, access_type, if_id,
525 MR0_REG, twr_mask_table[t_wr + 1] << 9,
530 * MR1: Set RTT and DIC Design GL values
533 CHECK_STATUS(ddr3_tip_if_write
534 (dev_num, ACCESS_TYPE_MULTICAST,
535 PARAM_NOT_CARE, MR1_REG,
536 g_dic | g_rtt_nom, 0x266));
538 /* MR2 - Part of the Generic code */
540 * The next configuration is done:
542 * 2) CAS Write Latency
544 data_value = (cwl_mask_table[cwl_val] << 3);
546 ((tm->interface_params[if_id].
548 HWS_TEMP_HIGH) ? (1 << 7) : 0);
549 CHECK_STATUS(ddr3_tip_if_write
550 (dev_num, access_type, if_id,
552 (0x7 << 3) | (0x1 << 7) | (0x3 <<
556 ddr3_tip_write_odt(dev_num, access_type, if_id,
558 ddr3_tip_set_timing(dev_num, access_type, if_id, freq);
560 CHECK_STATUS(ddr3_tip_if_write
561 (dev_num, access_type, if_id,
562 DUNIT_CONTROL_HIGH_REG, 0x177,
565 if (init_cntr_prm->is_ctrl64_bit) {
566 /* disable 0.25 cc delay */
567 CHECK_STATUS(ddr3_tip_if_write
568 (dev_num, access_type, if_id,
569 DUNIT_CONTROL_HIGH_REG, 0x0,
574 CHECK_STATUS(ddr3_tip_if_write
575 (dev_num, access_type, if_id,
576 DUNIT_CONTROL_HIGH_REG,
577 (init_cntr_prm->msys_init << 7), (1 << 7)));
579 /* calculate number of CS (per interface) */
580 CHECK_STATUS(calc_cs_num
581 (dev_num, if_id, &cs_num));
582 timing = tm->interface_params[if_id].timing;
584 if (mode2_t != 0xff) {
586 } else if (timing != HWS_TIM_DEFAULT) {
587 /* Board topology map is forcing timing */
588 t2t = (timing == HWS_TIM_2T) ? 1 : 0;
590 t2t = (cs_num == 1) ? 0 : 1;
593 CHECK_STATUS(ddr3_tip_if_write
594 (dev_num, access_type, if_id,
595 DDR_CONTROL_LOW_REG, t2t << 3,
597 /* move the block to ddr3_tip_set_timing - start */
598 t_pd = TIMES_9_TREFI_CYCLES;
599 txpdll = GET_MAX_VALUE(t_ckclk * 10,
600 speed_bin_table(speed_bin_index,
602 txpdll = CEIL_DIVIDE((txpdll - 1), t_ckclk);
603 CHECK_STATUS(ddr3_tip_if_write
604 (dev_num, access_type, if_id,
605 DDR_TIMING_REG, txpdll << 4 | t_pd,
607 CHECK_STATUS(ddr3_tip_if_write
608 (dev_num, access_type, if_id,
609 DDR_TIMING_REG, 0x28 << 9, 0x3f << 9));
610 CHECK_STATUS(ddr3_tip_if_write
611 (dev_num, access_type, if_id,
612 DDR_TIMING_REG, 0xa << 21, 0xff << 21));
614 /* move the block to ddr3_tip_set_timing - end */
615 /* AUTO_ZQC_TIMING */
616 CHECK_STATUS(ddr3_tip_if_write
617 (dev_num, access_type, if_id,
618 TIMING_REG, (AUTO_ZQC_TIMING | (2 << 20)),
620 CHECK_STATUS(ddr3_tip_if_read
621 (dev_num, access_type, if_id,
622 DRAM_PHY_CONFIGURATION, data_read, 0x30));
624 (data_read[if_id] == 0) ? (1 << 11) : 0;
625 CHECK_STATUS(ddr3_tip_if_write
626 (dev_num, access_type, if_id,
627 DUNIT_CONTROL_HIGH_REG, data_value,
630 /* Set Active control for ODT write transactions */
632 odt_config = g_odt_config_1cs;
633 CHECK_STATUS(ddr3_tip_if_write
634 (dev_num, ACCESS_TYPE_MULTICAST,
635 PARAM_NOT_CARE, 0x1494, odt_config,
639 #ifdef STATIC_ALGO_SUPPORT
640 CHECK_STATUS(ddr3_tip_static_init_controller(dev_num));
641 #if defined(CONFIG_ARMADA_38X) || defined(CONFIG_ARMADA_39X)
642 CHECK_STATUS(ddr3_tip_static_phy_init_controller(dev_num));
644 #endif /* STATIC_ALGO_SUPPORT */
647 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
648 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
649 CHECK_STATUS(ddr3_tip_rank_control(dev_num, if_id));
651 if (init_cntr_prm->do_mrs_phy) {
652 CHECK_STATUS(ddr3_tip_pad_inv(dev_num, if_id));
655 /* Pad calibration control - disable */
656 CHECK_STATUS(ddr3_tip_if_write
657 (dev_num, access_type, if_id,
658 CALIB_MACHINE_CTRL_REG, 0x0, 0x1));
659 CHECK_STATUS(ddr3_tip_if_write
660 (dev_num, access_type, if_id,
661 CALIB_MACHINE_CTRL_REG,
662 calibration_update_control << 3, 0x3 << 3));
665 CHECK_STATUS(ddr3_tip_enable_init_sequence(dev_num));
667 if (delay_enable != 0) {
668 adll_tap = MEGA / (freq_val[freq] * 64);
669 ddr3_tip_cmd_addr_init_delay(dev_num, adll_tap);
678 int hws_ddr3_tip_load_topology_map(u32 dev_num, struct hws_topology_map *tm)
680 enum hws_speed_bin speed_bin_index;
681 enum hws_ddr_freq freq = DDR_FREQ_LIMIT;
684 freq_val[DDR_FREQ_LOW_FREQ] = dfs_low_freq;
685 tm = ddr3_get_topology_map();
686 CHECK_STATUS(ddr3_tip_get_first_active_if
687 ((u8)dev_num, tm->if_act_mask,
689 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE,
690 ("board IF_Mask=0x%x num_of_bus_per_interface=0x%x\n",
692 tm->num_of_bus_per_interface));
695 * if CL, CWL values are missing in topology map, then fill them
696 * according to speedbin tables
698 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
699 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
701 tm->interface_params[if_id].speed_bin_index;
702 /* TBD memory frequency of interface 0 only is used ! */
703 freq = tm->interface_params[first_active_if].memory_freq;
705 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE,
706 ("speed_bin_index =%d freq=%d cl=%d cwl=%d\n",
707 speed_bin_index, freq_val[freq],
708 tm->interface_params[if_id].
710 tm->interface_params[if_id].
713 if (tm->interface_params[if_id].cas_l == 0) {
714 tm->interface_params[if_id].cas_l =
715 cas_latency_table[speed_bin_index].cl_val[freq];
718 if (tm->interface_params[if_id].cas_wl == 0) {
719 tm->interface_params[if_id].cas_wl =
720 cas_write_latency_table[speed_bin_index].cl_val[freq];
730 static int ddr3_tip_rank_control(u32 dev_num, u32 if_id)
732 u32 data_value = 0, bus_cnt;
733 struct hws_topology_map *tm = ddr3_get_topology_map();
735 for (bus_cnt = 1; bus_cnt < GET_TOPOLOGY_NUM_OF_BUSES(); bus_cnt++) {
736 VALIDATE_ACTIVE(tm->bus_act_mask, bus_cnt);
737 if ((tm->interface_params[if_id].
738 as_bus_params[0].cs_bitmask !=
739 tm->interface_params[if_id].
740 as_bus_params[bus_cnt].cs_bitmask) ||
741 (tm->interface_params[if_id].
742 as_bus_params[0].mirror_enable_bitmask !=
743 tm->interface_params[if_id].
744 as_bus_params[bus_cnt].mirror_enable_bitmask))
745 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
746 ("WARNING:Wrong configuration for pup #%d CS mask and CS mirroring for all pups should be the same\n",
750 data_value |= tm->interface_params[if_id].
751 as_bus_params[0].cs_bitmask;
752 data_value |= tm->interface_params[if_id].
753 as_bus_params[0].mirror_enable_bitmask << 4;
755 CHECK_STATUS(ddr3_tip_if_write
756 (dev_num, ACCESS_TYPE_UNICAST, if_id, RANK_CTRL_REG,
765 static int ddr3_tip_pad_inv(u32 dev_num, u32 if_id)
767 u32 bus_cnt, data_value, ck_swap_pup_ctrl;
768 struct hws_topology_map *tm = ddr3_get_topology_map();
770 for (bus_cnt = 0; bus_cnt < GET_TOPOLOGY_NUM_OF_BUSES(); bus_cnt++) {
771 VALIDATE_ACTIVE(tm->bus_act_mask, bus_cnt);
772 if (tm->interface_params[if_id].
773 as_bus_params[bus_cnt].is_dqs_swap == 1) {
775 ddr3_tip_bus_read_modify_write(dev_num, ACCESS_TYPE_UNICAST,
778 PHY_CONTROL_PHY_REG, 0xc0,
782 if (tm->interface_params[if_id].
783 as_bus_params[bus_cnt].is_ck_swap == 1) {
785 data_value = 0x5 << 2;
787 data_value = 0xa << 2;
789 /* mask equals data */
790 /* ck swap pup is only control pup #0 ! */
791 ck_swap_pup_ctrl = 0;
792 ddr3_tip_bus_read_modify_write(dev_num, ACCESS_TYPE_UNICAST,
793 if_id, ck_swap_pup_ctrl,
796 data_value, data_value);
806 int hws_ddr3_tip_run_alg(u32 dev_num, enum hws_algo_type algo_type)
808 int ret = MV_OK, ret_tune = MV_OK;
810 #ifdef ODT_TEST_SUPPORT
811 if (finger_test == 1)
812 return odt_test(dev_num, algo_type);
815 if (algo_type == ALGO_TYPE_DYNAMIC) {
816 ret = ddr3_tip_ddr3_auto_tune(dev_num);
818 #ifdef STATIC_ALGO_SUPPORT
820 enum hws_ddr_freq freq;
824 if (is_adll_calib_before_init != 0) {
825 printf("with adll calib before init\n");
826 adll_calibration(dev_num, ACCESS_TYPE_MULTICAST,
830 * Frequency per interface is not relevant,
833 ret = ddr3_tip_run_static_alg(dev_num,
840 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
841 ("Run_alg: tuning failed %d\n", ret_tune));
847 #ifdef ODT_TEST_SUPPORT
851 static int odt_test(u32 dev_num, enum hws_algo_type algo_type)
853 int ret = MV_OK, ret_tune = MV_OK;
854 int pfinger_val = 0, nfinger_val;
856 for (pfinger_val = p_finger_start; pfinger_val <= p_finger_end;
857 pfinger_val += p_finger_step) {
858 for (nfinger_val = n_finger_start; nfinger_val <= n_finger_end;
859 nfinger_val += n_finger_step) {
860 if (finger_test != 0) {
861 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
862 ("pfinger_val %d nfinger_val %d\n",
863 pfinger_val, nfinger_val));
864 p_finger = pfinger_val;
865 n_finger = nfinger_val;
868 if (algo_type == ALGO_TYPE_DYNAMIC) {
869 ret = ddr3_tip_ddr3_auto_tune(dev_num);
872 * Frequency per interface is not relevant,
875 ret = ddr3_tip_run_static_alg(dev_num,
881 if (ret_tune != MV_OK) {
882 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
883 ("Run_alg: tuning failed %d\n", ret_tune));
884 ret = (ret == MV_OK) ? ret_tune : ret;
894 int hws_ddr3_tip_select_ddr_controller(u32 dev_num, int enable)
896 if (config_func_info[dev_num].tip_dunit_mux_select_func != NULL) {
897 return config_func_info[dev_num].
898 tip_dunit_mux_select_func((u8)dev_num, enable);
905 * Dunit Register Write
907 int ddr3_tip_if_write(u32 dev_num, enum hws_access_type interface_access,
908 u32 if_id, u32 reg_addr, u32 data_value, u32 mask)
910 if (config_func_info[dev_num].tip_dunit_write_func != NULL) {
911 return config_func_info[dev_num].
912 tip_dunit_write_func((u8)dev_num, interface_access,
921 * Dunit Register Read
923 int ddr3_tip_if_read(u32 dev_num, enum hws_access_type interface_access,
924 u32 if_id, u32 reg_addr, u32 *data, u32 mask)
926 if (config_func_info[dev_num].tip_dunit_read_func != NULL) {
927 return config_func_info[dev_num].
928 tip_dunit_read_func((u8)dev_num, interface_access,
937 * Dunit Register Polling
939 int ddr3_tip_if_polling(u32 dev_num, enum hws_access_type access_type,
940 u32 if_id, u32 exp_value, u32 mask, u32 offset,
943 u32 poll_cnt = 0, interface_num = 0, start_if, end_if;
944 u32 read_data[MAX_INTERFACE_NUM];
946 int is_fail = 0, is_if_fail;
947 struct hws_topology_map *tm = ddr3_get_topology_map();
949 if (access_type == ACCESS_TYPE_MULTICAST) {
951 end_if = MAX_INTERFACE_NUM - 1;
957 for (interface_num = start_if; interface_num <= end_if; interface_num++) {
958 /* polling bit 3 for n times */
959 VALIDATE_ACTIVE(tm->if_act_mask, interface_num);
962 for (poll_cnt = 0; poll_cnt < poll_tries; poll_cnt++) {
964 ddr3_tip_if_read(dev_num, ACCESS_TYPE_UNICAST,
965 interface_num, offset, read_data,
970 if (read_data[interface_num] == exp_value)
974 if (poll_cnt >= poll_tries) {
975 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
976 ("max poll IF #%d\n", interface_num));
981 training_result[training_stage][interface_num] =
982 (is_if_fail == 1) ? TEST_FAILED : TEST_SUCCESS;
985 return (is_fail == 0) ? MV_OK : MV_FAIL;
991 int ddr3_tip_bus_read(u32 dev_num, u32 if_id,
992 enum hws_access_type phy_access, u32 phy_id,
993 enum hws_ddr_phy phy_type, u32 reg_addr, u32 *data)
996 u32 data_read[MAX_INTERFACE_NUM];
997 struct hws_topology_map *tm = ddr3_get_topology_map();
999 if (phy_access == ACCESS_TYPE_MULTICAST) {
1000 for (bus_index = 0; bus_index < GET_TOPOLOGY_NUM_OF_BUSES();
1002 VALIDATE_ACTIVE(tm->bus_act_mask, bus_index);
1003 CHECK_STATUS(ddr3_tip_bus_access
1004 (dev_num, ACCESS_TYPE_UNICAST,
1005 if_id, ACCESS_TYPE_UNICAST,
1006 bus_index, phy_type, reg_addr, 0,
1008 CHECK_STATUS(ddr3_tip_if_read
1009 (dev_num, ACCESS_TYPE_UNICAST, if_id,
1010 PHY_REG_FILE_ACCESS, data_read,
1012 data[bus_index] = (data_read[if_id] & 0xffff);
1015 CHECK_STATUS(ddr3_tip_bus_access
1016 (dev_num, ACCESS_TYPE_UNICAST, if_id,
1017 phy_access, phy_id, phy_type, reg_addr, 0,
1019 CHECK_STATUS(ddr3_tip_if_read
1020 (dev_num, ACCESS_TYPE_UNICAST, if_id,
1021 PHY_REG_FILE_ACCESS, data_read, MASK_ALL_BITS));
1024 * only 16 lsb bit are valid in Phy (each register is different,
1025 * some can actually be less than 16 bits)
1027 *data = (data_read[if_id] & 0xffff);
1036 int ddr3_tip_bus_write(u32 dev_num, enum hws_access_type interface_access,
1037 u32 if_id, enum hws_access_type phy_access,
1038 u32 phy_id, enum hws_ddr_phy phy_type, u32 reg_addr,
1041 CHECK_STATUS(ddr3_tip_bus_access
1042 (dev_num, interface_access, if_id, phy_access,
1043 phy_id, phy_type, reg_addr, data_value, OPERATION_WRITE));
1049 * Bus access routine (relevant for both read & write)
1051 static int ddr3_tip_bus_access(u32 dev_num, enum hws_access_type interface_access,
1052 u32 if_id, enum hws_access_type phy_access,
1053 u32 phy_id, enum hws_ddr_phy phy_type, u32 reg_addr,
1054 u32 data_value, enum hws_operation oper_type)
1056 u32 addr_low = 0x3f & reg_addr;
1057 u32 addr_hi = ((0xc0 & reg_addr) >> 6);
1059 (oper_type << 30) + (addr_hi << 28) + (phy_access << 27) +
1060 (phy_type << 26) + (phy_id << 22) + (addr_low << 16) +
1061 (data_value & 0xffff);
1062 u32 data_p2 = data_p1 + (1 << 31);
1063 u32 start_if, end_if;
1064 struct hws_topology_map *tm = ddr3_get_topology_map();
1066 CHECK_STATUS(ddr3_tip_if_write
1067 (dev_num, interface_access, if_id, PHY_REG_FILE_ACCESS,
1068 data_p1, MASK_ALL_BITS));
1069 CHECK_STATUS(ddr3_tip_if_write
1070 (dev_num, interface_access, if_id, PHY_REG_FILE_ACCESS,
1071 data_p2, MASK_ALL_BITS));
1073 if (interface_access == ACCESS_TYPE_UNICAST) {
1078 end_if = MAX_INTERFACE_NUM - 1;
1081 /* polling for read/write execution done */
1082 for (if_id = start_if; if_id <= end_if; if_id++) {
1083 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1084 CHECK_STATUS(is_bus_access_done
1085 (dev_num, if_id, PHY_REG_FILE_ACCESS, 31));
1092 * Check bus access done
1094 static int is_bus_access_done(u32 dev_num, u32 if_id, u32 dunit_reg_adrr,
1099 u32 data_read[MAX_INTERFACE_NUM];
1101 CHECK_STATUS(ddr3_tip_if_read
1102 (dev_num, ACCESS_TYPE_UNICAST, if_id, dunit_reg_adrr,
1103 data_read, MASK_ALL_BITS));
1104 rd_data = data_read[if_id];
1105 rd_data &= (1 << bit);
1107 while (rd_data != 0) {
1108 if (cnt++ >= MAX_POLLING_ITERATIONS)
1111 CHECK_STATUS(ddr3_tip_if_read
1112 (dev_num, ACCESS_TYPE_UNICAST, if_id,
1113 dunit_reg_adrr, data_read, MASK_ALL_BITS));
1114 rd_data = data_read[if_id];
1115 rd_data &= (1 << bit);
1118 if (cnt < MAX_POLLING_ITERATIONS)
1125 * Phy read-modify-write
1127 int ddr3_tip_bus_read_modify_write(u32 dev_num, enum hws_access_type access_type,
1128 u32 interface_id, u32 phy_id,
1129 enum hws_ddr_phy phy_type, u32 reg_addr,
1130 u32 data_value, u32 reg_mask)
1132 u32 data_val = 0, if_id, start_if, end_if;
1133 struct hws_topology_map *tm = ddr3_get_topology_map();
1135 if (access_type == ACCESS_TYPE_MULTICAST) {
1137 end_if = MAX_INTERFACE_NUM - 1;
1139 start_if = interface_id;
1140 end_if = interface_id;
1143 for (if_id = start_if; if_id <= end_if; if_id++) {
1144 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1145 CHECK_STATUS(ddr3_tip_bus_read
1146 (dev_num, if_id, ACCESS_TYPE_UNICAST, phy_id,
1147 phy_type, reg_addr, &data_val));
1148 data_value = (data_val & (~reg_mask)) | (data_value & reg_mask);
1149 CHECK_STATUS(ddr3_tip_bus_write
1150 (dev_num, ACCESS_TYPE_UNICAST, if_id,
1151 ACCESS_TYPE_UNICAST, phy_id, phy_type, reg_addr,
1161 int adll_calibration(u32 dev_num, enum hws_access_type access_type,
1162 u32 if_id, enum hws_ddr_freq frequency)
1164 struct hws_tip_freq_config_info freq_config_info;
1166 struct hws_topology_map *tm = ddr3_get_topology_map();
1168 /* Reset Diver_b assert -> de-assert */
1169 CHECK_STATUS(ddr3_tip_if_write
1170 (dev_num, access_type, if_id, SDRAM_CONFIGURATION_REG,
1173 CHECK_STATUS(ddr3_tip_if_write
1174 (dev_num, access_type, if_id, SDRAM_CONFIGURATION_REG,
1175 0x10000000, 0x10000000));
1177 if (config_func_info[dev_num].tip_get_freq_config_info_func != NULL) {
1178 CHECK_STATUS(config_func_info[dev_num].
1179 tip_get_freq_config_info_func((u8)dev_num, frequency,
1180 &freq_config_info));
1182 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1183 ("tip_get_freq_config_info_func is NULL"));
1184 return MV_NOT_INITIALIZED;
1187 for (bus_cnt = 0; bus_cnt < GET_TOPOLOGY_NUM_OF_BUSES(); bus_cnt++) {
1188 VALIDATE_ACTIVE(tm->bus_act_mask, bus_cnt);
1189 CHECK_STATUS(ddr3_tip_bus_read_modify_write
1190 (dev_num, access_type, if_id, bus_cnt,
1191 DDR_PHY_DATA, BW_PHY_REG,
1192 freq_config_info.bw_per_freq << 8, 0x700));
1193 CHECK_STATUS(ddr3_tip_bus_read_modify_write
1194 (dev_num, access_type, if_id, bus_cnt,
1195 DDR_PHY_DATA, RATE_PHY_REG,
1196 freq_config_info.rate_per_freq, 0x7));
1199 /* DUnit to Phy drive post edge, ADLL reset assert de-assert */
1200 CHECK_STATUS(ddr3_tip_if_write
1201 (dev_num, access_type, if_id, DRAM_PHY_CONFIGURATION,
1202 0, (0x80000000 | 0x40000000)));
1203 mdelay(100 / (freq_val[frequency] / freq_val[DDR_FREQ_LOW_FREQ]));
1204 CHECK_STATUS(ddr3_tip_if_write
1205 (dev_num, access_type, if_id, DRAM_PHY_CONFIGURATION,
1206 (0x80000000 | 0x40000000), (0x80000000 | 0x40000000)));
1208 /* polling for ADLL Done */
1209 if (ddr3_tip_if_polling(dev_num, access_type, if_id,
1210 0x3ff03ff, 0x3ff03ff, PHY_LOCK_STATUS_REG,
1211 MAX_POLLING_ITERATIONS) != MV_OK) {
1212 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1213 ("Freq_set: DDR3 poll failed(1)"));
1216 /* pup data_pup reset assert-> deassert */
1217 CHECK_STATUS(ddr3_tip_if_write
1218 (dev_num, access_type, if_id, SDRAM_CONFIGURATION_REG,
1221 CHECK_STATUS(ddr3_tip_if_write
1222 (dev_num, access_type, if_id, SDRAM_CONFIGURATION_REG,
1223 0x60000000, 0x60000000));
1228 int ddr3_tip_freq_set(u32 dev_num, enum hws_access_type access_type,
1229 u32 if_id, enum hws_ddr_freq frequency)
1231 u32 cl_value = 0, cwl_value = 0, mem_mask = 0, val = 0,
1232 bus_cnt = 0, t_hclk = 0, t_wr = 0,
1233 refresh_interval_cnt = 0, cnt_id;
1235 u32 t_refi = 0, end_if, start_if;
1238 enum hws_speed_bin speed_bin_index = 0;
1239 struct hws_tip_freq_config_info freq_config_info;
1240 enum hws_result *flow_result = training_result[training_stage];
1242 u32 cs_mask[MAX_INTERFACE_NUM];
1243 struct hws_topology_map *tm = ddr3_get_topology_map();
1245 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE,
1246 ("dev %d access %d IF %d freq %d\n", dev_num,
1247 access_type, if_id, frequency));
1249 if (frequency == DDR_FREQ_LOW_FREQ)
1251 if (access_type == ACCESS_TYPE_MULTICAST) {
1253 end_if = MAX_INTERFACE_NUM - 1;
1259 /* calculate interface cs mask - Oferb 4/11 */
1260 /* speed bin can be different for each interface */
1261 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1262 /* cs enable is active low */
1263 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1264 cs_mask[if_id] = CS_BIT_MASK;
1265 training_result[training_stage][if_id] = TEST_SUCCESS;
1266 ddr3_tip_calc_cs_mask(dev_num, if_id, effective_cs,
1270 /* speed bin can be different for each interface */
1272 * moti b - need to remove the loop for multicas access functions
1273 * and loop the unicast access functions
1275 for (if_id = start_if; if_id <= end_if; if_id++) {
1276 if (IS_ACTIVE(tm->if_act_mask, if_id) == 0)
1279 flow_result[if_id] = TEST_SUCCESS;
1281 tm->interface_params[if_id].speed_bin_index;
1282 if (tm->interface_params[if_id].memory_freq ==
1285 tm->interface_params[if_id].cas_l;
1287 tm->interface_params[if_id].cas_wl;
1290 cas_latency_table[speed_bin_index].cl_val[frequency];
1292 cas_write_latency_table[speed_bin_index].
1296 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE,
1297 ("Freq_set dev 0x%x access 0x%x if 0x%x freq 0x%x speed %d:\n\t",
1298 dev_num, access_type, if_id,
1299 frequency, speed_bin_index));
1301 for (cnt_id = 0; cnt_id < DDR_FREQ_LIMIT; cnt_id++) {
1302 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE,
1304 cas_latency_table[speed_bin_index].
1308 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE, ("\n"));
1310 for (bus_index = 0; bus_index < GET_TOPOLOGY_NUM_OF_BUSES();
1312 VALIDATE_ACTIVE(tm->bus_act_mask, bus_index);
1314 tm->interface_params[if_id].
1315 as_bus_params[bus_index].mirror_enable_bitmask;
1318 if (mem_mask != 0) {
1319 /* motib redundant in KW28 */
1320 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type,
1322 CS_ENABLE_REG, 0, 0x8));
1325 /* dll state after exiting SR */
1326 if (is_dll_off == 1) {
1327 CHECK_STATUS(ddr3_tip_if_write
1328 (dev_num, access_type, if_id,
1329 DFS_REG, 0x1, 0x1));
1331 CHECK_STATUS(ddr3_tip_if_write
1332 (dev_num, access_type, if_id,
1336 CHECK_STATUS(ddr3_tip_if_write
1337 (dev_num, access_type, if_id,
1338 DUNIT_MMASK_REG, 0, 0x1));
1339 /* DFS - block transactions */
1340 CHECK_STATUS(ddr3_tip_if_write
1341 (dev_num, access_type, if_id,
1342 DFS_REG, 0x2, 0x2));
1344 /* disable ODT in case of dll off */
1345 if (is_dll_off == 1) {
1346 CHECK_STATUS(ddr3_tip_if_write
1347 (dev_num, access_type, if_id,
1349 CHECK_STATUS(ddr3_tip_if_write
1350 (dev_num, access_type, if_id,
1352 CHECK_STATUS(ddr3_tip_if_write
1353 (dev_num, access_type, if_id,
1355 CHECK_STATUS(ddr3_tip_if_write
1356 (dev_num, access_type, if_id,
1360 /* DFS - Enter Self-Refresh */
1361 CHECK_STATUS(ddr3_tip_if_write
1362 (dev_num, access_type, if_id, DFS_REG, 0x4,
1364 /* polling on self refresh entry */
1365 if (ddr3_tip_if_polling(dev_num, ACCESS_TYPE_UNICAST,
1366 if_id, 0x8, 0x8, DFS_REG,
1367 MAX_POLLING_ITERATIONS) != MV_OK) {
1368 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1369 ("Freq_set: DDR3 poll failed on SR entry\n"));
1372 /* PLL configuration */
1373 if (config_func_info[dev_num].tip_set_freq_divider_func != NULL) {
1374 config_func_info[dev_num].
1375 tip_set_freq_divider_func(dev_num, if_id,
1379 /* PLL configuration End */
1381 /* adjust t_refi to new frequency */
1382 t_refi = (tm->interface_params[if_id].interface_temp ==
1383 HWS_TEMP_HIGH) ? TREFI_HIGH : TREFI_LOW;
1384 t_refi *= 1000; /*psec */
1387 t_hclk = MEGA / (freq_val[frequency] / 2);
1388 refresh_interval_cnt = t_refi / t_hclk; /* no units */
1389 val = 0x4000 | refresh_interval_cnt;
1390 CHECK_STATUS(ddr3_tip_if_write
1391 (dev_num, access_type, if_id,
1392 SDRAM_CONFIGURATION_REG, val, 0x7fff));
1394 /* DFS - CL/CWL/WR parameters after exiting SR */
1395 CHECK_STATUS(ddr3_tip_if_write
1396 (dev_num, access_type, if_id, DFS_REG,
1397 (cl_mask_table[cl_value] << 8), 0xf00));
1398 CHECK_STATUS(ddr3_tip_if_write
1399 (dev_num, access_type, if_id, DFS_REG,
1400 (cwl_mask_table[cwl_value] << 12), 0x7000));
1402 t_ckclk = MEGA / freq_val[frequency];
1403 t_wr = TIME_2_CLOCK_CYCLES(speed_bin_table(speed_bin_index,
1407 CHECK_STATUS(ddr3_tip_if_write
1408 (dev_num, access_type, if_id, DFS_REG,
1409 (twr_mask_table[t_wr + 1] << 16), 0x70000));
1411 /* Restore original RTT values if returning from DLL OFF mode */
1412 if (is_dll_off == 1) {
1413 CHECK_STATUS(ddr3_tip_if_write
1414 (dev_num, access_type, if_id, 0x1874,
1415 g_dic | g_rtt_nom, 0x266));
1416 CHECK_STATUS(ddr3_tip_if_write
1417 (dev_num, access_type, if_id, 0x1884,
1418 g_dic | g_rtt_nom, 0x266));
1419 CHECK_STATUS(ddr3_tip_if_write
1420 (dev_num, access_type, if_id, 0x1894,
1421 g_dic | g_rtt_nom, 0x266));
1422 CHECK_STATUS(ddr3_tip_if_write
1423 (dev_num, access_type, if_id, 0x18a4,
1424 g_dic | g_rtt_nom, 0x266));
1427 /* Reset Diver_b assert -> de-assert */
1428 CHECK_STATUS(ddr3_tip_if_write
1429 (dev_num, access_type, if_id,
1430 SDRAM_CONFIGURATION_REG, 0, 0x10000000));
1432 CHECK_STATUS(ddr3_tip_if_write
1433 (dev_num, access_type, if_id,
1434 SDRAM_CONFIGURATION_REG, 0x10000000, 0x10000000));
1436 /* Adll configuration function of process and Frequency */
1437 if (config_func_info[dev_num].tip_get_freq_config_info_func != NULL) {
1438 CHECK_STATUS(config_func_info[dev_num].
1439 tip_get_freq_config_info_func(dev_num, frequency,
1440 &freq_config_info));
1442 /* TBD check milo5 using device ID ? */
1443 for (bus_cnt = 0; bus_cnt < GET_TOPOLOGY_NUM_OF_BUSES();
1445 VALIDATE_ACTIVE(tm->bus_act_mask, bus_cnt);
1446 CHECK_STATUS(ddr3_tip_bus_read_modify_write
1447 (dev_num, ACCESS_TYPE_UNICAST,
1448 if_id, bus_cnt, DDR_PHY_DATA,
1452 /*freq_mask[dev_num][frequency] << 8 */
1454 CHECK_STATUS(ddr3_tip_bus_read_modify_write
1455 (dev_num, ACCESS_TYPE_UNICAST, if_id,
1456 bus_cnt, DDR_PHY_DATA, 0x94,
1457 freq_config_info.rate_per_freq, 0x7));
1460 /* DUnit to Phy drive post edge, ADLL reset assert de-assert */
1461 CHECK_STATUS(ddr3_tip_if_write
1462 (dev_num, access_type, if_id,
1463 DRAM_PHY_CONFIGURATION, 0,
1464 (0x80000000 | 0x40000000)));
1465 mdelay(100 / (freq_val[frequency] / freq_val[DDR_FREQ_LOW_FREQ]));
1466 CHECK_STATUS(ddr3_tip_if_write
1467 (dev_num, access_type, if_id,
1468 DRAM_PHY_CONFIGURATION, (0x80000000 | 0x40000000),
1469 (0x80000000 | 0x40000000)));
1471 /* polling for ADLL Done */
1472 if (ddr3_tip_if_polling
1473 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x3ff03ff,
1474 0x3ff03ff, PHY_LOCK_STATUS_REG,
1475 MAX_POLLING_ITERATIONS) != MV_OK) {
1476 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1477 ("Freq_set: DDR3 poll failed(1)\n"));
1480 /* pup data_pup reset assert-> deassert */
1481 CHECK_STATUS(ddr3_tip_if_write
1482 (dev_num, access_type, if_id,
1483 SDRAM_CONFIGURATION_REG, 0, 0x60000000));
1485 CHECK_STATUS(ddr3_tip_if_write
1486 (dev_num, access_type, if_id,
1487 SDRAM_CONFIGURATION_REG, 0x60000000, 0x60000000));
1489 /* Set proper timing params before existing Self-Refresh */
1490 ddr3_tip_set_timing(dev_num, access_type, if_id, frequency);
1491 if (delay_enable != 0) {
1492 adll_tap = MEGA / (freq_val[frequency] * 64);
1493 ddr3_tip_cmd_addr_init_delay(dev_num, adll_tap);
1497 CHECK_STATUS(ddr3_tip_if_write
1498 (dev_num, access_type, if_id, DFS_REG, 0,
1500 if (ddr3_tip_if_polling
1501 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0, 0x8, DFS_REG,
1502 MAX_POLLING_ITERATIONS) != MV_OK) {
1503 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1504 ("Freq_set: DDR3 poll failed(2)"));
1507 /* Refresh Command */
1508 CHECK_STATUS(ddr3_tip_if_write
1509 (dev_num, access_type, if_id,
1510 SDRAM_OPERATION_REG, 0x2, 0xf1f));
1511 if (ddr3_tip_if_polling
1512 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0, 0x1f,
1513 SDRAM_OPERATION_REG, MAX_POLLING_ITERATIONS) != MV_OK) {
1514 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1515 ("Freq_set: DDR3 poll failed(3)"));
1518 /* Release DFS Block */
1519 CHECK_STATUS(ddr3_tip_if_write
1520 (dev_num, access_type, if_id, DFS_REG, 0,
1522 /* Controller to MBUS Retry - normal */
1523 CHECK_STATUS(ddr3_tip_if_write
1524 (dev_num, access_type, if_id, DUNIT_MMASK_REG,
1527 /* MRO: Burst Length 8, CL , Auto_precharge 0x16cc */
1529 ((cl_mask_table[cl_value] & 0x1) << 2) |
1530 ((cl_mask_table[cl_value] & 0xe) << 3);
1531 CHECK_STATUS(ddr3_tip_if_write
1532 (dev_num, access_type, if_id, MR0_REG,
1533 val, (0x7 << 4) | (1 << 2)));
1534 /* MR2: CWL = 10 , Auto Self-Refresh - disable */
1535 val = (cwl_mask_table[cwl_value] << 3);
1537 * nklein 24.10.13 - should not be here - leave value as set in
1538 * the init configuration val |= (1 << 9);
1539 * val |= ((tm->interface_params[if_id].
1540 * interface_temp == HWS_TEMP_HIGH) ? (1 << 7) : 0);
1542 /* nklein 24.10.13 - see above comment */
1543 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type,
1548 val = ((cl_value - cwl_value + 1) << 4) |
1549 ((cl_value - cwl_value + 6) << 8) |
1550 ((cl_value - 1) << 12) | ((cl_value + 6) << 16);
1551 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type,
1552 if_id, ODT_TIMING_LOW,
1554 val = 0x91 | ((cwl_value - 1) << 8) | ((cwl_value + 5) << 12);
1555 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type,
1556 if_id, ODT_TIMING_HI_REG,
1560 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type,
1562 DUNIT_ODT_CONTROL_REG,
1566 val = ((cl_mask_table[cl_value] & 0x1) << 2) |
1567 ((cl_mask_table[cl_value] & 0xe) << 3);
1568 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
1570 (0x7 << 4) | (1 << 2)));
1573 val = (cwl_mask_table[cwl_value] << 3);
1574 CHECK_STATUS(ddr3_tip_write_mrs_cmd(dev_num, cs_mask, MRS2_CMD,
1576 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
1577 0, MR2_REG, val, (0x7 << 3)));
1579 if (mem_mask != 0) {
1580 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type,
1593 static int ddr3_tip_write_odt(u32 dev_num, enum hws_access_type access_type,
1594 u32 if_id, u32 cl_value, u32 cwl_value)
1597 u32 val = (cl_value - cwl_value + 6);
1599 val = ((cl_value - cwl_value + 1) << 4) | ((val & 0xf) << 8) |
1600 (((cl_value - 1) & 0xf) << 12) |
1601 (((cl_value + 6) & 0xf) << 16) | (((val & 0x10) >> 4) << 21);
1602 val |= (((cl_value - 1) >> 4) << 22) | (((cl_value + 6) >> 4) << 23);
1604 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
1605 ODT_TIMING_LOW, val, 0xffff0));
1606 val = 0x91 | ((cwl_value - 1) << 8) | ((cwl_value + 5) << 12);
1607 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
1608 ODT_TIMING_HI_REG, val, 0xffff));
1609 if (odt_additional == 1) {
1610 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type,
1612 SDRAM_ODT_CONTROL_HIGH_REG,
1617 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
1618 DUNIT_ODT_CONTROL_REG, 0xf, 0xf));
1624 * Set Timing values for training
1626 static int ddr3_tip_set_timing(u32 dev_num, enum hws_access_type access_type,
1627 u32 if_id, enum hws_ddr_freq frequency)
1629 u32 t_ckclk = 0, t_ras = 0;
1630 u32 t_rcd = 0, t_rp = 0, t_wr = 0, t_wtr = 0, t_rrd = 0, t_rtp = 0,
1631 t_rfc = 0, t_mod = 0;
1632 u32 val = 0, page_size = 0;
1633 enum hws_speed_bin speed_bin_index;
1634 enum hws_mem_size memory_size = MEM_2G;
1635 struct hws_topology_map *tm = ddr3_get_topology_map();
1637 speed_bin_index = tm->interface_params[if_id].speed_bin_index;
1638 memory_size = tm->interface_params[if_id].memory_size;
1640 (tm->interface_params[if_id].bus_width ==
1641 BUS_WIDTH_8) ? page_param[memory_size].
1642 page_size_8bit : page_param[memory_size].page_size_16bit;
1643 t_ckclk = (MEGA / freq_val[frequency]);
1644 t_rrd = (page_size == 1) ? speed_bin_table(speed_bin_index,
1646 speed_bin_table(speed_bin_index, SPEED_BIN_TRRD2K);
1647 t_rrd = GET_MAX_VALUE(t_ckclk * 4, t_rrd);
1648 t_rtp = GET_MAX_VALUE(t_ckclk * 4, speed_bin_table(speed_bin_index,
1650 t_wtr = GET_MAX_VALUE(t_ckclk * 4, speed_bin_table(speed_bin_index,
1652 t_ras = TIME_2_CLOCK_CYCLES(speed_bin_table(speed_bin_index,
1655 t_rcd = TIME_2_CLOCK_CYCLES(speed_bin_table(speed_bin_index,
1658 t_rp = TIME_2_CLOCK_CYCLES(speed_bin_table(speed_bin_index,
1661 t_wr = TIME_2_CLOCK_CYCLES(speed_bin_table(speed_bin_index,
1664 t_wtr = TIME_2_CLOCK_CYCLES(t_wtr, t_ckclk);
1665 t_rrd = TIME_2_CLOCK_CYCLES(t_rrd, t_ckclk);
1666 t_rtp = TIME_2_CLOCK_CYCLES(t_rtp, t_ckclk);
1667 t_rfc = TIME_2_CLOCK_CYCLES(rfc_table[memory_size] * 1000, t_ckclk);
1668 t_mod = GET_MAX_VALUE(t_ckclk * 24, 15000);
1669 t_mod = TIME_2_CLOCK_CYCLES(t_mod, t_ckclk);
1671 /* SDRAM Timing Low */
1672 val = (t_ras & 0xf) | (t_rcd << 4) | (t_rp << 8) | (t_wr << 12) |
1673 (t_wtr << 16) | (((t_ras & 0x30) >> 4) << 20) | (t_rrd << 24) |
1675 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
1676 SDRAM_TIMING_LOW_REG, val, 0xff3fffff));
1678 /* SDRAM Timing High */
1679 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
1680 SDRAM_TIMING_HIGH_REG,
1681 t_rfc & 0x7f, 0x7f));
1682 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
1683 SDRAM_TIMING_HIGH_REG,
1685 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
1686 SDRAM_TIMING_HIGH_REG,
1688 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
1689 SDRAM_TIMING_HIGH_REG,
1691 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
1692 SDRAM_TIMING_HIGH_REG,
1693 ((t_rfc & 0x380) >> 7) << 16, 0x70000));
1694 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
1695 SDRAM_TIMING_HIGH_REG, 0,
1697 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
1698 SDRAM_TIMING_HIGH_REG,
1699 (t_mod & 0xf) << 25, 0x1e00000));
1700 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
1701 SDRAM_TIMING_HIGH_REG,
1702 (t_mod >> 4) << 30, 0xc0000000));
1703 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
1704 SDRAM_TIMING_HIGH_REG,
1705 0x16000000, 0x1e000000));
1706 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
1707 SDRAM_TIMING_HIGH_REG,
1708 0x40000000, 0xc0000000));
1716 int hws_ddr3_tip_mode_read(u32 dev_num, struct mode_info *mode_info)
1720 ret = ddr3_tip_if_read(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1721 MR0_REG, mode_info->reg_mr0, MASK_ALL_BITS);
1725 ret = ddr3_tip_if_read(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1726 MR1_REG, mode_info->reg_mr1, MASK_ALL_BITS);
1730 ret = ddr3_tip_if_read(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1731 MR2_REG, mode_info->reg_mr2, MASK_ALL_BITS);
1735 ret = ddr3_tip_if_read(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1736 MR3_REG, mode_info->reg_mr2, MASK_ALL_BITS);
1740 ret = ddr3_tip_if_read(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1741 READ_DATA_SAMPLE_DELAY, mode_info->read_data_sample,
1746 ret = ddr3_tip_if_read(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1747 READ_DATA_READY_DELAY, mode_info->read_data_ready,
1756 * Get first active IF
1758 int ddr3_tip_get_first_active_if(u8 dev_num, u32 interface_mask,
1762 struct hws_topology_map *tm = ddr3_get_topology_map();
1764 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1765 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1766 if (interface_mask & (1 << if_id)) {
1767 *interface_id = if_id;
1778 int ddr3_tip_write_cs_result(u32 dev_num, u32 offset)
1780 u32 if_id, bus_num, cs_bitmask, data_val, cs_num;
1781 struct hws_topology_map *tm = ddr3_get_topology_map();
1783 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1784 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1785 for (bus_num = 0; bus_num < tm->num_of_bus_per_interface;
1787 VALIDATE_ACTIVE(tm->bus_act_mask, bus_num);
1789 tm->interface_params[if_id].
1790 as_bus_params[bus_num].cs_bitmask;
1791 if (cs_bitmask != effective_cs) {
1792 cs_num = GET_CS_FROM_MASK(cs_bitmask);
1793 ddr3_tip_bus_read(dev_num, if_id,
1794 ACCESS_TYPE_UNICAST, bus_num,
1797 CS_REG_VALUE(effective_cs),
1799 ddr3_tip_bus_write(dev_num,
1800 ACCESS_TYPE_UNICAST,
1802 ACCESS_TYPE_UNICAST,
1803 bus_num, DDR_PHY_DATA,
1805 CS_REG_VALUE(cs_num),
1817 int ddr3_tip_write_mrs_cmd(u32 dev_num, u32 *cs_mask_arr, u32 cmd,
1821 struct hws_topology_map *tm = ddr3_get_topology_map();
1823 reg = (cmd == MRS1_CMD) ? MR1_REG : MR2_REG;
1824 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
1825 PARAM_NOT_CARE, reg, data, mask));
1826 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1827 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1828 CHECK_STATUS(ddr3_tip_if_write
1829 (dev_num, ACCESS_TYPE_UNICAST, if_id,
1830 SDRAM_OPERATION_REG,
1831 (cs_mask_arr[if_id] << 8) | cmd, 0xf1f));
1834 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1835 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1836 if (ddr3_tip_if_polling(dev_num, ACCESS_TYPE_UNICAST, if_id, 0,
1837 0x1f, SDRAM_OPERATION_REG,
1838 MAX_POLLING_ITERATIONS) != MV_OK) {
1839 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1840 ("write_mrs_cmd: Poll cmd fail"));
1848 * Reset XSB Read FIFO
1850 int ddr3_tip_reset_fifo_ptr(u32 dev_num)
1854 /* Configure PHY reset value to 0 in order to "clean" the FIFO */
1855 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
1856 if_id, 0x15c8, 0, 0xff000000));
1858 * Move PHY to RL mode (only in RL mode the PHY overrides FIFO values
1859 * during FIFO reset)
1861 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
1862 if_id, TRAINING_SW_2_REG,
1864 /* In order that above configuration will influence the PHY */
1865 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
1867 0x80000000, 0x80000000));
1868 /* Reset read fifo assertion */
1869 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
1870 if_id, 0x1400, 0, 0x40000000));
1871 /* Reset read fifo deassertion */
1872 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
1874 0x40000000, 0x40000000));
1875 /* Move PHY back to functional mode */
1876 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
1877 if_id, TRAINING_SW_2_REG,
1879 /* Stop training machine */
1880 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
1881 if_id, 0x15b4, 0x10000, 0x10000));
1887 * Reset Phy registers
1889 int ddr3_tip_ddr3_reset_phy_regs(u32 dev_num)
1891 u32 if_id, phy_id, cs;
1892 struct hws_topology_map *tm = ddr3_get_topology_map();
1894 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1895 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1896 for (phy_id = 0; phy_id < tm->num_of_bus_per_interface;
1898 VALIDATE_ACTIVE(tm->bus_act_mask, phy_id);
1899 CHECK_STATUS(ddr3_tip_bus_write
1900 (dev_num, ACCESS_TYPE_UNICAST,
1901 if_id, ACCESS_TYPE_UNICAST,
1902 phy_id, DDR_PHY_DATA,
1904 CS_REG_VALUE(effective_cs),
1906 CHECK_STATUS(ddr3_tip_bus_write
1907 (dev_num, ACCESS_TYPE_UNICAST, if_id,
1908 ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA,
1909 RL_PHY_REG + CS_REG_VALUE(effective_cs),
1911 CHECK_STATUS(ddr3_tip_bus_write
1912 (dev_num, ACCESS_TYPE_UNICAST, if_id,
1913 ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA,
1914 READ_CENTRALIZATION_PHY_REG +
1915 CS_REG_VALUE(effective_cs), phy_reg3_val));
1916 CHECK_STATUS(ddr3_tip_bus_write
1917 (dev_num, ACCESS_TYPE_UNICAST, if_id,
1918 ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA,
1919 WRITE_CENTRALIZATION_PHY_REG +
1920 CS_REG_VALUE(effective_cs), phy_reg3_val));
1924 /* Set Receiver Calibration value */
1925 for (cs = 0; cs < MAX_CS_NUM; cs++) {
1926 /* PHY register 0xdb bits[5:0] - configure to 63 */
1927 CHECK_STATUS(ddr3_tip_bus_write
1928 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1929 ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1930 DDR_PHY_DATA, CSN_IOB_VREF_REG(cs), 63));
1937 * Restore Dunit registers
1939 int ddr3_tip_restore_dunit_regs(u32 dev_num)
1943 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
1944 PARAM_NOT_CARE, CALIB_MACHINE_CTRL_REG,
1946 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
1947 PARAM_NOT_CARE, CALIB_MACHINE_CTRL_REG,
1948 calibration_update_control << 3,
1950 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
1952 ODPG_WRITE_READ_MODE_ENABLE_REG,
1953 0xffff, MASK_ALL_BITS));
1955 for (index_cnt = 0; index_cnt < ARRAY_SIZE(odpg_default_value);
1957 CHECK_STATUS(ddr3_tip_if_write
1958 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1959 odpg_default_value[index_cnt].reg_addr,
1960 odpg_default_value[index_cnt].reg_data,
1961 odpg_default_value[index_cnt].reg_mask));
1968 * Auto tune main flow
1970 static int ddr3_tip_ddr3_training_main_flow(u32 dev_num)
1972 enum hws_ddr_freq freq = init_freq;
1973 struct init_cntr_param init_cntr_prm;
1976 u32 max_cs = hws_ddr3_tip_max_cs_get();
1977 struct hws_topology_map *tm = ddr3_get_topology_map();
1979 #ifndef EXCLUDE_SWITCH_DEBUG
1980 if (debug_training == DEBUG_LEVEL_TRACE) {
1981 CHECK_STATUS(print_device_info((u8)dev_num));
1985 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
1986 CHECK_STATUS(ddr3_tip_ddr3_reset_phy_regs(dev_num));
1988 /* Set to 0 after each loop to avoid illegal value may be used */
1992 if (is_pll_before_init != 0) {
1993 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
1994 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1995 config_func_info[dev_num].tip_set_freq_divider_func(
1996 (u8)dev_num, if_id, freq);
2000 if (is_adll_calib_before_init != 0) {
2001 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2002 ("with adll calib before init\n"));
2003 adll_calibration(dev_num, ACCESS_TYPE_MULTICAST, 0, freq);
2006 if (is_reg_dump != 0) {
2007 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2008 ("Dump before init controller\n"));
2009 ddr3_tip_reg_dump(dev_num);
2012 if (mask_tune_func & INIT_CONTROLLER_MASK_BIT) {
2013 training_stage = INIT_CONTROLLER;
2014 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2015 ("INIT_CONTROLLER_MASK_BIT\n"));
2016 init_cntr_prm.do_mrs_phy = 1;
2017 init_cntr_prm.is_ctrl64_bit = 0;
2018 init_cntr_prm.init_phy = 1;
2019 init_cntr_prm.msys_init = 0;
2020 ret = hws_ddr3_tip_init_controller(dev_num, &init_cntr_prm);
2021 if (is_reg_dump != 0)
2022 ddr3_tip_reg_dump(dev_num);
2024 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2025 ("hws_ddr3_tip_init_controller failure\n"));
2026 if (debug_mode == 0)
2031 #ifdef STATIC_ALGO_SUPPORT
2032 if (mask_tune_func & STATIC_LEVELING_MASK_BIT) {
2033 training_stage = STATIC_LEVELING;
2034 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2035 ("STATIC_LEVELING_MASK_BIT\n"));
2036 ret = ddr3_tip_run_static_alg(dev_num, freq);
2037 if (is_reg_dump != 0)
2038 ddr3_tip_reg_dump(dev_num);
2040 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2041 ("ddr3_tip_run_static_alg failure\n"));
2042 if (debug_mode == 0)
2048 if (mask_tune_func & SET_LOW_FREQ_MASK_BIT) {
2049 training_stage = SET_LOW_FREQ;
2050 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2051 ("SET_LOW_FREQ_MASK_BIT %d\n",
2052 freq_val[low_freq]));
2053 ret = ddr3_tip_freq_set(dev_num, ACCESS_TYPE_MULTICAST,
2054 PARAM_NOT_CARE, low_freq);
2055 if (is_reg_dump != 0)
2056 ddr3_tip_reg_dump(dev_num);
2058 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2059 ("ddr3_tip_freq_set failure\n"));
2060 if (debug_mode == 0)
2065 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
2066 if (mask_tune_func & LOAD_PATTERN_MASK_BIT) {
2067 training_stage = LOAD_PATTERN;
2068 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2069 ("LOAD_PATTERN_MASK_BIT #%d\n",
2071 ret = ddr3_tip_load_all_pattern_to_mem(dev_num);
2072 if (is_reg_dump != 0)
2073 ddr3_tip_reg_dump(dev_num);
2075 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2076 ("ddr3_tip_load_all_pattern_to_mem failure CS #%d\n",
2078 if (debug_mode == 0)
2083 /* Set to 0 after each loop to avoid illegal value may be used */
2086 if (mask_tune_func & SET_MEDIUM_FREQ_MASK_BIT) {
2087 training_stage = SET_MEDIUM_FREQ;
2088 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2089 ("SET_MEDIUM_FREQ_MASK_BIT %d\n",
2090 freq_val[medium_freq]));
2092 ddr3_tip_freq_set(dev_num, ACCESS_TYPE_MULTICAST,
2093 PARAM_NOT_CARE, medium_freq);
2094 if (is_reg_dump != 0)
2095 ddr3_tip_reg_dump(dev_num);
2097 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2098 ("ddr3_tip_freq_set failure\n"));
2099 if (debug_mode == 0)
2104 if (mask_tune_func & WRITE_LEVELING_MASK_BIT) {
2105 training_stage = WRITE_LEVELING;
2106 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2107 ("WRITE_LEVELING_MASK_BIT\n"));
2108 if ((rl_mid_freq_wa == 0) || (freq_val[medium_freq] == 533)) {
2109 ret = ddr3_tip_dynamic_write_leveling(dev_num);
2112 ret = ddr3_tip_legacy_dynamic_write_leveling(dev_num);
2115 if (is_reg_dump != 0)
2116 ddr3_tip_reg_dump(dev_num);
2118 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2119 ("ddr3_tip_dynamic_write_leveling failure\n"));
2120 if (debug_mode == 0)
2125 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
2126 if (mask_tune_func & LOAD_PATTERN_2_MASK_BIT) {
2127 training_stage = LOAD_PATTERN_2;
2128 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2129 ("LOAD_PATTERN_2_MASK_BIT CS #%d\n",
2131 ret = ddr3_tip_load_all_pattern_to_mem(dev_num);
2132 if (is_reg_dump != 0)
2133 ddr3_tip_reg_dump(dev_num);
2135 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2136 ("ddr3_tip_load_all_pattern_to_mem failure CS #%d\n",
2138 if (debug_mode == 0)
2143 /* Set to 0 after each loop to avoid illegal value may be used */
2146 if (mask_tune_func & READ_LEVELING_MASK_BIT) {
2147 training_stage = READ_LEVELING;
2148 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2149 ("READ_LEVELING_MASK_BIT\n"));
2150 if ((rl_mid_freq_wa == 0) || (freq_val[medium_freq] == 533)) {
2151 ret = ddr3_tip_dynamic_read_leveling(dev_num, medium_freq);
2154 ret = ddr3_tip_legacy_dynamic_read_leveling(dev_num);
2157 if (is_reg_dump != 0)
2158 ddr3_tip_reg_dump(dev_num);
2160 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2161 ("ddr3_tip_dynamic_read_leveling failure\n"));
2162 if (debug_mode == 0)
2167 if (mask_tune_func & WRITE_LEVELING_SUPP_MASK_BIT) {
2168 training_stage = WRITE_LEVELING_SUPP;
2169 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2170 ("WRITE_LEVELING_SUPP_MASK_BIT\n"));
2171 ret = ddr3_tip_dynamic_write_leveling_supp(dev_num);
2172 if (is_reg_dump != 0)
2173 ddr3_tip_reg_dump(dev_num);
2175 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2176 ("ddr3_tip_dynamic_write_leveling_supp failure\n"));
2177 if (debug_mode == 0)
2182 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
2183 if (mask_tune_func & PBS_RX_MASK_BIT) {
2184 training_stage = PBS_RX;
2185 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2186 ("PBS_RX_MASK_BIT CS #%d\n",
2188 ret = ddr3_tip_pbs_rx(dev_num);
2189 if (is_reg_dump != 0)
2190 ddr3_tip_reg_dump(dev_num);
2192 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2193 ("ddr3_tip_pbs_rx failure CS #%d\n",
2195 if (debug_mode == 0)
2201 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
2202 if (mask_tune_func & PBS_TX_MASK_BIT) {
2203 training_stage = PBS_TX;
2204 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2205 ("PBS_TX_MASK_BIT CS #%d\n",
2207 ret = ddr3_tip_pbs_tx(dev_num);
2208 if (is_reg_dump != 0)
2209 ddr3_tip_reg_dump(dev_num);
2211 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2212 ("ddr3_tip_pbs_tx failure CS #%d\n",
2214 if (debug_mode == 0)
2219 /* Set to 0 after each loop to avoid illegal value may be used */
2222 if (mask_tune_func & SET_TARGET_FREQ_MASK_BIT) {
2223 training_stage = SET_TARGET_FREQ;
2224 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2225 ("SET_TARGET_FREQ_MASK_BIT %d\n",
2227 interface_params[first_active_if].
2229 ret = ddr3_tip_freq_set(dev_num, ACCESS_TYPE_MULTICAST,
2231 tm->interface_params[first_active_if].
2233 if (is_reg_dump != 0)
2234 ddr3_tip_reg_dump(dev_num);
2236 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2237 ("ddr3_tip_freq_set failure\n"));
2238 if (debug_mode == 0)
2243 if (mask_tune_func & WRITE_LEVELING_TF_MASK_BIT) {
2244 training_stage = WRITE_LEVELING_TF;
2245 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2246 ("WRITE_LEVELING_TF_MASK_BIT\n"));
2247 ret = ddr3_tip_dynamic_write_leveling(dev_num);
2248 if (is_reg_dump != 0)
2249 ddr3_tip_reg_dump(dev_num);
2251 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2252 ("ddr3_tip_dynamic_write_leveling TF failure\n"));
2253 if (debug_mode == 0)
2258 if (mask_tune_func & LOAD_PATTERN_HIGH_MASK_BIT) {
2259 training_stage = LOAD_PATTERN_HIGH;
2260 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, ("LOAD_PATTERN_HIGH\n"));
2261 ret = ddr3_tip_load_all_pattern_to_mem(dev_num);
2262 if (is_reg_dump != 0)
2263 ddr3_tip_reg_dump(dev_num);
2265 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2266 ("ddr3_tip_load_all_pattern_to_mem failure\n"));
2267 if (debug_mode == 0)
2272 if (mask_tune_func & READ_LEVELING_TF_MASK_BIT) {
2273 training_stage = READ_LEVELING_TF;
2274 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2275 ("READ_LEVELING_TF_MASK_BIT\n"));
2276 ret = ddr3_tip_dynamic_read_leveling(dev_num, tm->
2277 interface_params[first_active_if].
2279 if (is_reg_dump != 0)
2280 ddr3_tip_reg_dump(dev_num);
2282 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2283 ("ddr3_tip_dynamic_read_leveling TF failure\n"));
2284 if (debug_mode == 0)
2289 if (mask_tune_func & DM_PBS_TX_MASK_BIT) {
2290 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, ("DM_PBS_TX_MASK_BIT\n"));
2293 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
2294 if (mask_tune_func & VREF_CALIBRATION_MASK_BIT) {
2295 training_stage = VREF_CALIBRATION;
2296 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, ("VREF\n"));
2297 ret = ddr3_tip_vref(dev_num);
2298 if (is_reg_dump != 0) {
2299 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2301 ddr3_tip_reg_dump(dev_num);
2304 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2305 ("ddr3_tip_vref failure\n"));
2306 if (debug_mode == 0)
2311 /* Set to 0 after each loop to avoid illegal value may be used */
2314 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
2315 if (mask_tune_func & CENTRALIZATION_RX_MASK_BIT) {
2316 training_stage = CENTRALIZATION_RX;
2317 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2318 ("CENTRALIZATION_RX_MASK_BIT CS #%d\n",
2320 ret = ddr3_tip_centralization_rx(dev_num);
2321 if (is_reg_dump != 0)
2322 ddr3_tip_reg_dump(dev_num);
2324 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2325 ("ddr3_tip_centralization_rx failure CS #%d\n",
2327 if (debug_mode == 0)
2332 /* Set to 0 after each loop to avoid illegal value may be used */
2335 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
2336 if (mask_tune_func & WRITE_LEVELING_SUPP_TF_MASK_BIT) {
2337 training_stage = WRITE_LEVELING_SUPP_TF;
2338 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2339 ("WRITE_LEVELING_SUPP_TF_MASK_BIT CS #%d\n",
2341 ret = ddr3_tip_dynamic_write_leveling_supp(dev_num);
2342 if (is_reg_dump != 0)
2343 ddr3_tip_reg_dump(dev_num);
2345 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2346 ("ddr3_tip_dynamic_write_leveling_supp TF failure CS #%d\n",
2348 if (debug_mode == 0)
2353 /* Set to 0 after each loop to avoid illegal value may be used */
2356 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
2357 if (mask_tune_func & CENTRALIZATION_TX_MASK_BIT) {
2358 training_stage = CENTRALIZATION_TX;
2359 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2360 ("CENTRALIZATION_TX_MASK_BIT CS #%d\n",
2362 ret = ddr3_tip_centralization_tx(dev_num);
2363 if (is_reg_dump != 0)
2364 ddr3_tip_reg_dump(dev_num);
2366 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2367 ("ddr3_tip_centralization_tx failure CS #%d\n",
2369 if (debug_mode == 0)
2374 /* Set to 0 after each loop to avoid illegal value may be used */
2377 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, ("restore registers to default\n"));
2378 /* restore register values */
2379 CHECK_STATUS(ddr3_tip_restore_dunit_regs(dev_num));
2381 if (is_reg_dump != 0)
2382 ddr3_tip_reg_dump(dev_num);
2388 * DDR3 Dynamic training flow
2390 static int ddr3_tip_ddr3_auto_tune(u32 dev_num)
2392 u32 if_id, stage, ret;
2393 int is_if_fail = 0, is_auto_tune_fail = 0;
2395 training_stage = INIT_CONTROLLER;
2397 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
2398 for (stage = 0; stage < MAX_STAGE_LIMIT; stage++)
2399 training_result[stage][if_id] = NO_TEST_DONE;
2402 ret = ddr3_tip_ddr3_training_main_flow(dev_num);
2404 /* activate XSB test */
2405 if (xsb_validate_type != 0) {
2406 run_xsb_test(dev_num, xsb_validation_base_address, 1, 1,
2410 if (is_reg_dump != 0)
2411 ddr3_tip_reg_dump(dev_num);
2414 CHECK_STATUS(ddr3_tip_print_log(dev_num, window_mem_addr));
2417 CHECK_STATUS(ddr3_tip_print_stability_log(dev_num));
2420 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
2422 for (stage = 0; stage < MAX_STAGE_LIMIT; stage++) {
2423 if (training_result[stage][if_id] == TEST_FAILED)
2426 if (is_if_fail == 1) {
2427 is_auto_tune_fail = 1;
2428 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2429 ("Auto Tune failed for IF %d\n",
2434 if ((ret == MV_FAIL) || (is_auto_tune_fail == 1))
2441 * Enable init sequence
2443 int ddr3_tip_enable_init_sequence(u32 dev_num)
2446 u32 if_id = 0, mem_mask = 0, bus_index = 0;
2447 struct hws_topology_map *tm = ddr3_get_topology_map();
2449 /* Enable init sequence */
2450 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, 0,
2451 SDRAM_INIT_CONTROL_REG, 0x1, 0x1));
2453 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
2454 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
2456 if (ddr3_tip_if_polling
2457 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0, 0x1,
2458 SDRAM_INIT_CONTROL_REG,
2459 MAX_POLLING_ITERATIONS) != MV_OK) {
2460 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2461 ("polling failed IF %d\n",
2468 for (bus_index = 0; bus_index < GET_TOPOLOGY_NUM_OF_BUSES();
2470 VALIDATE_ACTIVE(tm->bus_act_mask, bus_index);
2472 tm->interface_params[if_id].
2473 as_bus_params[bus_index].mirror_enable_bitmask;
2476 if (mem_mask != 0) {
2477 /* Disable Multi CS */
2478 CHECK_STATUS(ddr3_tip_if_write
2479 (dev_num, ACCESS_TYPE_MULTICAST,
2480 if_id, CS_ENABLE_REG, 1 << 3,
2485 return (is_fail == 0) ? MV_OK : MV_FAIL;
2488 int ddr3_tip_register_dq_table(u32 dev_num, u32 *table)
2490 dq_map_table = table;
2496 * Check if pup search is locked
2498 int ddr3_tip_is_pup_lock(u32 *pup_buf, enum hws_training_result read_mode)
2500 u32 bit_start = 0, bit_end = 0, bit_id;
2502 if (read_mode == RESULT_PER_BIT) {
2504 bit_end = BUS_WIDTH_IN_BITS - 1;
2510 for (bit_id = bit_start; bit_id <= bit_end; bit_id++) {
2511 if (GET_LOCK_RESULT(pup_buf[bit_id]) == 0)
2519 * Get minimum buffer value
2521 u8 ddr3_tip_get_buf_min(u8 *buf_ptr)
2526 for (cnt = 0; cnt < BUS_WIDTH_IN_BITS; cnt++) {
2527 if (buf_ptr[cnt] < min_val)
2528 min_val = buf_ptr[cnt];
2535 * Get maximum buffer value
2537 u8 ddr3_tip_get_buf_max(u8 *buf_ptr)
2542 for (cnt = 0; cnt < BUS_WIDTH_IN_BITS; cnt++) {
2543 if (buf_ptr[cnt] > max_val)
2544 max_val = buf_ptr[cnt];
2551 * The following functions return memory parameters:
2552 * bus and device width, device size
2555 u32 hws_ddr3_get_bus_width(void)
2557 struct hws_topology_map *tm = ddr3_get_topology_map();
2559 return (DDR3_IS_16BIT_DRAM_MODE(tm->bus_act_mask) ==
2563 u32 hws_ddr3_get_device_width(u32 if_id)
2565 struct hws_topology_map *tm = ddr3_get_topology_map();
2567 return (tm->interface_params[if_id].bus_width ==
2568 BUS_WIDTH_8) ? 8 : 16;
2571 u32 hws_ddr3_get_device_size(u32 if_id)
2573 struct hws_topology_map *tm = ddr3_get_topology_map();
2575 if (tm->interface_params[if_id].memory_size >=
2577 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2578 ("Error: Wrong device size of Cs: %d",
2579 tm->interface_params[if_id].memory_size));
2582 return 1 << tm->interface_params[if_id].memory_size;
2586 int hws_ddr3_calc_mem_cs_size(u32 if_id, u32 cs, u32 *cs_size)
2588 u32 cs_mem_size, dev_size;
2590 dev_size = hws_ddr3_get_device_size(if_id);
2591 if (dev_size != 0) {
2592 cs_mem_size = ((hws_ddr3_get_bus_width() /
2593 hws_ddr3_get_device_width(if_id)) * dev_size);
2595 /* the calculated result in Gbytex16 to avoid float using */
2597 if (cs_mem_size == 2) {
2599 } else if (cs_mem_size == 4) {
2601 } else if (cs_mem_size == 8) {
2603 } else if (cs_mem_size == 16) {
2605 } else if (cs_mem_size == 32) {
2608 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2609 ("Error: Wrong Memory size of Cs: %d", cs));
2618 int hws_ddr3_cs_base_adr_calc(u32 if_id, u32 cs, u32 *cs_base_addr)
2620 u32 cs_mem_size = 0;
2621 #ifdef DEVICE_MAX_DRAM_ADDRESS_SIZE
2622 u32 physical_mem_size;
2623 u32 max_mem_size = DEVICE_MAX_DRAM_ADDRESS_SIZE;
2626 if (hws_ddr3_calc_mem_cs_size(if_id, cs, &cs_mem_size) != MV_OK)
2629 #ifdef DEVICE_MAX_DRAM_ADDRESS_SIZE
2630 struct hws_topology_map *tm = ddr3_get_topology_map();
2632 * if number of address pins doesn't allow to use max mem size that
2633 * is defined in topology mem size is defined by
2634 * DEVICE_MAX_DRAM_ADDRESS_SIZE
2637 mv_hwsmem_size[tm->interface_params[0].memory_size];
2639 if (hws_ddr3_get_device_width(cs) == 16) {
2641 * 16bit mem device can be twice more - no need in less
2644 max_mem_size = DEVICE_MAX_DRAM_ADDRESS_SIZE * 2;
2647 if (physical_mem_size > max_mem_size) {
2648 cs_mem_size = max_mem_size *
2649 (hws_ddr3_get_bus_width() /
2650 hws_ddr3_get_device_width(if_id));
2651 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2652 ("Updated Physical Mem size is from 0x%x to %x\n",
2654 DEVICE_MAX_DRAM_ADDRESS_SIZE));
2658 /* calculate CS base addr */
2659 *cs_base_addr = ((cs_mem_size) * cs) & 0xffff0000;