1 // SPDX-License-Identifier: GPL-2.0
3 * Copyright (C) Marvell International Ltd. and its affiliates
10 #include <asm/arch/cpu.h>
11 #include <asm/arch/soc.h>
13 #include "ddr3_init.h"
15 #define A38X_NUMBER_OF_INTERFACES 5
17 #define SAR_DEV_ID_OFFS 27
18 #define SAR_DEV_ID_MASK 0x7
20 /* Termal Sensor Registers */
21 #define TSEN_STATE_REG 0xe4070
22 #define TSEN_STATE_OFFSET 31
23 #define TSEN_STATE_MASK (0x1 << TSEN_STATE_OFFSET)
24 #define TSEN_CONF_REG 0xe4074
25 #define TSEN_CONF_RST_OFFSET 8
26 #define TSEN_CONF_RST_MASK (0x1 << TSEN_CONF_RST_OFFSET)
27 #define TSEN_STATUS_REG 0xe4078
28 #define TSEN_STATUS_READOUT_VALID_OFFSET 10
29 #define TSEN_STATUS_READOUT_VALID_MASK (0x1 << \
30 TSEN_STATUS_READOUT_VALID_OFFSET)
31 #define TSEN_STATUS_TEMP_OUT_OFFSET 0
32 #define TSEN_STATUS_TEMP_OUT_MASK (0x3ff << TSEN_STATUS_TEMP_OUT_OFFSET)
34 static struct dfx_access interface_map[] = {
50 /* This array hold the board round trip delay (DQ and CK) per <interface,bus> */
51 struct trip_delay_element a38x_board_round_trip_delay_array[] = {
53 /* Interface bus DQS-delay CK-delay */
58 { 4282, 6086 }, /* ECC PUP */
63 { 4282, 6160 }, /* ECC PUP */
66 /* Interface bus DQS-delay CK-delay */
71 { 4282, 6086 }, /* ECC PUP */
76 { 4282, 6160 } /* ECC PUP */
79 #ifdef STATIC_ALGO_SUPPORT
81 static struct trip_delay_element a38x_package_round_trip_delay_array[] = {
82 /* IF BUS DQ_DELAY CK_DELAY */
105 static int a38x_silicon_delay_offset[] = {
115 static u8 a38x_bw_per_freq[DDR_FREQ_LIMIT] = {
116 0x3, /* DDR_FREQ_100 */
117 0x4, /* DDR_FREQ_400 */
118 0x4, /* DDR_FREQ_533 */
119 0x5, /* DDR_FREQ_667 */
120 0x5, /* DDR_FREQ_800 */
121 0x5, /* DDR_FREQ_933 */
122 0x5, /* DDR_FREQ_1066 */
123 0x3, /* DDR_FREQ_311 */
124 0x3, /* DDR_FREQ_333 */
125 0x4, /* DDR_FREQ_467 */
126 0x5, /* DDR_FREQ_850 */
127 0x5, /* DDR_FREQ_600 */
128 0x3, /* DDR_FREQ_300 */
129 0x5, /* DDR_FREQ_900 */
130 0x3, /* DDR_FREQ_360 */
131 0x5 /* DDR_FREQ_1000 */
134 static u8 a38x_rate_per_freq[DDR_FREQ_LIMIT] = {
135 /*TBD*/ 0x1, /* DDR_FREQ_100 */
136 0x2, /* DDR_FREQ_400 */
137 0x2, /* DDR_FREQ_533 */
138 0x2, /* DDR_FREQ_667 */
139 0x2, /* DDR_FREQ_800 */
140 0x3, /* DDR_FREQ_933 */
141 0x3, /* DDR_FREQ_1066 */
142 0x1, /* DDR_FREQ_311 */
143 0x1, /* DDR_FREQ_333 */
144 0x2, /* DDR_FREQ_467 */
145 0x2, /* DDR_FREQ_850 */
146 0x2, /* DDR_FREQ_600 */
147 0x1, /* DDR_FREQ_300 */
148 0x2, /* DDR_FREQ_900 */
149 0x1, /* DDR_FREQ_360 */
150 0x2 /* DDR_FREQ_1000 */
153 static u16 a38x_vco_freq_per_sar[] = {
187 u32 pipe_multicast_mask;
189 u32 dq_bit_map_2_phy_pin[] = {
190 1, 0, 2, 6, 9, 8, 3, 7, /* 0 */
191 8, 9, 1, 7, 2, 6, 3, 0, /* 1 */
192 3, 9, 7, 8, 1, 0, 2, 6, /* 2 */
193 1, 0, 6, 2, 8, 3, 7, 9, /* 3 */
194 0, 1, 2, 9, 7, 8, 3, 6, /* 4 */
197 static int ddr3_tip_a38x_set_divider(u8 dev_num, u32 if_id,
198 enum hws_ddr_freq freq);
201 * Read temperature TJ value
203 u32 ddr3_ctrl_get_junc_temp(u8 dev_num)
207 /* Initiates TSEN hardware reset once */
208 if ((reg_read(TSEN_CONF_REG) & TSEN_CONF_RST_MASK) == 0)
209 reg_bit_set(TSEN_CONF_REG, TSEN_CONF_RST_MASK);
212 /* Check if the readout field is valid */
213 if ((reg_read(TSEN_STATUS_REG) & TSEN_STATUS_READOUT_VALID_MASK) == 0) {
214 printf("%s: TSEN not ready\n", __func__);
218 reg = reg_read(TSEN_STATUS_REG);
219 reg = (reg & TSEN_STATUS_TEMP_OUT_MASK) >> TSEN_STATUS_TEMP_OUT_OFFSET;
221 return ((((10000 * reg) / 21445) * 1000) - 272674) / 1000;
225 * Name: ddr3_tip_a38x_get_freq_config.
229 * Returns: MV_OK if success, other error code if fail.
231 int ddr3_tip_a38x_get_freq_config(u8 dev_num, enum hws_ddr_freq freq,
232 struct hws_tip_freq_config_info
235 if (a38x_bw_per_freq[freq] == 0xff)
236 return MV_NOT_SUPPORTED;
238 if (freq_config_info == NULL)
241 freq_config_info->bw_per_freq = a38x_bw_per_freq[freq];
242 freq_config_info->rate_per_freq = a38x_rate_per_freq[freq];
243 freq_config_info->is_supported = 1;
249 * Name: ddr3_tip_a38x_pipe_enable.
253 * Returns: MV_OK if success, other error code if fail.
255 int ddr3_tip_a38x_pipe_enable(u8 dev_num, enum hws_access_type interface_access,
256 u32 if_id, int enable)
258 u32 data_value, pipe_enable_mask = 0;
261 pipe_enable_mask = 0;
263 if (interface_access == ACCESS_TYPE_MULTICAST)
264 pipe_enable_mask = pipe_multicast_mask;
266 pipe_enable_mask = (1 << interface_map[if_id].pipe);
269 CHECK_STATUS(ddr3_tip_reg_read
270 (dev_num, PIPE_ENABLE_ADDR, &data_value, MASK_ALL_BITS));
271 data_value = (data_value & (~0xff)) | pipe_enable_mask;
272 CHECK_STATUS(ddr3_tip_reg_write(dev_num, PIPE_ENABLE_ADDR, data_value));
278 * Name: ddr3_tip_a38x_if_write.
282 * Returns: MV_OK if success, other error code if fail.
284 int ddr3_tip_a38x_if_write(u8 dev_num, enum hws_access_type interface_access,
285 u32 if_id, u32 reg_addr, u32 data_value,
290 if (mask != MASK_ALL_BITS) {
291 CHECK_STATUS(ddr3_tip_a38x_if_read
292 (dev_num, ACCESS_TYPE_UNICAST, if_id, reg_addr,
293 &ui_data_read, MASK_ALL_BITS));
294 data_value = (ui_data_read & (~mask)) | (data_value & mask);
297 reg_write(reg_addr, data_value);
303 * Name: ddr3_tip_a38x_if_read.
307 * Returns: MV_OK if success, other error code if fail.
309 int ddr3_tip_a38x_if_read(u8 dev_num, enum hws_access_type interface_access,
310 u32 if_id, u32 reg_addr, u32 *data, u32 mask)
312 *data = reg_read(reg_addr) & mask;
318 * Name: ddr3_tip_a38x_select_ddr_controller.
319 * Desc: Enable/Disable access to Marvell's server.
320 * Args: dev_num - device number
321 * enable - whether to enable or disable the server
323 * Returns: MV_OK if success, other error code if fail.
325 int ddr3_tip_a38x_select_ddr_controller(u8 dev_num, int enable)
329 reg = reg_read(CS_ENABLE_REG);
336 reg_write(CS_ENABLE_REG, reg);
342 * Name: ddr3_tip_init_a38x_silicon.
343 * Desc: init Training SW DB.
346 * Returns: MV_OK if success, other error code if fail.
348 static int ddr3_tip_init_a38x_silicon(u32 dev_num, u32 board_id)
350 struct hws_tip_config_func_db config_func;
351 enum hws_ddr_freq ddr_freq;
353 struct hws_topology_map *tm = ddr3_get_topology_map();
355 /* new read leveling version */
356 config_func.tip_dunit_read_func = ddr3_tip_a38x_if_read;
357 config_func.tip_dunit_write_func = ddr3_tip_a38x_if_write;
358 config_func.tip_dunit_mux_select_func =
359 ddr3_tip_a38x_select_ddr_controller;
360 config_func.tip_get_freq_config_info_func =
361 ddr3_tip_a38x_get_freq_config;
362 config_func.tip_set_freq_divider_func = ddr3_tip_a38x_set_divider;
363 config_func.tip_get_device_info_func = ddr3_tip_a38x_get_device_info;
364 config_func.tip_get_temperature = ddr3_ctrl_get_junc_temp;
366 ddr3_tip_init_config_func(dev_num, &config_func);
368 ddr3_tip_register_dq_table(dev_num, dq_bit_map_2_phy_pin);
370 #ifdef STATIC_ALGO_SUPPORT
372 struct hws_tip_static_config_info static_config;
374 board_id * A38X_NUMBER_OF_INTERFACES *
375 tm->num_of_bus_per_interface;
377 static_config.silicon_delay =
378 a38x_silicon_delay_offset[board_id];
379 static_config.package_trace_arr =
380 a38x_package_round_trip_delay_array;
381 static_config.board_trace_arr =
382 &a38x_board_round_trip_delay_array[board_offset];
383 ddr3_tip_init_static_config_db(dev_num, &static_config);
386 status = ddr3_tip_a38x_get_init_freq(dev_num, &ddr_freq);
387 if (MV_OK != status) {
388 DEBUG_TRAINING_ACCESS(DEBUG_LEVEL_ERROR,
389 ("DDR3 silicon get target frequency - FAILED 0x%x\n",
395 mask_tune_func = (SET_LOW_FREQ_MASK_BIT |
396 LOAD_PATTERN_MASK_BIT |
397 SET_MEDIUM_FREQ_MASK_BIT | WRITE_LEVELING_MASK_BIT |
398 /* LOAD_PATTERN_2_MASK_BIT | */
399 WRITE_LEVELING_SUPP_MASK_BIT |
400 READ_LEVELING_MASK_BIT |
403 SET_TARGET_FREQ_MASK_BIT |
404 WRITE_LEVELING_TF_MASK_BIT |
405 WRITE_LEVELING_SUPP_TF_MASK_BIT |
406 READ_LEVELING_TF_MASK_BIT |
407 CENTRALIZATION_RX_MASK_BIT |
408 CENTRALIZATION_TX_MASK_BIT);
411 if ((ddr_freq == DDR_FREQ_333) || (ddr_freq == DDR_FREQ_400)) {
412 mask_tune_func = (WRITE_LEVELING_MASK_BIT |
413 LOAD_PATTERN_2_MASK_BIT |
414 WRITE_LEVELING_SUPP_MASK_BIT |
415 READ_LEVELING_MASK_BIT |
418 CENTRALIZATION_RX_MASK_BIT |
419 CENTRALIZATION_TX_MASK_BIT);
420 rl_mid_freq_wa = 0; /* WA not needed if 333/400 is TF */
423 /* Supplementary not supported for ECC modes */
424 if (1 == ddr3_if_ecc_enabled()) {
425 mask_tune_func &= ~WRITE_LEVELING_SUPP_TF_MASK_BIT;
426 mask_tune_func &= ~WRITE_LEVELING_SUPP_MASK_BIT;
427 mask_tune_func &= ~PBS_TX_MASK_BIT;
428 mask_tune_func &= ~PBS_RX_MASK_BIT;
433 if (ck_delay_16 == -1)
438 calibration_update_control = 1;
440 init_freq = tm->interface_params[first_active_if].memory_freq;
442 ddr3_tip_a38x_get_medium_freq(dev_num, &medium_freq);
447 int ddr3_a38x_update_topology_map(u32 dev_num, struct hws_topology_map *tm)
450 enum hws_ddr_freq freq;
452 ddr3_tip_a38x_get_init_freq(dev_num, &freq);
453 tm->interface_params[if_id].memory_freq = freq;
456 * re-calc topology parameters according to topology updates
459 CHECK_STATUS(hws_ddr3_tip_load_topology_map(dev_num, tm));
464 int ddr3_tip_init_a38x(u32 dev_num, u32 board_id)
466 struct hws_topology_map *tm = ddr3_get_topology_map();
471 ddr3_a38x_update_topology_map(dev_num, tm);
472 ddr3_tip_init_a38x_silicon(dev_num, board_id);
477 int ddr3_tip_a38x_get_init_freq(int dev_num, enum hws_ddr_freq *freq)
481 /* Read sample at reset setting */
482 reg = (reg_read(REG_DEVICE_SAR1_ADDR) >>
483 RST2_CPU_DDR_CLOCK_SELECT_IN_OFFSET) &
484 RST2_CPU_DDR_CLOCK_SELECT_IN_MASK;
488 *freq = DDR_FREQ_333;
492 *freq = DDR_FREQ_400;
496 *freq = DDR_FREQ_533;
499 *freq = DDR_FREQ_600;
504 *freq = DDR_FREQ_667;
509 *freq = DDR_FREQ_800;
512 *freq = DDR_FREQ_933;
515 *freq = DDR_FREQ_900;
518 *freq = DDR_FREQ_900;
522 return MV_NOT_SUPPORTED;
528 int ddr3_tip_a38x_get_medium_freq(int dev_num, enum hws_ddr_freq *freq)
532 /* Read sample at reset setting */
533 reg = (reg_read(REG_DEVICE_SAR1_ADDR) >>
534 RST2_CPU_DDR_CLOCK_SELECT_IN_OFFSET) &
535 RST2_CPU_DDR_CLOCK_SELECT_IN_MASK;
539 /* Medium is same as TF to run PBS in this freq */
540 *freq = DDR_FREQ_333;
544 /* Medium is same as TF to run PBS in this freq */
545 *freq = DDR_FREQ_400;
549 *freq = DDR_FREQ_533;
554 *freq = DDR_FREQ_333;
559 *freq = DDR_FREQ_400;
562 *freq = DDR_FREQ_300;
565 *freq = DDR_FREQ_360;
568 *freq = DDR_FREQ_400;
572 return MV_NOT_SUPPORTED;
578 u32 ddr3_tip_get_init_freq(void)
580 enum hws_ddr_freq freq;
582 ddr3_tip_a38x_get_init_freq(0, &freq);
587 static int ddr3_tip_a38x_set_divider(u8 dev_num, u32 if_id,
588 enum hws_ddr_freq frequency)
594 DEBUG_TRAINING_ACCESS(DEBUG_LEVEL_ERROR,
595 ("A38x does not support interface 0x%x\n",
600 /* get VCO freq index */
601 sar_val = (reg_read(REG_DEVICE_SAR1_ADDR) >>
602 RST2_CPU_DDR_CLOCK_SELECT_IN_OFFSET) &
603 RST2_CPU_DDR_CLOCK_SELECT_IN_MASK;
604 divider = a38x_vco_freq_per_sar[sar_val] / freq_val[frequency];
607 CHECK_STATUS(ddr3_tip_a38x_if_write
608 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x20220, 0x0,
610 CHECK_STATUS(ddr3_tip_a38x_if_write
611 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0xe42f4, 0x0,
614 /* cpupll_clkdiv_reset_mask */
615 CHECK_STATUS(ddr3_tip_a38x_if_write
616 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0xe4264, 0x1f,
619 /* cpupll_clkdiv_reload_smooth */
620 CHECK_STATUS(ddr3_tip_a38x_if_write
621 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0xe4260,
622 (0x2 << 8), (0xff << 8)));
624 /* cpupll_clkdiv_relax_en */
625 CHECK_STATUS(ddr3_tip_a38x_if_write
626 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0xe4260,
627 (0x2 << 24), (0xff << 24)));
629 /* write the divider */
630 CHECK_STATUS(ddr3_tip_a38x_if_write
631 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0xe4268,
632 (divider << 8), (0x3f << 8)));
634 /* set cpupll_clkdiv_reload_ratio */
635 CHECK_STATUS(ddr3_tip_a38x_if_write
636 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0xe4264,
637 (1 << 8), (1 << 8)));
639 /* undet cpupll_clkdiv_reload_ratio */
640 CHECK_STATUS(ddr3_tip_a38x_if_write
641 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0xe4264, 0,
644 /* clear cpupll_clkdiv_reload_force */
645 CHECK_STATUS(ddr3_tip_a38x_if_write
646 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0xe4260, 0,
649 /* clear cpupll_clkdiv_relax_en */
650 CHECK_STATUS(ddr3_tip_a38x_if_write
651 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0xe4260, 0,
654 /* clear cpupll_clkdiv_reset_mask */
655 CHECK_STATUS(ddr3_tip_a38x_if_write
656 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0xe4264, 0,
659 /* Dunit training clock + 1:1 mode */
660 if ((frequency == DDR_FREQ_LOW_FREQ) || (freq_val[frequency] <= 400)) {
661 CHECK_STATUS(ddr3_tip_a38x_if_write
662 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x18488,
663 (1 << 16), (1 << 16)));
664 CHECK_STATUS(ddr3_tip_a38x_if_write
665 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x1524,
666 (0 << 15), (1 << 15)));
668 CHECK_STATUS(ddr3_tip_a38x_if_write
669 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x18488,
671 CHECK_STATUS(ddr3_tip_a38x_if_write
672 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x1524,
673 (1 << 15), (1 << 15)));
680 * external read from memory
682 int ddr3_tip_ext_read(u32 dev_num, u32 if_id, u32 reg_addr,
683 u32 num_of_bursts, u32 *data)
687 for (burst_num = 0; burst_num < num_of_bursts * 8; burst_num++)
688 data[burst_num] = readl(reg_addr + 4 * burst_num);
694 * external write to memory
696 int ddr3_tip_ext_write(u32 dev_num, u32 if_id, u32 reg_addr,
697 u32 num_of_bursts, u32 *data) {
700 for (burst_num = 0; burst_num < num_of_bursts * 8; burst_num++)
701 writel(data[burst_num], reg_addr + 4 * burst_num);
706 int ddr3_silicon_pre_init(void)
708 return ddr3_silicon_init();
711 int ddr3_post_run_alg(void)
716 int ddr3_silicon_post_init(void)
718 struct hws_topology_map *tm = ddr3_get_topology_map();
720 /* Set half bus width */
721 if (DDR3_IS_16BIT_DRAM_MODE(tm->bus_act_mask)) {
722 CHECK_STATUS(ddr3_tip_if_write
723 (0, ACCESS_TYPE_UNICAST, PARAM_NOT_CARE,
724 REG_SDRAM_CONFIG_ADDR, 0x0, 0x8000));
730 int ddr3_tip_a38x_get_device_info(u8 dev_num, struct ddr3_device_info *info_ptr)
732 info_ptr->device_id = 0x6800;
733 info_ptr->ck_delay = ck_delay;