1 // SPDX-License-Identifier: GPL-2.0+
3 * DDR3 mem setup file for board based on EXYNOS5
5 * Copyright (C) 2012 Samsung Electronics
11 #include <asm/arch/clock.h>
12 #include <asm/arch/cpu.h>
13 #include <asm/arch/dmc.h>
14 #include <asm/arch/power.h>
15 #include "common_setup.h"
16 #include "exynos5_setup.h"
17 #include "clock_init.h"
19 #define TIMEOUT_US 10000
20 #define NUM_BYTE_LANES 4
22 #define DEFAULT_DQS_X4 ((DEFAULT_DQS << 24) || (DEFAULT_DQS << 16) \
23 || (DEFAULT_DQS << 8) || (DEFAULT_DQS << 0))
25 #ifdef CONFIG_EXYNOS5250
26 static void reset_phy_ctrl(void)
28 struct exynos5_clock *clk =
29 (struct exynos5_clock *)samsung_get_base_clock();
31 writel(DDR3PHY_CTRL_PHY_RESET_OFF, &clk->lpddr3phy_ctrl);
32 writel(DDR3PHY_CTRL_PHY_RESET, &clk->lpddr3phy_ctrl);
35 int ddr3_mem_ctrl_init(struct mem_timings *mem, int reset)
38 struct exynos5_phy_control *phy0_ctrl, *phy1_ctrl;
39 struct exynos5_dmc *dmc;
42 phy0_ctrl = (struct exynos5_phy_control *)samsung_get_base_dmc_phy();
43 phy1_ctrl = (struct exynos5_phy_control *)(samsung_get_base_dmc_phy()
45 dmc = (struct exynos5_dmc *)samsung_get_base_dmc_ctrl();
50 /* Set Impedance Output Driver */
51 val = (mem->impedance << CA_CK_DRVR_DS_OFFSET) |
52 (mem->impedance << CA_CKE_DRVR_DS_OFFSET) |
53 (mem->impedance << CA_CS_DRVR_DS_OFFSET) |
54 (mem->impedance << CA_ADR_DRVR_DS_OFFSET);
55 writel(val, &phy0_ctrl->phy_con39);
56 writel(val, &phy1_ctrl->phy_con39);
58 /* Set Read Latency and Burst Length for PHY0 and PHY1 */
59 val = (mem->ctrl_bstlen << PHY_CON42_CTRL_BSTLEN_SHIFT) |
60 (mem->ctrl_rdlat << PHY_CON42_CTRL_RDLAT_SHIFT);
61 writel(val, &phy0_ctrl->phy_con42);
62 writel(val, &phy1_ctrl->phy_con42);
65 if (dmc_config_zq(mem, &phy0_ctrl->phy_con16, &phy1_ctrl->phy_con16,
66 &phy0_ctrl->phy_con17, &phy1_ctrl->phy_con17))
67 return SETUP_ERR_ZQ_CALIBRATION_FAILURE;
70 writel(mem->phy0_pulld_dqs, &phy0_ctrl->phy_con14);
71 writel(mem->phy1_pulld_dqs, &phy1_ctrl->phy_con14);
73 writel(mem->concontrol | (mem->rd_fetch << CONCONTROL_RD_FETCH_SHIFT)
74 | (mem->dfi_init_start << CONCONTROL_DFI_INIT_START_SHIFT),
77 update_reset_dll(&dmc->phycontrol0, DDR_MODE_DDR3);
80 writel(mem->phy0_dqs, &phy0_ctrl->phy_con4);
81 writel(mem->phy1_dqs, &phy1_ctrl->phy_con4);
83 writel(mem->phy0_dq, &phy0_ctrl->phy_con6);
84 writel(mem->phy1_dq, &phy1_ctrl->phy_con6);
86 writel(mem->phy0_tFS, &phy0_ctrl->phy_con10);
87 writel(mem->phy1_tFS, &phy1_ctrl->phy_con10);
89 val = (mem->ctrl_start_point << PHY_CON12_CTRL_START_POINT_SHIFT) |
90 (mem->ctrl_inc << PHY_CON12_CTRL_INC_SHIFT) |
91 (mem->ctrl_dll_on << PHY_CON12_CTRL_DLL_ON_SHIFT) |
92 (mem->ctrl_ref << PHY_CON12_CTRL_REF_SHIFT);
93 writel(val, &phy0_ctrl->phy_con12);
94 writel(val, &phy1_ctrl->phy_con12);
96 /* Start DLL locking */
97 writel(val | (mem->ctrl_start << PHY_CON12_CTRL_START_SHIFT),
98 &phy0_ctrl->phy_con12);
99 writel(val | (mem->ctrl_start << PHY_CON12_CTRL_START_SHIFT),
100 &phy1_ctrl->phy_con12);
102 update_reset_dll(&dmc->phycontrol0, DDR_MODE_DDR3);
104 writel(mem->concontrol | (mem->rd_fetch << CONCONTROL_RD_FETCH_SHIFT),
107 /* Memory Channel Inteleaving Size */
108 writel(mem->iv_size, &dmc->ivcontrol);
110 writel(mem->memconfig, &dmc->memconfig0);
111 writel(mem->memconfig, &dmc->memconfig1);
112 writel(mem->membaseconfig0, &dmc->membaseconfig0);
113 writel(mem->membaseconfig1, &dmc->membaseconfig1);
115 /* Precharge Configuration */
116 writel(mem->prechconfig_tp_cnt << PRECHCONFIG_TP_CNT_SHIFT,
119 /* Power Down mode Configuration */
120 writel(mem->dpwrdn_cyc << PWRDNCONFIG_DPWRDN_CYC_SHIFT |
121 mem->dsref_cyc << PWRDNCONFIG_DSREF_CYC_SHIFT,
124 /* TimingRow, TimingData, TimingPower and Timingaref
125 * values as per Memory AC parameters
127 writel(mem->timing_ref, &dmc->timingref);
128 writel(mem->timing_row, &dmc->timingrow);
129 writel(mem->timing_data, &dmc->timingdata);
130 writel(mem->timing_power, &dmc->timingpower);
132 /* Send PALL command */
133 dmc_config_prech(mem, &dmc->directcmd);
135 /* Send NOP, MRS and ZQINIT commands */
136 dmc_config_mrs(mem, &dmc->directcmd);
138 if (mem->gate_leveling_enable) {
139 val = PHY_CON0_RESET_VAL;
141 writel(val, &phy0_ctrl->phy_con0);
142 writel(val, &phy1_ctrl->phy_con0);
144 val = PHY_CON2_RESET_VAL;
145 val |= INIT_DESKEW_EN;
146 writel(val, &phy0_ctrl->phy_con2);
147 writel(val, &phy1_ctrl->phy_con2);
149 val = PHY_CON0_RESET_VAL;
151 val |= BYTE_RDLVL_EN;
152 writel(val, &phy0_ctrl->phy_con0);
153 writel(val, &phy1_ctrl->phy_con0);
155 val = (mem->ctrl_start_point <<
156 PHY_CON12_CTRL_START_POINT_SHIFT) |
157 (mem->ctrl_inc << PHY_CON12_CTRL_INC_SHIFT) |
158 (mem->ctrl_force << PHY_CON12_CTRL_FORCE_SHIFT) |
159 (mem->ctrl_start << PHY_CON12_CTRL_START_SHIFT) |
160 (mem->ctrl_ref << PHY_CON12_CTRL_REF_SHIFT);
161 writel(val, &phy0_ctrl->phy_con12);
162 writel(val, &phy1_ctrl->phy_con12);
164 val = PHY_CON2_RESET_VAL;
165 val |= INIT_DESKEW_EN;
166 val |= RDLVL_GATE_EN;
167 writel(val, &phy0_ctrl->phy_con2);
168 writel(val, &phy1_ctrl->phy_con2);
170 val = PHY_CON0_RESET_VAL;
172 val |= BYTE_RDLVL_EN;
174 writel(val, &phy0_ctrl->phy_con0);
175 writel(val, &phy1_ctrl->phy_con0);
177 val = PHY_CON1_RESET_VAL;
178 val &= ~(CTRL_GATEDURADJ_MASK);
179 writel(val, &phy0_ctrl->phy_con1);
180 writel(val, &phy1_ctrl->phy_con1);
182 writel(CTRL_RDLVL_GATE_ENABLE, &dmc->rdlvl_config);
184 while ((readl(&dmc->phystatus) &
185 (RDLVL_COMPLETE_CHO | RDLVL_COMPLETE_CH1)) !=
186 (RDLVL_COMPLETE_CHO | RDLVL_COMPLETE_CH1) && i > 0) {
188 * TODO(waihong): Comment on how long this take to
195 return SETUP_ERR_RDLV_COMPLETE_TIMEOUT;
196 writel(CTRL_RDLVL_GATE_DISABLE, &dmc->rdlvl_config);
198 writel(0, &phy0_ctrl->phy_con14);
199 writel(0, &phy1_ctrl->phy_con14);
201 val = (mem->ctrl_start_point <<
202 PHY_CON12_CTRL_START_POINT_SHIFT) |
203 (mem->ctrl_inc << PHY_CON12_CTRL_INC_SHIFT) |
204 (mem->ctrl_force << PHY_CON12_CTRL_FORCE_SHIFT) |
205 (mem->ctrl_start << PHY_CON12_CTRL_START_SHIFT) |
206 (mem->ctrl_dll_on << PHY_CON12_CTRL_DLL_ON_SHIFT) |
207 (mem->ctrl_ref << PHY_CON12_CTRL_REF_SHIFT);
208 writel(val, &phy0_ctrl->phy_con12);
209 writel(val, &phy1_ctrl->phy_con12);
211 update_reset_dll(&dmc->phycontrol0, DDR_MODE_DDR3);
214 /* Send PALL command */
215 dmc_config_prech(mem, &dmc->directcmd);
217 writel(mem->memcontrol, &dmc->memcontrol);
219 /* Set DMC Concontrol and enable auto-refresh counter */
220 writel(mem->concontrol | (mem->rd_fetch << CONCONTROL_RD_FETCH_SHIFT)
221 | (mem->aref_en << CONCONTROL_AREF_EN_SHIFT), &dmc->concontrol);
226 #ifdef CONFIG_EXYNOS5420
228 * RAM address to use in the test.
230 * We'll use 4 words at this address and 4 at this address + 0x80 (Ares
231 * interleaves channels every 128 bytes). This will allow us to evaluate all of
232 * the chips in a 1 chip per channel (2GB) system and half the chips in a 2
233 * chip per channel (4GB) system. We can't test the 2nd chip since we need to
234 * do tests before the 2nd chip is enabled. Looking at the 2nd chip isn't
235 * critical because the 1st and 2nd chip have very similar timings (they'd
236 * better have similar timings, since there's only a single adjustment that is
237 * shared by both chips).
239 const unsigned int test_addr = CONFIG_SYS_SDRAM_BASE;
241 /* Test pattern with which RAM will be tested */
242 static const unsigned int test_pattern[] = {
250 * This function is a test vector for sw read leveling,
251 * it compares the read data with the written data.
253 * @param ch DMC channel number
254 * @param byte_lane which DQS byte offset,
255 * possible values are 0,1,2,3
256 * @return TRUE if memory was good, FALSE if not.
258 static bool dmc_valid_window_test_vector(int ch, int byte_lane)
260 unsigned int read_data;
264 mask = 0xFF << (8 * byte_lane);
266 for (i = 0; i < ARRAY_SIZE(test_pattern); i++) {
267 read_data = readl(test_addr + i * 4 + ch * 0x80);
268 if ((read_data & mask) != (test_pattern[i] & mask))
276 * This function returns current read offset value.
278 * @param phy_ctrl pointer to the current phy controller
280 static unsigned int dmc_get_read_offset_value(struct exynos5420_phy_control
283 return readl(&phy_ctrl->phy_con4);
287 * This function performs resync, so that slave DLL is updated.
289 * @param phy_ctrl pointer to the current phy controller
291 static void ddr_phy_set_do_resync(struct exynos5420_phy_control *phy_ctrl)
293 setbits_le32(&phy_ctrl->phy_con10, PHY_CON10_CTRL_OFFSETR3);
294 clrbits_le32(&phy_ctrl->phy_con10, PHY_CON10_CTRL_OFFSETR3);
298 * This function sets read offset value register with 'offset'.
300 * ...we also call call ddr_phy_set_do_resync().
302 * @param phy_ctrl pointer to the current phy controller
303 * @param offset offset to read DQS
305 static void dmc_set_read_offset_value(struct exynos5420_phy_control *phy_ctrl,
308 writel(offset, &phy_ctrl->phy_con4);
309 ddr_phy_set_do_resync(phy_ctrl);
313 * Convert a 2s complement byte to a byte with a sign bit.
315 * NOTE: you shouldn't use normal math on the number returned by this function.
316 * As an example, -10 = 0xf6. After this function -10 = 0x8a. If you wanted
317 * to do math and get the average of 10 and -10 (should be 0):
318 * 0x8a + 0xa = 0x94 (-108)
319 * 0x94 / 2 = 0xca (-54)
320 * ...and 0xca = sign bit plus 0x4a, or -74
322 * Also note that you lose the ability to represent -128 since there are two
323 * representations of 0.
325 * @param b The byte to convert in two's complement.
326 * @return The 7-bit value + sign bit.
329 unsigned char make_signed_byte(signed char b)
338 * Test various shifts starting at 'start' and going to 'end'.
340 * For each byte lane, we'll walk through shift starting at 'start' and going
341 * to 'end' (inclusive). When we are finally able to read the test pattern
342 * we'll store the value in the results array.
344 * @param phy_ctrl pointer to the current phy controller
345 * @param ch channel number
346 * @param start the start shift. -127 to 127
347 * @param end the end shift. -127 to 127
348 * @param results we'll store results for each byte lane.
351 void test_shifts(struct exynos5420_phy_control *phy_ctrl, int ch,
352 int start, int end, int results[NUM_BYTE_LANES])
354 int incr = (start < end) ? 1 : -1;
357 for (byte_lane = 0; byte_lane < NUM_BYTE_LANES; byte_lane++) {
360 dmc_set_read_offset_value(phy_ctrl, DEFAULT_DQS_X4);
361 results[byte_lane] = DEFAULT_DQS;
363 for (shift = start; shift != (end + incr); shift += incr) {
364 unsigned int byte_offsetr;
365 unsigned int offsetr;
367 byte_offsetr = make_signed_byte(shift);
369 offsetr = dmc_get_read_offset_value(phy_ctrl);
370 offsetr &= ~(0xFF << (8 * byte_lane));
371 offsetr |= (byte_offsetr << (8 * byte_lane));
372 dmc_set_read_offset_value(phy_ctrl, offsetr);
374 if (dmc_valid_window_test_vector(ch, byte_lane)) {
375 results[byte_lane] = shift;
383 * This function performs SW read leveling to compensate DQ-DQS skew at
384 * receiver it first finds the optimal read offset value on each DQS
385 * then applies the value to PHY.
387 * Read offset value has its min margin and max margin. If read offset
388 * value exceeds its min or max margin, read data will have corruption.
389 * To avoid this we are doing sw read leveling.
391 * SW read leveling is:
392 * 1> Finding offset value's left_limit and right_limit
393 * 2> and calculate its center value
394 * 3> finally programs that center value to PHY
395 * 4> then PHY gets its optimal offset value.
397 * @param phy_ctrl pointer to the current phy controller
398 * @param ch channel number
399 * @param coarse_lock_val The coarse lock value read from PHY_CON13.
402 static void software_find_read_offset(struct exynos5420_phy_control *phy_ctrl,
403 int ch, unsigned int coarse_lock_val)
405 unsigned int offsetr_cent;
409 int left[NUM_BYTE_LANES];
410 int right[NUM_BYTE_LANES];
413 /* Fill the memory with test patterns */
414 for (i = 0; i < ARRAY_SIZE(test_pattern); i++)
415 writel(test_pattern[i], test_addr + i * 4 + ch * 0x80);
417 /* Figure out the limits we'll test with; keep -127 < limit < 127 */
418 left_limit = DEFAULT_DQS - coarse_lock_val;
419 right_limit = DEFAULT_DQS + coarse_lock_val;
420 if (right_limit > 127)
423 /* Fill in the location where reads were OK from left and right */
424 test_shifts(phy_ctrl, ch, left_limit, right_limit, left);
425 test_shifts(phy_ctrl, ch, right_limit, left_limit, right);
427 /* Make a final value by taking the center between the left and right */
429 for (byte_lane = 0; byte_lane < NUM_BYTE_LANES; byte_lane++) {
433 temp_center = (left[byte_lane] + right[byte_lane]) / 2;
434 vmwc = make_signed_byte(temp_center);
435 offsetr_cent |= vmwc << (8 * byte_lane);
437 dmc_set_read_offset_value(phy_ctrl, offsetr_cent);
440 int ddr3_mem_ctrl_init(struct mem_timings *mem, int reset)
442 struct exynos5420_clock *clk =
443 (struct exynos5420_clock *)samsung_get_base_clock();
444 struct exynos5420_power *power =
445 (struct exynos5420_power *)samsung_get_base_power();
446 struct exynos5420_phy_control *phy0_ctrl, *phy1_ctrl;
447 struct exynos5420_dmc *drex0, *drex1;
448 struct exynos5420_tzasc *tzasc0, *tzasc1;
449 struct exynos5_power *pmu;
450 uint32_t val, n_lock_r, n_lock_w_phy0, n_lock_w_phy1;
451 uint32_t lock0_info, lock1_info;
455 phy0_ctrl = (struct exynos5420_phy_control *)samsung_get_base_dmc_phy();
456 phy1_ctrl = (struct exynos5420_phy_control *)(samsung_get_base_dmc_phy()
458 drex0 = (struct exynos5420_dmc *)samsung_get_base_dmc_ctrl();
459 drex1 = (struct exynos5420_dmc *)(samsung_get_base_dmc_ctrl()
461 tzasc0 = (struct exynos5420_tzasc *)samsung_get_base_dmc_tzasc();
462 tzasc1 = (struct exynos5420_tzasc *)(samsung_get_base_dmc_tzasc()
464 pmu = (struct exynos5_power *)EXYNOS5420_POWER_BASE;
466 if (CONFIG_NR_DRAM_BANKS > 4) {
467 /* Need both controllers. */
468 mem->memcontrol |= DMC_MEMCONTROL_NUM_CHIP_2;
469 mem->chips_per_channel = 2;
470 mem->chips_to_configure = 2;
472 /* 2GB requires a single controller */
473 mem->memcontrol |= DMC_MEMCONTROL_NUM_CHIP_1;
476 /* Enable PAUSE for DREX */
477 setbits_le32(&clk->pause, ENABLE_BIT);
479 /* Enable BYPASS mode */
480 setbits_le32(&clk->bpll_con1, BYPASS_EN);
482 writel(MUX_BPLL_SEL_FOUTBPLL, &clk->src_cdrex);
484 val = readl(&clk->mux_stat_cdrex);
485 val &= BPLL_SEL_MASK;
486 } while (val != FOUTBPLL);
488 clrbits_le32(&clk->bpll_con1, BYPASS_EN);
490 /* Specify the DDR memory type as DDR3 */
491 val = readl(&phy0_ctrl->phy_con0);
492 val &= ~(PHY_CON0_CTRL_DDR_MODE_MASK << PHY_CON0_CTRL_DDR_MODE_SHIFT);
493 val |= (DDR_MODE_DDR3 << PHY_CON0_CTRL_DDR_MODE_SHIFT);
494 writel(val, &phy0_ctrl->phy_con0);
496 val = readl(&phy1_ctrl->phy_con0);
497 val &= ~(PHY_CON0_CTRL_DDR_MODE_MASK << PHY_CON0_CTRL_DDR_MODE_SHIFT);
498 val |= (DDR_MODE_DDR3 << PHY_CON0_CTRL_DDR_MODE_SHIFT);
499 writel(val, &phy1_ctrl->phy_con0);
501 /* Set Read Latency and Burst Length for PHY0 and PHY1 */
502 val = (mem->ctrl_bstlen << PHY_CON42_CTRL_BSTLEN_SHIFT) |
503 (mem->ctrl_rdlat << PHY_CON42_CTRL_RDLAT_SHIFT);
504 writel(val, &phy0_ctrl->phy_con42);
505 writel(val, &phy1_ctrl->phy_con42);
507 val = readl(&phy0_ctrl->phy_con26);
508 val &= ~(T_WRDATA_EN_MASK << T_WRDATA_EN_OFFSET);
509 val |= (T_WRDATA_EN_DDR3 << T_WRDATA_EN_OFFSET);
510 writel(val, &phy0_ctrl->phy_con26);
512 val = readl(&phy1_ctrl->phy_con26);
513 val &= ~(T_WRDATA_EN_MASK << T_WRDATA_EN_OFFSET);
514 val |= (T_WRDATA_EN_DDR3 << T_WRDATA_EN_OFFSET);
515 writel(val, &phy1_ctrl->phy_con26);
518 * Set Driver strength for CK, CKE, CS & CA to 0x7
519 * Set Driver strength for Data Slice 0~3 to 0x7
521 val = (0x7 << CA_CK_DRVR_DS_OFFSET) | (0x7 << CA_CKE_DRVR_DS_OFFSET) |
522 (0x7 << CA_CS_DRVR_DS_OFFSET) | (0x7 << CA_ADR_DRVR_DS_OFFSET);
523 val |= (0x7 << DA_3_DS_OFFSET) | (0x7 << DA_2_DS_OFFSET) |
524 (0x7 << DA_1_DS_OFFSET) | (0x7 << DA_0_DS_OFFSET);
525 writel(val, &phy0_ctrl->phy_con39);
526 writel(val, &phy1_ctrl->phy_con39);
529 if (dmc_config_zq(mem, &phy0_ctrl->phy_con16, &phy1_ctrl->phy_con16,
530 &phy0_ctrl->phy_con17, &phy1_ctrl->phy_con17))
531 return SETUP_ERR_ZQ_CALIBRATION_FAILURE;
533 clrbits_le32(&phy0_ctrl->phy_con16, ZQ_CLK_DIV_EN);
534 clrbits_le32(&phy1_ctrl->phy_con16, ZQ_CLK_DIV_EN);
537 val = readl(&phy0_ctrl->phy_con14);
538 val |= mem->phy0_pulld_dqs;
539 writel(val, &phy0_ctrl->phy_con14);
540 val = readl(&phy1_ctrl->phy_con14);
541 val |= mem->phy1_pulld_dqs;
542 writel(val, &phy1_ctrl->phy_con14);
544 val = MEM_TERM_EN | PHY_TERM_EN;
545 writel(val, &drex0->phycontrol0);
546 writel(val, &drex1->phycontrol0);
548 writel(mem->concontrol |
549 (mem->dfi_init_start << CONCONTROL_DFI_INIT_START_SHIFT) |
550 (mem->rd_fetch << CONCONTROL_RD_FETCH_SHIFT),
552 writel(mem->concontrol |
553 (mem->dfi_init_start << CONCONTROL_DFI_INIT_START_SHIFT) |
554 (mem->rd_fetch << CONCONTROL_RD_FETCH_SHIFT),
558 val = readl(&drex0->phystatus);
559 } while ((val & DFI_INIT_COMPLETE) != DFI_INIT_COMPLETE);
561 val = readl(&drex1->phystatus);
562 } while ((val & DFI_INIT_COMPLETE) != DFI_INIT_COMPLETE);
564 clrbits_le32(&drex0->concontrol, DFI_INIT_START);
565 clrbits_le32(&drex1->concontrol, DFI_INIT_START);
567 update_reset_dll(&drex0->phycontrol0, DDR_MODE_DDR3);
568 update_reset_dll(&drex1->phycontrol0, DDR_MODE_DDR3);
572 * 0x2000_0000 ~ 0x5FFF_FFFF
573 * 0x6000_0000 ~ 0x9FFF_FFFF
576 val = DMC_MEMBASECONFIGX_CHIP_BASE(DMC_CHIP_BASE_0) |
577 DMC_MEMBASECONFIGX_CHIP_MASK(DMC_CHIP_MASK);
578 writel(val, &tzasc0->membaseconfig0);
579 writel(val, &tzasc1->membaseconfig0);
582 val = DMC_MEMBASECONFIGX_CHIP_BASE(DMC_CHIP_BASE_1) |
583 DMC_MEMBASECONFIGX_CHIP_MASK(DMC_CHIP_MASK);
584 writel(val, &tzasc0->membaseconfig1);
585 writel(val, &tzasc1->membaseconfig1);
588 * Memory Channel Inteleaving Size
589 * Ares Channel interleaving = 128 bytes
592 writel(mem->memconfig, &tzasc0->memconfig0);
593 writel(mem->memconfig, &tzasc1->memconfig0);
594 writel(mem->memconfig, &tzasc0->memconfig1);
595 writel(mem->memconfig, &tzasc1->memconfig1);
597 /* Precharge Configuration */
598 writel(mem->prechconfig_tp_cnt << PRECHCONFIG_TP_CNT_SHIFT,
599 &drex0->prechconfig0);
600 writel(mem->prechconfig_tp_cnt << PRECHCONFIG_TP_CNT_SHIFT,
601 &drex1->prechconfig0);
604 * TimingRow, TimingData, TimingPower and Timingaref
605 * values as per Memory AC parameters
607 writel(mem->timing_ref, &drex0->timingref);
608 writel(mem->timing_ref, &drex1->timingref);
609 writel(mem->timing_row, &drex0->timingrow0);
610 writel(mem->timing_row, &drex1->timingrow0);
611 writel(mem->timing_data, &drex0->timingdata0);
612 writel(mem->timing_data, &drex1->timingdata0);
613 writel(mem->timing_power, &drex0->timingpower0);
614 writel(mem->timing_power, &drex1->timingpower0);
618 * Send NOP, MRS and ZQINIT commands
619 * Sending MRS command will reset the DRAM. We should not be
620 * resetting the DRAM after resume, this will lead to memory
621 * corruption as DRAM content is lost after DRAM reset
623 dmc_config_mrs(mem, &drex0->directcmd);
624 dmc_config_mrs(mem, &drex1->directcmd);
628 * Get PHY_CON13 from both phys. Gate CLKM around reading since
629 * PHY_CON13 is glitchy when CLKM is running. We're paranoid and
630 * wait until we get a "fine lock", though a coarse lock is probably
631 * OK (we only use the coarse numbers below). We try to gate the
632 * clock for as short a time as possible in case SDRAM is somehow
633 * sensitive. sdelay(10) in the loop is arbitrary to make sure
634 * there is some time for PHY_CON13 to get updated. In practice
635 * no delay appears to be needed.
637 val = readl(&clk->gate_bus_cdrex);
639 writel(val & ~0x1, &clk->gate_bus_cdrex);
640 lock0_info = readl(&phy0_ctrl->phy_con13);
641 writel(val, &clk->gate_bus_cdrex);
643 if ((lock0_info & CTRL_FINE_LOCKED) == CTRL_FINE_LOCKED)
649 writel(val & ~0x2, &clk->gate_bus_cdrex);
650 lock1_info = readl(&phy1_ctrl->phy_con13);
651 writel(val, &clk->gate_bus_cdrex);
653 if ((lock1_info & CTRL_FINE_LOCKED) == CTRL_FINE_LOCKED)
661 * During Suspend-Resume & S/W-Reset, as soon as PMU releases
662 * pad retention, CKE goes high. This causes memory contents
663 * not to be retained during DRAM initialization. Therfore,
664 * there is a new control register(0x100431e8[28]) which lets us
665 * release pad retention and retain the memory content until the
666 * initialization is complete.
668 writel(PAD_RETENTION_DRAM_COREBLK_VAL,
669 &power->pad_retention_dram_coreblk_option);
671 val = readl(&power->pad_retention_dram_status);
672 } while (val != 0x1);
675 * CKE PAD retention disables DRAM self-refresh mode.
676 * Send auto refresh command for DRAM refresh.
678 for (i = 0; i < 128; i++) {
679 for (chip = 0; chip < mem->chips_to_configure; chip++) {
680 writel(DIRECT_CMD_REFA |
681 (chip << DIRECT_CMD_CHIP_SHIFT),
683 writel(DIRECT_CMD_REFA |
684 (chip << DIRECT_CMD_CHIP_SHIFT),
690 if (mem->gate_leveling_enable) {
691 writel(PHY_CON0_RESET_VAL, &phy0_ctrl->phy_con0);
692 writel(PHY_CON0_RESET_VAL, &phy1_ctrl->phy_con0);
694 setbits_le32(&phy0_ctrl->phy_con0, P0_CMD_EN);
695 setbits_le32(&phy1_ctrl->phy_con0, P0_CMD_EN);
697 val = PHY_CON2_RESET_VAL;
698 val |= INIT_DESKEW_EN;
699 writel(val, &phy0_ctrl->phy_con2);
700 writel(val, &phy1_ctrl->phy_con2);
702 val = readl(&phy0_ctrl->phy_con1);
703 val |= (RDLVL_PASS_ADJ_VAL << RDLVL_PASS_ADJ_OFFSET);
704 writel(val, &phy0_ctrl->phy_con1);
706 val = readl(&phy1_ctrl->phy_con1);
707 val |= (RDLVL_PASS_ADJ_VAL << RDLVL_PASS_ADJ_OFFSET);
708 writel(val, &phy1_ctrl->phy_con1);
710 n_lock_w_phy0 = (lock0_info & CTRL_LOCK_COARSE_MASK) >> 2;
711 n_lock_r = readl(&phy0_ctrl->phy_con12);
712 n_lock_r &= ~CTRL_DLL_ON;
713 n_lock_r |= n_lock_w_phy0;
714 writel(n_lock_r, &phy0_ctrl->phy_con12);
716 n_lock_w_phy1 = (lock1_info & CTRL_LOCK_COARSE_MASK) >> 2;
717 n_lock_r = readl(&phy1_ctrl->phy_con12);
718 n_lock_r &= ~CTRL_DLL_ON;
719 n_lock_r |= n_lock_w_phy1;
720 writel(n_lock_r, &phy1_ctrl->phy_con12);
722 val = (0x3 << DIRECT_CMD_BANK_SHIFT) | 0x4;
723 for (chip = 0; chip < mem->chips_to_configure; chip++) {
724 writel(val | (chip << DIRECT_CMD_CHIP_SHIFT),
726 writel(val | (chip << DIRECT_CMD_CHIP_SHIFT),
730 setbits_le32(&phy0_ctrl->phy_con2, RDLVL_GATE_EN);
731 setbits_le32(&phy1_ctrl->phy_con2, RDLVL_GATE_EN);
733 setbits_le32(&phy0_ctrl->phy_con0, CTRL_SHGATE);
734 setbits_le32(&phy1_ctrl->phy_con0, CTRL_SHGATE);
736 val = readl(&phy0_ctrl->phy_con1);
737 val &= ~(CTRL_GATEDURADJ_MASK);
738 writel(val, &phy0_ctrl->phy_con1);
740 val = readl(&phy1_ctrl->phy_con1);
741 val &= ~(CTRL_GATEDURADJ_MASK);
742 writel(val, &phy1_ctrl->phy_con1);
744 writel(CTRL_RDLVL_GATE_ENABLE, &drex0->rdlvl_config);
746 while (((readl(&drex0->phystatus) & RDLVL_COMPLETE_CHO) !=
747 RDLVL_COMPLETE_CHO) && (i > 0)) {
749 * TODO(waihong): Comment on how long this take to
756 return SETUP_ERR_RDLV_COMPLETE_TIMEOUT;
757 writel(CTRL_RDLVL_GATE_DISABLE, &drex0->rdlvl_config);
759 writel(CTRL_RDLVL_GATE_ENABLE, &drex1->rdlvl_config);
761 while (((readl(&drex1->phystatus) & RDLVL_COMPLETE_CHO) !=
762 RDLVL_COMPLETE_CHO) && (i > 0)) {
764 * TODO(waihong): Comment on how long this take to
771 return SETUP_ERR_RDLV_COMPLETE_TIMEOUT;
772 writel(CTRL_RDLVL_GATE_DISABLE, &drex1->rdlvl_config);
774 writel(0, &phy0_ctrl->phy_con14);
775 writel(0, &phy1_ctrl->phy_con14);
777 val = (0x3 << DIRECT_CMD_BANK_SHIFT);
778 for (chip = 0; chip < mem->chips_to_configure; chip++) {
779 writel(val | (chip << DIRECT_CMD_CHIP_SHIFT),
781 writel(val | (chip << DIRECT_CMD_CHIP_SHIFT),
785 /* Common Settings for Leveling */
786 val = PHY_CON12_RESET_VAL;
787 writel((val + n_lock_w_phy0), &phy0_ctrl->phy_con12);
788 writel((val + n_lock_w_phy1), &phy1_ctrl->phy_con12);
790 setbits_le32(&phy0_ctrl->phy_con2, DLL_DESKEW_EN);
791 setbits_le32(&phy1_ctrl->phy_con2, DLL_DESKEW_EN);
795 * Do software read leveling
797 * Do this before we turn on auto refresh since the auto refresh can
798 * be in conflict with the resync operation that's part of setting
802 /* restore calibrated value after resume */
803 dmc_set_read_offset_value(phy0_ctrl, readl(&pmu->pmu_spare1));
804 dmc_set_read_offset_value(phy1_ctrl, readl(&pmu->pmu_spare2));
806 software_find_read_offset(phy0_ctrl, 0,
807 CTRL_LOCK_COARSE(lock0_info));
808 software_find_read_offset(phy1_ctrl, 1,
809 CTRL_LOCK_COARSE(lock1_info));
810 /* save calibrated value to restore after resume */
811 writel(dmc_get_read_offset_value(phy0_ctrl), &pmu->pmu_spare1);
812 writel(dmc_get_read_offset_value(phy1_ctrl), &pmu->pmu_spare2);
815 /* Send PALL command */
816 dmc_config_prech(mem, &drex0->directcmd);
817 dmc_config_prech(mem, &drex1->directcmd);
819 writel(mem->memcontrol, &drex0->memcontrol);
820 writel(mem->memcontrol, &drex1->memcontrol);
823 * Set DMC Concontrol: Enable auto-refresh counter, provide
824 * read data fetch cycles and enable DREX auto set powerdown
825 * for input buffer of I/O in none read memory state.
827 writel(mem->concontrol | (mem->aref_en << CONCONTROL_AREF_EN_SHIFT) |
828 (mem->rd_fetch << CONCONTROL_RD_FETCH_SHIFT)|
829 DMC_CONCONTROL_IO_PD_CON(0x2),
831 writel(mem->concontrol | (mem->aref_en << CONCONTROL_AREF_EN_SHIFT) |
832 (mem->rd_fetch << CONCONTROL_RD_FETCH_SHIFT)|
833 DMC_CONCONTROL_IO_PD_CON(0x2),
837 * Enable Clock Gating Control for DMC
838 * this saves around 25 mw dmc power as compared to the power
839 * consumption without these bits enabled
841 setbits_le32(&drex0->cgcontrol, DMC_INTERNAL_CG);
842 setbits_le32(&drex1->cgcontrol, DMC_INTERNAL_CG);
845 * As per Exynos5800 UM ver 0.00 section 17.13.2.1
846 * CONCONTROL register bit 3 [update_mode], Exynos5800 does not
847 * support the PHY initiated update. And it is recommended to set
848 * this field to 1'b1 during initialization
850 * When we apply PHY-initiated mode, DLL lock value is determined
851 * once at DMC init time and not updated later when we change the MIF
852 * voltage based on ASV group in kernel. Applying MC-initiated mode
853 * makes sure that DLL tracing is ON so that silicon is able to
854 * compensate the voltage variation.
856 val = readl(&drex0->concontrol);
857 val |= CONCONTROL_UPDATE_MODE;
858 writel(val, &drex0->concontrol);
859 val = readl(&drex1->concontrol);
860 val |= CONCONTROL_UPDATE_MODE;
861 writel(val, &drex1->concontrol);