2 * sunxi DRAM controller initialization
3 * (C) Copyright 2012 Henrik Nordstrom <henrik@henriknordstrom.net>
4 * (C) Copyright 2013 Luke Kenneth Casson Leighton <lkcl@lkcl.net>
6 * Based on sun4i Linux kernel sources mach-sunxi/pm/standby/dram*.c
7 * and earlier U-Boot Allwiner A10 SPL work
9 * (C) Copyright 2007-2012
10 * Allwinner Technology Co., Ltd. <www.allwinnertech.com>
11 * Berg Xing <bergxing@allwinnertech.com>
12 * Tom Cubie <tangliang@allwinnertech.com>
14 * SPDX-License-Identifier: GPL-2.0+
18 * Unfortunately the only documentation we have on the sun7i DRAM
19 * controller is Allwinner boot0 + boot1 code, and that code uses
20 * magic numbers & shifts with no explanations. Hence this code is
21 * rather undocumented and full of magic.
26 #include <asm/arch/clock.h>
27 #include <asm/arch/dram.h>
28 #include <asm/arch/timer.h>
29 #include <asm/arch/sys_proto.h>
31 #define CPU_CFG_CHIP_VER(n) ((n) << 6)
32 #define CPU_CFG_CHIP_VER_MASK CPU_CFG_CHIP_VER(0x3)
33 #define CPU_CFG_CHIP_REV_A 0x0
34 #define CPU_CFG_CHIP_REV_C1 0x1
35 #define CPU_CFG_CHIP_REV_C2 0x2
36 #define CPU_CFG_CHIP_REV_B 0x3
39 * Wait up to 1s for mask to be clear in given reg.
41 static inline void await_bits_clear(u32 *reg, u32 mask)
43 mctl_await_completion(reg, mask, 0);
47 * Wait up to 1s for mask to be set in given reg.
49 static inline void await_bits_set(u32 *reg, u32 mask)
51 mctl_await_completion(reg, mask, mask);
55 * This performs the external DRAM reset by driving the RESET pin low and
56 * then high again. According to the DDR3 spec, the RESET pin needs to be
57 * kept low for at least 200 us.
59 static void mctl_ddr3_reset(void)
61 struct sunxi_dram_reg *dram =
62 (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
64 #ifdef CONFIG_MACH_SUN4I
65 struct sunxi_timer_reg *timer =
66 (struct sunxi_timer_reg *)SUNXI_TIMER_BASE;
69 writel(0, &timer->cpu_cfg);
70 reg_val = readl(&timer->cpu_cfg);
72 if ((reg_val & CPU_CFG_CHIP_VER_MASK) !=
73 CPU_CFG_CHIP_VER(CPU_CFG_CHIP_REV_A)) {
74 setbits_le32(&dram->mcr, DRAM_MCR_RESET);
76 clrbits_le32(&dram->mcr, DRAM_MCR_RESET);
80 clrbits_le32(&dram->mcr, DRAM_MCR_RESET);
82 setbits_le32(&dram->mcr, DRAM_MCR_RESET);
84 /* After the RESET pin is de-asserted, the DDR3 spec requires to wait
85 * for additional 500 us before driving the CKE pin (Clock Enable)
86 * high. The duration of this delay can be configured in the SDR_IDCR
87 * (Initialization Delay Configuration Register) and applied
88 * automatically by the DRAM controller during the DDR3 initialization
89 * step. But SDR_IDCR has limited range on sun4i/sun5i hardware and
90 * can't provide sufficient delay at DRAM clock frequencies higher than
91 * 524 MHz (while Allwinner A13 supports DRAM clock frequency up to
92 * 533 MHz according to the datasheet). Additionally, there is no
93 * official documentation for the SDR_IDCR register anywhere, and
94 * there is always a chance that we are interpreting it wrong.
95 * Better be safe than sorry, so add an explicit delay here. */
99 static void mctl_set_drive(void)
101 struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
103 #ifdef CONFIG_MACH_SUN7I
104 clrsetbits_le32(&dram->mcr, DRAM_MCR_MODE_NORM(0x3) | (0x3 << 28),
106 clrsetbits_le32(&dram->mcr, DRAM_MCR_MODE_NORM(0x3),
108 DRAM_MCR_MODE_EN(0x3) |
112 static void mctl_itm_disable(void)
114 struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
116 clrsetbits_le32(&dram->ccr, DRAM_CCR_INIT, DRAM_CCR_ITM_OFF);
119 static void mctl_itm_enable(void)
121 struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
123 clrbits_le32(&dram->ccr, DRAM_CCR_ITM_OFF);
126 static void mctl_itm_reset(void)
129 udelay(1); /* ITM reset needs a bit of delay */
134 static void mctl_enable_dll0(u32 phase)
136 struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
138 clrsetbits_le32(&dram->dllcr[0], 0x3f << 6,
139 ((phase >> 16) & 0x3f) << 6);
140 clrsetbits_le32(&dram->dllcr[0], DRAM_DLLCR_NRESET, DRAM_DLLCR_DISABLE);
143 clrbits_le32(&dram->dllcr[0], DRAM_DLLCR_NRESET | DRAM_DLLCR_DISABLE);
146 clrsetbits_le32(&dram->dllcr[0], DRAM_DLLCR_DISABLE, DRAM_DLLCR_NRESET);
150 /* Get the number of DDR byte lanes */
151 static u32 mctl_get_number_of_lanes(void)
153 struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
154 if ((readl(&dram->dcr) & DRAM_DCR_BUS_WIDTH_MASK) ==
155 DRAM_DCR_BUS_WIDTH(DRAM_DCR_BUS_WIDTH_32BIT))
162 * Note: This differs from pm/standby in that it checks the bus width
164 static void mctl_enable_dllx(u32 phase)
166 struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
167 u32 i, number_of_lanes;
169 number_of_lanes = mctl_get_number_of_lanes();
171 for (i = 1; i <= number_of_lanes; i++) {
172 clrsetbits_le32(&dram->dllcr[i], 0xf << 14,
173 (phase & 0xf) << 14);
174 clrsetbits_le32(&dram->dllcr[i], DRAM_DLLCR_NRESET,
180 for (i = 1; i <= number_of_lanes; i++)
181 clrbits_le32(&dram->dllcr[i], DRAM_DLLCR_NRESET |
185 for (i = 1; i <= number_of_lanes; i++)
186 clrsetbits_le32(&dram->dllcr[i], DRAM_DLLCR_DISABLE,
191 static u32 hpcr_value[32] = {
192 #ifdef CONFIG_MACH_SUN5I
197 0x1031, 0x1031, 0x0735, 0x1035,
198 0x1035, 0x0731, 0x1031, 0,
199 0x0301, 0x0301, 0x0301, 0x0301,
200 0x0301, 0x0301, 0x0301, 0
202 #ifdef CONFIG_MACH_SUN4I
203 0x0301, 0x0301, 0x0301, 0x0301,
204 0x0301, 0x0301, 0, 0,
207 0x1031, 0x1031, 0x0735, 0x5031,
208 0x1035, 0x0731, 0x1031, 0x0735,
209 0x1035, 0x1031, 0x0731, 0x1035,
210 0x1031, 0x0301, 0x0301, 0x0731
212 #ifdef CONFIG_MACH_SUN7I
213 0x0301, 0x0301, 0x0301, 0x0301,
214 0x0301, 0x0301, 0x0301, 0x0301,
217 0x1031, 0x1031, 0x0735, 0x1035,
218 0x1035, 0x0731, 0x1031, 0x0735,
219 0x1035, 0x1031, 0x0731, 0x1035,
220 0x0001, 0x1031, 0, 0x1031
221 /* last row differs from boot0 source table
222 * 0x1031, 0x0301, 0x0301, 0x0731
223 * but boot0 code skips #28 and #30, and sets #29 and #31 to the
224 * value from #28 entry (0x1031)
229 static void mctl_configure_hostport(void)
231 struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
234 for (i = 0; i < 32; i++)
235 writel(hpcr_value[i], &dram->hpcr[i]);
238 static void mctl_setup_dram_clock(u32 clk, u32 mbus_clk)
241 struct sunxi_ccm_reg *ccm = (struct sunxi_ccm_reg *)SUNXI_CCM_BASE;
242 u32 pll5p_clk, pll6x_clk;
243 u32 pll5p_div, pll6x_div;
244 u32 pll5p_rate, pll6x_rate;
247 reg_val = readl(&ccm->pll5_cfg);
248 reg_val &= ~CCM_PLL5_CTRL_M_MASK; /* set M to 0 (x1) */
249 reg_val &= ~CCM_PLL5_CTRL_K_MASK; /* set K to 0 (x1) */
250 reg_val &= ~CCM_PLL5_CTRL_N_MASK; /* set N to 0 (x0) */
251 reg_val &= ~CCM_PLL5_CTRL_P_MASK; /* set P to 0 (x1) */
252 #ifdef CONFIG_OLD_SUNXI_KERNEL_COMPAT
253 /* Old kernels are hardcoded to P=1 (divide by 2) */
254 reg_val |= CCM_PLL5_CTRL_P(1);
256 if (clk >= 540 && clk < 552) {
258 reg_val |= CCM_PLL5_CTRL_M(CCM_PLL5_CTRL_M_X(2));
259 reg_val |= CCM_PLL5_CTRL_K(CCM_PLL5_CTRL_K_X(3));
260 reg_val |= CCM_PLL5_CTRL_N(CCM_PLL5_CTRL_N_X(15));
261 } else if (clk >= 512 && clk < 528) {
263 reg_val |= CCM_PLL5_CTRL_M(CCM_PLL5_CTRL_M_X(3));
264 reg_val |= CCM_PLL5_CTRL_K(CCM_PLL5_CTRL_K_X(4));
265 reg_val |= CCM_PLL5_CTRL_N(CCM_PLL5_CTRL_N_X(16));
266 } else if (clk >= 496 && clk < 504) {
268 reg_val |= CCM_PLL5_CTRL_M(CCM_PLL5_CTRL_M_X(3));
269 reg_val |= CCM_PLL5_CTRL_K(CCM_PLL5_CTRL_K_X(2));
270 reg_val |= CCM_PLL5_CTRL_N(CCM_PLL5_CTRL_N_X(31));
271 } else if (clk >= 468 && clk < 480) {
273 reg_val |= CCM_PLL5_CTRL_M(CCM_PLL5_CTRL_M_X(2));
274 reg_val |= CCM_PLL5_CTRL_K(CCM_PLL5_CTRL_K_X(3));
275 reg_val |= CCM_PLL5_CTRL_N(CCM_PLL5_CTRL_N_X(13));
276 } else if (clk >= 396 && clk < 408) {
278 reg_val |= CCM_PLL5_CTRL_M(CCM_PLL5_CTRL_M_X(2));
279 reg_val |= CCM_PLL5_CTRL_K(CCM_PLL5_CTRL_K_X(3));
280 reg_val |= CCM_PLL5_CTRL_N(CCM_PLL5_CTRL_N_X(11));
282 /* any other frequency that is a multiple of 24 */
283 reg_val |= CCM_PLL5_CTRL_M(CCM_PLL5_CTRL_M_X(2));
284 reg_val |= CCM_PLL5_CTRL_K(CCM_PLL5_CTRL_K_X(2));
285 reg_val |= CCM_PLL5_CTRL_N(CCM_PLL5_CTRL_N_X(clk / 24));
287 reg_val &= ~CCM_PLL5_CTRL_VCO_GAIN; /* PLL VCO Gain off */
288 reg_val |= CCM_PLL5_CTRL_EN; /* PLL On */
289 writel(reg_val, &ccm->pll5_cfg);
292 setbits_le32(&ccm->pll5_cfg, CCM_PLL5_CTRL_DDR_CLK);
294 #if defined(CONFIG_MACH_SUN4I) || defined(CONFIG_MACH_SUN7I)
296 clrbits_le32(&ccm->gps_clk_cfg, CCM_GPS_CTRL_RESET | CCM_GPS_CTRL_GATE);
297 setbits_le32(&ccm->ahb_gate0, CCM_AHB_GATE_GPS);
299 clrbits_le32(&ccm->ahb_gate0, CCM_AHB_GATE_GPS);
302 /* setup MBUS clock */
306 /* PLL5P and PLL6 are the potential clock sources for MBUS */
307 pll6x_clk = clock_get_pll6() / 1000000;
308 #ifdef CONFIG_MACH_SUN7I
309 pll6x_clk *= 2; /* sun7i uses PLL6*2, sun5i uses just PLL6 */
311 pll5p_clk = clock_get_pll5p() / 1000000;
312 pll6x_div = DIV_ROUND_UP(pll6x_clk, mbus_clk);
313 pll5p_div = DIV_ROUND_UP(pll5p_clk, mbus_clk);
314 pll6x_rate = pll6x_clk / pll6x_div;
315 pll5p_rate = pll5p_clk / pll5p_div;
317 if (pll6x_div <= 16 && pll6x_rate > pll5p_rate) {
318 /* use PLL6 as the MBUS clock source */
319 reg_val = CCM_MBUS_CTRL_GATE |
320 CCM_MBUS_CTRL_CLK_SRC(CCM_MBUS_CTRL_CLK_SRC_PLL6) |
321 CCM_MBUS_CTRL_N(CCM_MBUS_CTRL_N_X(1)) |
322 CCM_MBUS_CTRL_M(CCM_MBUS_CTRL_M_X(pll6x_div));
323 } else if (pll5p_div <= 16) {
324 /* use PLL5P as the MBUS clock source */
325 reg_val = CCM_MBUS_CTRL_GATE |
326 CCM_MBUS_CTRL_CLK_SRC(CCM_MBUS_CTRL_CLK_SRC_PLL5) |
327 CCM_MBUS_CTRL_N(CCM_MBUS_CTRL_N_X(1)) |
328 CCM_MBUS_CTRL_M(CCM_MBUS_CTRL_M_X(pll5p_div));
330 panic("Bad mbus_clk\n");
332 writel(reg_val, &ccm->mbus_clk_cfg);
335 * open DRAMC AHB & DLL register clock
338 #if defined(CONFIG_MACH_SUN5I) || defined(CONFIG_MACH_SUN7I)
339 clrbits_le32(&ccm->ahb_gate0, CCM_AHB_GATE_SDRAM | CCM_AHB_GATE_DLL);
341 clrbits_le32(&ccm->ahb_gate0, CCM_AHB_GATE_SDRAM);
346 #if defined(CONFIG_MACH_SUN5I) || defined(CONFIG_MACH_SUN7I)
347 setbits_le32(&ccm->ahb_gate0, CCM_AHB_GATE_SDRAM | CCM_AHB_GATE_DLL);
349 setbits_le32(&ccm->ahb_gate0, CCM_AHB_GATE_SDRAM);
355 * The data from rslrX and rdgrX registers (X=rank) is stored
356 * in a single 32-bit value using the following format:
357 * bits [31:26] - DQS gating system latency for byte lane 3
358 * bits [25:24] - DQS gating phase select for byte lane 3
359 * bits [23:18] - DQS gating system latency for byte lane 2
360 * bits [17:16] - DQS gating phase select for byte lane 2
361 * bits [15:10] - DQS gating system latency for byte lane 1
362 * bits [ 9:8 ] - DQS gating phase select for byte lane 1
363 * bits [ 7:2 ] - DQS gating system latency for byte lane 0
364 * bits [ 1:0 ] - DQS gating phase select for byte lane 0
366 static void mctl_set_dqs_gating_delay(int rank, u32 dqs_gating_delay)
368 struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
369 u32 lane, number_of_lanes = mctl_get_number_of_lanes();
370 /* rank0 gating system latency (3 bits per lane: cycles) */
371 u32 slr = readl(rank == 0 ? &dram->rslr0 : &dram->rslr1);
372 /* rank0 gating phase select (2 bits per lane: 90, 180, 270, 360) */
373 u32 dgr = readl(rank == 0 ? &dram->rdgr0 : &dram->rdgr1);
374 for (lane = 0; lane < number_of_lanes; lane++) {
375 u32 tmp = dqs_gating_delay >> (lane * 8);
376 slr &= ~(7 << (lane * 3));
377 slr |= ((tmp >> 2) & 7) << (lane * 3);
378 dgr &= ~(3 << (lane * 2));
379 dgr |= (tmp & 3) << (lane * 2);
381 writel(slr, rank == 0 ? &dram->rslr0 : &dram->rslr1);
382 writel(dgr, rank == 0 ? &dram->rdgr0 : &dram->rdgr1);
385 static int dramc_scan_readpipe(void)
387 struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
390 /* data training trigger */
391 clrbits_le32(&dram->csr, DRAM_CSR_FAILED);
392 setbits_le32(&dram->ccr, DRAM_CCR_DATA_TRAINING);
394 /* check whether data training process has completed */
395 await_bits_clear(&dram->ccr, DRAM_CCR_DATA_TRAINING);
397 /* check data training result */
398 reg_val = readl(&dram->csr);
399 if (reg_val & DRAM_CSR_FAILED)
405 static void dramc_clock_output_en(u32 on)
407 #if defined(CONFIG_MACH_SUN5I) || defined(CONFIG_MACH_SUN7I)
408 struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
411 setbits_le32(&dram->mcr, DRAM_MCR_DCLK_OUT);
413 clrbits_le32(&dram->mcr, DRAM_MCR_DCLK_OUT);
415 #ifdef CONFIG_MACH_SUN4I
416 struct sunxi_ccm_reg *ccm = (struct sunxi_ccm_reg *)SUNXI_CCM_BASE;
418 setbits_le32(&ccm->dram_clk_gate, CCM_DRAM_CTRL_DCLK_OUT);
420 clrbits_le32(&ccm->dram_clk_gate, CCM_DRAM_CTRL_DCLK_OUT);
424 /* tRFC in nanoseconds for different densities (from the DDR3 spec) */
425 static const u16 tRFC_DDR3_table[6] = {
426 /* 256Mb 512Mb 1Gb 2Gb 4Gb 8Gb */
427 90, 90, 110, 160, 300, 350
430 static void dramc_set_autorefresh_cycle(u32 clk, u32 density)
432 struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
435 tRFC = (tRFC_DDR3_table[density] * clk + 999) / 1000;
436 tREFI = (7987 * clk) >> 10; /* <= 7.8us */
438 writel(DRAM_DRR_TREFI(tREFI) | DRAM_DRR_TRFC(tRFC), &dram->drr);
441 /* Calculate the value for A11, A10, A9 bits in MR0 (write recovery) */
442 static u32 ddr3_write_recovery(u32 clk)
444 u32 twr_ns = 15; /* DDR3 spec says that it is 15ns for all speed bins */
445 u32 twr_ck = (twr_ns * clk + 999) / 1000;
448 else if (twr_ck <= 8)
450 else if (twr_ck <= 10)
457 * If the dram->ppwrsctl (SDR_DPCR) register has the lowest bit set to 1, this
458 * means that DRAM is currently in self-refresh mode and retaining the old
459 * data. Since we have no idea what to do in this situation yet, just set this
460 * register to 0 and initialize DRAM in the same way as on any normal reboot
461 * (discarding whatever was stored there).
463 * Note: on sun7i hardware, the highest 16 bits need to be set to 0x1651 magic
464 * value for this write operation to have any effect. On sun5i hadware this
465 * magic value is not necessary. And on sun4i hardware the writes to this
466 * register seem to have no effect at all.
468 static void mctl_disable_power_save(void)
470 struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
471 writel(0x16510000, &dram->ppwrsctl);
475 * After the DRAM is powered up or reset, the DDR3 spec requires to wait at
476 * least 500 us before driving the CKE pin (Clock Enable) high. The dram->idct
477 * (SDR_IDCR) register appears to configure this delay, which gets applied
478 * right at the time when the DRAM initialization is activated in the
479 * 'mctl_ddr3_initialize' function.
481 static void mctl_set_cke_delay(void)
483 struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
485 /* The CKE delay is represented in DRAM clock cycles, multiplied by N
486 * (where N=2 for sun4i/sun5i and N=3 for sun7i). Here it is set to
487 * the maximum possible value 0x1ffff, just like in the Allwinner's
488 * boot0 bootloader. The resulting delay value is somewhere between
489 * ~0.4 ms (sun5i with 648 MHz DRAM clock speed) and ~1.1 ms (sun7i
490 * with 360 MHz DRAM clock speed). */
491 setbits_le32(&dram->idcr, 0x1ffff);
495 * This triggers the DRAM initialization. It performs sending the mode registers
496 * to the DRAM among other things. Very likely the ZQCL command is also getting
497 * executed (to do the initial impedance calibration on the DRAM side of the
498 * wire). The memory controller and the PHY must be already configured before
499 * calling this function.
501 static void mctl_ddr3_initialize(void)
503 struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
504 setbits_le32(&dram->ccr, DRAM_CCR_INIT);
505 await_bits_clear(&dram->ccr, DRAM_CCR_INIT);
509 * Perform impedance calibration on the DRAM controller side of the wire.
511 static void mctl_set_impedance(u32 zq, bool odt_en)
513 struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
515 u32 zprog = zq & 0xFF, zdata = (zq >> 8) & 0xFFFFF;
517 #ifndef CONFIG_MACH_SUN7I
518 /* Appears that some kind of automatically initiated default
519 * ZQ calibration is already in progress at this point on sun4i/sun5i
520 * hardware, but not on sun7i. So it is reasonable to wait for its
521 * completion before doing anything else. */
522 await_bits_set(&dram->zqsr, DRAM_ZQSR_ZDONE);
525 /* ZQ calibration is not really useful unless ODT is enabled */
529 #ifdef CONFIG_MACH_SUN7I
530 /* Enabling ODT in SDR_IOCR on sun7i hardware results in a deadlock
531 * unless bit 24 is set in SDR_ZQCR1. Not much is known about the
532 * SDR_ZQCR1 register, but there are hints indicating that it might
533 * be related to periodic impedance re-calibration. This particular
534 * magic value is borrowed from the Allwinner boot0 bootloader, and
535 * using it helps to avoid troubles */
536 writel((1 << 24) | (1 << 1), &dram->zqcr1);
539 /* Needed at least for sun5i, because it does not self clear there */
540 clrbits_le32(&dram->zqcr0, DRAM_ZQCR0_ZCAL);
543 /* Set the user supplied impedance data */
544 reg_val = DRAM_ZQCR0_ZDEN | zdata;
545 writel(reg_val, &dram->zqcr0);
546 /* no need to wait, this takes effect immediately */
548 /* Do the calibration using the external resistor */
549 reg_val = DRAM_ZQCR0_ZCAL | DRAM_ZQCR0_IMP_DIV(zprog);
550 writel(reg_val, &dram->zqcr0);
551 /* Wait for the new impedance configuration to settle */
552 await_bits_set(&dram->zqsr, DRAM_ZQSR_ZDONE);
555 /* Needed at least for sun5i, because it does not self clear there */
556 clrbits_le32(&dram->zqcr0, DRAM_ZQCR0_ZCAL);
558 /* Set I/O configure register */
559 writel(DRAM_IOCR_ODT_EN, &dram->iocr);
562 static unsigned long dramc_init_helper(struct dram_para *para)
564 struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
570 * only single rank DDR3 is supported by this code even though the
571 * hardware can theoretically support DDR2 and up to two ranks
573 if (para->type != DRAM_MEMORY_TYPE_DDR3 || para->rank_num != 1)
576 /* setup DRAM relative clock */
577 mctl_setup_dram_clock(para->clock, para->mbus_clock);
579 /* Disable any pad power save control */
580 mctl_disable_power_save();
585 dramc_clock_output_en(0);
587 #ifdef CONFIG_MACH_SUN4I
588 /* select dram controller 1 */
589 writel(DRAM_CSEL_MAGIC, &dram->csel);
593 mctl_enable_dll0(para->tpr3);
595 /* configure external DRAM */
596 reg_val = DRAM_DCR_TYPE_DDR3;
597 reg_val |= DRAM_DCR_IO_WIDTH(para->io_width >> 3);
599 if (para->density == 256)
600 density = DRAM_DCR_CHIP_DENSITY_256M;
601 else if (para->density == 512)
602 density = DRAM_DCR_CHIP_DENSITY_512M;
603 else if (para->density == 1024)
604 density = DRAM_DCR_CHIP_DENSITY_1024M;
605 else if (para->density == 2048)
606 density = DRAM_DCR_CHIP_DENSITY_2048M;
607 else if (para->density == 4096)
608 density = DRAM_DCR_CHIP_DENSITY_4096M;
609 else if (para->density == 8192)
610 density = DRAM_DCR_CHIP_DENSITY_8192M;
612 density = DRAM_DCR_CHIP_DENSITY_256M;
614 reg_val |= DRAM_DCR_CHIP_DENSITY(density);
615 reg_val |= DRAM_DCR_BUS_WIDTH((para->bus_width >> 3) - 1);
616 reg_val |= DRAM_DCR_RANK_SEL(para->rank_num - 1);
617 reg_val |= DRAM_DCR_CMD_RANK_ALL;
618 reg_val |= DRAM_DCR_MODE(DRAM_DCR_MODE_INTERLEAVE);
619 writel(reg_val, &dram->dcr);
621 dramc_clock_output_en(1);
623 mctl_set_impedance(para->zq, para->odt_en);
625 mctl_set_cke_delay();
631 await_bits_clear(&dram->ccr, DRAM_CCR_INIT);
633 mctl_enable_dllx(para->tpr3);
635 /* set refresh period */
636 dramc_set_autorefresh_cycle(para->clock, density);
638 /* set timing parameters */
639 writel(para->tpr0, &dram->tpr0);
640 writel(para->tpr1, &dram->tpr1);
641 writel(para->tpr2, &dram->tpr2);
643 reg_val = DRAM_MR_BURST_LENGTH(0x0);
644 #if (defined(CONFIG_MACH_SUN5I) || defined(CONFIG_MACH_SUN7I))
645 reg_val |= DRAM_MR_POWER_DOWN;
647 reg_val |= DRAM_MR_CAS_LAT(para->cas - 4);
648 reg_val |= DRAM_MR_WRITE_RECOVERY(ddr3_write_recovery(para->clock));
649 writel(reg_val, &dram->mr);
651 writel(para->emr1, &dram->emr);
652 writel(para->emr2, &dram->emr2);
653 writel(para->emr3, &dram->emr3);
655 /* disable drift compensation and set passive DQS window mode */
656 clrsetbits_le32(&dram->ccr, DRAM_CCR_DQS_DRIFT_COMP, DRAM_CCR_DQS_GATE);
658 #ifdef CONFIG_MACH_SUN7I
659 /* Command rate timing mode 2T & 1T */
660 if (para->tpr4 & 0x1)
661 setbits_le32(&dram->ccr, DRAM_CCR_COMMAND_RATE_1T);
663 /* initialize external DRAM */
664 mctl_ddr3_initialize();
666 /* scan read pipe value */
669 /* Hardware DQS gate training */
670 ret_val = dramc_scan_readpipe();
675 /* allow to override the DQS training results with a custom delay */
676 if (para->dqs_gating_delay)
677 mctl_set_dqs_gating_delay(0, para->dqs_gating_delay);
679 /* set the DQS gating window type */
680 if (para->active_windowing)
681 clrbits_le32(&dram->ccr, DRAM_CCR_DQS_GATE);
683 setbits_le32(&dram->ccr, DRAM_CCR_DQS_GATE);
687 /* configure all host port */
688 mctl_configure_hostport();
690 return get_ram_size((long *)PHYS_SDRAM_0, PHYS_SDRAM_0_SIZE);
693 unsigned long dramc_init(struct dram_para *para)
695 unsigned long dram_size, actual_density;
697 /* If the dram configuration is not provided, use a default */
701 /* if everything is known, then autodetection is not necessary */
702 if (para->io_width && para->bus_width && para->density)
703 return dramc_init_helper(para);
705 /* try to autodetect the DRAM bus width and density */
707 para->bus_width = 32;
708 #if defined(CONFIG_MACH_SUN4I) || defined(CONFIG_MACH_SUN5I)
709 /* only A0-A14 address lines on A10/A13, limiting max density to 4096 */
710 para->density = 4096;
712 /* all A0-A15 address lines on A20, which allow density 8192 */
713 para->density = 8192;
716 dram_size = dramc_init_helper(para);
718 /* if 32-bit bus width failed, try 16-bit bus width instead */
719 para->bus_width = 16;
720 dram_size = dramc_init_helper(para);
722 /* if 16-bit bus width also failed, then bail out */
727 /* check if we need to adjust the density */
728 actual_density = (dram_size >> 17) * para->io_width / para->bus_width;
730 if (actual_density != para->density) {
731 /* update the density and re-initialize DRAM again */
732 para->density = actual_density;
733 dram_size = dramc_init_helper(para);