2 * sun8i H3 platform dram controller init
4 * (C) Copyright 2007-2015 Allwinner Technology Co.
5 * Jerry Wang <wangflord@allwinnertech.com>
6 * (C) Copyright 2015 Vishnu Patekar <vishnupatekar0510@gmail.com>
7 * (C) Copyright 2015 Hans de Goede <hdegoede@redhat.com>
8 * (C) Copyright 2015 Jens Kuske <jenskuske@gmail.com>
10 * SPDX-License-Identifier: GPL-2.0+
14 #include <asm/arch/clock.h>
15 #include <asm/arch/dram.h>
16 #include <linux/kconfig.h>
19 * The delay parameters below allow to allegedly specify delay times of some
20 * unknown unit for each individual bit trace in each of the four data bytes
21 * the 32-bit wide access consists of. Also three control signals can be
22 * adjusted individually.
24 #define BITS_PER_BYTE 8
25 #define NR_OF_BYTE_LANES (32 / BITS_PER_BYTE)
26 /* The eight data lines (DQn) plus DM, DQS and DQSN */
27 #define LINES_PER_BYTE_LANE (BITS_PER_BYTE + 3)
33 const u8 dx_read_delays[NR_OF_BYTE_LANES][LINES_PER_BYTE_LANE];
34 const u8 dx_write_delays[NR_OF_BYTE_LANES][LINES_PER_BYTE_LANE];
35 const u8 ac_delays[31];
38 static inline int ns_to_t(int nanoseconds)
40 const unsigned int ctrl_freq = CONFIG_DRAM_CLK / 2;
42 return DIV_ROUND_UP(ctrl_freq * nanoseconds, 1000);
45 static u32 bin_to_mgray(int val)
47 static const u8 lookup_table[32] = {
48 0x00, 0x01, 0x02, 0x03, 0x06, 0x07, 0x04, 0x05,
49 0x0c, 0x0d, 0x0e, 0x0f, 0x0a, 0x0b, 0x08, 0x09,
50 0x18, 0x19, 0x1a, 0x1b, 0x1e, 0x1f, 0x1c, 0x1d,
51 0x14, 0x15, 0x16, 0x17, 0x12, 0x13, 0x10, 0x11,
54 return lookup_table[clamp(val, 0, 31)];
57 static int mgray_to_bin(u32 val)
59 static const u8 lookup_table[32] = {
60 0x00, 0x01, 0x02, 0x03, 0x06, 0x07, 0x04, 0x05,
61 0x0e, 0x0f, 0x0c, 0x0d, 0x08, 0x09, 0x0a, 0x0b,
62 0x1e, 0x1f, 0x1c, 0x1d, 0x18, 0x19, 0x1a, 0x1b,
63 0x10, 0x11, 0x12, 0x13, 0x16, 0x17, 0x14, 0x15,
66 return lookup_table[val & 0x1f];
69 static void mctl_phy_init(u32 val)
71 struct sunxi_mctl_ctl_reg * const mctl_ctl =
72 (struct sunxi_mctl_ctl_reg *)SUNXI_DRAM_CTL0_BASE;
74 writel(val | PIR_INIT, &mctl_ctl->pir);
75 mctl_await_completion(&mctl_ctl->pgsr[0], PGSR_INIT_DONE, 0x1);
78 static void mctl_set_bit_delays(struct dram_para *para)
80 struct sunxi_mctl_ctl_reg * const mctl_ctl =
81 (struct sunxi_mctl_ctl_reg *)SUNXI_DRAM_CTL0_BASE;
84 clrbits_le32(&mctl_ctl->pgcr[0], 1 << 26);
86 for (i = 0; i < NR_OF_BYTE_LANES; i++)
87 for (j = 0; j < LINES_PER_BYTE_LANE; j++)
88 writel(DXBDLR_WRITE_DELAY(para->dx_write_delays[i][j]) |
89 DXBDLR_READ_DELAY(para->dx_read_delays[i][j]),
90 &mctl_ctl->dx[i].bdlr[j]);
92 for (i = 0; i < 31; i++)
93 writel(ACBDLR_WRITE_DELAY(para->ac_delays[i]),
94 &mctl_ctl->acbdlr[i]);
96 setbits_le32(&mctl_ctl->pgcr[0], 1 << 26);
102 MBUS_PORT_UNUSED = 2,
111 MBUS_PORT_DE_CFD = 11,
121 inline void mbus_configure_port(u8 port,
124 u8 qos, /* MBUS_QOS_LOWEST .. MBUS_QOS_HIGEST */
125 u8 waittime, /* 0 .. 0xf */
126 u8 acs, /* 0 .. 0xff */
127 u16 bwl0, /* 0 .. 0xffff, bandwidth limit in MB/s */
131 struct sunxi_mctl_com_reg * const mctl_com =
132 (struct sunxi_mctl_com_reg *)SUNXI_DRAM_COM_BASE;
134 const u32 cfg0 = ( (bwlimit ? (1 << 0) : 0)
135 | (priority ? (1 << 1) : 0)
137 | ((waittime & 0xf) << 4)
138 | ((acs & 0xff) << 8)
140 const u32 cfg1 = ((u32)bwl2 << 16) | (bwl1 & 0xffff);
142 debug("MBUS port %d cfg0 %08x cfg1 %08x\n", port, cfg0, cfg1);
143 writel(cfg0, &mctl_com->mcr[port][0]);
144 writel(cfg1, &mctl_com->mcr[port][1]);
147 #define MBUS_CONF(port, bwlimit, qos, acs, bwl0, bwl1, bwl2) \
148 mbus_configure_port(MBUS_PORT_ ## port, bwlimit, false, \
149 MBUS_QOS_ ## qos, 0, acs, bwl0, bwl1, bwl2)
151 static void mctl_set_master_priority(void)
153 struct sunxi_mctl_com_reg * const mctl_com =
154 (struct sunxi_mctl_com_reg *)SUNXI_DRAM_COM_BASE;
156 /* enable bandwidth limit windows and set windows size 1us */
157 writel(0x00010190, &mctl_com->bwcr);
159 /* set cpu high priority */
160 writel(0x00000001, &mctl_com->mapr);
162 MBUS_CONF( CPU, true, HIGHEST, 0, 512, 256, 128);
163 MBUS_CONF( GPU, true, HIGH, 0, 1536, 1024, 256);
164 MBUS_CONF(UNUSED, true, HIGHEST, 0, 512, 256, 96);
165 MBUS_CONF( DMA, true, HIGHEST, 0, 256, 128, 32);
166 MBUS_CONF( VE, true, HIGH, 0, 1792, 1600, 256);
167 MBUS_CONF( CSI, true, HIGHEST, 0, 256, 128, 32);
168 MBUS_CONF( NAND, true, HIGH, 0, 256, 128, 64);
169 MBUS_CONF( SS, true, HIGHEST, 0, 256, 128, 64);
170 MBUS_CONF( TS, true, HIGHEST, 0, 256, 128, 64);
171 MBUS_CONF( DI, true, HIGH, 0, 1024, 256, 64);
172 MBUS_CONF( DE, true, HIGHEST, 3, 8192, 6120, 1024);
173 MBUS_CONF(DE_CFD, true, HIGH, 0, 1024, 288, 64);
176 static void mctl_set_timing_params(struct dram_para *para)
178 struct sunxi_mctl_ctl_reg * const mctl_ctl =
179 (struct sunxi_mctl_ctl_reg *)SUNXI_DRAM_CTL0_BASE;
182 u8 tfaw = ns_to_t(50);
183 u8 trrd = max(ns_to_t(10), 4);
184 u8 trcd = ns_to_t(15);
185 u8 trc = ns_to_t(53);
186 u8 txp = max(ns_to_t(8), 3);
187 u8 twtr = max(ns_to_t(8), 4);
188 u8 trtp = max(ns_to_t(8), 4);
189 u8 twr = max(ns_to_t(15), 3);
190 u8 trp = ns_to_t(15);
191 u8 tras = ns_to_t(38);
192 u16 trefi = ns_to_t(7800) / 32;
193 u16 trfc = ns_to_t(350);
204 u8 tcl = 6; /* CL 12 */
205 u8 tcwl = 4; /* CWL 8 */
209 u32 tdinit0 = (500 * CONFIG_DRAM_CLK) + 1; /* 500us */
210 u32 tdinit1 = (360 * CONFIG_DRAM_CLK) / 1000 + 1; /* 360ns */
211 u32 tdinit2 = (200 * CONFIG_DRAM_CLK) + 1; /* 200us */
212 u32 tdinit3 = (1 * CONFIG_DRAM_CLK) + 1; /* 1us */
214 u8 twtp = tcwl + 2 + twr; /* WL + BL / 2 + tWR */
215 u8 twr2rd = tcwl + 2 + twtr; /* WL + BL / 2 + tWTR */
216 u8 trd2wr = tcl + 2 + 1 - tcwl; /* RL + BL / 2 + 2 - WL */
218 /* set mode register */
219 writel(0x1c70, &mctl_ctl->mr[0]); /* CL=11, WR=12 */
220 writel(0x40, &mctl_ctl->mr[1]);
221 writel(0x18, &mctl_ctl->mr[2]); /* CWL=8 */
222 writel(0x0, &mctl_ctl->mr[3]);
224 /* set DRAM timing */
225 writel(DRAMTMG0_TWTP(twtp) | DRAMTMG0_TFAW(tfaw) |
226 DRAMTMG0_TRAS_MAX(trasmax) | DRAMTMG0_TRAS(tras),
227 &mctl_ctl->dramtmg[0]);
228 writel(DRAMTMG1_TXP(txp) | DRAMTMG1_TRTP(trtp) | DRAMTMG1_TRC(trc),
229 &mctl_ctl->dramtmg[1]);
230 writel(DRAMTMG2_TCWL(tcwl) | DRAMTMG2_TCL(tcl) |
231 DRAMTMG2_TRD2WR(trd2wr) | DRAMTMG2_TWR2RD(twr2rd),
232 &mctl_ctl->dramtmg[2]);
233 writel(DRAMTMG3_TMRW(tmrw) | DRAMTMG3_TMRD(tmrd) | DRAMTMG3_TMOD(tmod),
234 &mctl_ctl->dramtmg[3]);
235 writel(DRAMTMG4_TRCD(trcd) | DRAMTMG4_TCCD(tccd) | DRAMTMG4_TRRD(trrd) |
236 DRAMTMG4_TRP(trp), &mctl_ctl->dramtmg[4]);
237 writel(DRAMTMG5_TCKSRX(tcksrx) | DRAMTMG5_TCKSRE(tcksre) |
238 DRAMTMG5_TCKESR(tckesr) | DRAMTMG5_TCKE(tcke),
239 &mctl_ctl->dramtmg[5]);
241 /* set two rank timing */
242 clrsetbits_le32(&mctl_ctl->dramtmg[8], (0xff << 8) | (0xff << 0),
243 (0x66 << 8) | (0x10 << 0));
245 /* set PHY interface timing, write latency and read latency configure */
246 writel((0x2 << 24) | (t_rdata_en << 16) | (0x1 << 8) |
247 (wr_latency << 0), &mctl_ctl->pitmg[0]);
249 /* set PHY timing, PTR0-2 use default */
250 writel(PTR3_TDINIT0(tdinit0) | PTR3_TDINIT1(tdinit1), &mctl_ctl->ptr[3]);
251 writel(PTR4_TDINIT2(tdinit2) | PTR4_TDINIT3(tdinit3), &mctl_ctl->ptr[4]);
253 /* set refresh timing */
254 writel(RFSHTMG_TREFI(trefi) | RFSHTMG_TRFC(trfc), &mctl_ctl->rfshtmg);
257 static void mctl_zq_calibration(struct dram_para *para)
259 struct sunxi_mctl_ctl_reg * const mctl_ctl =
260 (struct sunxi_mctl_ctl_reg *)SUNXI_DRAM_CTL0_BASE;
262 if ((readl(SUNXI_SRAMC_BASE + 0x24) & 0xff) == 0 &&
263 (readl(SUNXI_SRAMC_BASE + 0xf0) & 0x1) == 0) {
266 clrsetbits_le32(&mctl_ctl->zqcr, 0xffff,
267 CONFIG_DRAM_ZQ & 0xffff);
269 writel(PIR_CLRSR, &mctl_ctl->pir);
270 mctl_phy_init(PIR_ZCAL);
272 reg_val = readl(&mctl_ctl->zqdr[0]);
273 reg_val &= (0x1f << 16) | (0x1f << 0);
274 reg_val |= reg_val << 8;
275 writel(reg_val, &mctl_ctl->zqdr[0]);
277 reg_val = readl(&mctl_ctl->zqdr[1]);
278 reg_val &= (0x1f << 16) | (0x1f << 0);
279 reg_val |= reg_val << 8;
280 writel(reg_val, &mctl_ctl->zqdr[1]);
281 writel(reg_val, &mctl_ctl->zqdr[2]);
287 writel(0x0a0a0a0a, &mctl_ctl->zqdr[2]);
289 for (i = 0; i < 6; i++) {
290 u8 zq = (CONFIG_DRAM_ZQ >> (i * 4)) & 0xf;
292 writel((zq << 20) | (zq << 16) | (zq << 12) |
293 (zq << 8) | (zq << 4) | (zq << 0),
296 writel(PIR_CLRSR, &mctl_ctl->pir);
297 mctl_phy_init(PIR_ZCAL);
299 zq_val[i] = readl(&mctl_ctl->zqdr[0]) & 0xff;
300 writel(REPEAT_BYTE(zq_val[i]), &mctl_ctl->zqdr[2]);
302 writel(PIR_CLRSR, &mctl_ctl->pir);
303 mctl_phy_init(PIR_ZCAL);
305 val = readl(&mctl_ctl->zqdr[0]) >> 24;
306 zq_val[i] |= bin_to_mgray(mgray_to_bin(val) - 1) << 8;
309 writel((zq_val[1] << 16) | zq_val[0], &mctl_ctl->zqdr[0]);
310 writel((zq_val[3] << 16) | zq_val[2], &mctl_ctl->zqdr[1]);
311 writel((zq_val[5] << 16) | zq_val[4], &mctl_ctl->zqdr[2]);
315 static void mctl_set_cr(struct dram_para *para)
317 struct sunxi_mctl_com_reg * const mctl_com =
318 (struct sunxi_mctl_com_reg *)SUNXI_DRAM_COM_BASE;
320 writel(MCTL_CR_BL8 | MCTL_CR_2T | MCTL_CR_DDR3 | MCTL_CR_INTERLEAVED |
321 MCTL_CR_EIGHT_BANKS | MCTL_CR_BUS_WIDTH(para->bus_width) |
322 (para->dual_rank ? MCTL_CR_DUAL_RANK : MCTL_CR_SINGLE_RANK) |
323 MCTL_CR_PAGE_SIZE(para->page_size) |
324 MCTL_CR_ROW_BITS(para->row_bits), &mctl_com->cr);
327 static void mctl_sys_init(struct dram_para *para)
329 struct sunxi_ccm_reg * const ccm =
330 (struct sunxi_ccm_reg *)SUNXI_CCM_BASE;
331 struct sunxi_mctl_ctl_reg * const mctl_ctl =
332 (struct sunxi_mctl_ctl_reg *)SUNXI_DRAM_CTL0_BASE;
334 clrbits_le32(&ccm->mbus0_clk_cfg, MBUS_CLK_GATE);
335 clrbits_le32(&ccm->mbus_reset, CCM_MBUS_RESET_RESET);
336 clrbits_le32(&ccm->ahb_gate0, 1 << AHB_GATE_OFFSET_MCTL);
337 clrbits_le32(&ccm->ahb_reset0_cfg, 1 << AHB_RESET_OFFSET_MCTL);
338 clrbits_le32(&ccm->pll5_cfg, CCM_PLL5_CTRL_EN);
341 clrbits_le32(&ccm->dram_clk_cfg, CCM_DRAMCLK_CFG_RST);
344 clock_set_pll5(CONFIG_DRAM_CLK * 2 * 1000000, false);
345 clrsetbits_le32(&ccm->dram_clk_cfg,
346 CCM_DRAMCLK_CFG_DIV_MASK | CCM_DRAMCLK_CFG_SRC_MASK,
347 CCM_DRAMCLK_CFG_DIV(1) | CCM_DRAMCLK_CFG_SRC_PLL5 |
348 CCM_DRAMCLK_CFG_UPD);
349 mctl_await_completion(&ccm->dram_clk_cfg, CCM_DRAMCLK_CFG_UPD, 0);
351 setbits_le32(&ccm->ahb_reset0_cfg, 1 << AHB_RESET_OFFSET_MCTL);
352 setbits_le32(&ccm->ahb_gate0, 1 << AHB_GATE_OFFSET_MCTL);
353 setbits_le32(&ccm->mbus_reset, CCM_MBUS_RESET_RESET);
354 setbits_le32(&ccm->mbus0_clk_cfg, MBUS_CLK_GATE);
356 setbits_le32(&ccm->dram_clk_cfg, CCM_DRAMCLK_CFG_RST);
359 writel(0xc00e, &mctl_ctl->clken);
363 static int mctl_channel_init(struct dram_para *para)
365 struct sunxi_mctl_com_reg * const mctl_com =
366 (struct sunxi_mctl_com_reg *)SUNXI_DRAM_COM_BASE;
367 struct sunxi_mctl_ctl_reg * const mctl_ctl =
368 (struct sunxi_mctl_ctl_reg *)SUNXI_DRAM_CTL0_BASE;
373 mctl_set_timing_params(para);
374 mctl_set_master_priority();
376 /* setting VTC, default disable all VT */
377 clrbits_le32(&mctl_ctl->pgcr[0], (1 << 30) | 0x3f);
378 clrsetbits_le32(&mctl_ctl->pgcr[1], 1 << 24, 1 << 26);
380 /* increase DFI_PHY_UPD clock */
381 writel(PROTECT_MAGIC, &mctl_com->protect);
383 clrsetbits_le32(&mctl_ctl->upd2, 0xfff << 16, 0x50 << 16);
384 writel(0x0, &mctl_com->protect);
388 for (i = 0; i < 4; i++)
389 clrsetbits_le32(&mctl_ctl->dx[i].gcr, (0x3 << 4) |
390 (0x1 << 1) | (0x3 << 2) | (0x3 << 12) |
392 IS_ENABLED(CONFIG_DRAM_ODT_EN) ? 0x0 : 0x2);
394 /* AC PDR should always ON */
395 setbits_le32(&mctl_ctl->aciocr, 0x1 << 1);
397 /* set DQS auto gating PD mode */
398 setbits_le32(&mctl_ctl->pgcr[2], 0x3 << 6);
400 /* dx ddr_clk & hdr_clk dynamic mode */
401 clrbits_le32(&mctl_ctl->pgcr[0], (0x3 << 14) | (0x3 << 12));
403 /* dphy & aphy phase select 270 degree */
404 clrsetbits_le32(&mctl_ctl->pgcr[2], (0x3 << 10) | (0x3 << 8),
405 (0x1 << 10) | (0x2 << 8));
408 if (para->bus_width != 32) {
409 writel(0x0, &mctl_ctl->dx[2].gcr);
410 writel(0x0, &mctl_ctl->dx[3].gcr);
413 /* data training configuration */
414 clrsetbits_le32(&mctl_ctl->dtcr, 0xf << 24,
415 (para->dual_rank ? 0x3 : 0x1) << 24);
417 mctl_set_bit_delays(para);
420 mctl_zq_calibration(para);
422 mctl_phy_init(PIR_PLLINIT | PIR_DCAL | PIR_PHYRST | PIR_DRAMRST |
423 PIR_DRAMINIT | PIR_QSGATE);
425 /* detect ranks and bus width */
426 if (readl(&mctl_ctl->pgsr[0]) & (0xfe << 20)) {
428 if (((readl(&mctl_ctl->dx[0].gsr[0]) >> 24) & 0x2) ||
429 ((readl(&mctl_ctl->dx[1].gsr[0]) >> 24) & 0x2)) {
430 clrsetbits_le32(&mctl_ctl->dtcr, 0xf << 24, 0x1 << 24);
434 /* only half DQ width */
435 if (((readl(&mctl_ctl->dx[2].gsr[0]) >> 24) & 0x1) ||
436 ((readl(&mctl_ctl->dx[3].gsr[0]) >> 24) & 0x1)) {
437 writel(0x0, &mctl_ctl->dx[2].gcr);
438 writel(0x0, &mctl_ctl->dx[3].gcr);
439 para->bus_width = 16;
446 mctl_phy_init(PIR_QSGATE);
447 if (readl(&mctl_ctl->pgsr[0]) & (0xfe << 20))
451 /* check the dramc status */
452 mctl_await_completion(&mctl_ctl->statr, 0x1, 0x1);
454 /* liuke added for refresh debug */
455 setbits_le32(&mctl_ctl->rfshctl0, 0x1 << 31);
457 clrbits_le32(&mctl_ctl->rfshctl0, 0x1 << 31);
460 /* set PGCR3, CKE polarity */
461 writel(0x00aa0060, &mctl_ctl->pgcr[3]);
463 /* power down zq calibration module for power save */
464 setbits_le32(&mctl_ctl->zqcr, ZQCR_PWRDOWN);
466 /* enable master access */
467 writel(0xffffffff, &mctl_com->maer);
472 static void mctl_auto_detect_dram_size(struct dram_para *para)
474 /* detect row address bits */
475 para->page_size = 512;
479 for (para->row_bits = 11; para->row_bits < 16; para->row_bits++)
480 if (mctl_mem_matches((1 << (para->row_bits + 3)) * para->page_size))
483 /* detect page size */
484 para->page_size = 8192;
487 for (para->page_size = 512; para->page_size < 8192; para->page_size *= 2)
488 if (mctl_mem_matches(para->page_size))
493 * The actual values used here are taken from Allwinner provided boot0
494 * binaries, though they are probably board specific, so would likely benefit
495 * from invidual tuning for each board. Apparently a lot of boards copy from
496 * some Allwinner reference design, so we go with those generic values for now
497 * in the hope that they are reasonable for most (all?) boards.
499 #define SUN8I_H3_DX_READ_DELAYS \
500 {{ 18, 18, 18, 18, 18, 18, 18, 18, 18, 0, 0 }, \
501 { 14, 14, 14, 14, 14, 14, 14, 14, 14, 0, 0 }, \
502 { 18, 18, 18, 18, 18, 18, 18, 18, 18, 0, 0 }, \
503 { 14, 14, 14, 14, 14, 14, 14, 14, 14, 0, 0 }}
504 #define SUN8I_H3_DX_WRITE_DELAYS \
505 {{ 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 10 }, \
506 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 10 }, \
507 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 10 }, \
508 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 6 }}
509 #define SUN8I_H3_AC_DELAYS \
510 { 0, 0, 0, 0, 0, 0, 0, 0, \
511 0, 0, 0, 0, 0, 0, 0, 0, \
512 0, 0, 0, 0, 0, 0, 0, 0, \
513 0, 0, 0, 0, 0, 0, 0 }
515 unsigned long sunxi_dram_init(void)
517 struct sunxi_mctl_com_reg * const mctl_com =
518 (struct sunxi_mctl_com_reg *)SUNXI_DRAM_COM_BASE;
519 struct sunxi_mctl_ctl_reg * const mctl_ctl =
520 (struct sunxi_mctl_ctl_reg *)SUNXI_DRAM_CTL0_BASE;
522 struct dram_para para = {
527 .dx_read_delays = SUN8I_H3_DX_READ_DELAYS,
528 .dx_write_delays = SUN8I_H3_DX_WRITE_DELAYS,
529 .ac_delays = SUN8I_H3_AC_DELAYS,
532 mctl_sys_init(¶);
533 if (mctl_channel_init(¶))
537 writel(0x00000303, &mctl_ctl->odtmap);
539 writel(0x00000201, &mctl_ctl->odtmap);
543 writel(0x0c000400, &mctl_ctl->odtcfg);
545 /* clear credit value */
546 setbits_le32(&mctl_com->cccr, 1 << 31);
549 mctl_auto_detect_dram_size(¶);
552 return (1 << (para.row_bits + 3)) * para.page_size *
553 (para.dual_rank ? 2 : 1);