X-Git-Url: https://git.sur5r.net/?a=blobdiff_plain;f=cpu%2Fmpc83xx%2Fspd_sdram.c;h=0d93f2e1ea4ea25a455356e725bd985208cef696;hb=701f649aa304e547639fb8bbfdfec0753e9b3b9a;hp=153848d343db088f42b255a636d8a29d4d412494;hpb=f6eda7f80ccc13d658020268c507d7173cf2e8aa;p=u-boot diff --git a/cpu/mpc83xx/spd_sdram.c b/cpu/mpc83xx/spd_sdram.c index 153848d343..0d93f2e1ea 100644 --- a/cpu/mpc83xx/spd_sdram.c +++ b/cpu/mpc83xx/spd_sdram.c @@ -4,7 +4,7 @@ * (C) Copyright 2006 * Wolfgang Denk, DENX Software Engineering, wd@denx.de. * - * Copyright 2004 Freescale Semiconductor. + * Copyright (C) 2004-2006 Freescale Semiconductor, Inc. * (C) Copyright 2003 Motorola Inc. * Xianghua Xiao (X.Xiao@motorola.com) * @@ -25,15 +25,6 @@ * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, * MA 02111-1307 USA - * - * Change log: - * - * 20050101: Eran Liberty (liberty@freescale.com) - * Initial file creating (porting from 85XX & 8260) - * 20060601: Dave Liu (daveliu@freescale.com) - * DDR ECC support - * unify variable names for 83xx - * code cleanup */ #include @@ -45,6 +36,8 @@ #ifdef CONFIG_SPD_EEPROM +DECLARE_GLOBAL_DATA_PTR; + #if defined(CONFIG_DDR_ECC) && !defined(CONFIG_ECC_INIT_VIA_DDRC) extern void dma_init(void); extern uint dma_check(void); @@ -58,19 +51,16 @@ extern int dma_xfer(void *dest, uint count, void *src); /* * Convert picoseconds into clock cycles (rounding up if needed). */ -extern ulong get_ddr_clk(ulong dummy); - int picos_to_clk(int picos) { unsigned int ddr_bus_clk; int clks; - ddr_bus_clk = get_ddr_clk(0) >> 1; + ddr_bus_clk = gd->ddr_clk >> 1; clks = picos / ((1000000000 / ddr_bus_clk) * 1000); - if (picos % ((1000000000 / ddr_bus_clk) * 1000) !=0) { + if (picos % ((1000000000 / ddr_bus_clk) * 1000) != 0) clks++; - } return clks; } @@ -112,11 +102,10 @@ static void spd_debug(spd_eeprom_t *spd) long int spd_sdram() { - volatile immap_t *immap = (immap_t *)CFG_IMMRBAR; + volatile immap_t *immap = (immap_t *)CFG_IMMR; volatile ddr83xx_t *ddr = &immap->ddr; volatile law83xx_t *ecm = &immap->sysconf.ddrlaw[0]; spd_eeprom_t spd; - unsigned tmp; unsigned int memsize; unsigned int law_size; unsigned char caslat, caslat_ctrl; @@ -128,8 +117,7 @@ long int spd_sdram() unsigned sdram_cfg; unsigned int ddrc_ecc_enable; - - /* Read SPD parameters with I2C */ + /* Read SPD parameters with I2C */ CFG_READ_SPD(SPD_EEPROM_ADDRESS, 0, 1, (uchar *) & spd, sizeof (spd)); #ifdef SPD_DEBUG spd_debug(&spd); @@ -159,7 +147,27 @@ long int spd_sdram() spd.ncol_addr); return 0; } - /* Setup DDR chip select register */ + /* Setup DDR chip select register */ +#ifdef CFG_83XX_DDR_USES_CS0 + ddr->csbnds[0].csbnds = (banksize(spd.row_dens) >> 24) - 1; + ddr->cs_config[0] = ( 1 << 31 + | (spd.nrow_addr - 12) << 8 + | (spd.ncol_addr - 8) ); + debug("\n"); + debug("cs0_bnds = 0x%08x\n",ddr->csbnds[0].csbnds); + debug("cs0_config = 0x%08x\n",ddr->cs_config[0]); + + if (spd.nrows == 2) { + ddr->csbnds[1].csbnds = ( (banksize(spd.row_dens) >> 8) + | ((banksize(spd.row_dens) >> 23) - 1) ); + ddr->cs_config[1] = ( 1<<31 + | (spd.nrow_addr-12) << 8 + | (spd.ncol_addr-8) ); + debug("cs1_bnds = 0x%08x\n",ddr->csbnds[1].csbnds); + debug("cs1_config = 0x%08x\n",ddr->cs_config[1]); + } + +#else ddr->csbnds[2].csbnds = (banksize(spd.row_dens) >> 24) - 1; ddr->cs_config[2] = ( 1 << 31 | (spd.nrow_addr - 12) << 8 @@ -177,6 +185,7 @@ long int spd_sdram() debug("cs3_bnds = 0x%08x\n",ddr->csbnds[3].csbnds); debug("cs3_config = 0x%08x\n",ddr->cs_config[3]); } +#endif if (spd.mem_type != 0x07) { puts("No DDR module found!\n"); @@ -209,18 +218,19 @@ long int spd_sdram() * CAS Lat DDR I Ctrl * Clocks SPD Bit Value * -------+--------+--------- - * 1.0 0 001 - * 1.5 1 010 - * 2.0 2 011 - * 2.5 3 100 - * 3.0 4 101 - * 3.5 5 110 - * 4.0 6 111 + * 1.0 0 001 + * 1.5 1 010 + * 2.0 2 011 + * 2.5 3 100 + * 3.0 4 101 + * 3.5 5 110 + * 4.0 6 111 */ caslat = __ilog2(spd.cas_lat); - if (caslat > 4 ) { - printf("DDR: Invalid SPD CAS Latency, caslat=%02X\n", caslat); + if (caslat > 6 ) { + printf("DDR: Invalid SPD CAS Latency, caslat=%02X\n", + spd.cas_lat); return 0; } max_bus_clk = 1000 *10 / (((spd.clk_cycle & 0xF0) >> 4) * 10 @@ -229,59 +239,108 @@ long int spd_sdram() debug("DDR:Module maximum data rate is: %dMhz\n", max_data_rate); - ddrc_clk = get_ddr_clk(0) / 1000000; + ddrc_clk = gd->ddr_clk / 1000000; if (max_data_rate >= 390) { /* it is DDR 400 */ - printf("DDR: platform not support DDR 400\n"); - return 0; + if (ddrc_clk <= 410 && ddrc_clk > 350) { + /* DDR controller clk at 350~410 */ + effective_data_rate = 400; /* 5ns */ + caslat = caslat; + } else if (ddrc_clk <= 350 && ddrc_clk > 280) { + /* DDR controller clk at 280~350 */ + effective_data_rate = 333; /* 6ns */ + if (spd.clk_cycle2 == 0x60) + caslat = caslat - 1; + else + caslat = caslat; + } else if (ddrc_clk <= 280 && ddrc_clk > 230) { + /* DDR controller clk at 230~280 */ + effective_data_rate = 266; /* 7.5ns */ + if (spd.clk_cycle3 == 0x75) + caslat = caslat - 2; + else if (spd.clk_cycle2 == 0x60) + caslat = caslat - 1; + else + caslat = caslat; + } else if (ddrc_clk <= 230 && ddrc_clk > 90) { + /* DDR controller clk at 90~230 */ + effective_data_rate = 200; /* 10ns */ + if (spd.clk_cycle3 == 0x75) + caslat = caslat - 2; + else if (spd.clk_cycle2 == 0x60) + caslat = caslat - 1; + else + caslat = caslat; + } } else if (max_data_rate >= 323) { /* it is DDR 333 */ if (ddrc_clk <= 350 && ddrc_clk > 280) { - /* DDRC clk at 280~350 */ + /* DDR controller clk at 280~350 */ effective_data_rate = 333; /* 6ns */ caslat = caslat; } else if (ddrc_clk <= 280 && ddrc_clk > 230) { - /* DDRC clk at 230~280 */ - if (spd.clk_cycle2 == 0x75) { - effective_data_rate = 266; /* 7.5ns */ + /* DDR controller clk at 230~280 */ + effective_data_rate = 266; /* 7.5ns */ + if (spd.clk_cycle2 == 0x75) caslat = caslat - 1; - } + else + caslat = caslat; } else if (ddrc_clk <= 230 && ddrc_clk > 90) { - /* DDRC clk at 90~230 */ - if (spd.clk_cycle3 == 0xa0) { - effective_data_rate = 200; /* 10ns */ + /* DDR controller clk at 90~230 */ + effective_data_rate = 200; /* 10ns */ + if (spd.clk_cycle3 == 0xa0) caslat = caslat - 2; - } + else if (spd.clk_cycle2 == 0x75) + caslat = caslat - 1; + else + caslat = caslat; } } else if (max_data_rate >= 256) { /* it is DDR 266 */ if (ddrc_clk <= 350 && ddrc_clk > 280) { - /* DDRC clk at 280~350 */ + /* DDR controller clk at 280~350 */ printf("DDR: DDR controller freq is more than " "max data rate of the module\n"); return 0; } else if (ddrc_clk <= 280 && ddrc_clk > 230) { - /* DDRC clk at 230~280 */ + /* DDR controller clk at 230~280 */ effective_data_rate = 266; /* 7.5ns */ caslat = caslat; } else if (ddrc_clk <= 230 && ddrc_clk > 90) { - /* DDRC clk at 90~230 */ - if (spd.clk_cycle2 == 0xa0) { - effective_data_rate = 200; /* 10ns */ + /* DDR controller clk at 90~230 */ + effective_data_rate = 200; /* 10ns */ + if (spd.clk_cycle2 == 0xa0) caslat = caslat - 1; - } } } else if (max_data_rate >= 190) { /* it is DDR 200 */ if (ddrc_clk <= 350 && ddrc_clk > 230) { - /* DDRC clk at 230~350 */ + /* DDR controller clk at 230~350 */ printf("DDR: DDR controller freq is more than " "max data rate of the module\n"); return 0; } else if (ddrc_clk <= 230 && ddrc_clk > 90) { - /* DDRC clk at 90~230 */ + /* DDR controller clk at 90~230 */ effective_data_rate = 200; /* 10ns */ caslat = caslat; } } + debug("DDR:Effective data rate is: %dMhz\n", effective_data_rate); + debug("DDR:The MSB 1 of CAS Latency is: %d\n", caslat); + + /* + * Errata DDR6 work around: input enable 2 cycles earlier. + * including MPC834x Rev1.0/1.1 and MPC8360 Rev1.1/1.2. + */ + if (caslat == 2) + ddr->debug_reg = 0x201c0000; /* CL=2 */ + else if (caslat == 3) + ddr->debug_reg = 0x202c0000; /* CL=2.5 */ + else if (caslat == 4) + ddr->debug_reg = 0x202c0000; /* CL=3.0 */ + + __asm__ __volatile__ ("sync"); + + debug("Errata DDR6 (debug_reg=0x%08x)\n", ddr->debug_reg); + /* * note: caslat must also be programmed into ddr->sdram_mode * register. @@ -308,87 +367,82 @@ long int spd_sdram() ddr->sdram_cfg = 0x42000000; /* Check DIMM data bus width */ - if (spd.dataw_lsb == 0x20) - { + if (spd.dataw_lsb == 0x20) { burstlen = 0x03; /* 32 bit data bus, burst len is 8 */ printf("\n DDR DIMM: data bus width is 32 bit"); - } - else - { + } else { burstlen = 0x02; /* Others act as 64 bit bus, burst len is 4 */ printf("\n DDR DIMM: data bus width is 64 bit"); } /* Is this an ECC DDR chip? */ - if (spd.config == 0x02) { + if (spd.config == 0x02) printf(" with ECC\n"); - } else printf(" without ECC\n"); /* Burst length is always 4 for 64 bit data bus, 8 for 32 bit data bus, Burst type is sequential */ - switch(caslat) { - case 1: - ddr->sdram_mode = 0x50 | burstlen; /* CL=1.5 */ - break; - case 2: - ddr->sdram_mode = 0x20 | burstlen; /* CL=2.0 */ - break; - case 3: - ddr->sdram_mode = 0x60 | burstlen; /* CL=2.5 */ - break; - case 4: - ddr->sdram_mode = 0x30 | burstlen; /* CL=3.0 */ - break; - default: - printf("DDR:only CAS Latency 1.5, 2.0, 2.5, 3.0 " - "is supported.\n"); - return 0; + switch (caslat) { + case 1: + ddr->sdram_mode = 0x50 | burstlen; /* CL=1.5 */ + break; + case 2: + ddr->sdram_mode = 0x20 | burstlen; /* CL=2.0 */ + break; + case 3: + ddr->sdram_mode = 0x60 | burstlen; /* CL=2.5 */ + break; + case 4: + ddr->sdram_mode = 0x30 | burstlen; /* CL=3.0 */ + break; + default: + printf("DDR:only CL 1.5, 2.0, 2.5, 3.0 is supported\n"); + return 0; } debug("DDR:sdram_mode=0x%08x\n", ddr->sdram_mode); - switch(spd.refresh) { - case 0x00: - case 0x80: - tmp = picos_to_clk(15625000); - break; - case 0x01: - case 0x81: - tmp = picos_to_clk(3900000); - break; - case 0x02: - case 0x82: - tmp = picos_to_clk(7800000); - break; - case 0x03: - case 0x83: - tmp = picos_to_clk(31300000); - break; - case 0x04: - case 0x84: - tmp = picos_to_clk(62500000); - break; - case 0x05: - case 0x85: - tmp = picos_to_clk(125000000); - break; - default: - tmp = 0x512; - break; + switch (spd.refresh) { + case 0x00: + case 0x80: + refresh_clk = picos_to_clk(15625000); + break; + case 0x01: + case 0x81: + refresh_clk = picos_to_clk(3900000); + break; + case 0x02: + case 0x82: + refresh_clk = picos_to_clk(7800000); + break; + case 0x03: + case 0x83: + refresh_clk = picos_to_clk(31300000); + break; + case 0x04: + case 0x84: + refresh_clk = picos_to_clk(62500000); + break; + case 0x05: + case 0x85: + refresh_clk = picos_to_clk(125000000); + break; + default: + refresh_clk = 0x512; + break; } /* * Set BSTOPRE to 0x100 for page mode * If auto-charge is used, set BSTOPRE = 0 */ - ddr->sdram_interval = ((tmp & 0x3fff) << 16) | 0x100; + ddr->sdram_interval = ((refresh_clk & 0x3fff) << 16) | 0x100; debug("DDR:sdram_interval=0x%08x\n", ddr->sdram_interval); /* SS_EN = 0, source synchronous disable * CLK_ADJST = 0, MCK/MCK# is launched aligned with addr/cmd - */ + */ ddr->sdram_clk_cntl = 0x00000000; debug("DDR:sdram_clk_cntl=0x%08x\n", ddr->sdram_clk_cntl); @@ -397,8 +451,8 @@ long int spd_sdram() udelay(600); /* - * Figure out the settings for the sdram_cfg register. Build up - * the entire register in 'tmp' before writing since the write into + * Figure out the settings for the sdram_cfg register. Build up + * the value in 'sdram_cfg' before writing since the write into * the register will actually enable the memory controller, and all * settings must be done before enabling. * @@ -411,14 +465,13 @@ long int spd_sdram() sdram_cfg = 0xC2000000; /* sdram_cfg[3] = RD_EN - registered DIMM enable */ - if (spd.mod_attr & 0x02) { + if (spd.mod_attr & 0x02) sdram_cfg |= 0x10000000; - } /* The DIMM is 32bit width */ - if (spd.dataw_lsb == 0x20) { + if (spd.dataw_lsb == 0x20) sdram_cfg |= 0x000C0000; - } + ddrc_ecc_enable = 0; #if defined(CONFIG_DDR_ECC) @@ -431,13 +484,13 @@ long int spd_sdram() /* set single bit error threshold to maximum value, * reset counter to zero */ ddr->err_sbe = (255 << ECC_ERROR_MAN_SBET_SHIFT) | - (0 << ECC_ERROR_MAN_SBEC_SHIFT); + (0 << ECC_ERROR_MAN_SBEC_SHIFT); } debug("DDR:err_disable=0x%08x\n", ddr->err_disable); debug("DDR:err_sbe=0x%08x\n", ddr->err_sbe); #endif - printf(" DDRC ECC mode: %s", ddrc_ecc_enable ? "ON":"OFF"); + printf(" DDRC ECC mode: %s\n", ddrc_ecc_enable ? "ON":"OFF"); #if defined(CONFIG_DDR_2T_TIMING) /* @@ -455,7 +508,6 @@ long int spd_sdram() } #endif /* CONFIG_SPD_EEPROM */ - #if defined(CONFIG_DDR_ECC) && !defined(CONFIG_ECC_INIT_VIA_DDRC) /* * Use timebase counter, get_timer() is not availabe @@ -492,74 +544,48 @@ static __inline__ unsigned long get_tbms (void) /* #define CONFIG_DDR_ECC_INIT_VIA_DMA */ void ddr_enable_ecc(unsigned int dram_size) { - uint *p; - volatile immap_t *immap = (immap_t *)CFG_IMMRBAR; + volatile immap_t *immap = (immap_t *)CFG_IMMR; volatile ddr83xx_t *ddr= &immap->ddr; unsigned long t_start, t_end; + register u64 *p; + register uint size; + unsigned int pattern[2]; #if defined(CONFIG_DDR_ECC_INIT_VIA_DMA) uint i; #endif - - debug("Initialize a Cachline in DRAM\n"); icache_enable(); - -#if defined(CONFIG_DDR_ECC_INIT_VIA_DMA) - /* Initialise DMA for direct Transfers */ - dma_init(); -#endif - t_start = get_tbms(); + pattern[0] = 0xdeadbeef; + pattern[1] = 0xdeadbeef; #if !defined(CONFIG_DDR_ECC_INIT_VIA_DMA) - debug("DDR init: Cache flush method\n"); - for (p = 0; p < (uint *)(dram_size); p++) { - if (((unsigned int)p & 0x1f) == 0) { - ppcDcbz((unsigned long) p); - } - - /* write pattern to cache and flush */ - *p = (unsigned int)0xdeadbeef; - - if (((unsigned int)p & 0x1c) == 0x1c) { - ppcDcbf((unsigned long) p); - } + debug("ddr init: CPU FP write method\n"); + size = dram_size; + for (p = 0; p < (u64*)(size); p++) { + ppcDWstore((u32*)p, pattern); } + __asm__ __volatile__ ("sync"); #else - printf("DDR init: DMA method\n"); - for (p = 0; p < (uint *)(8 * 1024); p++) { - /* zero one data cache line */ - if (((unsigned int)p & 0x1f) == 0) { - ppcDcbz((unsigned long)p); - } - - /* write pattern to it and flush */ - *p = (unsigned int)0xdeadbeef; - - if (((unsigned int)p & 0x1c) == 0x1c) { - ppcDcbf((unsigned long)p); - } + debug("ddr init: DMA method\n"); + size = 0x2000; + for (p = 0; p < (u64*)(size); p++) { + ppcDWstore((u32*)p, pattern); } + __asm__ __volatile__ ("sync"); - /* 8K */ - dma_xfer((uint *)0x2000, 0x2000, (uint *)0); - /* 16K */ - dma_xfer((uint *)0x4000, 0x4000, (uint *)0); - /* 32K */ - dma_xfer((uint *)0x8000, 0x8000, (uint *)0); - /* 64K */ - dma_xfer((uint *)0x10000, 0x10000, (uint *)0); - /* 128k */ - dma_xfer((uint *)0x20000, 0x20000, (uint *)0); - /* 256k */ - dma_xfer((uint *)0x40000, 0x40000, (uint *)0); - /* 512k */ - dma_xfer((uint *)0x80000, 0x80000, (uint *)0); - /* 1M */ - dma_xfer((uint *)0x100000, 0x100000, (uint *)0); - /* 2M */ - dma_xfer((uint *)0x200000, 0x200000, (uint *)0); - /* 4M */ - dma_xfer((uint *)0x400000, 0x400000, (uint *)0); + /* Initialise DMA for direct transfer */ + dma_init(); + /* Start DMA to transfer */ + dma_xfer((uint *)0x2000, 0x2000, (uint *)0); /* 8K */ + dma_xfer((uint *)0x4000, 0x4000, (uint *)0); /* 16K */ + dma_xfer((uint *)0x8000, 0x8000, (uint *)0); /* 32K */ + dma_xfer((uint *)0x10000, 0x10000, (uint *)0); /* 64K */ + dma_xfer((uint *)0x20000, 0x20000, (uint *)0); /* 128K */ + dma_xfer((uint *)0x40000, 0x40000, (uint *)0); /* 256K */ + dma_xfer((uint *)0x80000, 0x80000, (uint *)0); /* 512K */ + dma_xfer((uint *)0x100000, 0x100000, (uint *)0); /* 1M */ + dma_xfer((uint *)0x200000, 0x200000, (uint *)0); /* 2M */ + dma_xfer((uint *)0x400000, 0x400000, (uint *)0); /* 4M */ for (i = 1; i < dram_size / 0x800000; i++) { dma_xfer((uint *)(0x800000*i), 0x800000, (uint *)0);