1 // SPDX-License-Identifier: GPL-2.0
3 * Copyright (C) Marvell International Ltd. and its affiliates
10 #include <asm/arch/cpu.h>
11 #include <asm/arch/soc.h>
13 #include "ddr3_init.h"
15 #if defined(MV88F78X60)
16 #include "ddr3_axp_config.h"
17 #elif defined(MV88F67XX)
18 #include "ddr3_a370_config.h"
21 #if defined(MV88F672X)
22 #include "ddr3_a375_config.h"
27 /* DIMM SPD offsets */
28 #define SPD_DEV_TYPE_BYTE 2
30 #define SPD_MODULE_TYPE_BYTE 3
31 #define SPD_MODULE_MASK 0xf
32 #define SPD_MODULE_TYPE_RDIMM 1
33 #define SPD_MODULE_TYPE_UDIMM 2
35 #define SPD_DEV_DENSITY_BYTE 4
36 #define SPD_DEV_DENSITY_MASK 0xf
38 #define SPD_ROW_NUM_BYTE 5
39 #define SPD_ROW_NUM_MIN 12
40 #define SPD_ROW_NUM_OFF 3
41 #define SPD_ROW_NUM_MASK (7 << SPD_ROW_NUM_OFF)
43 #define SPD_COL_NUM_BYTE 5
44 #define SPD_COL_NUM_MIN 9
45 #define SPD_COL_NUM_OFF 0
46 #define SPD_COL_NUM_MASK (7 << SPD_COL_NUM_OFF)
48 #define SPD_MODULE_ORG_BYTE 7
49 #define SPD_MODULE_SDRAM_DEV_WIDTH_OFF 0
50 #define SPD_MODULE_SDRAM_DEV_WIDTH_MASK (7 << SPD_MODULE_SDRAM_DEV_WIDTH_OFF)
51 #define SPD_MODULE_BANK_NUM_MIN 1
52 #define SPD_MODULE_BANK_NUM_OFF 3
53 #define SPD_MODULE_BANK_NUM_MASK (7 << SPD_MODULE_BANK_NUM_OFF)
55 #define SPD_BUS_WIDTH_BYTE 8
56 #define SPD_BUS_WIDTH_OFF 0
57 #define SPD_BUS_WIDTH_MASK (7 << SPD_BUS_WIDTH_OFF)
58 #define SPD_BUS_ECC_OFF 3
59 #define SPD_BUS_ECC_MASK (3 << SPD_BUS_ECC_OFF)
61 #define SPD_MTB_DIVIDEND_BYTE 10
62 #define SPD_MTB_DIVISOR_BYTE 11
63 #define SPD_TCK_BYTE 12
64 #define SPD_SUP_CAS_LAT_LSB_BYTE 14
65 #define SPD_SUP_CAS_LAT_MSB_BYTE 15
66 #define SPD_TAA_BYTE 16
67 #define SPD_TWR_BYTE 17
68 #define SPD_TRCD_BYTE 18
69 #define SPD_TRRD_BYTE 19
70 #define SPD_TRP_BYTE 20
72 #define SPD_TRAS_MSB_BYTE 21
73 #define SPD_TRAS_MSB_MASK 0xf
75 #define SPD_TRC_MSB_BYTE 21
76 #define SPD_TRC_MSB_MASK 0xf0
78 #define SPD_TRAS_LSB_BYTE 22
79 #define SPD_TRC_LSB_BYTE 23
80 #define SPD_TRFC_LSB_BYTE 24
81 #define SPD_TRFC_MSB_BYTE 25
82 #define SPD_TWTR_BYTE 26
83 #define SPD_TRTP_BYTE 27
85 #define SPD_TFAW_MSB_BYTE 28
86 #define SPD_TFAW_MSB_MASK 0xf
88 #define SPD_TFAW_LSB_BYTE 29
89 #define SPD_OPT_FEATURES_BYTE 30
90 #define SPD_THERMAL_REFRESH_OPT_BYTE 31
92 #define SPD_ADDR_MAP_BYTE 63
93 #define SPD_ADDR_MAP_MIRROR_OFFS 0
95 #define SPD_RDIMM_RC_BYTE 69
96 #define SPD_RDIMM_RC_NIBBLE_MASK 0xF
97 #define SPD_RDIMM_RC_NUM 16
99 /* Dimm Memory Type values */
100 #define SPD_MEM_TYPE_SDRAM 0x4
101 #define SPD_MEM_TYPE_DDR1 0x7
102 #define SPD_MEM_TYPE_DDR2 0x8
103 #define SPD_MEM_TYPE_DDR3 0xB
105 #define DIMM_MODULE_MANU_OFFS 64
106 #define DIMM_MODULE_MANU_SIZE 8
107 #define DIMM_MODULE_VEN_OFFS 73
108 #define DIMM_MODULE_VEN_SIZE 25
109 #define DIMM_MODULE_ID_OFFS 99
110 #define DIMM_MODULE_ID_SIZE 18
112 /* enumeration for voltage levels. */
122 /* enumaration for SDRAM CAS Latencies. */
123 enum dimm_sdram_cas {
134 /* enumeration for memory types */
142 /* DIMM information structure */
143 typedef struct dimm_info {
144 /* DIMM dimensions */
145 u32 num_of_module_ranks;
151 u32 num_of_banks_on_each_device;
159 u32 err_check_type; /* ECC , PARITY.. */
160 u32 type_info; /* DDR2 only */
162 /* DIMM timing parameters */
163 u32 supported_cas_latencies;
164 u32 refresh_interval;
166 u32 min_row_precharge_time;
167 u32 min_row_active_to_row_active;
168 u32 min_ras_to_cas_delay;
169 u32 min_write_recovery_time; /* DDR3/2 only */
170 u32 min_write_to_read_cmd_delay; /* DDR3/2 only */
171 u32 min_read_to_prech_cmd_delay; /* DDR3/2 only */
172 u32 min_active_to_precharge;
173 u32 min_refresh_recovery; /* DDR3/2 only */
174 u32 min_cas_lat_time;
175 u32 min_four_active_win_delay;
176 u8 dimm_rc[SPD_RDIMM_RC_NUM];
182 static int ddr3_spd_sum_init(MV_DIMM_INFO *info, MV_DIMM_INFO *sum_info,
184 static u32 ddr3_get_max_val(u32 spd_val, u32 dimm_num, u32 static_val);
185 static u32 ddr3_get_min_val(u32 spd_val, u32 dimm_num, u32 static_val);
186 static int ddr3_spd_init(MV_DIMM_INFO *info, u32 dimm_addr, u32 dimm_width);
187 static u32 ddr3_div(u32 val, u32 divider, u32 sub);
189 extern u8 spd_data[SPD_SIZE];
190 extern u32 odt_config[ODT_OPT];
191 extern u16 odt_static[ODT_OPT][MAX_CS];
192 extern u16 odt_dynamic[ODT_OPT][MAX_CS];
194 #if !(defined(DB_88F6710) || defined(DB_88F6710_PCAC) || defined(RD_88F6710))
196 * Name: ddr3_get_dimm_num - Find number of dimms and their addresses
198 * Args: dimm_addr - array of dimm addresses
202 static u32 ddr3_get_dimm_num(u32 *dimm_addr)
209 /* Read the dimm eeprom */
210 for (dimm_cur_addr = MAX_DIMM_ADDR; dimm_cur_addr > MIN_DIMM_ADDR;
212 data[SPD_DEV_TYPE_BYTE] = 0;
214 /* Far-End DIMM must be connected */
215 if ((dimm_num == 0) && (dimm_cur_addr < FAR_END_DIMM_ADDR))
218 ret = i2c_read(dimm_cur_addr, 0, 1, (uchar *)data, 3);
220 if (data[SPD_DEV_TYPE_BYTE] == SPD_MEM_TYPE_DDR3) {
221 dimm_addr[dimm_num] = dimm_cur_addr;
232 * Name: dimmSpdInit - Get the SPD parameters.
233 * Desc: Read the DIMM SPD parameters into given struct parameter.
234 * Args: dimmNum - DIMM number. See MV_BOARD_DIMM_NUM enumerator.
235 * info - DIMM information structure.
237 * Returns: MV_OK if function could read DIMM parameters, 0 otherwise.
239 int ddr3_spd_init(MV_DIMM_INFO *info, u32 dimm_addr, u32 dimm_width)
244 __maybe_unused u32 rc;
245 __maybe_unused u8 vendor_high, vendor_low;
247 if (dimm_addr != 0) {
248 memset(spd_data, 0, SPD_SIZE * sizeof(u8));
250 ret = i2c_read(dimm_addr, 0, 1, (uchar *)spd_data, SPD_SIZE);
252 return MV_DDR3_TRAINING_ERR_TWSI_FAIL;
256 if (spd_data[SPD_DEV_TYPE_BYTE] != SPD_MEM_TYPE_DDR3)
257 return MV_DDR3_TRAINING_ERR_TWSI_BAD_TYPE;
259 /* Error Check Type */
260 /* No byte for error check in DDR3 SPD, use DDR2 convention */
261 info->err_check_type = 0;
264 if ((spd_data[SPD_BUS_WIDTH_BYTE] & 0x18) >> 3)
265 info->err_check_type = 1;
267 DEBUG_INIT_FULL_C("DRAM err_check_type ", info->err_check_type, 1);
268 switch (spd_data[SPD_MODULE_TYPE_BYTE]) {
271 info->type_info = SPD_MODULE_TYPE_RDIMM;
275 info->type_info = SPD_MODULE_TYPE_UDIMM;
277 case 11: /* LRDIMM current not supported */
279 info->type_info = (spd_data[SPD_MODULE_TYPE_BYTE]);
283 /* Size Calculations: */
285 /* Number Of Row Addresses - 12/13/14/15/16 */
286 info->num_of_row_addr =
287 (spd_data[SPD_ROW_NUM_BYTE] & SPD_ROW_NUM_MASK) >>
289 info->num_of_row_addr += SPD_ROW_NUM_MIN;
290 DEBUG_INIT_FULL_C("DRAM num_of_row_addr ", info->num_of_row_addr, 2);
292 /* Number Of Column Addresses - 9/10/11/12 */
293 info->num_of_col_addr =
294 (spd_data[SPD_COL_NUM_BYTE] & SPD_COL_NUM_MASK) >>
296 info->num_of_col_addr += SPD_COL_NUM_MIN;
297 DEBUG_INIT_FULL_C("DRAM num_of_col_addr ", info->num_of_col_addr, 1);
299 /* Number Of Ranks = number of CS on Dimm - 1/2/3/4 Ranks */
300 info->num_of_module_ranks =
301 (spd_data[SPD_MODULE_ORG_BYTE] & SPD_MODULE_BANK_NUM_MASK) >>
302 SPD_MODULE_BANK_NUM_OFF;
303 info->num_of_module_ranks += SPD_MODULE_BANK_NUM_MIN;
304 DEBUG_INIT_FULL_C("DRAM numOfModuleBanks ", info->num_of_module_ranks,
307 /* Data Width - 8/16/32/64 bits */
309 1 << (3 + (spd_data[SPD_BUS_WIDTH_BYTE] & SPD_BUS_WIDTH_MASK));
310 DEBUG_INIT_FULL_C("DRAM data_width ", info->data_width, 1);
312 /* Number Of Banks On Each Device - 8/16/32/64 banks */
313 info->num_of_banks_on_each_device =
314 1 << (3 + ((spd_data[SPD_DEV_DENSITY_BYTE] >> 4) & 0x7));
315 DEBUG_INIT_FULL_C("DRAM num_of_banks_on_each_device ",
316 info->num_of_banks_on_each_device, 1);
318 /* Total SDRAM capacity - 256Mb/512Mb/1Gb/2Gb/4Gb/8Gb/16Gb - MegaBits */
319 info->sdram_capacity =
320 spd_data[SPD_DEV_DENSITY_BYTE] & SPD_DEV_DENSITY_MASK;
322 /* Sdram Width - 4/8/16/32 bits */
323 info->sdram_width = 1 << (2 + (spd_data[SPD_MODULE_ORG_BYTE] &
324 SPD_MODULE_SDRAM_DEV_WIDTH_MASK));
325 DEBUG_INIT_FULL_C("DRAM sdram_width ", info->sdram_width, 1);
327 /* CS (Rank) Capacity - MB */
329 * DDR3 device uiDensity val are: (device capacity/8) *
330 * (Module_width/Device_width)
332 /* Jedec SPD DDR3 - page 7, Save spd_data in Mb - 2048=2GB */
333 if (dimm_width == 32) {
334 info->rank_capacity =
335 ((1 << info->sdram_capacity) * 256 *
336 (info->data_width / info->sdram_width)) << 16;
337 /* CS size = CS size / 2 */
339 info->rank_capacity =
340 ((1 << info->sdram_capacity) * 256 *
341 (info->data_width / info->sdram_width) * 0x2) << 16;
342 /* 0x2 => 0x100000-1Mbit / 8-bit->byte / 0x10000 */
344 DEBUG_INIT_FULL_C("DRAM rank_capacity[31] ", info->rank_capacity, 1);
346 /* Number of devices includeing Error correction */
347 info->num_of_devices =
348 ((info->data_width / info->sdram_width) *
349 info->num_of_module_ranks) + info->err_check_type;
350 DEBUG_INIT_FULL_C("DRAM num_of_devices ", info->num_of_devices, 1);
352 /* Address Mapping from Edge connector to DRAM - mirroring option */
353 info->addr_mirroring =
354 spd_data[SPD_ADDR_MAP_BYTE] & (1 << SPD_ADDR_MAP_MIRROR_OFFS);
356 /* Timings - All in ps */
358 time_base = (1000 * spd_data[SPD_MTB_DIVIDEND_BYTE]) /
359 spd_data[SPD_MTB_DIVISOR_BYTE];
361 /* Minimum Cycle Time At Max CasLatancy */
362 info->min_cycle_time = spd_data[SPD_TCK_BYTE] * time_base;
363 DEBUG_INIT_FULL_C("DRAM tCKmin ", info->min_cycle_time, 1);
365 /* Refresh Interval */
366 /* No byte for refresh interval in DDR3 SPD, use DDR2 convention */
368 * JEDEC param are 0 <= Tcase <= 85: 7.8uSec, 85 <= Tcase
371 info->refresh_interval = 7800000; /* Set to 7.8uSec */
372 DEBUG_INIT_FULL_C("DRAM refresh_interval ", info->refresh_interval, 1);
374 /* Suported Cas Latencies - DDR 3: */
377 * bit7 | bit6 | bit5 | bit4 | bit3 | bit2 | bit1 | bit0 *
378 *******-******-******-******-******-******-******-*******-*******
379 CAS = 11 | 10 | 9 | 8 | 7 | 6 | 5 | 4 *
380 *********************************************************-*******
381 *******-******-******-******-******-******-******-*******-*******
382 * bit15 |bit14 |bit13 |bit12 |bit11 |bit10 | bit9 | bit8 *
383 *******-******-******-******-******-******-******-*******-*******
384 CAS = TBD | 18 | 17 | 16 | 15 | 14 | 13 | 12 *
387 /* DDR3 include 2 byte of CAS support */
388 info->supported_cas_latencies =
389 (spd_data[SPD_SUP_CAS_LAT_MSB_BYTE] << 8) |
390 spd_data[SPD_SUP_CAS_LAT_LSB_BYTE];
391 DEBUG_INIT_FULL_C("DRAM supported_cas_latencies ",
392 info->supported_cas_latencies, 1);
394 /* Minimum Cycle Time At Max CasLatancy */
395 info->min_cas_lat_time = (spd_data[SPD_TAA_BYTE] * time_base);
397 * This field divided by the cycleTime will give us the CAS latency
402 * For DDR3 and DDR2 includes Write Recovery Time field.
405 info->min_write_recovery_time = spd_data[SPD_TWR_BYTE] * time_base;
406 DEBUG_INIT_FULL_C("DRAM min_write_recovery_time ",
407 info->min_write_recovery_time, 1);
409 /* Mininmum Ras to Cas Delay */
410 info->min_ras_to_cas_delay = spd_data[SPD_TRCD_BYTE] * time_base;
411 DEBUG_INIT_FULL_C("DRAM min_ras_to_cas_delay ",
412 info->min_ras_to_cas_delay, 1);
414 /* Minimum Row Active to Row Active Time */
415 info->min_row_active_to_row_active =
416 spd_data[SPD_TRRD_BYTE] * time_base;
417 DEBUG_INIT_FULL_C("DRAM min_row_active_to_row_active ",
418 info->min_row_active_to_row_active, 1);
420 /* Minimum Row Precharge Delay Time */
421 info->min_row_precharge_time = spd_data[SPD_TRP_BYTE] * time_base;
422 DEBUG_INIT_FULL_C("DRAM min_row_precharge_time ",
423 info->min_row_precharge_time, 1);
425 /* Minimum Active to Precharge Delay Time - tRAS ps */
426 info->min_active_to_precharge =
427 (spd_data[SPD_TRAS_MSB_BYTE] & SPD_TRAS_MSB_MASK) << 8;
428 info->min_active_to_precharge |= spd_data[SPD_TRAS_LSB_BYTE];
429 info->min_active_to_precharge *= time_base;
430 DEBUG_INIT_FULL_C("DRAM min_active_to_precharge ",
431 info->min_active_to_precharge, 1);
433 /* Minimum Refresh Recovery Delay Time - tRFC ps */
434 info->min_refresh_recovery = spd_data[SPD_TRFC_MSB_BYTE] << 8;
435 info->min_refresh_recovery |= spd_data[SPD_TRFC_LSB_BYTE];
436 info->min_refresh_recovery *= time_base;
437 DEBUG_INIT_FULL_C("DRAM min_refresh_recovery ",
438 info->min_refresh_recovery, 1);
441 * For DDR3 and DDR2 includes Internal Write To Read Command Delay
444 info->min_write_to_read_cmd_delay = spd_data[SPD_TWTR_BYTE] * time_base;
445 DEBUG_INIT_FULL_C("DRAM min_write_to_read_cmd_delay ",
446 info->min_write_to_read_cmd_delay, 1);
449 * For DDR3 and DDR2 includes Internal Read To Precharge Command Delay
452 info->min_read_to_prech_cmd_delay = spd_data[SPD_TRTP_BYTE] * time_base;
453 DEBUG_INIT_FULL_C("DRAM min_read_to_prech_cmd_delay ",
454 info->min_read_to_prech_cmd_delay, 1);
457 * For DDR3 includes Minimum Activate to Activate/Refresh Command
460 tmp = ((spd_data[SPD_TFAW_MSB_BYTE] & SPD_TFAW_MSB_MASK) << 8) |
461 spd_data[SPD_TFAW_LSB_BYTE];
462 info->min_four_active_win_delay = tmp * time_base;
463 DEBUG_INIT_FULL_C("DRAM min_four_active_win_delay ",
464 info->min_four_active_win_delay, 1);
466 #if defined(MV88F78X60) || defined(MV88F672X)
467 /* Registered DIMM support */
468 if (info->type_info == SPD_MODULE_TYPE_RDIMM) {
469 for (rc = 2; rc < 6; rc += 2) {
470 tmp = spd_data[SPD_RDIMM_RC_BYTE + rc / 2];
472 spd_data[SPD_RDIMM_RC_BYTE + rc / 2] &
473 SPD_RDIMM_RC_NIBBLE_MASK;
474 info->dimm_rc[rc + 1] =
475 (spd_data[SPD_RDIMM_RC_BYTE + rc / 2] >> 4) &
476 SPD_RDIMM_RC_NIBBLE_MASK;
479 vendor_low = spd_data[66];
480 vendor_high = spd_data[65];
481 info->vendor = (vendor_high << 8) + vendor_low;
482 DEBUG_INIT_C("DDR3 Training Sequence - Registered DIMM vendor ID 0x",
485 info->dimm_rc[0] = RDIMM_RC0;
486 info->dimm_rc[1] = RDIMM_RC1;
487 info->dimm_rc[2] = RDIMM_RC2;
488 info->dimm_rc[8] = RDIMM_RC8;
489 info->dimm_rc[9] = RDIMM_RC9;
490 info->dimm_rc[10] = RDIMM_RC10;
491 info->dimm_rc[11] = RDIMM_RC11;
499 * Name: ddr3_spd_sum_init - Get the SPD parameters.
500 * Desc: Read the DIMM SPD parameters into given struct parameter.
501 * Args: dimmNum - DIMM number. See MV_BOARD_DIMM_NUM enumerator.
502 * info - DIMM information structure.
504 * Returns: MV_OK if function could read DIMM parameters, 0 otherwise.
506 int ddr3_spd_sum_init(MV_DIMM_INFO *info, MV_DIMM_INFO *sum_info, u32 dimm)
509 memcpy(sum_info, info, sizeof(MV_DIMM_INFO));
512 if (sum_info->type_info != info->type_info) {
513 DEBUG_INIT_S("DDR3 Dimm Compare - DIMM type does not match - FAIL\n");
514 return MV_DDR3_TRAINING_ERR_DIMM_TYPE_NO_MATCH;
516 if (sum_info->err_check_type > info->err_check_type) {
517 sum_info->err_check_type = info->err_check_type;
518 DEBUG_INIT_S("DDR3 Dimm Compare - ECC does not match. ECC is disabled\n");
520 if (sum_info->data_width != info->data_width) {
521 DEBUG_INIT_S("DDR3 Dimm Compare - DRAM bus width does not match - FAIL\n");
522 return MV_DDR3_TRAINING_ERR_BUS_WIDTH_NOT_MATCH;
524 if (sum_info->min_cycle_time < info->min_cycle_time)
525 sum_info->min_cycle_time = info->min_cycle_time;
526 if (sum_info->refresh_interval < info->refresh_interval)
527 sum_info->refresh_interval = info->refresh_interval;
528 sum_info->supported_cas_latencies &= info->supported_cas_latencies;
529 if (sum_info->min_cas_lat_time < info->min_cas_lat_time)
530 sum_info->min_cas_lat_time = info->min_cas_lat_time;
531 if (sum_info->min_write_recovery_time < info->min_write_recovery_time)
532 sum_info->min_write_recovery_time =
533 info->min_write_recovery_time;
534 if (sum_info->min_ras_to_cas_delay < info->min_ras_to_cas_delay)
535 sum_info->min_ras_to_cas_delay = info->min_ras_to_cas_delay;
536 if (sum_info->min_row_active_to_row_active <
537 info->min_row_active_to_row_active)
538 sum_info->min_row_active_to_row_active =
539 info->min_row_active_to_row_active;
540 if (sum_info->min_row_precharge_time < info->min_row_precharge_time)
541 sum_info->min_row_precharge_time = info->min_row_precharge_time;
542 if (sum_info->min_active_to_precharge < info->min_active_to_precharge)
543 sum_info->min_active_to_precharge =
544 info->min_active_to_precharge;
545 if (sum_info->min_refresh_recovery < info->min_refresh_recovery)
546 sum_info->min_refresh_recovery = info->min_refresh_recovery;
547 if (sum_info->min_write_to_read_cmd_delay <
548 info->min_write_to_read_cmd_delay)
549 sum_info->min_write_to_read_cmd_delay =
550 info->min_write_to_read_cmd_delay;
551 if (sum_info->min_read_to_prech_cmd_delay <
552 info->min_read_to_prech_cmd_delay)
553 sum_info->min_read_to_prech_cmd_delay =
554 info->min_read_to_prech_cmd_delay;
555 if (sum_info->min_four_active_win_delay <
556 info->min_four_active_win_delay)
557 sum_info->min_four_active_win_delay =
558 info->min_four_active_win_delay;
559 if (sum_info->min_write_to_read_cmd_delay <
560 info->min_write_to_read_cmd_delay)
561 sum_info->min_write_to_read_cmd_delay =
562 info->min_write_to_read_cmd_delay;
568 * Name: ddr3_dunit_setup
569 * Desc: Set the controller with the timing values.
570 * Args: ecc_ena - User ECC setup
574 int ddr3_dunit_setup(u32 ecc_ena, u32 hclk_time, u32 *ddr_width)
578 MV_DIMM_INFO dimm_info[2];
579 MV_DIMM_INFO sum_info;
580 u32 stat_val, spd_val;
581 u32 cs, cl, cs_num, cs_ena;
585 __maybe_unused u32 dimm_cnt, cs_count, dimm;
586 __maybe_unused u32 dimm_addr[2] = { 0, 0 };
588 #if defined(DB_88F6710) || defined(DB_88F6710_PCAC) || defined(RD_88F6710)
589 /* Armada 370 - SPD is not available on DIMM */
591 * Set MC registers according to Static SPD values Values -
592 * must be set manually
595 * We only have one optional DIMM for the DB and we already got the
596 * SPD matching values
598 status = ddr3_spd_init(&dimm_info[0], 0, *ddr_width);
603 /* Use JP8 to enable multiCS support for Armada 370 DB */
604 if (!ddr3_check_config(EEPROM_MODULE_ADDR, CONFIG_MULTI_CS))
605 dimm_info[0].num_of_module_ranks = 1;
606 status = ddr3_spd_sum_init(&dimm_info[0], &sum_info, 0);
610 /* Dynamic D-Unit Setup - Read SPD values */
612 dimm_num = ddr3_get_dimm_num(dimm_addr);
614 #ifdef MIXED_DIMM_STATIC
615 DEBUG_INIT_S("DDR3 Training Sequence - No DIMMs detected\n");
617 DEBUG_INIT_S("DDR3 Training Sequence - FAILED (Wrong DIMMs Setup)\n");
618 return MV_DDR3_TRAINING_ERR_BAD_DIMM_SETUP;
621 DEBUG_INIT_C("DDR3 Training Sequence - Number of DIMMs detected: ",
625 for (dimm = 0; dimm < dimm_num; dimm++) {
626 status = ddr3_spd_init(&dimm_info[dimm], dimm_addr[dimm],
630 status = ddr3_spd_sum_init(&dimm_info[dimm], &sum_info, dimm);
637 /* Set number of enabled CS */
640 cs_num = ddr3_get_cs_num_from_reg();
643 for (dimm = 0; dimm < dimm_num; dimm++)
644 cs_num += dimm_info[dimm].num_of_module_ranks;
646 if (cs_num > MAX_CS) {
647 DEBUG_INIT_C("DDR3 Training Sequence - Number of CS exceed limit - ",
649 return MV_DDR3_TRAINING_ERR_MAX_CS_LIMIT;
652 /* Set bitmap of enabled CS */
655 cs_ena = ddr3_get_cs_ena_from_reg();
661 for (cs = 0; cs < MAX_CS; cs += 2) {
662 if (((1 << cs) & DIMM_CS_BITMAP) &&
663 !(cs_ena & (1 << cs))) {
664 if (dimm_info[dimm].num_of_module_ranks == 1)
665 cs_ena |= (0x1 << cs);
666 else if (dimm_info[dimm].num_of_module_ranks == 2)
667 cs_ena |= (0x3 << cs);
668 else if (dimm_info[dimm].num_of_module_ranks == 3)
669 cs_ena |= (0x7 << cs);
670 else if (dimm_info[dimm].num_of_module_ranks == 4)
671 cs_ena |= (0xF << cs);
674 if (dimm == dimm_num)
682 DEBUG_INIT_C("DDR3 Training Sequence - Number of enabled CS exceed limit - ",
684 return MV_DDR3_TRAINING_ERR_MAX_ENA_CS_LIMIT;
687 DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - Number of CS = ", cs_num, 1);
689 /* Check Ratio - '1' - 2:1, '0' - 1:1 */
690 if (reg_read(REG_DDR_IO_ADDR) & (1 << REG_DDR_IO_CLK_RATIO_OFFS))
691 ddr_clk_time = hclk_time / 2;
693 ddr_clk_time = hclk_time;
696 /* Get target CL value from set register */
697 reg = (reg_read(REG_DDR3_MR0_ADDR) >> 2);
698 reg = ((((reg >> 1) & 0xE)) | (reg & 0x1)) & 0xF;
700 cl = ddr3_get_max_val(ddr3_div(sum_info.min_cas_lat_time,
702 dimm_num, ddr3_valid_cl_to_cl(reg));
704 cl = ddr3_div(sum_info.min_cas_lat_time, ddr_clk_time, 0);
709 DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - Cas Latency = ", cl, 1);
711 /* {0x00001400} - DDR SDRAM Configuration Register */
713 stat_val = ddr3_get_static_mc_value(
714 REG_SDRAM_CONFIG_ADDR, REG_SDRAM_CONFIG_ECC_OFFS, 0x1, 0, 0);
715 if (ecc_ena && ddr3_get_min_val(sum_info.err_check_type, dimm_num,
717 reg |= (1 << REG_SDRAM_CONFIG_ECC_OFFS);
718 reg |= (1 << REG_SDRAM_CONFIG_IERR_OFFS);
719 DEBUG_INIT_FULL_S("DDR3 - DUNIT-SET - ECC Enabled\n");
721 DEBUG_INIT_FULL_S("DDR3 - DUNIT-SET - ECC Disabled\n");
724 if (sum_info.type_info == SPD_MODULE_TYPE_RDIMM) {
726 DEBUG_INIT_S("DDR3 Training Sequence - FAIL - Illegal R-DIMM setup\n");
727 return MV_DDR3_TRAINING_ERR_BAD_R_DIMM_SETUP;
729 reg |= (1 << REG_SDRAM_CONFIG_REGDIMM_OFFS);
730 DEBUG_INIT_FULL_S("DDR3 - DUNIT-SET - R-DIMM\n");
732 DEBUG_INIT_FULL_S("DDR3 - DUNIT-SET - U-DIMM\n");
737 if (ddr3_get_min_val(sum_info.data_width, dimm_num, BUS_WIDTH) == 64) {
739 if (*ddr_width == 64) {
741 reg |= (1 << REG_SDRAM_CONFIG_WIDTH_OFFS);
742 DEBUG_INIT_FULL_S("DDR3 - DUNIT-SET - Datawidth - 64Bits\n");
744 DEBUG_INIT_FULL_S("DDR3 - DUNIT-SET - Datawidth - 32Bits\n");
747 DEBUG_INIT_FULL_S("DDR3 - DUNIT-SET - Datawidth - 16Bits\n");
750 #if defined(MV88F672X)
751 if (*ddr_width == 32) {
752 reg |= (1 << REG_SDRAM_CONFIG_WIDTH_OFFS);
753 DEBUG_INIT_FULL_S("DDR3 - DUNIT-SET - Datawidth - 32Bits\n");
755 DEBUG_INIT_FULL_S("DDR3 - DUNIT-SET - Datawidth - 16Bits\n");
758 stat_val = ddr3_get_static_mc_value(REG_SDRAM_CONFIG_ADDR, 0,
759 REG_SDRAM_CONFIG_RFRS_MASK, 0, 0);
760 tmp = ddr3_get_min_val(sum_info.refresh_interval / hclk_time,
764 tmp = min(TREFI_USER / hclk_time, tmp);
767 DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - RefreshInterval/Hclk = ", tmp, 4);
771 reg |= (1 << 16); /* If 2:1 need to set P2DWr */
773 #if defined(MV88F672X)
774 reg |= (1 << 27); /* PhyRfRST = Disable */
776 reg_write(REG_SDRAM_CONFIG_ADDR, reg);
778 /*{0x00001404} - DDR SDRAM Configuration Register */
781 reg |= (DRAM_2T << REG_DUNIT_CTRL_LOW_2T_OFFS);
783 reg_write(REG_DUNIT_CTRL_LOW_ADDR, reg);
785 /* {0x00001408} - DDR SDRAM Timing (Low) Register */
788 /* tRAS - (0:3,20) */
789 spd_val = ddr3_div(sum_info.min_active_to_precharge,
791 stat_val = ddr3_get_static_mc_value(REG_SDRAM_TIMING_LOW_ADDR,
793 tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
794 DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tRAS-1 = ", tmp, 1);
796 reg |= ((tmp & 0x10) << 16); /* to bit 20 */
799 spd_val = ddr3_div(sum_info.min_ras_to_cas_delay, ddr_clk_time, 1);
800 stat_val = ddr3_get_static_mc_value(REG_SDRAM_TIMING_LOW_ADDR,
802 tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
803 DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tRCD-1 = ", tmp, 1);
804 reg |= ((tmp & 0xF) << 4);
807 spd_val = ddr3_div(sum_info.min_row_precharge_time, ddr_clk_time, 1);
808 stat_val = ddr3_get_static_mc_value(REG_SDRAM_TIMING_LOW_ADDR,
810 tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
811 DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tRP-1 = ", tmp, 1);
812 reg |= ((tmp & 0xF) << 8);
815 spd_val = ddr3_div(sum_info.min_write_recovery_time, ddr_clk_time, 1);
816 stat_val = ddr3_get_static_mc_value(REG_SDRAM_TIMING_LOW_ADDR,
818 tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
819 DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tWR-1 = ", tmp, 1);
820 reg |= ((tmp & 0xF) << 12);
823 spd_val = ddr3_div(sum_info.min_write_to_read_cmd_delay, ddr_clk_time, 1);
824 stat_val = ddr3_get_static_mc_value(REG_SDRAM_TIMING_LOW_ADDR,
826 tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
827 DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tWTR-1 = ", tmp, 1);
828 reg |= ((tmp & 0xF) << 16);
831 spd_val = ddr3_div(sum_info.min_row_active_to_row_active, ddr_clk_time, 1);
832 stat_val = ddr3_get_static_mc_value(REG_SDRAM_TIMING_LOW_ADDR,
834 tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
835 DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tRRD-1 = ", tmp, 1);
836 reg |= ((tmp & 0xF) << 24);
839 spd_val = ddr3_div(sum_info.min_read_to_prech_cmd_delay, ddr_clk_time, 1);
840 stat_val = ddr3_get_static_mc_value(REG_SDRAM_TIMING_LOW_ADDR,
842 tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
843 DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tRTP-1 = ", tmp, 1);
844 reg |= ((tmp & 0xF) << 28);
849 reg_write(REG_SDRAM_TIMING_LOW_ADDR, reg);
851 /*{0x0000140C} - DDR SDRAM Timing (High) Register */
852 /* Add cycles to R2R W2W */
855 /* tRFC - (0:6,16:18) */
856 spd_val = ddr3_div(sum_info.min_refresh_recovery, ddr_clk_time, 1);
857 stat_val = ddr3_get_static_mc_value(REG_SDRAM_TIMING_HIGH_ADDR,
859 tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
860 DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tRFC-1 = ", tmp, 1);
862 reg |= ((tmp & 0x380) << 9); /* to bit 16 */
863 reg_write(REG_SDRAM_TIMING_HIGH_ADDR, reg);
865 /*{0x00001410} - DDR SDRAM Address Control Register */
869 #if (defined(MV88F78X60) || defined(MV88F672X))
870 tmp = sum_info.min_four_active_win_delay;
871 spd_val = ddr3_div(tmp, ddr_clk_time, 0);
872 stat_val = ddr3_get_static_mc_value(REG_SDRAM_ADDRESS_CTRL_ADDR,
874 tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
875 DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tFAW = ", tmp, 1);
876 reg |= ((tmp & 0x3F) << 24);
878 tmp = sum_info.min_four_active_win_delay -
879 4 * (sum_info.min_row_active_to_row_active);
880 spd_val = ddr3_div(tmp, ddr_clk_time, 0);
881 stat_val = ddr3_get_static_mc_value(REG_SDRAM_ADDRESS_CTRL_ADDR,
883 tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
884 DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tFAW-4*tRRD = ", tmp, 1);
885 reg |= ((tmp & 0x1F) << 24);
888 /* SDRAM device capacity */
890 reg |= (reg_read(REG_SDRAM_ADDRESS_CTRL_ADDR) & 0xF0FFFF);
896 for (cs = 0; cs < MAX_CS; cs++) {
897 if (cs_ena & (1 << cs) & DIMM_CS_BITMAP) {
898 if (dimm_info[dimm_cnt].num_of_module_ranks == cs_count) {
903 if (dimm_info[dimm_cnt].sdram_capacity < 0x3) {
904 reg |= ((dimm_info[dimm_cnt].sdram_capacity + 1) <<
905 (REG_SDRAM_ADDRESS_SIZE_OFFS +
906 (REG_SDRAM_ADDRESS_CTRL_STRUCT_OFFS * cs)));
907 } else if (dimm_info[dimm_cnt].sdram_capacity > 0x3) {
908 reg |= ((dimm_info[dimm_cnt].sdram_capacity & 0x3) <<
909 (REG_SDRAM_ADDRESS_SIZE_OFFS +
910 (REG_SDRAM_ADDRESS_CTRL_STRUCT_OFFS * cs)));
911 reg |= ((dimm_info[dimm_cnt].sdram_capacity & 0x4) <<
912 (REG_SDRAM_ADDRESS_SIZE_HIGH_OFFS + cs));
917 /* SDRAM device structure */
920 for (cs = 0; cs < MAX_CS; cs++) {
921 if (cs_ena & (1 << cs) & DIMM_CS_BITMAP) {
922 if (dimm_info[dimm_cnt].num_of_module_ranks == cs_count) {
927 if (dimm_info[dimm_cnt].sdram_width == 16)
928 reg |= (1 << (REG_SDRAM_ADDRESS_CTRL_STRUCT_OFFS * cs));
932 reg_write(REG_SDRAM_ADDRESS_CTRL_ADDR, reg);
934 /*{0x00001418} - DDR SDRAM Operation Register */
936 for (cs = 0; cs < MAX_CS; cs++) {
937 if (cs_ena & (1 << cs))
938 reg &= ~(1 << (cs + REG_SDRAM_OPERATION_CS_OFFS));
940 reg_write(REG_SDRAM_OPERATION_ADDR, reg);
942 /*{0x00001420} - DDR SDRAM Extended Mode Register */
944 reg_write(REG_SDRAM_EXT_MODE_ADDR, reg);
946 /*{0x00001424} - DDR Controller Control (High) Register */
947 #if (defined(MV88F78X60) || defined(MV88F672X))
952 reg_write(REG_DDR_CONT_HIGH_ADDR, reg);
954 /*{0x0000142C} - DDR3 Timing Register */
956 #if defined(MV88F78X60) || defined(MV88F672X)
959 reg_write(0x142C, reg);
961 /*{0x00001484} - MBus CPU Block Register */
963 if (reg_read(REG_DDR_IO_ADDR) & (1 << REG_DDR_IO_CLK_RATIO_OFFS))
964 reg_write(REG_MBUS_CPU_BLOCK_ADDR, 0x0000E907);
968 * In case of mixed dimm and on-board devices setup paramters will
969 * be taken statically
971 /*{0x00001494} - DDR SDRAM ODT Control (Low) Register */
972 reg = odt_config[cs_ena];
973 reg_write(REG_SDRAM_ODT_CTRL_LOW_ADDR, reg);
975 /*{0x00001498} - DDR SDRAM ODT Control (High) Register */
977 reg_write(REG_SDRAM_ODT_CTRL_HIGH_ADDR, reg);
979 /*{0x0000149C} - DDR Dunit ODT Control Register */
981 reg_write(REG_DUNIT_ODT_CTRL_ADDR, reg);
983 /*{0x000014A0} - DDR Dunit ODT Control Register */
984 #if defined(MV88F672X)
986 reg_write(REG_DRAM_FIFO_CTRL_ADDR, reg);
989 /*{0x000014C0} - DRAM address and Control Driving Strenght */
990 reg_write(REG_DRAM_ADDR_CTRL_DRIVE_STRENGTH_ADDR, 0x192435e9);
992 /*{0x000014C4} - DRAM Data and DQS Driving Strenght */
993 reg_write(REG_DRAM_DATA_DQS_DRIVE_STRENGTH_ADDR, 0xB2C35E9);
995 #if (defined(MV88F78X60) || defined(MV88F672X))
996 /*{0x000014CC} - DRAM Main Pads Calibration Machine Control Register */
997 reg = reg_read(REG_DRAM_MAIN_PADS_CAL_ADDR);
998 reg_write(REG_DRAM_MAIN_PADS_CAL_ADDR, reg | (1 << 0));
1001 #if defined(MV88F672X)
1002 /* DRAM Main Pads Calibration Machine Control Register */
1003 /* 0x14CC[4:3] - CalUpdateControl = IntOnly */
1004 reg = reg_read(REG_DRAM_MAIN_PADS_CAL_ADDR);
1007 reg_write(REG_DRAM_MAIN_PADS_CAL_ADDR, reg);
1013 for (cs = 0; cs < MAX_CS; cs++) {
1014 if ((1 << cs) & DIMM_CS_BITMAP) {
1015 if ((1 << cs) & cs_ena) {
1016 if (dimm_info[dimm_cnt].num_of_module_ranks ==
1022 reg_write(REG_CS_SIZE_SCRATCH_ADDR + (cs * 0x8),
1023 dimm_info[dimm_cnt].rank_capacity - 1);
1025 reg_write(REG_CS_SIZE_SCRATCH_ADDR + (cs * 0x8), 0);
1031 /*{0x00020184} - Close FastPath - 2G */
1032 reg_write(REG_FASTPATH_WIN_0_CTRL_ADDR, 0);
1034 /*{0x00001538} - Read Data Sample Delays Register */
1036 for (cs = 0; cs < MAX_CS; cs++) {
1037 if (cs_ena & (1 << cs))
1038 reg |= (cl << (REG_READ_DATA_SAMPLE_DELAYS_OFFS * cs));
1041 reg_write(REG_READ_DATA_SAMPLE_DELAYS_ADDR, reg);
1042 DEBUG_INIT_FULL_C("DDR3 - SPD-SET - Read Data Sample Delays = ", reg,
1045 /*{0x0000153C} - Read Data Ready Delay Register */
1047 for (cs = 0; cs < MAX_CS; cs++) {
1048 if (cs_ena & (1 << cs)) {
1050 (REG_READ_DATA_READY_DELAYS_OFFS * cs));
1053 reg_write(REG_READ_DATA_READY_DELAYS_ADDR, reg);
1054 DEBUG_INIT_FULL_C("DDR3 - SPD-SET - Read Data Ready Delays = ", reg, 1);
1056 /* Set MR registers */
1059 tmp = ddr3_cl_to_valid_cl(cl);
1060 reg |= ((tmp & 0x1) << 2);
1061 reg |= ((tmp & 0xE) << 3); /* to bit 4 */
1062 for (cs = 0; cs < MAX_CS; cs++) {
1063 if (cs_ena & (1 << cs)) {
1064 reg_write(REG_DDR3_MR0_CS_ADDR +
1065 (cs << MR_CS_ADDR_OFFS), reg);
1070 reg = 0x00000044 & REG_DDR3_MR1_ODT_MASK;
1072 reg = 0x00000046 & REG_DDR3_MR1_ODT_MASK;
1074 for (cs = 0; cs < MAX_CS; cs++) {
1075 if (cs_ena & (1 << cs)) {
1076 reg |= odt_static[cs_ena][cs];
1077 reg_write(REG_DDR3_MR1_CS_ADDR +
1078 (cs << MR_CS_ADDR_OFFS), reg);
1083 if (reg_read(REG_DDR_IO_ADDR) & (1 << REG_DDR_IO_CLK_RATIO_OFFS))
1084 tmp = hclk_time / 2;
1089 cwl = 5; /* CWL = 5 */
1090 else if (tmp >= 1875 && tmp < 2500)
1091 cwl = 6; /* CWL = 6 */
1092 else if (tmp >= 1500 && tmp < 1875)
1093 cwl = 7; /* CWL = 7 */
1094 else if (tmp >= 1250 && tmp < 1500)
1095 cwl = 8; /* CWL = 8 */
1096 else if (tmp >= 1070 && tmp < 1250)
1097 cwl = 9; /* CWL = 9 */
1098 else if (tmp >= 935 && tmp < 1070)
1099 cwl = 10; /* CWL = 10 */
1100 else if (tmp >= 833 && tmp < 935)
1101 cwl = 11; /* CWL = 11 */
1102 else if (tmp >= 750 && tmp < 833)
1103 cwl = 12; /* CWL = 12 */
1105 cwl = 12; /* CWL = 12 */
1106 printf("Unsupported hclk %d MHz\n", tmp);
1109 reg = ((cwl - 5) << REG_DDR3_MR2_CWL_OFFS);
1111 for (cs = 0; cs < MAX_CS; cs++) {
1112 if (cs_ena & (1 << cs)) {
1113 reg &= REG_DDR3_MR2_ODT_MASK;
1114 reg |= odt_dynamic[cs_ena][cs];
1115 reg_write(REG_DDR3_MR2_CS_ADDR +
1116 (cs << MR_CS_ADDR_OFFS), reg);
1122 for (cs = 0; cs < MAX_CS; cs++) {
1123 if (cs_ena & (1 << cs)) {
1124 reg_write(REG_DDR3_MR3_CS_ADDR +
1125 (cs << MR_CS_ADDR_OFFS), reg);
1129 /* {0x00001428} - DDR ODT Timing (Low) Register */
1131 reg |= (((cl - cwl + 1) & 0xF) << 4);
1132 reg |= (((cl - cwl + 6) & 0xF) << 8);
1133 reg |= ((((cl - cwl + 6) >> 4) & 0x1) << 21);
1134 reg |= (((cl - 1) & 0xF) << 12);
1135 reg |= (((cl + 6) & 0x1F) << 16);
1136 reg_write(REG_ODT_TIME_LOW_ADDR, reg);
1138 /* {0x0000147C} - DDR ODT Timing (High) Register */
1140 reg |= ((cwl - 1) << 8);
1141 reg |= ((cwl + 5) << 12);
1142 reg_write(REG_ODT_TIME_HIGH_ADDR, reg);
1145 /*{0x000015E0} - DDR3 Rank Control Register */
1149 for (cs = 0; cs < MAX_CS; cs++) {
1150 if (cs_ena & (1 << cs) & DIMM_CS_BITMAP) {
1151 if (dimm_info[dimm_cnt].num_of_module_ranks == cs_count) {
1157 if (dimm_info[dimm_cnt].addr_mirroring &&
1158 (cs == 1 || cs == 3) &&
1159 (sum_info.type_info != SPD_MODULE_TYPE_RDIMM)) {
1160 reg |= (1 << (REG_DDR3_RANK_CTRL_MIRROR_OFFS + cs));
1161 DEBUG_INIT_FULL_C("DDR3 - SPD-SET - Setting Address Mirroring for CS = ",
1166 reg_write(REG_DDR3_RANK_CTRL_ADDR, reg);
1169 /*{0xD00015E4} - ZQDS Configuration Register */
1171 reg_write(REG_ZQC_CONF_ADDR, reg);
1173 /* {0x00015EC} - DDR PHY */
1174 #if defined(MV88F78X60)
1176 if (mv_ctrl_rev_get() == MV_78XX0_B0_REV)
1180 #if defined(MV88F672X)
1184 reg_write(REG_DRAM_PHY_CONFIG_ADDR, reg);
1186 #if (defined(MV88F78X60) || defined(MV88F672X))
1187 /* Registered DIMM support - supported only in AXP A0 devices */
1188 /* Currently supported for SPD detection only */
1190 * Flow is according to the Registered DIMM chapter in the
1193 if (sum_info.type_info == SPD_MODULE_TYPE_RDIMM) {
1194 DEBUG_INIT_S("DDR3 Training Sequence - Registered DIMM detected\n");
1196 /* Set commands parity completion */
1197 reg = reg_read(REG_REGISTERED_DRAM_CTRL_ADDR);
1198 reg &= ~REG_REGISTERED_DRAM_CTRL_PARITY_MASK;
1200 reg_write(REG_REGISTERED_DRAM_CTRL_ADDR, reg);
1202 /* De-assert M_RESETn and assert M_CKE */
1203 reg_write(REG_SDRAM_INIT_CTRL_ADDR,
1204 1 << REG_SDRAM_INIT_CKE_ASSERT_OFFS);
1206 reg = (reg_read(REG_SDRAM_INIT_CTRL_ADDR)) &
1207 (1 << REG_SDRAM_INIT_CKE_ASSERT_OFFS);
1210 for (rc = 0; rc < SPD_RDIMM_RC_NUM; rc++) {
1211 if (rc != 6 && rc != 7) {
1212 /* Set CWA Command */
1213 reg = (REG_SDRAM_OPERATION_CMD_CWA &
1214 ~(0xF << REG_SDRAM_OPERATION_CS_OFFS));
1215 reg |= ((dimm_info[0].dimm_rc[rc] &
1216 REG_SDRAM_OPERATION_CWA_DATA_MASK) <<
1217 REG_SDRAM_OPERATION_CWA_DATA_OFFS);
1218 reg |= rc << REG_SDRAM_OPERATION_CWA_RC_OFFS;
1219 /* Configure - Set Delay - tSTAB/tMRD */
1220 if (rc == 2 || rc == 10)
1221 reg |= (0x1 << REG_SDRAM_OPERATION_CWA_DELAY_SEL_OFFS);
1222 /* 0x1418 - SDRAM Operation Register */
1223 reg_write(REG_SDRAM_OPERATION_ADDR, reg);
1226 * Poll the "cmd" field in the SDRAM OP
1230 reg = reg_read(REG_SDRAM_OPERATION_ADDR) &
1231 (REG_SDRAM_OPERATION_CMD_MASK);
1242 * Name: ddr3_div - this function divides integers
1244 * Args: val - the value
1245 * divider - the divider
1246 * sub - substruction value
1248 * Returns: required value
1250 u32 ddr3_div(u32 val, u32 divider, u32 sub)
1252 return val / divider + (val % divider > 0 ? 1 : 0) - sub;
1256 * Name: ddr3_get_max_val
1262 u32 ddr3_get_max_val(u32 spd_val, u32 dimm_num, u32 static_val)
1266 if (spd_val >= static_val)
1279 * Name: ddr3_get_min_val
1285 u32 ddr3_get_min_val(u32 spd_val, u32 dimm_num, u32 static_val)
1289 if (spd_val <= static_val)