]> git.sur5r.net Git - u-boot/blob - drivers/ddr/marvell/axp/ddr3_spd.c
SPDX: Convert all of our single license tags to Linux Kernel style
[u-boot] / drivers / ddr / marvell / axp / ddr3_spd.c
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Copyright (C) Marvell International Ltd. and its affiliates
4  */
5
6 #include <common.h>
7 #include <i2c.h>
8 #include <spl.h>
9 #include <asm/io.h>
10 #include <asm/arch/cpu.h>
11 #include <asm/arch/soc.h>
12
13 #include "ddr3_init.h"
14
15 #if defined(MV88F78X60)
16 #include "ddr3_axp_config.h"
17 #elif defined(MV88F67XX)
18 #include "ddr3_a370_config.h"
19 #endif
20
21 #if defined(MV88F672X)
22 #include "ddr3_a375_config.h"
23 #endif
24
25 #ifdef DUNIT_SPD
26
27 /* DIMM SPD offsets */
28 #define SPD_DEV_TYPE_BYTE               2
29
30 #define SPD_MODULE_TYPE_BYTE            3
31 #define SPD_MODULE_MASK                 0xf
32 #define SPD_MODULE_TYPE_RDIMM           1
33 #define SPD_MODULE_TYPE_UDIMM           2
34
35 #define SPD_DEV_DENSITY_BYTE            4
36 #define SPD_DEV_DENSITY_MASK            0xf
37
38 #define SPD_ROW_NUM_BYTE                5
39 #define SPD_ROW_NUM_MIN                 12
40 #define SPD_ROW_NUM_OFF                 3
41 #define SPD_ROW_NUM_MASK                (7 << SPD_ROW_NUM_OFF)
42
43 #define SPD_COL_NUM_BYTE                5
44 #define SPD_COL_NUM_MIN                 9
45 #define SPD_COL_NUM_OFF                 0
46 #define SPD_COL_NUM_MASK                (7 << SPD_COL_NUM_OFF)
47
48 #define SPD_MODULE_ORG_BYTE             7
49 #define SPD_MODULE_SDRAM_DEV_WIDTH_OFF  0
50 #define SPD_MODULE_SDRAM_DEV_WIDTH_MASK (7 << SPD_MODULE_SDRAM_DEV_WIDTH_OFF)
51 #define SPD_MODULE_BANK_NUM_MIN         1
52 #define SPD_MODULE_BANK_NUM_OFF         3
53 #define SPD_MODULE_BANK_NUM_MASK        (7 << SPD_MODULE_BANK_NUM_OFF)
54
55 #define SPD_BUS_WIDTH_BYTE              8
56 #define SPD_BUS_WIDTH_OFF               0
57 #define SPD_BUS_WIDTH_MASK              (7 << SPD_BUS_WIDTH_OFF)
58 #define SPD_BUS_ECC_OFF                 3
59 #define SPD_BUS_ECC_MASK                (3 << SPD_BUS_ECC_OFF)
60
61 #define SPD_MTB_DIVIDEND_BYTE           10
62 #define SPD_MTB_DIVISOR_BYTE            11
63 #define SPD_TCK_BYTE                    12
64 #define SPD_SUP_CAS_LAT_LSB_BYTE        14
65 #define SPD_SUP_CAS_LAT_MSB_BYTE        15
66 #define SPD_TAA_BYTE                    16
67 #define SPD_TWR_BYTE                    17
68 #define SPD_TRCD_BYTE                   18
69 #define SPD_TRRD_BYTE                   19
70 #define SPD_TRP_BYTE                    20
71
72 #define SPD_TRAS_MSB_BYTE               21
73 #define SPD_TRAS_MSB_MASK               0xf
74
75 #define SPD_TRC_MSB_BYTE                21
76 #define SPD_TRC_MSB_MASK                0xf0
77
78 #define SPD_TRAS_LSB_BYTE               22
79 #define SPD_TRC_LSB_BYTE                23
80 #define SPD_TRFC_LSB_BYTE               24
81 #define SPD_TRFC_MSB_BYTE               25
82 #define SPD_TWTR_BYTE                   26
83 #define SPD_TRTP_BYTE                   27
84
85 #define SPD_TFAW_MSB_BYTE               28
86 #define SPD_TFAW_MSB_MASK               0xf
87
88 #define SPD_TFAW_LSB_BYTE               29
89 #define SPD_OPT_FEATURES_BYTE           30
90 #define SPD_THERMAL_REFRESH_OPT_BYTE    31
91
92 #define SPD_ADDR_MAP_BYTE               63
93 #define SPD_ADDR_MAP_MIRROR_OFFS        0
94
95 #define SPD_RDIMM_RC_BYTE               69
96 #define SPD_RDIMM_RC_NIBBLE_MASK        0xF
97 #define SPD_RDIMM_RC_NUM                16
98
99 /* Dimm Memory Type values */
100 #define SPD_MEM_TYPE_SDRAM              0x4
101 #define SPD_MEM_TYPE_DDR1               0x7
102 #define SPD_MEM_TYPE_DDR2               0x8
103 #define SPD_MEM_TYPE_DDR3               0xB
104
105 #define DIMM_MODULE_MANU_OFFS           64
106 #define DIMM_MODULE_MANU_SIZE           8
107 #define DIMM_MODULE_VEN_OFFS            73
108 #define DIMM_MODULE_VEN_SIZE            25
109 #define DIMM_MODULE_ID_OFFS             99
110 #define DIMM_MODULE_ID_SIZE             18
111
112 /* enumeration for voltage levels. */
113 enum dimm_volt_if {
114         TTL_5V_TOLERANT,
115         LVTTL,
116         HSTL_1_5V,
117         SSTL_3_3V,
118         SSTL_2_5V,
119         VOLTAGE_UNKNOWN,
120 };
121
122 /* enumaration for SDRAM CAS Latencies. */
123 enum dimm_sdram_cas {
124         SD_CL_1 = 1,
125         SD_CL_2,
126         SD_CL_3,
127         SD_CL_4,
128         SD_CL_5,
129         SD_CL_6,
130         SD_CL_7,
131         SD_FAULT
132 };
133
134 /* enumeration for memory types */
135 enum memory_type {
136         MEM_TYPE_SDRAM,
137         MEM_TYPE_DDR1,
138         MEM_TYPE_DDR2,
139         MEM_TYPE_DDR3
140 };
141
142 /* DIMM information structure */
143 typedef struct dimm_info {
144         /* DIMM dimensions */
145         u32 num_of_module_ranks;
146         u32 data_width;
147         u32 rank_capacity;
148         u32 num_of_devices;
149
150         u32 sdram_width;
151         u32 num_of_banks_on_each_device;
152         u32 sdram_capacity;
153
154         u32 num_of_row_addr;
155         u32 num_of_col_addr;
156
157         u32 addr_mirroring;
158
159         u32 err_check_type;                     /* ECC , PARITY.. */
160         u32 type_info;                          /* DDR2 only */
161
162         /* DIMM timing parameters */
163         u32 supported_cas_latencies;
164         u32 refresh_interval;
165         u32 min_cycle_time;
166         u32 min_row_precharge_time;
167         u32 min_row_active_to_row_active;
168         u32 min_ras_to_cas_delay;
169         u32 min_write_recovery_time;            /* DDR3/2 only */
170         u32 min_write_to_read_cmd_delay;        /* DDR3/2 only */
171         u32 min_read_to_prech_cmd_delay;        /* DDR3/2 only */
172         u32 min_active_to_precharge;
173         u32 min_refresh_recovery;               /* DDR3/2 only */
174         u32 min_cas_lat_time;
175         u32 min_four_active_win_delay;
176         u8 dimm_rc[SPD_RDIMM_RC_NUM];
177
178         /* DIMM vendor ID */
179         u32 vendor;
180 } MV_DIMM_INFO;
181
182 static int ddr3_spd_sum_init(MV_DIMM_INFO *info, MV_DIMM_INFO *sum_info,
183                              u32 dimm);
184 static u32 ddr3_get_max_val(u32 spd_val, u32 dimm_num, u32 static_val);
185 static u32 ddr3_get_min_val(u32 spd_val, u32 dimm_num, u32 static_val);
186 static int ddr3_spd_init(MV_DIMM_INFO *info, u32 dimm_addr, u32 dimm_width);
187 static u32 ddr3_div(u32 val, u32 divider, u32 sub);
188
189 extern u8 spd_data[SPD_SIZE];
190 extern u32 odt_config[ODT_OPT];
191 extern u16 odt_static[ODT_OPT][MAX_CS];
192 extern u16 odt_dynamic[ODT_OPT][MAX_CS];
193
194 #if !(defined(DB_88F6710) || defined(DB_88F6710_PCAC) || defined(RD_88F6710))
195 /*
196  * Name:     ddr3_get_dimm_num - Find number of dimms and their addresses
197  * Desc:
198  * Args:     dimm_addr - array of dimm addresses
199  * Notes:
200  * Returns:  None.
201  */
202 static u32 ddr3_get_dimm_num(u32 *dimm_addr)
203 {
204         u32 dimm_cur_addr;
205         u8 data[3];
206         u32 dimm_num = 0;
207         int ret;
208
209         /* Read the dimm eeprom */
210         for (dimm_cur_addr = MAX_DIMM_ADDR; dimm_cur_addr > MIN_DIMM_ADDR;
211              dimm_cur_addr--) {
212                 data[SPD_DEV_TYPE_BYTE] = 0;
213
214                 /* Far-End DIMM must be connected */
215                 if ((dimm_num == 0) && (dimm_cur_addr < FAR_END_DIMM_ADDR))
216                         return 0;
217
218                 ret = i2c_read(dimm_cur_addr, 0, 1, (uchar *)data, 3);
219                 if (!ret) {
220                         if (data[SPD_DEV_TYPE_BYTE] == SPD_MEM_TYPE_DDR3) {
221                                 dimm_addr[dimm_num] = dimm_cur_addr;
222                                 dimm_num++;
223                         }
224                 }
225         }
226
227         return dimm_num;
228 }
229 #endif
230
231 /*
232  * Name:     dimmSpdInit - Get the SPD parameters.
233  * Desc:     Read the DIMM SPD parameters into given struct parameter.
234  * Args:     dimmNum - DIMM number. See MV_BOARD_DIMM_NUM enumerator.
235  *           info - DIMM information structure.
236  * Notes:
237  * Returns:  MV_OK if function could read DIMM parameters, 0 otherwise.
238  */
239 int ddr3_spd_init(MV_DIMM_INFO *info, u32 dimm_addr, u32 dimm_width)
240 {
241         u32 tmp;
242         u32 time_base;
243         int ret;
244         __maybe_unused u32 rc;
245         __maybe_unused u8 vendor_high, vendor_low;
246
247         if (dimm_addr != 0) {
248                 memset(spd_data, 0, SPD_SIZE * sizeof(u8));
249
250                 ret = i2c_read(dimm_addr, 0, 1, (uchar *)spd_data, SPD_SIZE);
251                 if (ret)
252                         return MV_DDR3_TRAINING_ERR_TWSI_FAIL;
253         }
254
255         /* Check if DDR3 */
256         if (spd_data[SPD_DEV_TYPE_BYTE] != SPD_MEM_TYPE_DDR3)
257                 return MV_DDR3_TRAINING_ERR_TWSI_BAD_TYPE;
258
259         /* Error Check Type */
260         /* No byte for error check in DDR3 SPD, use DDR2 convention */
261         info->err_check_type = 0;
262
263         /* Check if ECC */
264         if ((spd_data[SPD_BUS_WIDTH_BYTE] & 0x18) >> 3)
265                 info->err_check_type = 1;
266
267         DEBUG_INIT_FULL_C("DRAM err_check_type ", info->err_check_type, 1);
268         switch (spd_data[SPD_MODULE_TYPE_BYTE]) {
269         case 1:
270                 /* support RDIMM */
271                 info->type_info = SPD_MODULE_TYPE_RDIMM;
272                 break;
273         case 2:
274                 /* support UDIMM */
275                 info->type_info = SPD_MODULE_TYPE_UDIMM;
276                 break;
277         case 11:                /* LRDIMM current not supported */
278         default:
279                 info->type_info = (spd_data[SPD_MODULE_TYPE_BYTE]);
280                 break;
281         }
282
283         /* Size Calculations: */
284
285         /* Number Of Row Addresses - 12/13/14/15/16 */
286         info->num_of_row_addr =
287                 (spd_data[SPD_ROW_NUM_BYTE] & SPD_ROW_NUM_MASK) >>
288                 SPD_ROW_NUM_OFF;
289         info->num_of_row_addr += SPD_ROW_NUM_MIN;
290         DEBUG_INIT_FULL_C("DRAM num_of_row_addr ", info->num_of_row_addr, 2);
291
292         /* Number Of Column Addresses - 9/10/11/12 */
293         info->num_of_col_addr =
294                 (spd_data[SPD_COL_NUM_BYTE] & SPD_COL_NUM_MASK) >>
295                 SPD_COL_NUM_OFF;
296         info->num_of_col_addr += SPD_COL_NUM_MIN;
297         DEBUG_INIT_FULL_C("DRAM num_of_col_addr ", info->num_of_col_addr, 1);
298
299         /* Number Of Ranks = number of CS on Dimm - 1/2/3/4 Ranks */
300         info->num_of_module_ranks =
301                 (spd_data[SPD_MODULE_ORG_BYTE] & SPD_MODULE_BANK_NUM_MASK) >>
302                 SPD_MODULE_BANK_NUM_OFF;
303         info->num_of_module_ranks += SPD_MODULE_BANK_NUM_MIN;
304         DEBUG_INIT_FULL_C("DRAM numOfModuleBanks ", info->num_of_module_ranks,
305                           1);
306
307         /* Data Width - 8/16/32/64 bits */
308         info->data_width =
309                 1 << (3 + (spd_data[SPD_BUS_WIDTH_BYTE] & SPD_BUS_WIDTH_MASK));
310         DEBUG_INIT_FULL_C("DRAM data_width ", info->data_width, 1);
311
312         /* Number Of Banks On Each Device - 8/16/32/64 banks */
313         info->num_of_banks_on_each_device =
314                 1 << (3 + ((spd_data[SPD_DEV_DENSITY_BYTE] >> 4) & 0x7));
315         DEBUG_INIT_FULL_C("DRAM num_of_banks_on_each_device ",
316                           info->num_of_banks_on_each_device, 1);
317
318         /* Total SDRAM capacity - 256Mb/512Mb/1Gb/2Gb/4Gb/8Gb/16Gb - MegaBits */
319         info->sdram_capacity =
320                 spd_data[SPD_DEV_DENSITY_BYTE] & SPD_DEV_DENSITY_MASK;
321
322         /* Sdram Width - 4/8/16/32 bits */
323         info->sdram_width = 1 << (2 + (spd_data[SPD_MODULE_ORG_BYTE] &
324                                        SPD_MODULE_SDRAM_DEV_WIDTH_MASK));
325         DEBUG_INIT_FULL_C("DRAM sdram_width ", info->sdram_width, 1);
326
327         /* CS (Rank) Capacity - MB */
328         /*
329          * DDR3 device uiDensity val are: (device capacity/8) *
330          * (Module_width/Device_width)
331          */
332         /* Jedec SPD DDR3 - page 7, Save spd_data in Mb  - 2048=2GB */
333         if (dimm_width == 32) {
334                 info->rank_capacity =
335                         ((1 << info->sdram_capacity) * 256 *
336                          (info->data_width / info->sdram_width)) << 16;
337                 /* CS size = CS size / 2  */
338         } else {
339                 info->rank_capacity =
340                         ((1 << info->sdram_capacity) * 256 *
341                          (info->data_width / info->sdram_width) * 0x2) << 16;
342                 /* 0x2 =>  0x100000-1Mbit / 8-bit->byte / 0x10000  */
343         }
344         DEBUG_INIT_FULL_C("DRAM rank_capacity[31] ", info->rank_capacity, 1);
345
346         /* Number of devices includeing Error correction */
347         info->num_of_devices =
348                 ((info->data_width / info->sdram_width) *
349                  info->num_of_module_ranks) + info->err_check_type;
350         DEBUG_INIT_FULL_C("DRAM num_of_devices  ", info->num_of_devices, 1);
351
352         /* Address Mapping from Edge connector to DRAM - mirroring option */
353         info->addr_mirroring =
354                 spd_data[SPD_ADDR_MAP_BYTE] & (1 << SPD_ADDR_MAP_MIRROR_OFFS);
355
356         /* Timings - All in ps */
357
358         time_base = (1000 * spd_data[SPD_MTB_DIVIDEND_BYTE]) /
359                 spd_data[SPD_MTB_DIVISOR_BYTE];
360
361         /* Minimum Cycle Time At Max CasLatancy */
362         info->min_cycle_time = spd_data[SPD_TCK_BYTE] * time_base;
363         DEBUG_INIT_FULL_C("DRAM tCKmin ", info->min_cycle_time, 1);
364
365         /* Refresh Interval */
366         /* No byte for refresh interval in DDR3 SPD, use DDR2 convention */
367         /*
368          * JEDEC param are 0 <= Tcase <= 85: 7.8uSec, 85 <= Tcase
369          * <= 95: 3.9uSec
370          */
371         info->refresh_interval = 7800000;       /* Set to 7.8uSec */
372         DEBUG_INIT_FULL_C("DRAM refresh_interval ", info->refresh_interval, 1);
373
374         /* Suported Cas Latencies -  DDR 3: */
375
376         /*
377          *         bit7 | bit6 | bit5 | bit4 | bit3 | bit2 | bit1 | bit0 *
378          *******-******-******-******-******-******-******-*******-*******
379          CAS =      11  |  10  |  9   |  8   |  7   |  6   |  5   |  4   *
380          *********************************************************-*******
381          *******-******-******-******-******-******-******-*******-*******
382          *        bit15 |bit14 |bit13 |bit12 |bit11 |bit10 | bit9 | bit8 *
383          *******-******-******-******-******-******-******-*******-*******
384          CAS =     TBD  |  18  |  17  |  16  |  15  |  14  |  13  |  12  *
385         */
386
387         /* DDR3 include 2 byte of CAS support */
388         info->supported_cas_latencies =
389                 (spd_data[SPD_SUP_CAS_LAT_MSB_BYTE] << 8) |
390                 spd_data[SPD_SUP_CAS_LAT_LSB_BYTE];
391         DEBUG_INIT_FULL_C("DRAM supported_cas_latencies ",
392                           info->supported_cas_latencies, 1);
393
394         /* Minimum Cycle Time At Max CasLatancy */
395         info->min_cas_lat_time = (spd_data[SPD_TAA_BYTE] * time_base);
396         /*
397          * This field divided by the cycleTime will give us the CAS latency
398          * to config
399          */
400
401         /*
402          * For DDR3 and DDR2 includes Write Recovery Time field.
403          * Other SDRAM ignore
404          */
405         info->min_write_recovery_time = spd_data[SPD_TWR_BYTE] * time_base;
406         DEBUG_INIT_FULL_C("DRAM min_write_recovery_time ",
407                           info->min_write_recovery_time, 1);
408
409         /* Mininmum Ras to Cas Delay */
410         info->min_ras_to_cas_delay = spd_data[SPD_TRCD_BYTE] * time_base;
411         DEBUG_INIT_FULL_C("DRAM min_ras_to_cas_delay ",
412                           info->min_ras_to_cas_delay, 1);
413
414         /* Minimum Row Active to Row Active Time */
415         info->min_row_active_to_row_active =
416             spd_data[SPD_TRRD_BYTE] * time_base;
417         DEBUG_INIT_FULL_C("DRAM min_row_active_to_row_active ",
418                           info->min_row_active_to_row_active, 1);
419
420         /* Minimum Row Precharge Delay Time */
421         info->min_row_precharge_time = spd_data[SPD_TRP_BYTE] * time_base;
422         DEBUG_INIT_FULL_C("DRAM min_row_precharge_time ",
423                           info->min_row_precharge_time, 1);
424
425         /* Minimum Active to Precharge Delay Time - tRAS   ps */
426         info->min_active_to_precharge =
427                 (spd_data[SPD_TRAS_MSB_BYTE] & SPD_TRAS_MSB_MASK) << 8;
428         info->min_active_to_precharge |= spd_data[SPD_TRAS_LSB_BYTE];
429         info->min_active_to_precharge *= time_base;
430         DEBUG_INIT_FULL_C("DRAM min_active_to_precharge ",
431                           info->min_active_to_precharge, 1);
432
433         /* Minimum Refresh Recovery Delay Time - tRFC  ps */
434         info->min_refresh_recovery = spd_data[SPD_TRFC_MSB_BYTE] << 8;
435         info->min_refresh_recovery |= spd_data[SPD_TRFC_LSB_BYTE];
436         info->min_refresh_recovery *= time_base;
437         DEBUG_INIT_FULL_C("DRAM min_refresh_recovery ",
438                           info->min_refresh_recovery, 1);
439
440         /*
441          * For DDR3 and DDR2 includes Internal Write To Read Command Delay
442          * field.
443          */
444         info->min_write_to_read_cmd_delay = spd_data[SPD_TWTR_BYTE] * time_base;
445         DEBUG_INIT_FULL_C("DRAM min_write_to_read_cmd_delay ",
446                           info->min_write_to_read_cmd_delay, 1);
447
448         /*
449          * For DDR3 and DDR2 includes Internal Read To Precharge Command Delay
450          * field.
451          */
452         info->min_read_to_prech_cmd_delay = spd_data[SPD_TRTP_BYTE] * time_base;
453         DEBUG_INIT_FULL_C("DRAM min_read_to_prech_cmd_delay ",
454                           info->min_read_to_prech_cmd_delay, 1);
455
456         /*
457          * For DDR3 includes Minimum Activate to Activate/Refresh Command
458          * field
459          */
460         tmp = ((spd_data[SPD_TFAW_MSB_BYTE] & SPD_TFAW_MSB_MASK) << 8) |
461                 spd_data[SPD_TFAW_LSB_BYTE];
462         info->min_four_active_win_delay = tmp * time_base;
463         DEBUG_INIT_FULL_C("DRAM min_four_active_win_delay ",
464                           info->min_four_active_win_delay, 1);
465
466 #if defined(MV88F78X60) || defined(MV88F672X)
467         /* Registered DIMM support */
468         if (info->type_info == SPD_MODULE_TYPE_RDIMM) {
469                 for (rc = 2; rc < 6; rc += 2) {
470                         tmp = spd_data[SPD_RDIMM_RC_BYTE + rc / 2];
471                         info->dimm_rc[rc] =
472                                 spd_data[SPD_RDIMM_RC_BYTE + rc / 2] &
473                                 SPD_RDIMM_RC_NIBBLE_MASK;
474                         info->dimm_rc[rc + 1] =
475                                 (spd_data[SPD_RDIMM_RC_BYTE + rc / 2] >> 4) &
476                                 SPD_RDIMM_RC_NIBBLE_MASK;
477                 }
478
479                 vendor_low = spd_data[66];
480                 vendor_high = spd_data[65];
481                 info->vendor = (vendor_high << 8) + vendor_low;
482                 DEBUG_INIT_C("DDR3 Training Sequence - Registered DIMM vendor ID 0x",
483                              info->vendor, 4);
484
485                 info->dimm_rc[0] = RDIMM_RC0;
486                 info->dimm_rc[1] = RDIMM_RC1;
487                 info->dimm_rc[2] = RDIMM_RC2;
488                 info->dimm_rc[8] = RDIMM_RC8;
489                 info->dimm_rc[9] = RDIMM_RC9;
490                 info->dimm_rc[10] = RDIMM_RC10;
491                 info->dimm_rc[11] = RDIMM_RC11;
492         }
493 #endif
494
495         return MV_OK;
496 }
497
498 /*
499  * Name:     ddr3_spd_sum_init - Get the SPD parameters.
500  * Desc:     Read the DIMM SPD parameters into given struct parameter.
501  * Args:     dimmNum - DIMM number. See MV_BOARD_DIMM_NUM enumerator.
502  *           info - DIMM information structure.
503  * Notes:
504  * Returns:  MV_OK if function could read DIMM parameters, 0 otherwise.
505  */
506 int ddr3_spd_sum_init(MV_DIMM_INFO *info, MV_DIMM_INFO *sum_info, u32 dimm)
507 {
508         if (dimm == 0) {
509                 memcpy(sum_info, info, sizeof(MV_DIMM_INFO));
510                 return MV_OK;
511         }
512         if (sum_info->type_info != info->type_info) {
513                 DEBUG_INIT_S("DDR3 Dimm Compare - DIMM type does not match - FAIL\n");
514                 return MV_DDR3_TRAINING_ERR_DIMM_TYPE_NO_MATCH;
515         }
516         if (sum_info->err_check_type > info->err_check_type) {
517                 sum_info->err_check_type = info->err_check_type;
518                 DEBUG_INIT_S("DDR3 Dimm Compare - ECC does not match. ECC is disabled\n");
519         }
520         if (sum_info->data_width != info->data_width) {
521                 DEBUG_INIT_S("DDR3 Dimm Compare - DRAM bus width does not match - FAIL\n");
522                 return MV_DDR3_TRAINING_ERR_BUS_WIDTH_NOT_MATCH;
523         }
524         if (sum_info->min_cycle_time < info->min_cycle_time)
525                 sum_info->min_cycle_time = info->min_cycle_time;
526         if (sum_info->refresh_interval < info->refresh_interval)
527                 sum_info->refresh_interval = info->refresh_interval;
528         sum_info->supported_cas_latencies &= info->supported_cas_latencies;
529         if (sum_info->min_cas_lat_time < info->min_cas_lat_time)
530                 sum_info->min_cas_lat_time = info->min_cas_lat_time;
531         if (sum_info->min_write_recovery_time < info->min_write_recovery_time)
532                 sum_info->min_write_recovery_time =
533                     info->min_write_recovery_time;
534         if (sum_info->min_ras_to_cas_delay < info->min_ras_to_cas_delay)
535                 sum_info->min_ras_to_cas_delay = info->min_ras_to_cas_delay;
536         if (sum_info->min_row_active_to_row_active <
537             info->min_row_active_to_row_active)
538                 sum_info->min_row_active_to_row_active =
539                     info->min_row_active_to_row_active;
540         if (sum_info->min_row_precharge_time < info->min_row_precharge_time)
541                 sum_info->min_row_precharge_time = info->min_row_precharge_time;
542         if (sum_info->min_active_to_precharge < info->min_active_to_precharge)
543                 sum_info->min_active_to_precharge =
544                     info->min_active_to_precharge;
545         if (sum_info->min_refresh_recovery < info->min_refresh_recovery)
546                 sum_info->min_refresh_recovery = info->min_refresh_recovery;
547         if (sum_info->min_write_to_read_cmd_delay <
548             info->min_write_to_read_cmd_delay)
549                 sum_info->min_write_to_read_cmd_delay =
550                     info->min_write_to_read_cmd_delay;
551         if (sum_info->min_read_to_prech_cmd_delay <
552             info->min_read_to_prech_cmd_delay)
553                 sum_info->min_read_to_prech_cmd_delay =
554                     info->min_read_to_prech_cmd_delay;
555         if (sum_info->min_four_active_win_delay <
556             info->min_four_active_win_delay)
557                 sum_info->min_four_active_win_delay =
558                     info->min_four_active_win_delay;
559         if (sum_info->min_write_to_read_cmd_delay <
560             info->min_write_to_read_cmd_delay)
561                 sum_info->min_write_to_read_cmd_delay =
562                         info->min_write_to_read_cmd_delay;
563
564         return MV_OK;
565 }
566
567 /*
568  * Name:     ddr3_dunit_setup
569  * Desc:     Set the controller with the timing values.
570  * Args:     ecc_ena - User ECC setup
571  * Notes:
572  * Returns:
573  */
574 int ddr3_dunit_setup(u32 ecc_ena, u32 hclk_time, u32 *ddr_width)
575 {
576         u32 reg, tmp, cwl;
577         u32 ddr_clk_time;
578         MV_DIMM_INFO dimm_info[2];
579         MV_DIMM_INFO sum_info;
580         u32 stat_val, spd_val;
581         u32 cs, cl, cs_num, cs_ena;
582         u32 dimm_num = 0;
583         int status;
584         u32 rc;
585         __maybe_unused u32 dimm_cnt, cs_count, dimm;
586         __maybe_unused u32 dimm_addr[2] = { 0, 0 };
587
588 #if defined(DB_88F6710) || defined(DB_88F6710_PCAC) || defined(RD_88F6710)
589         /* Armada 370 - SPD is not available on DIMM */
590         /*
591          * Set MC registers according to Static SPD values Values -
592          * must be set manually
593          */
594         /*
595          * We only have one optional DIMM for the DB and we already got the
596          * SPD matching values
597          */
598         status = ddr3_spd_init(&dimm_info[0], 0, *ddr_width);
599         if (MV_OK != status)
600                 return status;
601
602         dimm_num = 1;
603         /* Use JP8 to enable multiCS support for Armada 370 DB */
604         if (!ddr3_check_config(EEPROM_MODULE_ADDR, CONFIG_MULTI_CS))
605                 dimm_info[0].num_of_module_ranks = 1;
606         status = ddr3_spd_sum_init(&dimm_info[0], &sum_info, 0);
607         if (MV_OK != status)
608                 return status;
609 #else
610         /* Dynamic D-Unit Setup - Read SPD values */
611 #ifdef DUNIT_SPD
612         dimm_num = ddr3_get_dimm_num(dimm_addr);
613         if (dimm_num == 0) {
614 #ifdef MIXED_DIMM_STATIC
615                 DEBUG_INIT_S("DDR3 Training Sequence - No DIMMs detected\n");
616 #else
617                 DEBUG_INIT_S("DDR3 Training Sequence - FAILED (Wrong DIMMs Setup)\n");
618                 return MV_DDR3_TRAINING_ERR_BAD_DIMM_SETUP;
619 #endif
620         } else {
621                 DEBUG_INIT_C("DDR3 Training Sequence - Number of DIMMs detected: ",
622                              dimm_num, 1);
623         }
624
625         for (dimm = 0; dimm < dimm_num; dimm++) {
626                 status = ddr3_spd_init(&dimm_info[dimm], dimm_addr[dimm],
627                                        *ddr_width);
628                 if (MV_OK != status)
629                         return status;
630                 status = ddr3_spd_sum_init(&dimm_info[dimm], &sum_info, dimm);
631                 if (MV_OK != status)
632                         return status;
633         }
634 #endif
635 #endif
636
637         /* Set number of enabled CS */
638         cs_num = 0;
639 #ifdef DUNIT_STATIC
640         cs_num = ddr3_get_cs_num_from_reg();
641 #endif
642 #ifdef DUNIT_SPD
643         for (dimm = 0; dimm < dimm_num; dimm++)
644                 cs_num += dimm_info[dimm].num_of_module_ranks;
645 #endif
646         if (cs_num > MAX_CS) {
647                 DEBUG_INIT_C("DDR3 Training Sequence - Number of CS exceed limit -  ",
648                              MAX_CS, 1);
649                 return MV_DDR3_TRAINING_ERR_MAX_CS_LIMIT;
650         }
651
652         /* Set bitmap of enabled CS */
653         cs_ena = 0;
654 #ifdef DUNIT_STATIC
655         cs_ena = ddr3_get_cs_ena_from_reg();
656 #endif
657 #ifdef DUNIT_SPD
658         dimm = 0;
659
660         if (dimm_num) {
661                 for (cs = 0; cs < MAX_CS; cs += 2) {
662                         if (((1 << cs) & DIMM_CS_BITMAP) &&
663                             !(cs_ena & (1 << cs))) {
664                                 if (dimm_info[dimm].num_of_module_ranks == 1)
665                                         cs_ena |= (0x1 << cs);
666                                 else if (dimm_info[dimm].num_of_module_ranks == 2)
667                                         cs_ena |= (0x3 << cs);
668                                 else if (dimm_info[dimm].num_of_module_ranks == 3)
669                                         cs_ena |= (0x7 << cs);
670                                 else if (dimm_info[dimm].num_of_module_ranks == 4)
671                                         cs_ena |= (0xF << cs);
672
673                                 dimm++;
674                                 if (dimm == dimm_num)
675                                         break;
676                         }
677                 }
678         }
679 #endif
680
681         if (cs_ena > 0xF) {
682                 DEBUG_INIT_C("DDR3 Training Sequence - Number of enabled CS exceed limit -  ",
683                              MAX_CS, 1);
684                 return MV_DDR3_TRAINING_ERR_MAX_ENA_CS_LIMIT;
685         }
686
687         DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - Number of CS = ", cs_num, 1);
688
689         /* Check Ratio - '1' - 2:1, '0' - 1:1 */
690         if (reg_read(REG_DDR_IO_ADDR) & (1 << REG_DDR_IO_CLK_RATIO_OFFS))
691                 ddr_clk_time = hclk_time / 2;
692         else
693                 ddr_clk_time = hclk_time;
694
695 #ifdef DUNIT_STATIC
696         /* Get target CL value from set register */
697         reg = (reg_read(REG_DDR3_MR0_ADDR) >> 2);
698         reg = ((((reg >> 1) & 0xE)) | (reg & 0x1)) & 0xF;
699
700         cl = ddr3_get_max_val(ddr3_div(sum_info.min_cas_lat_time,
701                                        ddr_clk_time, 0),
702                               dimm_num, ddr3_valid_cl_to_cl(reg));
703 #else
704         cl = ddr3_div(sum_info.min_cas_lat_time, ddr_clk_time, 0);
705 #endif
706         if (cl < 5)
707                 cl = 5;
708
709         DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - Cas Latency = ", cl, 1);
710
711         /* {0x00001400} -   DDR SDRAM Configuration Register */
712         reg = 0x73004000;
713         stat_val = ddr3_get_static_mc_value(
714                 REG_SDRAM_CONFIG_ADDR, REG_SDRAM_CONFIG_ECC_OFFS, 0x1, 0, 0);
715         if (ecc_ena && ddr3_get_min_val(sum_info.err_check_type, dimm_num,
716                                         stat_val)) {
717                 reg |= (1 << REG_SDRAM_CONFIG_ECC_OFFS);
718                 reg |= (1 << REG_SDRAM_CONFIG_IERR_OFFS);
719                 DEBUG_INIT_FULL_S("DDR3 - DUNIT-SET - ECC Enabled\n");
720         } else {
721                 DEBUG_INIT_FULL_S("DDR3 - DUNIT-SET - ECC Disabled\n");
722         }
723
724         if (sum_info.type_info == SPD_MODULE_TYPE_RDIMM) {
725 #ifdef DUNIT_STATIC
726                 DEBUG_INIT_S("DDR3 Training Sequence - FAIL - Illegal R-DIMM setup\n");
727                 return MV_DDR3_TRAINING_ERR_BAD_R_DIMM_SETUP;
728 #endif
729                 reg |= (1 << REG_SDRAM_CONFIG_REGDIMM_OFFS);
730                 DEBUG_INIT_FULL_S("DDR3 - DUNIT-SET - R-DIMM\n");
731         } else {
732                 DEBUG_INIT_FULL_S("DDR3 - DUNIT-SET - U-DIMM\n");
733         }
734
735 #ifndef MV88F67XX
736 #ifdef DUNIT_STATIC
737         if (ddr3_get_min_val(sum_info.data_width, dimm_num, BUS_WIDTH) == 64) {
738 #else
739         if (*ddr_width == 64) {
740 #endif
741                 reg |= (1 << REG_SDRAM_CONFIG_WIDTH_OFFS);
742                 DEBUG_INIT_FULL_S("DDR3 - DUNIT-SET - Datawidth - 64Bits\n");
743         } else {
744                 DEBUG_INIT_FULL_S("DDR3 - DUNIT-SET - Datawidth - 32Bits\n");
745         }
746 #else
747         DEBUG_INIT_FULL_S("DDR3 - DUNIT-SET - Datawidth - 16Bits\n");
748 #endif
749
750 #if defined(MV88F672X)
751         if (*ddr_width == 32) {
752                 reg |= (1 << REG_SDRAM_CONFIG_WIDTH_OFFS);
753                 DEBUG_INIT_FULL_S("DDR3 - DUNIT-SET - Datawidth - 32Bits\n");
754         } else {
755                 DEBUG_INIT_FULL_S("DDR3 - DUNIT-SET - Datawidth - 16Bits\n");
756         }
757 #endif
758         stat_val = ddr3_get_static_mc_value(REG_SDRAM_CONFIG_ADDR, 0,
759                                                REG_SDRAM_CONFIG_RFRS_MASK, 0, 0);
760         tmp = ddr3_get_min_val(sum_info.refresh_interval / hclk_time,
761                                dimm_num, stat_val);
762
763 #ifdef TREFI_USER_EN
764         tmp = min(TREFI_USER / hclk_time, tmp);
765 #endif
766
767         DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - RefreshInterval/Hclk = ", tmp, 4);
768         reg |= tmp;
769
770         if (cl != 3)
771                 reg |= (1 << 16);       /*  If 2:1 need to set P2DWr */
772
773 #if defined(MV88F672X)
774         reg |= (1 << 27);       /* PhyRfRST = Disable */
775 #endif
776         reg_write(REG_SDRAM_CONFIG_ADDR, reg);
777
778         /*{0x00001404}  -   DDR SDRAM Configuration Register */
779         reg = 0x3630B800;
780 #ifdef DUNIT_SPD
781         reg |= (DRAM_2T << REG_DUNIT_CTRL_LOW_2T_OFFS);
782 #endif
783         reg_write(REG_DUNIT_CTRL_LOW_ADDR, reg);
784
785         /* {0x00001408}  -   DDR SDRAM Timing (Low) Register */
786         reg = 0x0;
787
788         /* tRAS - (0:3,20) */
789         spd_val = ddr3_div(sum_info.min_active_to_precharge,
790                             ddr_clk_time, 1);
791         stat_val = ddr3_get_static_mc_value(REG_SDRAM_TIMING_LOW_ADDR,
792                                             0, 0xF, 16, 0x10);
793         tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
794         DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tRAS-1 = ", tmp, 1);
795         reg |= (tmp & 0xF);
796         reg |= ((tmp & 0x10) << 16);    /* to bit 20 */
797
798         /* tRCD - (4:7) */
799         spd_val = ddr3_div(sum_info.min_ras_to_cas_delay, ddr_clk_time, 1);
800         stat_val = ddr3_get_static_mc_value(REG_SDRAM_TIMING_LOW_ADDR,
801                                             4, 0xF, 0, 0);
802         tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
803         DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tRCD-1 = ", tmp, 1);
804         reg |= ((tmp & 0xF) << 4);
805
806         /* tRP - (8:11) */
807         spd_val = ddr3_div(sum_info.min_row_precharge_time, ddr_clk_time, 1);
808         stat_val = ddr3_get_static_mc_value(REG_SDRAM_TIMING_LOW_ADDR,
809                                             8, 0xF, 0, 0);
810         tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
811         DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tRP-1 = ", tmp, 1);
812         reg |= ((tmp & 0xF) << 8);
813
814         /* tWR - (12:15) */
815         spd_val = ddr3_div(sum_info.min_write_recovery_time, ddr_clk_time, 1);
816         stat_val = ddr3_get_static_mc_value(REG_SDRAM_TIMING_LOW_ADDR,
817                                             12, 0xF, 0, 0);
818         tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
819         DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tWR-1 = ", tmp, 1);
820         reg |= ((tmp & 0xF) << 12);
821
822         /* tWTR - (16:19) */
823         spd_val = ddr3_div(sum_info.min_write_to_read_cmd_delay, ddr_clk_time, 1);
824         stat_val = ddr3_get_static_mc_value(REG_SDRAM_TIMING_LOW_ADDR,
825                                             16, 0xF, 0, 0);
826         tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
827         DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tWTR-1 = ", tmp, 1);
828         reg |= ((tmp & 0xF) << 16);
829
830         /* tRRD - (24:27) */
831         spd_val = ddr3_div(sum_info.min_row_active_to_row_active, ddr_clk_time, 1);
832         stat_val = ddr3_get_static_mc_value(REG_SDRAM_TIMING_LOW_ADDR,
833                                             24, 0xF, 0, 0);
834         tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
835         DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tRRD-1 = ", tmp, 1);
836         reg |= ((tmp & 0xF) << 24);
837
838         /* tRTP - (28:31) */
839         spd_val = ddr3_div(sum_info.min_read_to_prech_cmd_delay, ddr_clk_time, 1);
840         stat_val = ddr3_get_static_mc_value(REG_SDRAM_TIMING_LOW_ADDR,
841                                             28, 0xF, 0, 0);
842         tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
843         DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tRTP-1 = ", tmp, 1);
844         reg |= ((tmp & 0xF) << 28);
845
846         if (cl < 7)
847                 reg = 0x33137663;
848
849         reg_write(REG_SDRAM_TIMING_LOW_ADDR, reg);
850
851         /*{0x0000140C}  -   DDR SDRAM Timing (High) Register */
852         /* Add cycles to R2R W2W */
853         reg = 0x39F8FF80;
854
855         /* tRFC - (0:6,16:18) */
856         spd_val = ddr3_div(sum_info.min_refresh_recovery, ddr_clk_time, 1);
857         stat_val = ddr3_get_static_mc_value(REG_SDRAM_TIMING_HIGH_ADDR,
858                                             0, 0x7F, 9, 0x380);
859         tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
860         DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tRFC-1 = ", tmp, 1);
861         reg |= (tmp & 0x7F);
862         reg |= ((tmp & 0x380) << 9);    /* to bit 16 */
863         reg_write(REG_SDRAM_TIMING_HIGH_ADDR, reg);
864
865         /*{0x00001410}  -   DDR SDRAM Address Control Register */
866         reg = 0x000F0000;
867
868         /* tFAW - (24:28)  */
869 #if (defined(MV88F78X60) || defined(MV88F672X))
870         tmp = sum_info.min_four_active_win_delay;
871         spd_val = ddr3_div(tmp, ddr_clk_time, 0);
872         stat_val = ddr3_get_static_mc_value(REG_SDRAM_ADDRESS_CTRL_ADDR,
873                                             24, 0x3F, 0, 0);
874         tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
875         DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tFAW = ", tmp, 1);
876         reg |= ((tmp & 0x3F) << 24);
877 #else
878         tmp = sum_info.min_four_active_win_delay -
879                 4 * (sum_info.min_row_active_to_row_active);
880         spd_val = ddr3_div(tmp, ddr_clk_time, 0);
881         stat_val = ddr3_get_static_mc_value(REG_SDRAM_ADDRESS_CTRL_ADDR,
882                                             24, 0x1F, 0, 0);
883         tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
884         DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tFAW-4*tRRD = ", tmp, 1);
885         reg |= ((tmp & 0x1F) << 24);
886 #endif
887
888         /* SDRAM device capacity */
889 #ifdef DUNIT_STATIC
890         reg |= (reg_read(REG_SDRAM_ADDRESS_CTRL_ADDR) & 0xF0FFFF);
891 #endif
892
893 #ifdef DUNIT_SPD
894         cs_count = 0;
895         dimm_cnt = 0;
896         for (cs = 0; cs < MAX_CS; cs++) {
897                 if (cs_ena & (1 << cs) & DIMM_CS_BITMAP) {
898                         if (dimm_info[dimm_cnt].num_of_module_ranks == cs_count) {
899                                 dimm_cnt++;
900                                 cs_count = 0;
901                         }
902                         cs_count++;
903                         if (dimm_info[dimm_cnt].sdram_capacity < 0x3) {
904                                 reg |= ((dimm_info[dimm_cnt].sdram_capacity + 1) <<
905                                         (REG_SDRAM_ADDRESS_SIZE_OFFS +
906                                          (REG_SDRAM_ADDRESS_CTRL_STRUCT_OFFS * cs)));
907                         } else if (dimm_info[dimm_cnt].sdram_capacity > 0x3) {
908                                 reg |= ((dimm_info[dimm_cnt].sdram_capacity & 0x3) <<
909                                         (REG_SDRAM_ADDRESS_SIZE_OFFS +
910                                          (REG_SDRAM_ADDRESS_CTRL_STRUCT_OFFS * cs)));
911                                 reg |= ((dimm_info[dimm_cnt].sdram_capacity & 0x4) <<
912                                         (REG_SDRAM_ADDRESS_SIZE_HIGH_OFFS + cs));
913                         }
914                 }
915         }
916
917         /* SDRAM device structure */
918         cs_count = 0;
919         dimm_cnt = 0;
920         for (cs = 0; cs < MAX_CS; cs++) {
921                 if (cs_ena & (1 << cs) & DIMM_CS_BITMAP) {
922                         if (dimm_info[dimm_cnt].num_of_module_ranks == cs_count) {
923                                 dimm_cnt++;
924                                 cs_count = 0;
925                         }
926                         cs_count++;
927                         if (dimm_info[dimm_cnt].sdram_width == 16)
928                                 reg |= (1 << (REG_SDRAM_ADDRESS_CTRL_STRUCT_OFFS * cs));
929                 }
930         }
931 #endif
932         reg_write(REG_SDRAM_ADDRESS_CTRL_ADDR, reg);
933
934         /*{0x00001418}  -   DDR SDRAM Operation Register */
935         reg = 0xF00;
936         for (cs = 0; cs < MAX_CS; cs++) {
937                 if (cs_ena & (1 << cs))
938                         reg &= ~(1 << (cs + REG_SDRAM_OPERATION_CS_OFFS));
939         }
940         reg_write(REG_SDRAM_OPERATION_ADDR, reg);
941
942         /*{0x00001420}  -   DDR SDRAM Extended Mode Register */
943         reg = 0x00000004;
944         reg_write(REG_SDRAM_EXT_MODE_ADDR, reg);
945
946         /*{0x00001424}  -   DDR Controller Control (High) Register */
947 #if (defined(MV88F78X60) || defined(MV88F672X))
948         reg = 0x0000D3FF;
949 #else
950         reg = 0x0100D1FF;
951 #endif
952         reg_write(REG_DDR_CONT_HIGH_ADDR, reg);
953
954         /*{0x0000142C}  -   DDR3 Timing Register */
955         reg = 0x014C2F38;
956 #if defined(MV88F78X60) || defined(MV88F672X)
957         reg = 0x1FEC2F38;
958 #endif
959         reg_write(0x142C, reg);
960
961         /*{0x00001484}  - MBus CPU Block Register */
962 #ifdef MV88F67XX
963         if (reg_read(REG_DDR_IO_ADDR) & (1 << REG_DDR_IO_CLK_RATIO_OFFS))
964                 reg_write(REG_MBUS_CPU_BLOCK_ADDR, 0x0000E907);
965 #endif
966
967         /*
968          * In case of mixed dimm and on-board devices setup paramters will
969          * be taken statically
970          */
971         /*{0x00001494}  -   DDR SDRAM ODT Control (Low) Register */
972         reg = odt_config[cs_ena];
973         reg_write(REG_SDRAM_ODT_CTRL_LOW_ADDR, reg);
974
975         /*{0x00001498}  -   DDR SDRAM ODT Control (High) Register */
976         reg = 0x00000000;
977         reg_write(REG_SDRAM_ODT_CTRL_HIGH_ADDR, reg);
978
979         /*{0x0000149C}  -   DDR Dunit ODT Control Register */
980         reg = cs_ena;
981         reg_write(REG_DUNIT_ODT_CTRL_ADDR, reg);
982
983         /*{0x000014A0}  -   DDR Dunit ODT Control Register */
984 #if defined(MV88F672X)
985         reg = 0x000006A9;
986         reg_write(REG_DRAM_FIFO_CTRL_ADDR, reg);
987 #endif
988
989         /*{0x000014C0}  -   DRAM address and Control Driving Strenght */
990         reg_write(REG_DRAM_ADDR_CTRL_DRIVE_STRENGTH_ADDR, 0x192435e9);
991
992         /*{0x000014C4}  -   DRAM Data and DQS Driving Strenght */
993         reg_write(REG_DRAM_DATA_DQS_DRIVE_STRENGTH_ADDR, 0xB2C35E9);
994
995 #if (defined(MV88F78X60) || defined(MV88F672X))
996         /*{0x000014CC}  -   DRAM Main Pads Calibration Machine Control Register */
997         reg = reg_read(REG_DRAM_MAIN_PADS_CAL_ADDR);
998         reg_write(REG_DRAM_MAIN_PADS_CAL_ADDR, reg | (1 << 0));
999 #endif
1000
1001 #if defined(MV88F672X)
1002         /* DRAM Main Pads Calibration Machine Control Register */
1003         /* 0x14CC[4:3] - CalUpdateControl = IntOnly */
1004         reg = reg_read(REG_DRAM_MAIN_PADS_CAL_ADDR);
1005         reg &= 0xFFFFFFE7;
1006         reg |= (1 << 3);
1007         reg_write(REG_DRAM_MAIN_PADS_CAL_ADDR, reg);
1008 #endif
1009
1010 #ifdef DUNIT_SPD
1011         cs_count = 0;
1012         dimm_cnt = 0;
1013         for (cs = 0; cs < MAX_CS; cs++) {
1014                 if ((1 << cs) & DIMM_CS_BITMAP) {
1015                         if ((1 << cs) & cs_ena) {
1016                                 if (dimm_info[dimm_cnt].num_of_module_ranks ==
1017                                     cs_count) {
1018                                         dimm_cnt++;
1019                                         cs_count = 0;
1020                                 }
1021                                 cs_count++;
1022                                 reg_write(REG_CS_SIZE_SCRATCH_ADDR + (cs * 0x8),
1023                                           dimm_info[dimm_cnt].rank_capacity - 1);
1024                         } else {
1025                                 reg_write(REG_CS_SIZE_SCRATCH_ADDR + (cs * 0x8), 0);
1026                         }
1027                 }
1028         }
1029 #endif
1030
1031         /*{0x00020184}  -   Close FastPath - 2G */
1032         reg_write(REG_FASTPATH_WIN_0_CTRL_ADDR, 0);
1033
1034         /*{0x00001538}  -    Read Data Sample Delays Register */
1035         reg = 0;
1036         for (cs = 0; cs < MAX_CS; cs++) {
1037                 if (cs_ena & (1 << cs))
1038                         reg |= (cl << (REG_READ_DATA_SAMPLE_DELAYS_OFFS * cs));
1039         }
1040
1041         reg_write(REG_READ_DATA_SAMPLE_DELAYS_ADDR, reg);
1042         DEBUG_INIT_FULL_C("DDR3 - SPD-SET - Read Data Sample Delays = ", reg,
1043                           1);
1044
1045         /*{0x0000153C}  -   Read Data Ready Delay Register */
1046         reg = 0;
1047         for (cs = 0; cs < MAX_CS; cs++) {
1048                 if (cs_ena & (1 << cs)) {
1049                         reg |= ((cl + 2) <<
1050                                 (REG_READ_DATA_READY_DELAYS_OFFS * cs));
1051                 }
1052         }
1053         reg_write(REG_READ_DATA_READY_DELAYS_ADDR, reg);
1054         DEBUG_INIT_FULL_C("DDR3 - SPD-SET - Read Data Ready Delays = ", reg, 1);
1055
1056         /* Set MR registers */
1057         /* MR0 */
1058         reg = 0x00000600;
1059         tmp = ddr3_cl_to_valid_cl(cl);
1060         reg |= ((tmp & 0x1) << 2);
1061         reg |= ((tmp & 0xE) << 3);      /* to bit 4 */
1062         for (cs = 0; cs < MAX_CS; cs++) {
1063                 if (cs_ena & (1 << cs)) {
1064                         reg_write(REG_DDR3_MR0_CS_ADDR +
1065                                   (cs << MR_CS_ADDR_OFFS), reg);
1066                 }
1067         }
1068
1069         /* MR1 */
1070         reg = 0x00000044 & REG_DDR3_MR1_ODT_MASK;
1071         if (cs_num > 1)
1072                 reg = 0x00000046 & REG_DDR3_MR1_ODT_MASK;
1073
1074         for (cs = 0; cs < MAX_CS; cs++) {
1075                 if (cs_ena & (1 << cs)) {
1076                         reg |= odt_static[cs_ena][cs];
1077                         reg_write(REG_DDR3_MR1_CS_ADDR +
1078                                   (cs << MR_CS_ADDR_OFFS), reg);
1079                 }
1080         }
1081
1082         /* MR2 */
1083         if (reg_read(REG_DDR_IO_ADDR) & (1 << REG_DDR_IO_CLK_RATIO_OFFS))
1084                 tmp = hclk_time / 2;
1085         else
1086                 tmp = hclk_time;
1087
1088         if (tmp >= 2500)
1089                 cwl = 5;        /* CWL = 5 */
1090         else if (tmp >= 1875 && tmp < 2500)
1091                 cwl = 6;        /* CWL = 6 */
1092         else if (tmp >= 1500 && tmp < 1875)
1093                 cwl = 7;        /* CWL = 7 */
1094         else if (tmp >= 1250 && tmp < 1500)
1095                 cwl = 8;        /* CWL = 8 */
1096         else if (tmp >= 1070 && tmp < 1250)
1097                 cwl = 9;        /* CWL = 9 */
1098         else if (tmp >= 935 && tmp < 1070)
1099                 cwl = 10;       /* CWL = 10 */
1100         else if (tmp >= 833 && tmp < 935)
1101                 cwl = 11;       /* CWL = 11 */
1102         else if (tmp >= 750 && tmp < 833)
1103                 cwl = 12;       /* CWL = 12 */
1104         else {
1105                 cwl = 12;       /* CWL = 12 */
1106                 printf("Unsupported hclk %d MHz\n", tmp);
1107         }
1108
1109         reg = ((cwl - 5) << REG_DDR3_MR2_CWL_OFFS);
1110
1111         for (cs = 0; cs < MAX_CS; cs++) {
1112                 if (cs_ena & (1 << cs)) {
1113                         reg &= REG_DDR3_MR2_ODT_MASK;
1114                         reg |= odt_dynamic[cs_ena][cs];
1115                         reg_write(REG_DDR3_MR2_CS_ADDR +
1116                                   (cs << MR_CS_ADDR_OFFS), reg);
1117                 }
1118         }
1119
1120         /* MR3 */
1121         reg = 0x00000000;
1122         for (cs = 0; cs < MAX_CS; cs++) {
1123                 if (cs_ena & (1 << cs)) {
1124                         reg_write(REG_DDR3_MR3_CS_ADDR +
1125                                   (cs << MR_CS_ADDR_OFFS), reg);
1126                 }
1127         }
1128
1129         /* {0x00001428}  -   DDR ODT Timing (Low) Register */
1130         reg = 0;
1131         reg |= (((cl - cwl + 1) & 0xF) << 4);
1132         reg |= (((cl - cwl + 6) & 0xF) << 8);
1133         reg |= ((((cl - cwl + 6) >> 4) & 0x1) << 21);
1134         reg |= (((cl - 1) & 0xF) << 12);
1135         reg |= (((cl + 6) & 0x1F) << 16);
1136         reg_write(REG_ODT_TIME_LOW_ADDR, reg);
1137
1138         /* {0x0000147C}  -   DDR ODT Timing (High) Register */
1139         reg = 0x00000071;
1140         reg |= ((cwl - 1) << 8);
1141         reg |= ((cwl + 5) << 12);
1142         reg_write(REG_ODT_TIME_HIGH_ADDR, reg);
1143
1144 #ifdef DUNIT_SPD
1145         /*{0x000015E0} - DDR3 Rank Control Register */
1146         reg = cs_ena;
1147         cs_count = 0;
1148         dimm_cnt = 0;
1149         for (cs = 0; cs < MAX_CS; cs++) {
1150                 if (cs_ena & (1 << cs) & DIMM_CS_BITMAP) {
1151                         if (dimm_info[dimm_cnt].num_of_module_ranks == cs_count) {
1152                                 dimm_cnt++;
1153                                 cs_count = 0;
1154                         }
1155                         cs_count++;
1156
1157                         if (dimm_info[dimm_cnt].addr_mirroring &&
1158                             (cs == 1 || cs == 3) &&
1159                             (sum_info.type_info != SPD_MODULE_TYPE_RDIMM)) {
1160                                 reg |= (1 << (REG_DDR3_RANK_CTRL_MIRROR_OFFS + cs));
1161                                 DEBUG_INIT_FULL_C("DDR3 - SPD-SET - Setting Address Mirroring for CS = ",
1162                                                   cs, 1);
1163                         }
1164                 }
1165         }
1166         reg_write(REG_DDR3_RANK_CTRL_ADDR, reg);
1167 #endif
1168
1169         /*{0xD00015E4}  -   ZQDS Configuration Register */
1170         reg = 0x00203c18;
1171         reg_write(REG_ZQC_CONF_ADDR, reg);
1172
1173         /* {0x00015EC}  -   DDR PHY */
1174 #if defined(MV88F78X60)
1175         reg = 0xF800AAA5;
1176         if (mv_ctrl_rev_get() == MV_78XX0_B0_REV)
1177                 reg = 0xF800A225;
1178 #else
1179         reg = 0xDE000025;
1180 #if defined(MV88F672X)
1181         reg = 0xF800A225;
1182 #endif
1183 #endif
1184         reg_write(REG_DRAM_PHY_CONFIG_ADDR, reg);
1185
1186 #if (defined(MV88F78X60) || defined(MV88F672X))
1187         /* Registered DIMM support - supported only in AXP A0 devices */
1188         /* Currently supported for SPD detection only */
1189         /*
1190          * Flow is according to the Registered DIMM chapter in the
1191          * Functional Spec
1192          */
1193         if (sum_info.type_info == SPD_MODULE_TYPE_RDIMM) {
1194                 DEBUG_INIT_S("DDR3 Training Sequence - Registered DIMM detected\n");
1195
1196                 /* Set commands parity completion */
1197                 reg = reg_read(REG_REGISTERED_DRAM_CTRL_ADDR);
1198                 reg &= ~REG_REGISTERED_DRAM_CTRL_PARITY_MASK;
1199                 reg |= 0x8;
1200                 reg_write(REG_REGISTERED_DRAM_CTRL_ADDR, reg);
1201
1202                 /* De-assert M_RESETn and assert M_CKE */
1203                 reg_write(REG_SDRAM_INIT_CTRL_ADDR,
1204                           1 << REG_SDRAM_INIT_CKE_ASSERT_OFFS);
1205                 do {
1206                         reg = (reg_read(REG_SDRAM_INIT_CTRL_ADDR)) &
1207                                 (1 << REG_SDRAM_INIT_CKE_ASSERT_OFFS);
1208                 } while (reg);
1209
1210                 for (rc = 0; rc < SPD_RDIMM_RC_NUM; rc++) {
1211                         if (rc != 6 && rc != 7) {
1212                                 /* Set CWA Command */
1213                                 reg = (REG_SDRAM_OPERATION_CMD_CWA &
1214                                        ~(0xF << REG_SDRAM_OPERATION_CS_OFFS));
1215                                 reg |= ((dimm_info[0].dimm_rc[rc] &
1216                                          REG_SDRAM_OPERATION_CWA_DATA_MASK) <<
1217                                         REG_SDRAM_OPERATION_CWA_DATA_OFFS);
1218                                 reg |= rc << REG_SDRAM_OPERATION_CWA_RC_OFFS;
1219                                 /* Configure - Set Delay - tSTAB/tMRD */
1220                                 if (rc == 2 || rc == 10)
1221                                         reg |= (0x1 << REG_SDRAM_OPERATION_CWA_DELAY_SEL_OFFS);
1222                                 /* 0x1418 - SDRAM Operation Register */
1223                                 reg_write(REG_SDRAM_OPERATION_ADDR, reg);
1224
1225                                 /*
1226                                  * Poll the "cmd" field in the SDRAM OP
1227                                  * register for 0x0
1228                                  */
1229                                 do {
1230                                         reg = reg_read(REG_SDRAM_OPERATION_ADDR) &
1231                                                 (REG_SDRAM_OPERATION_CMD_MASK);
1232                                 } while (reg);
1233                         }
1234                 }
1235         }
1236 #endif
1237
1238         return MV_OK;
1239 }
1240
1241 /*
1242  * Name:     ddr3_div - this function divides integers
1243  * Desc:
1244  * Args:     val - the value
1245  *           divider - the divider
1246  *           sub - substruction value
1247  * Notes:
1248  * Returns:  required value
1249  */
1250 u32 ddr3_div(u32 val, u32 divider, u32 sub)
1251 {
1252         return val / divider + (val % divider > 0 ? 1 : 0) - sub;
1253 }
1254
1255 /*
1256  * Name:     ddr3_get_max_val
1257  * Desc:
1258  * Args:
1259  * Notes:
1260  * Returns:
1261  */
1262 u32 ddr3_get_max_val(u32 spd_val, u32 dimm_num, u32 static_val)
1263 {
1264 #ifdef DUNIT_STATIC
1265         if (dimm_num > 0) {
1266                 if (spd_val >= static_val)
1267                         return spd_val;
1268                 else
1269                         return static_val;
1270         } else {
1271                 return static_val;
1272         }
1273 #else
1274         return spd_val;
1275 #endif
1276 }
1277
1278 /*
1279  * Name:     ddr3_get_min_val
1280  * Desc:
1281  * Args:
1282  * Notes:
1283  * Returns:
1284  */
1285 u32 ddr3_get_min_val(u32 spd_val, u32 dimm_num, u32 static_val)
1286 {
1287 #ifdef DUNIT_STATIC
1288         if (dimm_num > 0) {
1289                 if (spd_val <= static_val)
1290                         return spd_val;
1291                 else
1292                         return static_val;
1293         } else
1294                 return static_val;
1295 #else
1296         return spd_val;
1297 #endif
1298 }
1299 #endif