4 * Copyright (C) 2009-2010 Texas Instruments, Inc.
6 * Aneesh V <aneesh@ti.com>
8 * This program is free software; you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License version 2 as
10 * published by the Free Software Foundation.
15 #include <asm/types.h>
20 #define EMIF1_BASE 0x4c000000
21 #define EMIF2_BASE 0x4d000000
26 /* Registers shifts, masks and values */
29 #define EMIF_REG_SCHEME_SHIFT 30
30 #define EMIF_REG_SCHEME_MASK (0x3 << 30)
31 #define EMIF_REG_MODULE_ID_SHIFT 16
32 #define EMIF_REG_MODULE_ID_MASK (0xfff << 16)
33 #define EMIF_REG_RTL_VERSION_SHIFT 11
34 #define EMIF_REG_RTL_VERSION_MASK (0x1f << 11)
35 #define EMIF_REG_MAJOR_REVISION_SHIFT 8
36 #define EMIF_REG_MAJOR_REVISION_MASK (0x7 << 8)
37 #define EMIF_REG_MINOR_REVISION_SHIFT 0
38 #define EMIF_REG_MINOR_REVISION_MASK (0x3f << 0)
41 #define EMIF_REG_BE_SHIFT 31
42 #define EMIF_REG_BE_MASK (1 << 31)
43 #define EMIF_REG_DUAL_CLK_MODE_SHIFT 30
44 #define EMIF_REG_DUAL_CLK_MODE_MASK (1 << 30)
45 #define EMIF_REG_FAST_INIT_SHIFT 29
46 #define EMIF_REG_FAST_INIT_MASK (1 << 29)
47 #define EMIF_REG_LEVLING_TO_SHIFT 4
48 #define EMIF_REG_LEVELING_TO_MASK (7 << 4)
49 #define EMIF_REG_PHY_DLL_READY_SHIFT 2
50 #define EMIF_REG_PHY_DLL_READY_MASK (1 << 2)
53 #define EMIF_REG_SDRAM_TYPE_SHIFT 29
54 #define EMIF_REG_SDRAM_TYPE_MASK (0x7 << 29)
55 #define EMIF_REG_SDRAM_TYPE_DDR1 0
56 #define EMIF_REG_SDRAM_TYPE_LPDDR1 1
57 #define EMIF_REG_SDRAM_TYPE_DDR2 2
58 #define EMIF_REG_SDRAM_TYPE_DDR3 3
59 #define EMIF_REG_SDRAM_TYPE_LPDDR2_S4 4
60 #define EMIF_REG_SDRAM_TYPE_LPDDR2_S2 5
61 #define EMIF_REG_IBANK_POS_SHIFT 27
62 #define EMIF_REG_IBANK_POS_MASK (0x3 << 27)
63 #define EMIF_REG_DDR_TERM_SHIFT 24
64 #define EMIF_REG_DDR_TERM_MASK (0x7 << 24)
65 #define EMIF_REG_DDR2_DDQS_SHIFT 23
66 #define EMIF_REG_DDR2_DDQS_MASK (1 << 23)
67 #define EMIF_REG_DYN_ODT_SHIFT 21
68 #define EMIF_REG_DYN_ODT_MASK (0x3 << 21)
69 #define EMIF_REG_DDR_DISABLE_DLL_SHIFT 20
70 #define EMIF_REG_DDR_DISABLE_DLL_MASK (1 << 20)
71 #define EMIF_REG_SDRAM_DRIVE_SHIFT 18
72 #define EMIF_REG_SDRAM_DRIVE_MASK (0x3 << 18)
73 #define EMIF_REG_CWL_SHIFT 16
74 #define EMIF_REG_CWL_MASK (0x3 << 16)
75 #define EMIF_REG_NARROW_MODE_SHIFT 14
76 #define EMIF_REG_NARROW_MODE_MASK (0x3 << 14)
77 #define EMIF_REG_CL_SHIFT 10
78 #define EMIF_REG_CL_MASK (0xf << 10)
79 #define EMIF_REG_ROWSIZE_SHIFT 7
80 #define EMIF_REG_ROWSIZE_MASK (0x7 << 7)
81 #define EMIF_REG_IBANK_SHIFT 4
82 #define EMIF_REG_IBANK_MASK (0x7 << 4)
83 #define EMIF_REG_EBANK_SHIFT 3
84 #define EMIF_REG_EBANK_MASK (1 << 3)
85 #define EMIF_REG_PAGESIZE_SHIFT 0
86 #define EMIF_REG_PAGESIZE_MASK (0x7 << 0)
89 #define EMIF_REG_CS1NVMEN_SHIFT 30
90 #define EMIF_REG_CS1NVMEN_MASK (1 << 30)
91 #define EMIF_REG_EBANK_POS_SHIFT 27
92 #define EMIF_REG_EBANK_POS_MASK (1 << 27)
93 #define EMIF_REG_RDBNUM_SHIFT 4
94 #define EMIF_REG_RDBNUM_MASK (0x3 << 4)
95 #define EMIF_REG_RDBSIZE_SHIFT 0
96 #define EMIF_REG_RDBSIZE_MASK (0x7 << 0)
99 #define EMIF_REG_INITREF_DIS_SHIFT 31
100 #define EMIF_REG_INITREF_DIS_MASK (1 << 31)
101 #define EMIF_REG_SRT_SHIFT 29
102 #define EMIF_REG_SRT_MASK (1 << 29)
103 #define EMIF_REG_ASR_SHIFT 28
104 #define EMIF_REG_ASR_MASK (1 << 28)
105 #define EMIF_REG_PASR_SHIFT 24
106 #define EMIF_REG_PASR_MASK (0x7 << 24)
107 #define EMIF_REG_REFRESH_RATE_SHIFT 0
108 #define EMIF_REG_REFRESH_RATE_MASK (0xffff << 0)
110 /* SDRAM_REF_CTRL_SHDW */
111 #define EMIF_REG_REFRESH_RATE_SHDW_SHIFT 0
112 #define EMIF_REG_REFRESH_RATE_SHDW_MASK (0xffff << 0)
115 #define EMIF_REG_T_RP_SHIFT 25
116 #define EMIF_REG_T_RP_MASK (0xf << 25)
117 #define EMIF_REG_T_RCD_SHIFT 21
118 #define EMIF_REG_T_RCD_MASK (0xf << 21)
119 #define EMIF_REG_T_WR_SHIFT 17
120 #define EMIF_REG_T_WR_MASK (0xf << 17)
121 #define EMIF_REG_T_RAS_SHIFT 12
122 #define EMIF_REG_T_RAS_MASK (0x1f << 12)
123 #define EMIF_REG_T_RC_SHIFT 6
124 #define EMIF_REG_T_RC_MASK (0x3f << 6)
125 #define EMIF_REG_T_RRD_SHIFT 3
126 #define EMIF_REG_T_RRD_MASK (0x7 << 3)
127 #define EMIF_REG_T_WTR_SHIFT 0
128 #define EMIF_REG_T_WTR_MASK (0x7 << 0)
130 /* SDRAM_TIM_1_SHDW */
131 #define EMIF_REG_T_RP_SHDW_SHIFT 25
132 #define EMIF_REG_T_RP_SHDW_MASK (0xf << 25)
133 #define EMIF_REG_T_RCD_SHDW_SHIFT 21
134 #define EMIF_REG_T_RCD_SHDW_MASK (0xf << 21)
135 #define EMIF_REG_T_WR_SHDW_SHIFT 17
136 #define EMIF_REG_T_WR_SHDW_MASK (0xf << 17)
137 #define EMIF_REG_T_RAS_SHDW_SHIFT 12
138 #define EMIF_REG_T_RAS_SHDW_MASK (0x1f << 12)
139 #define EMIF_REG_T_RC_SHDW_SHIFT 6
140 #define EMIF_REG_T_RC_SHDW_MASK (0x3f << 6)
141 #define EMIF_REG_T_RRD_SHDW_SHIFT 3
142 #define EMIF_REG_T_RRD_SHDW_MASK (0x7 << 3)
143 #define EMIF_REG_T_WTR_SHDW_SHIFT 0
144 #define EMIF_REG_T_WTR_SHDW_MASK (0x7 << 0)
147 #define EMIF_REG_T_XP_SHIFT 28
148 #define EMIF_REG_T_XP_MASK (0x7 << 28)
149 #define EMIF_REG_T_ODT_SHIFT 25
150 #define EMIF_REG_T_ODT_MASK (0x7 << 25)
151 #define EMIF_REG_T_XSNR_SHIFT 16
152 #define EMIF_REG_T_XSNR_MASK (0x1ff << 16)
153 #define EMIF_REG_T_XSRD_SHIFT 6
154 #define EMIF_REG_T_XSRD_MASK (0x3ff << 6)
155 #define EMIF_REG_T_RTP_SHIFT 3
156 #define EMIF_REG_T_RTP_MASK (0x7 << 3)
157 #define EMIF_REG_T_CKE_SHIFT 0
158 #define EMIF_REG_T_CKE_MASK (0x7 << 0)
160 /* SDRAM_TIM_2_SHDW */
161 #define EMIF_REG_T_XP_SHDW_SHIFT 28
162 #define EMIF_REG_T_XP_SHDW_MASK (0x7 << 28)
163 #define EMIF_REG_T_ODT_SHDW_SHIFT 25
164 #define EMIF_REG_T_ODT_SHDW_MASK (0x7 << 25)
165 #define EMIF_REG_T_XSNR_SHDW_SHIFT 16
166 #define EMIF_REG_T_XSNR_SHDW_MASK (0x1ff << 16)
167 #define EMIF_REG_T_XSRD_SHDW_SHIFT 6
168 #define EMIF_REG_T_XSRD_SHDW_MASK (0x3ff << 6)
169 #define EMIF_REG_T_RTP_SHDW_SHIFT 3
170 #define EMIF_REG_T_RTP_SHDW_MASK (0x7 << 3)
171 #define EMIF_REG_T_CKE_SHDW_SHIFT 0
172 #define EMIF_REG_T_CKE_SHDW_MASK (0x7 << 0)
175 #define EMIF_REG_T_CKESR_SHIFT 21
176 #define EMIF_REG_T_CKESR_MASK (0x7 << 21)
177 #define EMIF_REG_ZQ_ZQCS_SHIFT 15
178 #define EMIF_REG_ZQ_ZQCS_MASK (0x3f << 15)
179 #define EMIF_REG_T_TDQSCKMAX_SHIFT 13
180 #define EMIF_REG_T_TDQSCKMAX_MASK (0x3 << 13)
181 #define EMIF_REG_T_RFC_SHIFT 4
182 #define EMIF_REG_T_RFC_MASK (0x1ff << 4)
183 #define EMIF_REG_T_RAS_MAX_SHIFT 0
184 #define EMIF_REG_T_RAS_MAX_MASK (0xf << 0)
186 /* SDRAM_TIM_3_SHDW */
187 #define EMIF_REG_T_CKESR_SHDW_SHIFT 21
188 #define EMIF_REG_T_CKESR_SHDW_MASK (0x7 << 21)
189 #define EMIF_REG_ZQ_ZQCS_SHDW_SHIFT 15
190 #define EMIF_REG_ZQ_ZQCS_SHDW_MASK (0x3f << 15)
191 #define EMIF_REG_T_TDQSCKMAX_SHDW_SHIFT 13
192 #define EMIF_REG_T_TDQSCKMAX_SHDW_MASK (0x3 << 13)
193 #define EMIF_REG_T_RFC_SHDW_SHIFT 4
194 #define EMIF_REG_T_RFC_SHDW_MASK (0x1ff << 4)
195 #define EMIF_REG_T_RAS_MAX_SHDW_SHIFT 0
196 #define EMIF_REG_T_RAS_MAX_SHDW_MASK (0xf << 0)
199 #define EMIF_REG_NVM_T_XP_SHIFT 28
200 #define EMIF_REG_NVM_T_XP_MASK (0x7 << 28)
201 #define EMIF_REG_NVM_T_WTR_SHIFT 24
202 #define EMIF_REG_NVM_T_WTR_MASK (0x7 << 24)
203 #define EMIF_REG_NVM_T_RP_SHIFT 20
204 #define EMIF_REG_NVM_T_RP_MASK (0xf << 20)
205 #define EMIF_REG_NVM_T_WRA_SHIFT 16
206 #define EMIF_REG_NVM_T_WRA_MASK (0xf << 16)
207 #define EMIF_REG_NVM_T_RRD_SHIFT 8
208 #define EMIF_REG_NVM_T_RRD_MASK (0xff << 8)
209 #define EMIF_REG_NVM_T_RCDMIN_SHIFT 0
210 #define EMIF_REG_NVM_T_RCDMIN_MASK (0xff << 0)
212 /* LPDDR2_NVM_TIM_SHDW */
213 #define EMIF_REG_NVM_T_XP_SHDW_SHIFT 28
214 #define EMIF_REG_NVM_T_XP_SHDW_MASK (0x7 << 28)
215 #define EMIF_REG_NVM_T_WTR_SHDW_SHIFT 24
216 #define EMIF_REG_NVM_T_WTR_SHDW_MASK (0x7 << 24)
217 #define EMIF_REG_NVM_T_RP_SHDW_SHIFT 20
218 #define EMIF_REG_NVM_T_RP_SHDW_MASK (0xf << 20)
219 #define EMIF_REG_NVM_T_WRA_SHDW_SHIFT 16
220 #define EMIF_REG_NVM_T_WRA_SHDW_MASK (0xf << 16)
221 #define EMIF_REG_NVM_T_RRD_SHDW_SHIFT 8
222 #define EMIF_REG_NVM_T_RRD_SHDW_MASK (0xff << 8)
223 #define EMIF_REG_NVM_T_RCDMIN_SHDW_SHIFT 0
224 #define EMIF_REG_NVM_T_RCDMIN_SHDW_MASK (0xff << 0)
227 #define EMIF_REG_IDLEMODE_SHIFT 30
228 #define EMIF_REG_IDLEMODE_MASK (0x3 << 30)
229 #define EMIF_REG_PD_TIM_SHIFT 12
230 #define EMIF_REG_PD_TIM_MASK (0xf << 12)
231 #define EMIF_REG_DPD_EN_SHIFT 11
232 #define EMIF_REG_DPD_EN_MASK (1 << 11)
233 #define EMIF_REG_LP_MODE_SHIFT 8
234 #define EMIF_REG_LP_MODE_MASK (0x7 << 8)
235 #define EMIF_REG_SR_TIM_SHIFT 4
236 #define EMIF_REG_SR_TIM_MASK (0xf << 4)
237 #define EMIF_REG_CS_TIM_SHIFT 0
238 #define EMIF_REG_CS_TIM_MASK (0xf << 0)
240 /* PWR_MGMT_CTRL_SHDW */
241 #define EMIF_REG_PD_TIM_SHDW_SHIFT 12
242 #define EMIF_REG_PD_TIM_SHDW_MASK (0xf << 12)
243 #define EMIF_REG_SR_TIM_SHDW_SHIFT 4
244 #define EMIF_REG_SR_TIM_SHDW_MASK (0xf << 4)
245 #define EMIF_REG_CS_TIM_SHDW_SHIFT 0
246 #define EMIF_REG_CS_TIM_SHDW_MASK (0xf << 0)
248 /* LPDDR2_MODE_REG_DATA */
249 #define EMIF_REG_VALUE_0_SHIFT 0
250 #define EMIF_REG_VALUE_0_MASK (0x7f << 0)
252 /* LPDDR2_MODE_REG_CFG */
253 #define EMIF_REG_CS_SHIFT 31
254 #define EMIF_REG_CS_MASK (1 << 31)
255 #define EMIF_REG_REFRESH_EN_SHIFT 30
256 #define EMIF_REG_REFRESH_EN_MASK (1 << 30)
257 #define EMIF_REG_ADDRESS_SHIFT 0
258 #define EMIF_REG_ADDRESS_MASK (0xff << 0)
261 #define EMIF_REG_SYS_THRESH_MAX_SHIFT 24
262 #define EMIF_REG_SYS_THRESH_MAX_MASK (0xf << 24)
263 #define EMIF_REG_MPU_THRESH_MAX_SHIFT 20
264 #define EMIF_REG_MPU_THRESH_MAX_MASK (0xf << 20)
265 #define EMIF_REG_LL_THRESH_MAX_SHIFT 16
266 #define EMIF_REG_LL_THRESH_MAX_MASK (0xf << 16)
267 #define EMIF_REG_PR_OLD_COUNT_SHIFT 0
268 #define EMIF_REG_PR_OLD_COUNT_MASK (0xff << 0)
271 #define EMIF_REG_SYS_BUS_WIDTH_SHIFT 30
272 #define EMIF_REG_SYS_BUS_WIDTH_MASK (0x3 << 30)
273 #define EMIF_REG_LL_BUS_WIDTH_SHIFT 28
274 #define EMIF_REG_LL_BUS_WIDTH_MASK (0x3 << 28)
275 #define EMIF_REG_WR_FIFO_DEPTH_SHIFT 8
276 #define EMIF_REG_WR_FIFO_DEPTH_MASK (0xff << 8)
277 #define EMIF_REG_CMD_FIFO_DEPTH_SHIFT 0
278 #define EMIF_REG_CMD_FIFO_DEPTH_MASK (0xff << 0)
281 #define EMIF_REG_RREG_FIFO_DEPTH_SHIFT 16
282 #define EMIF_REG_RREG_FIFO_DEPTH_MASK (0xff << 16)
283 #define EMIF_REG_RSD_FIFO_DEPTH_SHIFT 8
284 #define EMIF_REG_RSD_FIFO_DEPTH_MASK (0xff << 8)
285 #define EMIF_REG_RCMD_FIFO_DEPTH_SHIFT 0
286 #define EMIF_REG_RCMD_FIFO_DEPTH_MASK (0xff << 0)
289 #define EMIF_REG_TLEC_SHIFT 16
290 #define EMIF_REG_TLEC_MASK (0xffff << 16)
291 #define EMIF_REG_MT_SHIFT 14
292 #define EMIF_REG_MT_MASK (1 << 14)
293 #define EMIF_REG_ACT_CAP_EN_SHIFT 13
294 #define EMIF_REG_ACT_CAP_EN_MASK (1 << 13)
295 #define EMIF_REG_OPG_LD_SHIFT 12
296 #define EMIF_REG_OPG_LD_MASK (1 << 12)
297 #define EMIF_REG_RESET_PHY_SHIFT 10
298 #define EMIF_REG_RESET_PHY_MASK (1 << 10)
299 #define EMIF_REG_MMS_SHIFT 8
300 #define EMIF_REG_MMS_MASK (1 << 8)
301 #define EMIF_REG_MC_SHIFT 4
302 #define EMIF_REG_MC_MASK (0x3 << 4)
303 #define EMIF_REG_PC_SHIFT 1
304 #define EMIF_REG_PC_MASK (0x7 << 1)
305 #define EMIF_REG_TM_SHIFT 0
306 #define EMIF_REG_TM_MASK (1 << 0)
308 /* IODFT_CTRL_MISR_RSLT */
309 #define EMIF_REG_DQM_TLMR_SHIFT 16
310 #define EMIF_REG_DQM_TLMR_MASK (0x3ff << 16)
311 #define EMIF_REG_CTL_TLMR_SHIFT 0
312 #define EMIF_REG_CTL_TLMR_MASK (0x7ff << 0)
314 /* IODFT_ADDR_MISR_RSLT */
315 #define EMIF_REG_ADDR_TLMR_SHIFT 0
316 #define EMIF_REG_ADDR_TLMR_MASK (0x1fffff << 0)
318 /* IODFT_DATA_MISR_RSLT_1 */
319 #define EMIF_REG_DATA_TLMR_31_0_SHIFT 0
320 #define EMIF_REG_DATA_TLMR_31_0_MASK (0xffffffff << 0)
322 /* IODFT_DATA_MISR_RSLT_2 */
323 #define EMIF_REG_DATA_TLMR_63_32_SHIFT 0
324 #define EMIF_REG_DATA_TLMR_63_32_MASK (0xffffffff << 0)
326 /* IODFT_DATA_MISR_RSLT_3 */
327 #define EMIF_REG_DATA_TLMR_66_64_SHIFT 0
328 #define EMIF_REG_DATA_TLMR_66_64_MASK (0x7 << 0)
331 #define EMIF_REG_COUNTER1_SHIFT 0
332 #define EMIF_REG_COUNTER1_MASK (0xffffffff << 0)
335 #define EMIF_REG_COUNTER2_SHIFT 0
336 #define EMIF_REG_COUNTER2_MASK (0xffffffff << 0)
339 #define EMIF_REG_CNTR2_MCONNID_EN_SHIFT 31
340 #define EMIF_REG_CNTR2_MCONNID_EN_MASK (1 << 31)
341 #define EMIF_REG_CNTR2_REGION_EN_SHIFT 30
342 #define EMIF_REG_CNTR2_REGION_EN_MASK (1 << 30)
343 #define EMIF_REG_CNTR2_CFG_SHIFT 16
344 #define EMIF_REG_CNTR2_CFG_MASK (0xf << 16)
345 #define EMIF_REG_CNTR1_MCONNID_EN_SHIFT 15
346 #define EMIF_REG_CNTR1_MCONNID_EN_MASK (1 << 15)
347 #define EMIF_REG_CNTR1_REGION_EN_SHIFT 14
348 #define EMIF_REG_CNTR1_REGION_EN_MASK (1 << 14)
349 #define EMIF_REG_CNTR1_CFG_SHIFT 0
350 #define EMIF_REG_CNTR1_CFG_MASK (0xf << 0)
353 #define EMIF_REG_MCONNID2_SHIFT 24
354 #define EMIF_REG_MCONNID2_MASK (0xff << 24)
355 #define EMIF_REG_REGION_SEL2_SHIFT 16
356 #define EMIF_REG_REGION_SEL2_MASK (0x3 << 16)
357 #define EMIF_REG_MCONNID1_SHIFT 8
358 #define EMIF_REG_MCONNID1_MASK (0xff << 8)
359 #define EMIF_REG_REGION_SEL1_SHIFT 0
360 #define EMIF_REG_REGION_SEL1_MASK (0x3 << 0)
363 #define EMIF_REG_TOTAL_TIME_SHIFT 0
364 #define EMIF_REG_TOTAL_TIME_MASK (0xffffffff << 0)
367 #define EMIF_REG_READ_IDLE_LEN_SHIFT 16
368 #define EMIF_REG_READ_IDLE_LEN_MASK (0xf << 16)
369 #define EMIF_REG_READ_IDLE_INTERVAL_SHIFT 0
370 #define EMIF_REG_READ_IDLE_INTERVAL_MASK (0x1ff << 0)
372 /* READ_IDLE_CTRL_SHDW */
373 #define EMIF_REG_READ_IDLE_LEN_SHDW_SHIFT 16
374 #define EMIF_REG_READ_IDLE_LEN_SHDW_MASK (0xf << 16)
375 #define EMIF_REG_READ_IDLE_INTERVAL_SHDW_SHIFT 0
376 #define EMIF_REG_READ_IDLE_INTERVAL_SHDW_MASK (0x1ff << 0)
379 #define EMIF_REG_EOI_SHIFT 0
380 #define EMIF_REG_EOI_MASK (1 << 0)
382 /* IRQSTATUS_RAW_SYS */
383 #define EMIF_REG_DNV_SYS_SHIFT 2
384 #define EMIF_REG_DNV_SYS_MASK (1 << 2)
385 #define EMIF_REG_TA_SYS_SHIFT 1
386 #define EMIF_REG_TA_SYS_MASK (1 << 1)
387 #define EMIF_REG_ERR_SYS_SHIFT 0
388 #define EMIF_REG_ERR_SYS_MASK (1 << 0)
390 /* IRQSTATUS_RAW_LL */
391 #define EMIF_REG_DNV_LL_SHIFT 2
392 #define EMIF_REG_DNV_LL_MASK (1 << 2)
393 #define EMIF_REG_TA_LL_SHIFT 1
394 #define EMIF_REG_TA_LL_MASK (1 << 1)
395 #define EMIF_REG_ERR_LL_SHIFT 0
396 #define EMIF_REG_ERR_LL_MASK (1 << 0)
402 /* IRQENABLE_SET_SYS */
403 #define EMIF_REG_EN_DNV_SYS_SHIFT 2
404 #define EMIF_REG_EN_DNV_SYS_MASK (1 << 2)
405 #define EMIF_REG_EN_TA_SYS_SHIFT 1
406 #define EMIF_REG_EN_TA_SYS_MASK (1 << 1)
407 #define EMIF_REG_EN_ERR_SYS_SHIFT 0
408 #define EMIF_REG_EN_ERR_SYS_MASK (1 << 0)
410 /* IRQENABLE_SET_LL */
411 #define EMIF_REG_EN_DNV_LL_SHIFT 2
412 #define EMIF_REG_EN_DNV_LL_MASK (1 << 2)
413 #define EMIF_REG_EN_TA_LL_SHIFT 1
414 #define EMIF_REG_EN_TA_LL_MASK (1 << 1)
415 #define EMIF_REG_EN_ERR_LL_SHIFT 0
416 #define EMIF_REG_EN_ERR_LL_MASK (1 << 0)
418 /* IRQENABLE_CLR_SYS */
420 /* IRQENABLE_CLR_LL */
423 #define EMIF_REG_ZQ_CS1EN_SHIFT 31
424 #define EMIF_REG_ZQ_CS1EN_MASK (1 << 31)
425 #define EMIF_REG_ZQ_CS0EN_SHIFT 30
426 #define EMIF_REG_ZQ_CS0EN_MASK (1 << 30)
427 #define EMIF_REG_ZQ_DUALCALEN_SHIFT 29
428 #define EMIF_REG_ZQ_DUALCALEN_MASK (1 << 29)
429 #define EMIF_REG_ZQ_SFEXITEN_SHIFT 28
430 #define EMIF_REG_ZQ_SFEXITEN_MASK (1 << 28)
431 #define EMIF_REG_ZQ_ZQINIT_MULT_SHIFT 18
432 #define EMIF_REG_ZQ_ZQINIT_MULT_MASK (0x3 << 18)
433 #define EMIF_REG_ZQ_ZQCL_MULT_SHIFT 16
434 #define EMIF_REG_ZQ_ZQCL_MULT_MASK (0x3 << 16)
435 #define EMIF_REG_ZQ_REFINTERVAL_SHIFT 0
436 #define EMIF_REG_ZQ_REFINTERVAL_MASK (0xffff << 0)
438 /* TEMP_ALERT_CONFIG */
439 #define EMIF_REG_TA_CS1EN_SHIFT 31
440 #define EMIF_REG_TA_CS1EN_MASK (1 << 31)
441 #define EMIF_REG_TA_CS0EN_SHIFT 30
442 #define EMIF_REG_TA_CS0EN_MASK (1 << 30)
443 #define EMIF_REG_TA_SFEXITEN_SHIFT 28
444 #define EMIF_REG_TA_SFEXITEN_MASK (1 << 28)
445 #define EMIF_REG_TA_DEVWDT_SHIFT 26
446 #define EMIF_REG_TA_DEVWDT_MASK (0x3 << 26)
447 #define EMIF_REG_TA_DEVCNT_SHIFT 24
448 #define EMIF_REG_TA_DEVCNT_MASK (0x3 << 24)
449 #define EMIF_REG_TA_REFINTERVAL_SHIFT 0
450 #define EMIF_REG_TA_REFINTERVAL_MASK (0x3fffff << 0)
453 #define EMIF_REG_MADDRSPACE_SHIFT 14
454 #define EMIF_REG_MADDRSPACE_MASK (0x3 << 14)
455 #define EMIF_REG_MBURSTSEQ_SHIFT 11
456 #define EMIF_REG_MBURSTSEQ_MASK (0x7 << 11)
457 #define EMIF_REG_MCMD_SHIFT 8
458 #define EMIF_REG_MCMD_MASK (0x7 << 8)
459 #define EMIF_REG_MCONNID_SHIFT 0
460 #define EMIF_REG_MCONNID_MASK (0xff << 0)
463 #define EMIF_REG_DDR_PHY_CTRL_1_SHIFT 4
464 #define EMIF_REG_DDR_PHY_CTRL_1_MASK (0xfffffff << 4)
465 #define EMIF_REG_READ_LATENCY_SHIFT 0
466 #define EMIF_REG_READ_LATENCY_MASK (0xf << 0)
467 #define EMIF_REG_DLL_SLAVE_DLY_CTRL_SHIFT 4
468 #define EMIF_REG_DLL_SLAVE_DLY_CTRL_MASK (0xFF << 4)
469 #define EMIF_EMIF_DDR_PHY_CTRL_1_BASE_VAL_SHIFT 12
470 #define EMIF_EMIF_DDR_PHY_CTRL_1_BASE_VAL_MASK (0xFFFFF << 12)
472 /* DDR_PHY_CTRL_1_SHDW */
473 #define EMIF_REG_DDR_PHY_CTRL_1_SHDW_SHIFT 4
474 #define EMIF_REG_DDR_PHY_CTRL_1_SHDW_MASK (0xfffffff << 4)
475 #define EMIF_REG_READ_LATENCY_SHDW_SHIFT 0
476 #define EMIF_REG_READ_LATENCY_SHDW_MASK (0xf << 0)
477 #define EMIF_REG_DLL_SLAVE_DLY_CTRL_SHDW_SHIFT 4
478 #define EMIF_REG_DLL_SLAVE_DLY_CTRL_SHDW_MASK (0xFF << 4)
479 #define EMIF_EMIF_DDR_PHY_CTRL_1_BASE_VAL_SHDW_SHIFT 12
480 #define EMIF_EMIF_DDR_PHY_CTRL_1_BASE_VAL_SHDW_MASK (0xFFFFF << 12)
483 #define EMIF_REG_DDR_PHY_CTRL_2_SHIFT 0
484 #define EMIF_REG_DDR_PHY_CTRL_2_MASK (0xffffffff << 0)
486 /*EMIF_READ_WRITE_LEVELING_CONTROL*/
487 #define EMIF_REG_RDWRLVLFULL_START_SHIFT 31
488 #define EMIF_REG_RDWRLVLFULL_START_MASK (1 << 31)
489 #define EMIF_REG_RDWRLVLINC_PRE_SHIFT 24
490 #define EMIF_REG_RDWRLVLINC_PRE_MASK (0x7F << 24)
491 #define EMIF_REG_RDLVLINC_INT_SHIFT 16
492 #define EMIF_REG_RDLVLINC_INT_MASK (0xFF << 16)
493 #define EMIF_REG_RDLVLGATEINC_INT_SHIFT 8
494 #define EMIF_REG_RDLVLGATEINC_INT_MASK (0xFF << 8)
495 #define EMIF_REG_WRLVLINC_INT_SHIFT 0
496 #define EMIF_REG_WRLVLINC_INT_MASK (0xFF << 0)
498 /*EMIF_READ_WRITE_LEVELING_RAMP_CONTROL*/
499 #define EMIF_REG_RDWRLVL_EN_SHIFT 31
500 #define EMIF_REG_RDWRLVL_EN_MASK (1 << 31)
501 #define EMIF_REG_RDWRLVLINC_RMP_PRE_SHIFT 24
502 #define EMIF_REG_RDWRLVLINC_RMP_PRE_MASK (0x7F << 24)
503 #define EMIF_REG_RDLVLINC_RMP_INT_SHIFT 16
504 #define EMIF_REG_RDLVLINC_RMP_INT_MASK (0xFF << 16)
505 #define EMIF_REG_RDLVLGATEINC_RMP_INT_SHIFT 8
506 #define EMIF_REG_RDLVLGATEINC_RMP_INT_MASK (0xFF << 8)
507 #define EMIF_REG_WRLVLINC_RMP_INT_SHIFT 0
508 #define EMIF_REG_WRLVLINC_RMP_INT_MASK (0xFF << 0)
510 /*EMIF_READ_WRITE_LEVELING_RAMP_WINDOW*/
511 #define EMIF_REG_RDWRLVLINC_RMP_WIN_SHIFT 0
512 #define EMIF_REG_RDWRLVLINC_RMP_WIN_MASK (0x1FFF << 0)
514 /* EMIF_PHY_CTRL_36 */
515 #define EMIF_REG_PHY_FIFO_WE_IN_MISALINED_CLR (1 << 8)
517 #define PHY_RDDQS_RATIO_REGS 5
518 #define PHY_FIFO_WE_SLAVE_RATIO_REGS 5
519 #define PHY_REG_WR_DQ_SLAVE_RATIO_REGS 10
522 #define DDR3_WR_LVL_INT 0x73
523 #define DDR3_RD_LVL_INT 0x33
524 #define DDR3_RD_LVL_GATE_INT 0x59
525 #define RD_RW_LVL_INC_PRE 0x0
526 #define DDR3_FULL_LVL (1 << EMIF_REG_RDWRLVL_EN_SHIFT)
528 #define DDR3_INC_LVL ((DDR3_WR_LVL_INT << EMIF_REG_WRLVLINC_INT_SHIFT) \
529 | (DDR3_RD_LVL_GATE_INT << EMIF_REG_RDLVLGATEINC_INT_SHIFT) \
530 | (DDR3_RD_LVL_INT << EMIF_REG_RDLVLINC_RMP_INT_SHIFT) \
531 | (RD_RW_LVL_INC_PRE << EMIF_REG_RDWRLVLINC_RMP_PRE_SHIFT))
533 #define SDRAM_CONFIG_EXT_RD_LVL_11_SAMPLES 0x0000C1A7
534 #define SDRAM_CONFIG_EXT_RD_LVL_4_SAMPLES 0x000001A7
535 #define SDRAM_CONFIG_EXT_RD_LVL_11_SAMPLES_ES2 0x0000C1C7
538 #define DMM_BASE 0x4E000040
541 #define MA_BASE 0x482AF040
544 #define EMIF_SYS_ADDR_SHIFT 24
545 #define EMIF_SYS_ADDR_MASK (0xff << 24)
546 #define EMIF_SYS_SIZE_SHIFT 20
547 #define EMIF_SYS_SIZE_MASK (0x7 << 20)
548 #define EMIF_SDRC_INTL_SHIFT 18
549 #define EMIF_SDRC_INTL_MASK (0x3 << 18)
550 #define EMIF_SDRC_ADDRSPC_SHIFT 16
551 #define EMIF_SDRC_ADDRSPC_MASK (0x3 << 16)
552 #define EMIF_SDRC_MAP_SHIFT 8
553 #define EMIF_SDRC_MAP_MASK (0x3 << 8)
554 #define EMIF_SDRC_ADDR_SHIFT 0
555 #define EMIF_SDRC_ADDR_MASK (0xff << 0)
557 /* DMM_LISA_MAP fields */
558 #define DMM_SDRC_MAP_UNMAPPED 0
559 #define DMM_SDRC_MAP_EMIF1_ONLY 1
560 #define DMM_SDRC_MAP_EMIF2_ONLY 2
561 #define DMM_SDRC_MAP_EMIF1_AND_EMIF2 3
563 #define DMM_SDRC_INTL_NONE 0
564 #define DMM_SDRC_INTL_128B 1
565 #define DMM_SDRC_INTL_256B 2
566 #define DMM_SDRC_INTL_512 3
568 #define DMM_SDRC_ADDR_SPC_SDRAM 0
569 #define DMM_SDRC_ADDR_SPC_NVM 1
570 #define DMM_SDRC_ADDR_SPC_INVALID 2
572 #define DMM_LISA_MAP_INTERLEAVED_BASE_VAL (\
573 (DMM_SDRC_MAP_EMIF1_AND_EMIF2 << EMIF_SDRC_MAP_SHIFT) |\
574 (DMM_SDRC_ADDR_SPC_SDRAM << EMIF_SDRC_ADDRSPC_SHIFT) |\
575 (DMM_SDRC_INTL_128B << EMIF_SDRC_INTL_SHIFT) |\
576 (CONFIG_SYS_SDRAM_BASE << EMIF_SYS_ADDR_SHIFT))
578 #define DMM_LISA_MAP_EMIF1_ONLY_BASE_VAL (\
579 (DMM_SDRC_MAP_EMIF1_ONLY << EMIF_SDRC_MAP_SHIFT)|\
580 (DMM_SDRC_ADDR_SPC_SDRAM << EMIF_SDRC_ADDRSPC_SHIFT)|\
581 (DMM_SDRC_INTL_NONE << EMIF_SDRC_INTL_SHIFT))
583 #define DMM_LISA_MAP_EMIF2_ONLY_BASE_VAL (\
584 (DMM_SDRC_MAP_EMIF2_ONLY << EMIF_SDRC_MAP_SHIFT)|\
585 (DMM_SDRC_ADDR_SPC_SDRAM << EMIF_SDRC_ADDRSPC_SHIFT)|\
586 (DMM_SDRC_INTL_NONE << EMIF_SDRC_INTL_SHIFT))
588 /* Trap for invalid TILER PAT entries */
589 #define DMM_LISA_MAP_0_INVAL_ADDR_TRAP (\
590 (0 << EMIF_SDRC_ADDR_SHIFT) |\
591 (DMM_SDRC_MAP_EMIF1_ONLY << EMIF_SDRC_MAP_SHIFT)|\
592 (DMM_SDRC_ADDR_SPC_INVALID << EMIF_SDRC_ADDRSPC_SHIFT)|\
593 (DMM_SDRC_INTL_NONE << EMIF_SDRC_INTL_SHIFT)|\
594 (0xFF << EMIF_SYS_ADDR_SHIFT))
596 #define EMIF_EXT_PHY_CTRL_TIMING_REG 0x5
598 /* Reg mapping structure */
599 struct emif_reg_struct {
602 u32 emif_sdram_config;
603 u32 emif_lpddr2_nvm_config;
604 u32 emif_sdram_ref_ctrl;
605 u32 emif_sdram_ref_ctrl_shdw;
606 u32 emif_sdram_tim_1;
607 u32 emif_sdram_tim_1_shdw;
608 u32 emif_sdram_tim_2;
609 u32 emif_sdram_tim_2_shdw;
610 u32 emif_sdram_tim_3;
611 u32 emif_sdram_tim_3_shdw;
612 u32 emif_lpddr2_nvm_tim;
613 u32 emif_lpddr2_nvm_tim_shdw;
614 u32 emif_pwr_mgmt_ctrl;
615 u32 emif_pwr_mgmt_ctrl_shdw;
616 u32 emif_lpddr2_mode_reg_data;
618 u32 emif_lpddr2_mode_reg_data_es2;
620 u32 emif_lpddr2_mode_reg_cfg;
622 u32 emif_l3_cfg_val_1;
623 u32 emif_l3_cfg_val_2;
628 u32 emif_perf_cnt_cfg;
629 u32 emif_perf_cnt_sel;
630 u32 emif_perf_cnt_tim;
632 u32 emif_read_idlectrl;
633 u32 emif_read_idlectrl_shdw;
635 u32 emif_irqstatus_raw_sys;
636 u32 emif_irqstatus_raw_ll;
637 u32 emif_irqstatus_sys;
638 u32 emif_irqstatus_ll;
639 u32 emif_irqenable_set_sys;
640 u32 emif_irqenable_set_ll;
641 u32 emif_irqenable_clr_sys;
642 u32 emif_irqenable_clr_ll;
645 u32 emif_temp_alert_config;
647 u32 emif_rd_wr_lvl_rmp_win;
648 u32 emif_rd_wr_lvl_rmp_ctl;
649 u32 emif_rd_wr_lvl_ctl;
651 u32 emif_ddr_phy_ctrl_1;
652 u32 emif_ddr_phy_ctrl_1_shdw;
653 u32 emif_ddr_phy_ctrl_2;
655 u32 emif_prio_class_serv_map;
656 u32 emif_connect_id_serv_1_map;
657 u32 emif_connect_id_serv_2_map;
659 u32 emif_rd_wr_exec_thresh;
662 u32 emif_ddr_phy_status[28];
664 u32 emif_ddr_ext_phy_ctrl_1;
665 u32 emif_ddr_ext_phy_ctrl_1_shdw;
666 u32 emif_ddr_ext_phy_ctrl_2;
667 u32 emif_ddr_ext_phy_ctrl_2_shdw;
668 u32 emif_ddr_ext_phy_ctrl_3;
669 u32 emif_ddr_ext_phy_ctrl_3_shdw;
670 u32 emif_ddr_ext_phy_ctrl_4;
671 u32 emif_ddr_ext_phy_ctrl_4_shdw;
672 u32 emif_ddr_ext_phy_ctrl_5;
673 u32 emif_ddr_ext_phy_ctrl_5_shdw;
674 u32 emif_ddr_ext_phy_ctrl_6;
675 u32 emif_ddr_ext_phy_ctrl_6_shdw;
676 u32 emif_ddr_ext_phy_ctrl_7;
677 u32 emif_ddr_ext_phy_ctrl_7_shdw;
678 u32 emif_ddr_ext_phy_ctrl_8;
679 u32 emif_ddr_ext_phy_ctrl_8_shdw;
680 u32 emif_ddr_ext_phy_ctrl_9;
681 u32 emif_ddr_ext_phy_ctrl_9_shdw;
682 u32 emif_ddr_ext_phy_ctrl_10;
683 u32 emif_ddr_ext_phy_ctrl_10_shdw;
684 u32 emif_ddr_ext_phy_ctrl_11;
685 u32 emif_ddr_ext_phy_ctrl_11_shdw;
686 u32 emif_ddr_ext_phy_ctrl_12;
687 u32 emif_ddr_ext_phy_ctrl_12_shdw;
688 u32 emif_ddr_ext_phy_ctrl_13;
689 u32 emif_ddr_ext_phy_ctrl_13_shdw;
690 u32 emif_ddr_ext_phy_ctrl_14;
691 u32 emif_ddr_ext_phy_ctrl_14_shdw;
692 u32 emif_ddr_ext_phy_ctrl_15;
693 u32 emif_ddr_ext_phy_ctrl_15_shdw;
694 u32 emif_ddr_ext_phy_ctrl_16;
695 u32 emif_ddr_ext_phy_ctrl_16_shdw;
696 u32 emif_ddr_ext_phy_ctrl_17;
697 u32 emif_ddr_ext_phy_ctrl_17_shdw;
698 u32 emif_ddr_ext_phy_ctrl_18;
699 u32 emif_ddr_ext_phy_ctrl_18_shdw;
700 u32 emif_ddr_ext_phy_ctrl_19;
701 u32 emif_ddr_ext_phy_ctrl_19_shdw;
702 u32 emif_ddr_ext_phy_ctrl_20;
703 u32 emif_ddr_ext_phy_ctrl_20_shdw;
704 u32 emif_ddr_ext_phy_ctrl_21;
705 u32 emif_ddr_ext_phy_ctrl_21_shdw;
706 u32 emif_ddr_ext_phy_ctrl_22;
707 u32 emif_ddr_ext_phy_ctrl_22_shdw;
708 u32 emif_ddr_ext_phy_ctrl_23;
709 u32 emif_ddr_ext_phy_ctrl_23_shdw;
710 u32 emif_ddr_ext_phy_ctrl_24;
711 u32 emif_ddr_ext_phy_ctrl_24_shdw;
712 u32 emif_ddr_ext_phy_ctrl_25;
713 u32 emif_ddr_ext_phy_ctrl_25_shdw;
714 u32 emif_ddr_ext_phy_ctrl_26;
715 u32 emif_ddr_ext_phy_ctrl_26_shdw;
716 u32 emif_ddr_ext_phy_ctrl_27;
717 u32 emif_ddr_ext_phy_ctrl_27_shdw;
718 u32 emif_ddr_ext_phy_ctrl_28;
719 u32 emif_ddr_ext_phy_ctrl_28_shdw;
720 u32 emif_ddr_ext_phy_ctrl_29;
721 u32 emif_ddr_ext_phy_ctrl_29_shdw;
722 u32 emif_ddr_ext_phy_ctrl_30;
723 u32 emif_ddr_ext_phy_ctrl_30_shdw;
724 u32 emif_ddr_ext_phy_ctrl_31;
725 u32 emif_ddr_ext_phy_ctrl_31_shdw;
726 u32 emif_ddr_ext_phy_ctrl_32;
727 u32 emif_ddr_ext_phy_ctrl_32_shdw;
728 u32 emif_ddr_ext_phy_ctrl_33;
729 u32 emif_ddr_ext_phy_ctrl_33_shdw;
730 u32 emif_ddr_ext_phy_ctrl_34;
731 u32 emif_ddr_ext_phy_ctrl_34_shdw;
732 u32 emif_ddr_ext_phy_ctrl_35;
733 u32 emif_ddr_ext_phy_ctrl_35_shdw;
735 u32 emif_ddr_ext_phy_ctrl_36;
736 u32 emif_ddr_fifo_misaligned_clear_1;
739 u32 emif_ddr_ext_phy_ctrl_36_shdw;
740 u32 emif_ddr_fifo_misaligned_clear_2;
744 struct dmm_lisa_map_regs {
754 /* The maximum frequency at which the LPDDR2 interface can operate in Hz*/
755 #define MAX_LPDDR2_FREQ 400000000 /* 400 MHz */
758 * The period of DDR clk is represented as numerator and denominator for
759 * better accuracy in integer based calculations. However, if the numerator
760 * and denominator are very huge there may be chances of overflow in
761 * calculations. So, as a trade-off keep denominator(and consequently
762 * numerator) within a limit sacrificing some accuracy - but not much
763 * If denominator and numerator are already small (such as at 400 MHz)
764 * no adjustment is needed
766 #define EMIF_PERIOD_DEN_LIMIT 1000
768 * Maximum number of different frequencies supported by EMIF driver
769 * Determines the number of entries in the pointer array for register
772 #define EMIF_MAX_NUM_FREQUENCIES 6
774 * Indices into the Addressing Table array.
775 * One entry each for all the different types of devices with different
778 #define ADDR_TABLE_INDEX64M 0
779 #define ADDR_TABLE_INDEX128M 1
780 #define ADDR_TABLE_INDEX256M 2
781 #define ADDR_TABLE_INDEX512M 3
782 #define ADDR_TABLE_INDEX1GS4 4
783 #define ADDR_TABLE_INDEX2GS4 5
784 #define ADDR_TABLE_INDEX4G 6
785 #define ADDR_TABLE_INDEX8G 7
786 #define ADDR_TABLE_INDEX1GS2 8
787 #define ADDR_TABLE_INDEX2GS2 9
788 #define ADDR_TABLE_INDEXMAX 10
790 /* Number of Row bits */
800 /* Number of Column bits */
805 #define COL_7 4 /*Not supported by OMAP included for completeness */
813 /* Refresh rate in micro seconds x 10 */
814 #define T_REFI_15_6 156
815 #define T_REFI_7_8 78
816 #define T_REFI_3_9 39
818 #define EBANK_CS1_DIS 0
819 #define EBANK_CS1_EN 1
821 /* Read Latency used by the device at reset */
823 /* Read Latency for the highest frequency you want to use */
824 #ifdef CONFIG_OMAP54XX
831 /* Interleaving policies at EMIF level- between banks and Chip Selects */
832 #define EMIF_INTERLEAVING_POLICY_MAX_INTERLEAVING 0
833 #define EMIF_INTERLEAVING_POLICY_NO_BANK_INTERLEAVING 3
836 * Interleaving policy to be used
837 * Currently set to MAX interleaving for better performance
839 #define EMIF_INTERLEAVING_POLICY EMIF_INTERLEAVING_POLICY_MAX_INTERLEAVING
841 /* State of the core voltage:
842 * This is important for some parameters such as read idle control and
843 * ZQ calibration timings. Timings are much stricter when voltage ramp
844 * is happening compared to when the voltage is stable.
845 * We need to calculate two sets of values for these parameters and use
848 #define LPDDR2_VOLTAGE_STABLE 0
849 #define LPDDR2_VOLTAGE_RAMPING 1
851 /* Length of the forced read idle period in terms of cycles */
852 #define EMIF_REG_READ_IDLE_LEN_VAL 5
854 /* Interval between forced 'read idles' */
855 /* To be used when voltage is changed for DPS/DVFS - 1us */
856 #define READ_IDLE_INTERVAL_DVFS (1*1000)
858 * To be used when voltage is not scaled except by Smart Reflex
859 * 50us - or maximum value will do
861 #define READ_IDLE_INTERVAL_NORMAL (50*1000)
865 * Unless voltage is changing due to DVFS one ZQCS command every 50ms should
866 * be enough. This shoule be enough also in the case when voltage is changing
867 * due to smart-reflex.
869 #define EMIF_ZQCS_INTERVAL_NORMAL_IN_US (50*1000)
871 * If voltage is changing due to DVFS ZQCS should be performed more
874 #define EMIF_ZQCS_INTERVAL_DVFS_IN_US 50
876 /* The interval between ZQCL commands as a multiple of ZQCS interval */
877 #define REG_ZQ_ZQCL_MULT 4
878 /* The interval between ZQINIT commands as a multiple of ZQCL interval */
879 #define REG_ZQ_ZQINIT_MULT 3
880 /* Enable ZQ Calibration on exiting Self-refresh */
881 #define REG_ZQ_SFEXITEN_ENABLE 1
883 * ZQ Calibration simultaneously on both chip-selects:
884 * Needs one calibration resistor per CS
885 * None of the boards that we know of have this capability
886 * So disabled by default
888 #define REG_ZQ_DUALCALEN_DISABLE 0
890 * Enable ZQ Calibration by default on CS0. If we are asked to program
891 * the EMIF there will be something connected to CS0 for sure
893 #define REG_ZQ_CS0EN_ENABLE 1
895 /* EMIF_PWR_MGMT_CTRL register */
896 /* Low power modes */
897 #define LP_MODE_DISABLE 0
898 #define LP_MODE_CLOCK_STOP 1
899 #define LP_MODE_SELF_REFRESH 2
900 #define LP_MODE_PWR_DN 3
903 #define DPD_DISABLE 0
906 /* Maximum delay before Low Power Modes */
907 #define REG_CS_TIM 0x0
908 #define REG_SR_TIM 0x0
909 #define REG_PD_TIM 0x0
912 /* EMIF_PWR_MGMT_CTRL register */
913 #define EMIF_PWR_MGMT_CTRL (\
914 ((REG_CS_TIM << EMIF_REG_CS_TIM_SHIFT) & EMIF_REG_CS_TIM_MASK)|\
915 ((REG_SR_TIM << EMIF_REG_SR_TIM_SHIFT) & EMIF_REG_SR_TIM_MASK)|\
916 ((REG_PD_TIM << EMIF_REG_PD_TIM_SHIFT) & EMIF_REG_PD_TIM_MASK)|\
917 ((LP_MODE_DISABLE << EMIF_REG_LP_MODE_SHIFT)\
918 & EMIF_REG_LP_MODE_MASK) |\
919 ((DPD_DISABLE << EMIF_REG_DPD_EN_SHIFT)\
920 & EMIF_REG_DPD_EN_MASK))\
922 #define EMIF_PWR_MGMT_CTRL_SHDW (\
923 ((REG_CS_TIM << EMIF_REG_CS_TIM_SHDW_SHIFT)\
924 & EMIF_REG_CS_TIM_SHDW_MASK) |\
925 ((REG_SR_TIM << EMIF_REG_SR_TIM_SHDW_SHIFT)\
926 & EMIF_REG_SR_TIM_SHDW_MASK) |\
927 ((REG_PD_TIM << EMIF_REG_PD_TIM_SHDW_SHIFT)\
928 & EMIF_REG_PD_TIM_SHDW_MASK))
930 /* EMIF_L3_CONFIG register value */
931 #define EMIF_L3_CONFIG_VAL_SYS_10_LL_0 0x0A0000FF
932 #define EMIF_L3_CONFIG_VAL_SYS_10_MPU_3_LL_0 0x0A300000
933 #define EMIF_L3_CONFIG_VAL_SYS_10_MPU_5_LL_0 0x0A500000
936 * Value of bits 12:31 of DDR_PHY_CTRL_1 register:
937 * All these fields have magic values dependent on frequency and
938 * determined by PHY and DLL integration with EMIF. Setting the magic
939 * values suggested by hw team.
941 #define EMIF_DDR_PHY_CTRL_1_BASE_VAL 0x049FF
942 #define EMIF_DLL_SLAVE_DLY_CTRL_400_MHZ 0x41
943 #define EMIF_DLL_SLAVE_DLY_CTRL_200_MHZ 0x80
944 #define EMIF_DLL_SLAVE_DLY_CTRL_100_MHZ_AND_LESS 0xFF
949 * Burst type : sequential
951 * nWR : 3(default). EMIF does not do pre-charge.
952 * : So nWR is don't care
954 #define MR1_BL_8_BT_SEQ_WRAP_EN_NWR_3 0x23
955 #define MR1_BL_8_BT_SEQ_WRAP_EN_NWR_8 0xc3
958 #define MR2_RL3_WL1 1
959 #define MR2_RL4_WL2 2
960 #define MR2_RL5_WL2 3
961 #define MR2_RL6_WL3 4
963 /* MR10: ZQ calibration codes */
964 #define MR10_ZQ_ZQCS 0x56
965 #define MR10_ZQ_ZQCL 0xAB
966 #define MR10_ZQ_ZQINIT 0xFF
967 #define MR10_ZQ_ZQRESET 0xC3
969 /* TEMP_ALERT_CONFIG */
970 #define TEMP_ALERT_POLL_INTERVAL_MS 360 /* for temp gradient - 5 C/s */
971 #define TEMP_ALERT_CONFIG_DEVCT_1 0
972 #define TEMP_ALERT_CONFIG_DEVWDT_32 2
974 /* MR16 value: refresh full array(no partial array self refresh) */
975 #define MR16_REF_FULL_ARRAY 0
978 * Maximum number of entries we keep in our array of timing tables
979 * We need not keep all the speed bins supported by the device
980 * We need to keep timing tables for only the speed bins that we
983 #define MAX_NUM_SPEEDBINS 4
985 /* LPDDR2 Densities */
986 #define LPDDR2_DENSITY_64Mb 0
987 #define LPDDR2_DENSITY_128Mb 1
988 #define LPDDR2_DENSITY_256Mb 2
989 #define LPDDR2_DENSITY_512Mb 3
990 #define LPDDR2_DENSITY_1Gb 4
991 #define LPDDR2_DENSITY_2Gb 5
992 #define LPDDR2_DENSITY_4Gb 6
993 #define LPDDR2_DENSITY_8Gb 7
994 #define LPDDR2_DENSITY_16Gb 8
995 #define LPDDR2_DENSITY_32Gb 9
998 #define LPDDR2_TYPE_S4 0
999 #define LPDDR2_TYPE_S2 1
1000 #define LPDDR2_TYPE_NVM 2
1002 /* LPDDR2 IO width */
1003 #define LPDDR2_IO_WIDTH_32 0
1004 #define LPDDR2_IO_WIDTH_16 1
1005 #define LPDDR2_IO_WIDTH_8 2
1007 /* Mode register numbers */
1008 #define LPDDR2_MR0 0
1009 #define LPDDR2_MR1 1
1010 #define LPDDR2_MR2 2
1011 #define LPDDR2_MR3 3
1012 #define LPDDR2_MR4 4
1013 #define LPDDR2_MR5 5
1014 #define LPDDR2_MR6 6
1015 #define LPDDR2_MR7 7
1016 #define LPDDR2_MR8 8
1017 #define LPDDR2_MR9 9
1018 #define LPDDR2_MR10 10
1019 #define LPDDR2_MR11 11
1020 #define LPDDR2_MR16 16
1021 #define LPDDR2_MR17 17
1022 #define LPDDR2_MR18 18
1025 #define LPDDR2_MR0_DAI_SHIFT 0
1026 #define LPDDR2_MR0_DAI_MASK 1
1027 #define LPDDR2_MR0_DI_SHIFT 1
1028 #define LPDDR2_MR0_DI_MASK (1 << 1)
1029 #define LPDDR2_MR0_DNVI_SHIFT 2
1030 #define LPDDR2_MR0_DNVI_MASK (1 << 2)
1033 #define MR4_SDRAM_REF_RATE_SHIFT 0
1034 #define MR4_SDRAM_REF_RATE_MASK 7
1035 #define MR4_TUF_SHIFT 7
1036 #define MR4_TUF_MASK (1 << 7)
1038 /* MR4 SDRAM Refresh Rate field values */
1039 #define SDRAM_TEMP_LESS_LOW_SHUTDOWN 0x0
1040 #define SDRAM_TEMP_LESS_4X_REFRESH_AND_TIMINGS 0x1
1041 #define SDRAM_TEMP_LESS_2X_REFRESH_AND_TIMINGS 0x2
1042 #define SDRAM_TEMP_NOMINAL 0x3
1043 #define SDRAM_TEMP_RESERVED_4 0x4
1044 #define SDRAM_TEMP_HIGH_DERATE_REFRESH 0x5
1045 #define SDRAM_TEMP_HIGH_DERATE_REFRESH_AND_TIMINGS 0x6
1046 #define SDRAM_TEMP_VERY_HIGH_SHUTDOWN 0x7
1048 #define LPDDR2_MANUFACTURER_SAMSUNG 1
1049 #define LPDDR2_MANUFACTURER_QIMONDA 2
1050 #define LPDDR2_MANUFACTURER_ELPIDA 3
1051 #define LPDDR2_MANUFACTURER_ETRON 4
1052 #define LPDDR2_MANUFACTURER_NANYA 5
1053 #define LPDDR2_MANUFACTURER_HYNIX 6
1054 #define LPDDR2_MANUFACTURER_MOSEL 7
1055 #define LPDDR2_MANUFACTURER_WINBOND 8
1056 #define LPDDR2_MANUFACTURER_ESMT 9
1057 #define LPDDR2_MANUFACTURER_SPANSION 11
1058 #define LPDDR2_MANUFACTURER_SST 12
1059 #define LPDDR2_MANUFACTURER_ZMOS 13
1060 #define LPDDR2_MANUFACTURER_INTEL 14
1061 #define LPDDR2_MANUFACTURER_NUMONYX 254
1062 #define LPDDR2_MANUFACTURER_MICRON 255
1064 /* MR8 register fields */
1065 #define MR8_TYPE_SHIFT 0x0
1066 #define MR8_TYPE_MASK 0x3
1067 #define MR8_DENSITY_SHIFT 0x2
1068 #define MR8_DENSITY_MASK (0xF << 0x2)
1069 #define MR8_IO_WIDTH_SHIFT 0x6
1070 #define MR8_IO_WIDTH_MASK (0x3 << 0x6)
1073 #define EMIF_SDRAM_TYPE_DDR2 0x2
1074 #define EMIF_SDRAM_TYPE_DDR3 0x3
1075 #define EMIF_SDRAM_TYPE_LPDDR2 0x4
1077 struct lpddr2_addressing {
1080 u8 row_sz[2]; /* One entry each for x32 and x16 */
1081 u8 col_sz[2]; /* One entry each for x32 and x16 */
1084 /* Structure for timings from the DDR datasheet */
1085 struct lpddr2_ac_timings {
1110 * Min tCK values for some of the parameters:
1111 * If the calculated clock cycles for the respective parameter is
1112 * less than the corresponding min tCK value, we need to set the min
1113 * tCK value. This may happen at lower frequencies.
1115 struct lpddr2_min_tck {
1130 struct lpddr2_device_details {
1137 struct lpddr2_device_timings {
1138 const struct lpddr2_ac_timings **ac_timings;
1139 const struct lpddr2_min_tck *min_tck;
1142 /* Details of the devices connected to each chip-select of an EMIF instance */
1143 struct emif_device_details {
1144 const struct lpddr2_device_details *cs0_device_details;
1145 const struct lpddr2_device_details *cs1_device_details;
1146 const struct lpddr2_device_timings *cs0_device_timings;
1147 const struct lpddr2_device_timings *cs1_device_timings;
1151 * Structure containing shadow of important registers in EMIF
1152 * The calculation function fills in this structure to be later used for
1153 * initialization and DVFS
1157 u32 sdram_config_init;
1167 u32 temp_alert_config;
1168 u32 emif_ddr_phy_ctlr_1_init;
1169 u32 emif_ddr_phy_ctlr_1;
1170 u32 emif_ddr_ext_phy_ctrl_1;
1171 u32 emif_ddr_ext_phy_ctrl_2;
1172 u32 emif_ddr_ext_phy_ctrl_3;
1173 u32 emif_ddr_ext_phy_ctrl_4;
1174 u32 emif_ddr_ext_phy_ctrl_5;
1175 u32 emif_rd_wr_lvl_rmp_win;
1176 u32 emif_rd_wr_lvl_rmp_ctl;
1177 u32 emif_rd_wr_lvl_ctl;
1178 u32 emif_rd_wr_exec_thresh;
1179 u32 emif_prio_class_serv_map;
1180 u32 emif_connect_id_serv_1_map;
1181 u32 emif_connect_id_serv_2_map;
1182 u32 emif_cos_config;
1185 struct lpddr2_mr_regs {
1193 struct read_write_regs {
1198 static inline u32 get_emif_rev(u32 base)
1200 struct emif_reg_struct *emif = (struct emif_reg_struct *)base;
1202 return (readl(&emif->emif_mod_id_rev) & EMIF_REG_MAJOR_REVISION_MASK)
1203 >> EMIF_REG_MAJOR_REVISION_SHIFT;
1207 * Get SDRAM type connected to EMIF.
1208 * Assuming similar SDRAM parts are connected to both EMIF's
1209 * which is typically the case. So it is sufficient to get
1210 * SDRAM type from EMIF1.
1212 static inline u32 emif_sdram_type(u32 sdram_config)
1214 return (sdram_config & EMIF_REG_SDRAM_TYPE_MASK)
1215 >> EMIF_REG_SDRAM_TYPE_SHIFT;
1220 #define emif_assert(c) ({ if (!(c)) for (;;); })
1222 #define emif_assert(c) ({ if (0) hang(); })
1225 #ifdef CONFIG_SYS_EMIF_PRECALCULATED_TIMING_REGS
1226 void emif_get_reg_dump(u32 emif_nr, const struct emif_regs **regs);
1227 void emif_get_dmm_regs(const struct dmm_lisa_map_regs **dmm_lisa_regs);
1229 struct lpddr2_device_details *emif_get_device_details(u32 emif_nr, u8 cs,
1230 struct lpddr2_device_details *lpddr2_dev_details);
1231 void emif_get_device_timings(u32 emif_nr,
1232 const struct lpddr2_device_timings **cs0_device_timings,
1233 const struct lpddr2_device_timings **cs1_device_timings);
1236 void do_ext_phy_settings(u32 base, const struct emif_regs *regs);
1237 void get_lpddr2_mr_regs(const struct lpddr2_mr_regs **regs);
1239 #ifndef CONFIG_SYS_EMIF_PRECALCULATED_TIMING_REGS
1240 extern u32 *const T_num;
1241 extern u32 *const T_den;
1244 void config_data_eye_leveling_samples(u32 emif_base);
1245 const struct read_write_regs *get_bug_regs(u32 *iterations);