1 // SPDX-License-Identifier: GPL-2.0
3 * Copyright (C) Marvell International Ltd. and its affiliates
9 #include <asm/arch/cpu.h>
10 #include <asm/arch/soc.h>
12 #include "ddr3_init.h"
14 #define PATTERN_1 0x55555555
15 #define PATTERN_2 0xaaaaaaaa
17 #define VALIDATE_TRAINING_LIMIT(e1, e2) \
18 ((((e2) - (e1) + 1) > 33) && ((e1) < 67))
20 u32 phy_reg_bk[MAX_INTERFACE_NUM][MAX_BUS_NUM][BUS_WIDTH_IN_BITS];
22 u32 training_res[MAX_INTERFACE_NUM * MAX_BUS_NUM * BUS_WIDTH_IN_BITS *
23 HWS_SEARCH_DIR_LIMIT];
25 u16 mask_results_dq_reg_map[] = {
26 RESULT_CONTROL_PUP_0_BIT_0_REG, RESULT_CONTROL_PUP_0_BIT_1_REG,
27 RESULT_CONTROL_PUP_0_BIT_2_REG, RESULT_CONTROL_PUP_0_BIT_3_REG,
28 RESULT_CONTROL_PUP_0_BIT_4_REG, RESULT_CONTROL_PUP_0_BIT_5_REG,
29 RESULT_CONTROL_PUP_0_BIT_6_REG, RESULT_CONTROL_PUP_0_BIT_7_REG,
30 RESULT_CONTROL_PUP_1_BIT_0_REG, RESULT_CONTROL_PUP_1_BIT_1_REG,
31 RESULT_CONTROL_PUP_1_BIT_2_REG, RESULT_CONTROL_PUP_1_BIT_3_REG,
32 RESULT_CONTROL_PUP_1_BIT_4_REG, RESULT_CONTROL_PUP_1_BIT_5_REG,
33 RESULT_CONTROL_PUP_1_BIT_6_REG, RESULT_CONTROL_PUP_1_BIT_7_REG,
34 RESULT_CONTROL_PUP_2_BIT_0_REG, RESULT_CONTROL_PUP_2_BIT_1_REG,
35 RESULT_CONTROL_PUP_2_BIT_2_REG, RESULT_CONTROL_PUP_2_BIT_3_REG,
36 RESULT_CONTROL_PUP_2_BIT_4_REG, RESULT_CONTROL_PUP_2_BIT_5_REG,
37 RESULT_CONTROL_PUP_2_BIT_6_REG, RESULT_CONTROL_PUP_2_BIT_7_REG,
38 RESULT_CONTROL_PUP_3_BIT_0_REG, RESULT_CONTROL_PUP_3_BIT_1_REG,
39 RESULT_CONTROL_PUP_3_BIT_2_REG, RESULT_CONTROL_PUP_3_BIT_3_REG,
40 RESULT_CONTROL_PUP_3_BIT_4_REG, RESULT_CONTROL_PUP_3_BIT_5_REG,
41 RESULT_CONTROL_PUP_3_BIT_6_REG, RESULT_CONTROL_PUP_3_BIT_7_REG,
42 RESULT_CONTROL_PUP_4_BIT_0_REG, RESULT_CONTROL_PUP_4_BIT_1_REG,
43 RESULT_CONTROL_PUP_4_BIT_2_REG, RESULT_CONTROL_PUP_4_BIT_3_REG,
44 RESULT_CONTROL_PUP_4_BIT_4_REG, RESULT_CONTROL_PUP_4_BIT_5_REG,
45 RESULT_CONTROL_PUP_4_BIT_6_REG, RESULT_CONTROL_PUP_4_BIT_7_REG,
48 u16 mask_results_pup_reg_map[] = {
49 RESULT_CONTROL_BYTE_PUP_0_REG, RESULT_CONTROL_BYTE_PUP_1_REG,
50 RESULT_CONTROL_BYTE_PUP_2_REG, RESULT_CONTROL_BYTE_PUP_3_REG,
51 RESULT_CONTROL_BYTE_PUP_4_REG
54 u16 mask_results_dq_reg_map_pup3_ecc[] = {
55 RESULT_CONTROL_PUP_0_BIT_0_REG, RESULT_CONTROL_PUP_0_BIT_1_REG,
56 RESULT_CONTROL_PUP_0_BIT_2_REG, RESULT_CONTROL_PUP_0_BIT_3_REG,
57 RESULT_CONTROL_PUP_0_BIT_4_REG, RESULT_CONTROL_PUP_0_BIT_5_REG,
58 RESULT_CONTROL_PUP_0_BIT_6_REG, RESULT_CONTROL_PUP_0_BIT_7_REG,
59 RESULT_CONTROL_PUP_1_BIT_0_REG, RESULT_CONTROL_PUP_1_BIT_1_REG,
60 RESULT_CONTROL_PUP_1_BIT_2_REG, RESULT_CONTROL_PUP_1_BIT_3_REG,
61 RESULT_CONTROL_PUP_1_BIT_4_REG, RESULT_CONTROL_PUP_1_BIT_5_REG,
62 RESULT_CONTROL_PUP_1_BIT_6_REG, RESULT_CONTROL_PUP_1_BIT_7_REG,
63 RESULT_CONTROL_PUP_2_BIT_0_REG, RESULT_CONTROL_PUP_2_BIT_1_REG,
64 RESULT_CONTROL_PUP_2_BIT_2_REG, RESULT_CONTROL_PUP_2_BIT_3_REG,
65 RESULT_CONTROL_PUP_2_BIT_4_REG, RESULT_CONTROL_PUP_2_BIT_5_REG,
66 RESULT_CONTROL_PUP_2_BIT_6_REG, RESULT_CONTROL_PUP_2_BIT_7_REG,
67 RESULT_CONTROL_PUP_4_BIT_0_REG, RESULT_CONTROL_PUP_4_BIT_1_REG,
68 RESULT_CONTROL_PUP_4_BIT_2_REG, RESULT_CONTROL_PUP_4_BIT_3_REG,
69 RESULT_CONTROL_PUP_4_BIT_4_REG, RESULT_CONTROL_PUP_4_BIT_5_REG,
70 RESULT_CONTROL_PUP_4_BIT_6_REG, RESULT_CONTROL_PUP_4_BIT_7_REG,
71 RESULT_CONTROL_PUP_4_BIT_0_REG, RESULT_CONTROL_PUP_4_BIT_1_REG,
72 RESULT_CONTROL_PUP_4_BIT_2_REG, RESULT_CONTROL_PUP_4_BIT_3_REG,
73 RESULT_CONTROL_PUP_4_BIT_4_REG, RESULT_CONTROL_PUP_4_BIT_5_REG,
74 RESULT_CONTROL_PUP_4_BIT_6_REG, RESULT_CONTROL_PUP_4_BIT_7_REG,
77 u16 mask_results_pup_reg_map_pup3_ecc[] = {
78 RESULT_CONTROL_BYTE_PUP_0_REG, RESULT_CONTROL_BYTE_PUP_1_REG,
79 RESULT_CONTROL_BYTE_PUP_2_REG, RESULT_CONTROL_BYTE_PUP_4_REG,
80 RESULT_CONTROL_BYTE_PUP_4_REG
83 struct pattern_info pattern_table_16[] = {
85 * num tx phases, tx burst, delay between, rx pattern,
86 * start_address, pattern_len
88 {1, 1, 2, 1, 0x0080, 2}, /* PATTERN_PBS1 */
89 {1, 1, 2, 1, 0x00c0, 2}, /* PATTERN_PBS2 */
90 {1, 1, 2, 1, 0x0100, 2}, /* PATTERN_RL */
91 {0xf, 0x7, 2, 0x7, 0x0140, 16}, /* PATTERN_STATIC_PBS */
92 {0xf, 0x7, 2, 0x7, 0x0190, 16}, /* PATTERN_KILLER_DQ0 */
93 {0xf, 0x7, 2, 0x7, 0x01d0, 16}, /* PATTERN_KILLER_DQ1 */
94 {0xf, 0x7, 2, 0x7, 0x0210, 16}, /* PATTERN_KILLER_DQ2 */
95 {0xf, 0x7, 2, 0x7, 0x0250, 16}, /* PATTERN_KILLER_DQ3 */
96 {0xf, 0x7, 2, 0x7, 0x0290, 16}, /* PATTERN_KILLER_DQ4 */
97 {0xf, 0x7, 2, 0x7, 0x02d0, 16}, /* PATTERN_KILLER_DQ5 */
98 {0xf, 0x7, 2, 0x7, 0x0310, 16}, /* PATTERN_KILLER_DQ6 */
99 {0xf, 0x7, 2, 0x7, 0x0350, 16}, /* PATTERN_KILLER_DQ7 */
100 {1, 1, 2, 1, 0x0380, 2}, /* PATTERN_PBS3 */
101 {1, 1, 2, 1, 0x0000, 2}, /* PATTERN_RL2 */
102 {1, 1, 2, 1, 0x0040, 2}, /* PATTERN_TEST */
103 {0xf, 0x7, 2, 0x7, 0x03c0, 16}, /* PATTERN_FULL_SSO_1T */
104 {0xf, 0x7, 2, 0x7, 0x0400, 16}, /* PATTERN_FULL_SSO_2T */
105 {0xf, 0x7, 2, 0x7, 0x0440, 16}, /* PATTERN_FULL_SSO_3T */
106 {0xf, 0x7, 2, 0x7, 0x0480, 16}, /* PATTERN_FULL_SSO_4T */
107 {0xf, 0x7, 2, 0x7, 0x04c0, 16} /* PATTERN_VREF */
108 /*Note: actual start_address is <<3 of defined addess */
111 struct pattern_info pattern_table_32[] = {
113 * num tx phases, tx burst, delay between, rx pattern,
114 * start_address, pattern_len
116 {3, 3, 2, 3, 0x0080, 4}, /* PATTERN_PBS1 */
117 {3, 3, 2, 3, 0x00c0, 4}, /* PATTERN_PBS2 */
118 {3, 3, 2, 3, 0x0100, 4}, /* PATTERN_RL */
119 {0x1f, 0xf, 2, 0xf, 0x0140, 32}, /* PATTERN_STATIC_PBS */
120 {0x1f, 0xf, 2, 0xf, 0x0190, 32}, /* PATTERN_KILLER_DQ0 */
121 {0x1f, 0xf, 2, 0xf, 0x01d0, 32}, /* PATTERN_KILLER_DQ1 */
122 {0x1f, 0xf, 2, 0xf, 0x0210, 32}, /* PATTERN_KILLER_DQ2 */
123 {0x1f, 0xf, 2, 0xf, 0x0250, 32}, /* PATTERN_KILLER_DQ3 */
124 {0x1f, 0xf, 2, 0xf, 0x0290, 32}, /* PATTERN_KILLER_DQ4 */
125 {0x1f, 0xf, 2, 0xf, 0x02d0, 32}, /* PATTERN_KILLER_DQ5 */
126 {0x1f, 0xf, 2, 0xf, 0x0310, 32}, /* PATTERN_KILLER_DQ6 */
127 {0x1f, 0xf, 2, 0xf, 0x0350, 32}, /* PATTERN_KILLER_DQ7 */
128 {3, 3, 2, 3, 0x0380, 4}, /* PATTERN_PBS3 */
129 {3, 3, 2, 3, 0x0000, 4}, /* PATTERN_RL2 */
130 {3, 3, 2, 3, 0x0040, 4}, /* PATTERN_TEST */
131 {0x1f, 0xf, 2, 0xf, 0x03c0, 32}, /* PATTERN_FULL_SSO_1T */
132 {0x1f, 0xf, 2, 0xf, 0x0400, 32}, /* PATTERN_FULL_SSO_2T */
133 {0x1f, 0xf, 2, 0xf, 0x0440, 32}, /* PATTERN_FULL_SSO_3T */
134 {0x1f, 0xf, 2, 0xf, 0x0480, 32}, /* PATTERN_FULL_SSO_4T */
135 {0x1f, 0xf, 2, 0xf, 0x04c0, 32} /* PATTERN_VREF */
136 /*Note: actual start_address is <<3 of defined addess */
140 enum hws_ddr_cs traintrain_cs_type;
142 enum hws_training_result train_result_type;
143 enum hws_control_element train_control_element;
144 enum hws_search_dir traine_search_dir;
145 enum hws_dir train_direction;
147 u32 train_init_value;
148 u32 train_number_iterations;
149 enum hws_pattern train_pattern;
150 enum hws_edge_compare train_edge_compare;
152 u32 train_if_acess, train_if_id, train_pup_access;
153 u32 max_polling_for_done = 1000000;
155 u32 *ddr3_tip_get_buf_ptr(u32 dev_num, enum hws_search_dir search,
156 enum hws_training_result result_type,
161 buf_ptr = &training_res
162 [MAX_INTERFACE_NUM * MAX_BUS_NUM * BUS_WIDTH_IN_BITS * search +
163 interface_num * MAX_BUS_NUM * BUS_WIDTH_IN_BITS];
170 * Note: for one edge search only from fail to pass, else jitter can
171 * be be entered into solution.
173 int ddr3_tip_ip_training(u32 dev_num, enum hws_access_type access_type,
175 enum hws_access_type pup_access_type,
176 u32 pup_num, enum hws_training_result result_type,
177 enum hws_control_element control_element,
178 enum hws_search_dir search_dir, enum hws_dir direction,
179 u32 interface_mask, u32 init_value, u32 num_iter,
180 enum hws_pattern pattern,
181 enum hws_edge_compare edge_comp,
182 enum hws_ddr_cs cs_type, u32 cs_num,
183 enum hws_training_ip_stat *train_status)
185 u32 mask_dq_num_of_regs, mask_pup_num_of_regs, index_cnt, poll_cnt,
188 u32 delay_between_burst;
190 u32 read_data[MAX_INTERFACE_NUM];
191 struct pattern_info *pattern_table = ddr3_tip_get_pattern_table();
192 u16 *mask_results_pup_reg_map = ddr3_tip_get_mask_results_pup_reg_map();
193 u16 *mask_results_dq_reg_map = ddr3_tip_get_mask_results_dq_reg();
194 struct hws_topology_map *tm = ddr3_get_topology_map();
196 if (pup_num >= tm->num_of_bus_per_interface) {
197 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
198 ("pup_num %d not valid\n", pup_num));
200 if (interface_num >= MAX_INTERFACE_NUM) {
201 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
202 ("if_id %d not valid\n",
205 if (train_status == NULL) {
206 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
207 ("error param 4\n"));
212 if (cs_type == CS_SINGLE) {
214 CHECK_STATUS(ddr3_tip_if_write
215 (dev_num, access_type, interface_num,
216 CS_ENABLE_REG, 1 << 3, 1 << 3));
218 CHECK_STATUS(ddr3_tip_if_write
219 (dev_num, access_type, interface_num,
220 ODPG_DATA_CONTROL_REG,
221 (0x3 | (effective_cs << 26)), 0xc000003));
223 CHECK_STATUS(ddr3_tip_if_write
224 (dev_num, access_type, interface_num,
225 CS_ENABLE_REG, 0, 1 << 3));
227 CHECK_STATUS(ddr3_tip_if_write
228 (dev_num, access_type, interface_num,
229 ODPG_DATA_CONTROL_REG, 0x3 | cs_num << 26,
233 /* load pattern to ODPG */
234 ddr3_tip_load_pattern_to_odpg(dev_num, access_type, interface_num,
236 pattern_table[pattern].start_addr);
237 tx_burst_size = (direction == OPER_WRITE) ?
238 pattern_table[pattern].tx_burst_size : 0;
239 delay_between_burst = (direction == OPER_WRITE) ? 2 : 0;
240 rd_mode = (direction == OPER_WRITE) ? 1 : 0;
241 CHECK_STATUS(ddr3_tip_configure_odpg
242 (dev_num, access_type, interface_num, direction,
243 pattern_table[pattern].num_of_phases_tx, tx_burst_size,
244 pattern_table[pattern].num_of_phases_rx,
245 delay_between_burst, rd_mode, effective_cs, STRESS_NONE,
247 reg_data = (direction == OPER_READ) ? 0 : (0x3 << 30);
248 reg_data |= (direction == OPER_READ) ? 0x60 : 0xfa;
249 CHECK_STATUS(ddr3_tip_if_write
250 (dev_num, access_type, interface_num,
251 ODPG_WRITE_READ_MODE_ENABLE_REG, reg_data,
253 reg_data = (edge_comp == EDGE_PF || edge_comp == EDGE_FP) ? 0 : 1 << 6;
254 reg_data |= (edge_comp == EDGE_PF || edge_comp == EDGE_PFP) ?
257 /* change from Pass to Fail will lock the result */
258 if (pup_access_type == ACCESS_TYPE_MULTICAST)
259 reg_data |= 0xe << 14;
261 reg_data |= pup_num << 14;
263 if (edge_comp == EDGE_FP) {
264 /* don't search for readl edge change, only the state */
265 reg_data |= (0 << 20);
266 } else if (edge_comp == EDGE_FPF) {
267 reg_data |= (0 << 20);
269 reg_data |= (3 << 20);
272 CHECK_STATUS(ddr3_tip_if_write
273 (dev_num, access_type, interface_num,
274 ODPG_TRAINING_CONTROL_REG,
275 reg_data | (0x7 << 8) | (0x7 << 11),
276 (0x3 | (0x3 << 2) | (0x3 << 6) | (1 << 5) | (0x7 << 8) |
277 (0x7 << 11) | (0xf << 14) | (0x3 << 18) | (3 << 20))));
278 reg_data = (search_dir == HWS_LOW2HIGH) ? 0 : (1 << 8);
279 CHECK_STATUS(ddr3_tip_if_write
280 (dev_num, access_type, interface_num, ODPG_OBJ1_OPCODE_REG,
281 1 | reg_data | init_value << 9 | (1 << 25) | (1 << 26),
282 0xff | (1 << 8) | (0xffff << 9) | (1 << 25) | (1 << 26)));
285 * Write2_dunit(0x10b4, Number_iteration , [15:0])
286 * Max number of iterations
288 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, interface_num,
289 ODPG_OBJ1_ITER_CNT_REG, num_iter,
291 if (control_element == HWS_CONTROL_ELEMENT_DQ_SKEW &&
292 direction == OPER_READ) {
294 * Write2_dunit(0x10c0, 0x5f , [7:0])
295 * MC PBS Reg Address at DDR PHY
298 effective_cs * CALIBRATED_OBJECTS_REG_ADDR_OFFSET;
299 } else if (control_element == HWS_CONTROL_ELEMENT_DQ_SKEW &&
300 direction == OPER_WRITE) {
302 effective_cs * CALIBRATED_OBJECTS_REG_ADDR_OFFSET;
303 } else if (control_element == HWS_CONTROL_ELEMENT_ADLL &&
304 direction == OPER_WRITE) {
306 * LOOP 0x00000001 + 4*n:
307 * where n (0-3) represents M_CS number
310 * Write2_dunit(0x10c0, 0x1 , [7:0])
311 * ADLL WR Reg Address at DDR PHY
313 reg_data = 1 + effective_cs * CS_REGISTER_ADDR_OFFSET;
314 } else if (control_element == HWS_CONTROL_ELEMENT_ADLL &&
315 direction == OPER_READ) {
316 /* ADLL RD Reg Address at DDR PHY */
317 reg_data = 3 + effective_cs * CS_REGISTER_ADDR_OFFSET;
318 } else if (control_element == HWS_CONTROL_ELEMENT_DQS_SKEW &&
319 direction == OPER_WRITE) {
320 /* TBD not defined in 0.5.0 requirement */
321 } else if (control_element == HWS_CONTROL_ELEMENT_DQS_SKEW &&
322 direction == OPER_READ) {
323 /* TBD not defined in 0.5.0 requirement */
326 reg_data |= (0x6 << 28);
327 CHECK_STATUS(ddr3_tip_if_write
328 (dev_num, access_type, interface_num, CALIB_OBJ_PRFA_REG,
329 reg_data | (init_value << 8),
330 0xff | (0xffff << 8) | (0xf << 24) | (u32) (0xf << 28)));
332 mask_dq_num_of_regs = tm->num_of_bus_per_interface * BUS_WIDTH_IN_BITS;
333 mask_pup_num_of_regs = tm->num_of_bus_per_interface;
335 if (result_type == RESULT_PER_BIT) {
336 for (index_cnt = 0; index_cnt < mask_dq_num_of_regs;
338 CHECK_STATUS(ddr3_tip_if_write
339 (dev_num, access_type, interface_num,
340 mask_results_dq_reg_map[index_cnt], 0,
344 /* Mask disabled buses */
345 for (pup_id = 0; pup_id < tm->num_of_bus_per_interface;
347 if (IS_ACTIVE(tm->bus_act_mask, pup_id) == 1)
350 for (index_cnt = (mask_dq_num_of_regs - pup_id * 8);
352 (mask_dq_num_of_regs - (pup_id + 1) * 8);
354 CHECK_STATUS(ddr3_tip_if_write
355 (dev_num, access_type,
357 mask_results_dq_reg_map
358 [index_cnt], (1 << 24), 1 << 24));
362 for (index_cnt = 0; index_cnt < mask_pup_num_of_regs;
364 CHECK_STATUS(ddr3_tip_if_write
365 (dev_num, access_type, interface_num,
366 mask_results_pup_reg_map[index_cnt],
367 (1 << 24), 1 << 24));
369 } else if (result_type == RESULT_PER_BYTE) {
371 for (index_cnt = 0; index_cnt < mask_pup_num_of_regs;
373 CHECK_STATUS(ddr3_tip_if_write
374 (dev_num, access_type, interface_num,
375 mask_results_pup_reg_map[index_cnt], 0,
378 for (index_cnt = 0; index_cnt < mask_dq_num_of_regs;
380 CHECK_STATUS(ddr3_tip_if_write
381 (dev_num, access_type, interface_num,
382 mask_results_dq_reg_map[index_cnt],
383 (1 << 24), (1 << 24)));
387 /* Start Training Trigger */
388 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, interface_num,
389 ODPG_TRAINING_TRIGGER_REG, 1, 1));
390 /* wait for all RFU tests to finish (or timeout) */
391 /* WA for 16 bit mode, more investigation needed */
394 /* Training "Done ?" */
395 for (index_cnt = 0; index_cnt < MAX_INTERFACE_NUM; index_cnt++) {
396 if (IS_ACTIVE(tm->if_act_mask, index_cnt) == 0)
399 if (interface_mask & (1 << index_cnt)) {
400 /* need to check results for this Dunit */
401 for (poll_cnt = 0; poll_cnt < max_polling_for_done;
403 CHECK_STATUS(ddr3_tip_if_read
404 (dev_num, ACCESS_TYPE_UNICAST,
406 ODPG_TRAINING_STATUS_REG,
407 ®_data, MASK_ALL_BITS));
408 if ((reg_data & 0x2) != 0) {
410 train_status[index_cnt] =
411 HWS_TRAINING_IP_STATUS_SUCCESS;
416 if (poll_cnt == max_polling_for_done) {
417 train_status[index_cnt] =
418 HWS_TRAINING_IP_STATUS_TIMEOUT;
421 /* Be sure that ODPG done */
422 CHECK_STATUS(is_odpg_access_done(dev_num, index_cnt));
425 /* Write ODPG done in Dunit */
426 CHECK_STATUS(ddr3_tip_if_write
427 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
428 ODPG_STATUS_DONE_REG, 0, 0x1));
430 /* wait for all Dunit tests to finish (or timeout) */
431 /* Training "Done ?" */
432 /* Training "Pass ?" */
433 for (index_cnt = 0; index_cnt < MAX_INTERFACE_NUM; index_cnt++) {
434 if (IS_ACTIVE(tm->if_act_mask, index_cnt) == 0)
437 if (interface_mask & (1 << index_cnt)) {
438 /* need to check results for this Dunit */
439 for (poll_cnt = 0; poll_cnt < max_polling_for_done;
441 CHECK_STATUS(ddr3_tip_if_read
442 (dev_num, ACCESS_TYPE_UNICAST,
444 ODPG_TRAINING_TRIGGER_REG,
445 read_data, MASK_ALL_BITS));
446 reg_data = read_data[index_cnt];
447 if ((reg_data & 0x2) != 0) {
449 if ((reg_data & 0x4) == 0) {
450 train_status[index_cnt] =
451 HWS_TRAINING_IP_STATUS_SUCCESS;
453 train_status[index_cnt] =
454 HWS_TRAINING_IP_STATUS_FAIL;
460 if (poll_cnt == max_polling_for_done) {
461 train_status[index_cnt] =
462 HWS_TRAINING_IP_STATUS_TIMEOUT;
467 CHECK_STATUS(ddr3_tip_if_write
468 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
469 ODPG_DATA_CONTROL_REG, 0, MASK_ALL_BITS));
475 * Load expected Pattern to ODPG
477 int ddr3_tip_load_pattern_to_odpg(u32 dev_num, enum hws_access_type access_type,
478 u32 if_id, enum hws_pattern pattern,
481 u32 pattern_length_cnt = 0;
482 struct pattern_info *pattern_table = ddr3_tip_get_pattern_table();
484 for (pattern_length_cnt = 0;
485 pattern_length_cnt < pattern_table[pattern].pattern_len;
486 pattern_length_cnt++) {
487 CHECK_STATUS(ddr3_tip_if_write
488 (dev_num, access_type, if_id,
489 ODPG_PATTERN_DATA_LOW_REG,
490 pattern_table_get_word(dev_num, pattern,
491 (u8) (pattern_length_cnt *
492 2)), MASK_ALL_BITS));
493 CHECK_STATUS(ddr3_tip_if_write
494 (dev_num, access_type, if_id,
495 ODPG_PATTERN_DATA_HI_REG,
496 pattern_table_get_word(dev_num, pattern,
497 (u8) (pattern_length_cnt *
500 CHECK_STATUS(ddr3_tip_if_write
501 (dev_num, access_type, if_id,
502 ODPG_PATTERN_ADDR_REG, pattern_length_cnt,
506 CHECK_STATUS(ddr3_tip_if_write
507 (dev_num, access_type, if_id,
508 ODPG_PATTERN_ADDR_OFFSET_REG, load_addr, MASK_ALL_BITS));
516 int ddr3_tip_configure_odpg(u32 dev_num, enum hws_access_type access_type,
517 u32 if_id, enum hws_dir direction, u32 tx_phases,
518 u32 tx_burst_size, u32 rx_phases,
519 u32 delay_between_burst, u32 rd_mode, u32 cs_num,
520 u32 addr_stress_jump, u32 single_pattern)
525 data_value = ((single_pattern << 2) | (tx_phases << 5) |
526 (tx_burst_size << 11) | (delay_between_burst << 15) |
527 (rx_phases << 21) | (rd_mode << 25) | (cs_num << 26) |
528 (addr_stress_jump << 29));
529 ret = ddr3_tip_if_write(dev_num, access_type, if_id,
530 ODPG_DATA_CONTROL_REG, data_value, 0xaffffffc);
537 int ddr3_tip_process_result(u32 *ar_result, enum hws_edge e_edge,
538 enum hws_edge_search e_edge_search,
542 int tap_val, max_val = -10000, min_val = 10000;
543 int lock_success = 1;
545 for (i = 0; i < BUS_WIDTH_IN_BITS; i++) {
546 res = GET_LOCK_RESULT(ar_result[i]);
551 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
552 ("lock failed for bit %d\n", i));
555 if (lock_success == 1) {
556 for (i = 0; i < BUS_WIDTH_IN_BITS; i++) {
557 tap_val = GET_TAP_RESULT(ar_result[i], e_edge);
558 if (tap_val > max_val)
560 if (tap_val < min_val)
562 if (e_edge_search == TRAINING_EDGE_MAX)
563 *edge_result = (u32) max_val;
565 *edge_result = (u32) min_val;
567 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
568 ("i %d ar_result[i] 0x%x tap_val %d max_val %d min_val %d Edge_result %d\n",
569 i, ar_result[i], tap_val,
581 * Read training search result
583 int ddr3_tip_read_training_result(u32 dev_num, u32 if_id,
584 enum hws_access_type pup_access_type,
585 u32 pup_num, u32 bit_num,
586 enum hws_search_dir search,
587 enum hws_dir direction,
588 enum hws_training_result result_type,
589 enum hws_training_load_op operation,
590 u32 cs_num_type, u32 **load_res,
591 int is_read_from_db, u8 cons_tap,
592 int is_check_result_validity)
594 u32 reg_offset, pup_cnt, start_pup, end_pup, start_reg, end_reg;
595 u32 *interface_train_res = NULL;
596 u16 *reg_addr = NULL;
597 u32 read_data[MAX_INTERFACE_NUM];
598 u16 *mask_results_pup_reg_map = ddr3_tip_get_mask_results_pup_reg_map();
599 u16 *mask_results_dq_reg_map = ddr3_tip_get_mask_results_dq_reg();
600 struct hws_topology_map *tm = ddr3_get_topology_map();
603 * Agreed assumption: all CS mask contain same number of bits,
604 * i.e. in multi CS, the number of CS per memory is the same for
607 CHECK_STATUS(ddr3_tip_if_write
608 (dev_num, ACCESS_TYPE_UNICAST, if_id, CS_ENABLE_REG,
609 (cs_num_type == 0) ? 1 << 3 : 0, (1 << 3)));
610 CHECK_STATUS(ddr3_tip_if_write
611 (dev_num, ACCESS_TYPE_UNICAST, if_id,
612 ODPG_DATA_CONTROL_REG, (cs_num_type << 26), (3 << 26)));
613 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_TRACE,
614 ("Read_from_d_b %d cs_type %d oper %d result_type %d direction %d search %d pup_num %d if_id %d pup_access_type %d\n",
615 is_read_from_db, cs_num_type, operation,
616 result_type, direction, search, pup_num,
617 if_id, pup_access_type));
619 if ((load_res == NULL) && (is_read_from_db == 1)) {
620 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
621 ("ddr3_tip_read_training_result load_res = NULL"));
624 if (pup_num >= tm->num_of_bus_per_interface) {
625 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
626 ("pup_num %d not valid\n", pup_num));
628 if (if_id >= MAX_INTERFACE_NUM) {
629 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
630 ("if_id %d not valid\n", if_id));
632 if (result_type == RESULT_PER_BIT)
633 reg_addr = mask_results_dq_reg_map;
635 reg_addr = mask_results_pup_reg_map;
636 if (pup_access_type == ACCESS_TYPE_UNICAST) {
639 } else { /*pup_access_type == ACCESS_TYPE_MULTICAST) */
642 end_pup = tm->num_of_bus_per_interface - 1;
645 for (pup_cnt = start_pup; pup_cnt <= end_pup; pup_cnt++) {
646 VALIDATE_ACTIVE(tm->bus_act_mask, pup_cnt);
647 DEBUG_TRAINING_IP_ENGINE(
649 ("if_id %d start_pup %d end_pup %d pup_cnt %d\n",
650 if_id, start_pup, end_pup, pup_cnt));
651 if (result_type == RESULT_PER_BIT) {
652 if (bit_num == ALL_BITS_PER_PUP) {
653 start_reg = pup_cnt * BUS_WIDTH_IN_BITS;
654 end_reg = (pup_cnt + 1) * BUS_WIDTH_IN_BITS - 1;
657 pup_cnt * BUS_WIDTH_IN_BITS + bit_num;
658 end_reg = pup_cnt * BUS_WIDTH_IN_BITS + bit_num;
665 interface_train_res =
666 ddr3_tip_get_buf_ptr(dev_num, search, result_type,
668 DEBUG_TRAINING_IP_ENGINE(
670 ("start_reg %d end_reg %d interface %p\n",
671 start_reg, end_reg, interface_train_res));
672 if (interface_train_res == NULL) {
673 DEBUG_TRAINING_IP_ENGINE(
675 ("interface_train_res is NULL\n"));
679 for (reg_offset = start_reg; reg_offset <= end_reg;
681 if (operation == TRAINING_LOAD_OPERATION_UNLOAD) {
682 if (is_read_from_db == 0) {
683 CHECK_STATUS(ddr3_tip_if_read
687 reg_addr[reg_offset],
690 if (is_check_result_validity == 1) {
691 if ((read_data[if_id] &
705 interface_train_res[reg_offset]
709 DEBUG_TRAINING_IP_ENGINE
711 ("reg_offset %d value 0x%x addr %p\n",
719 &interface_train_res[start_reg];
720 DEBUG_TRAINING_IP_ENGINE
722 ("*load_res %p\n", *load_res));
725 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_TRACE,
726 ("not supported\n"));
735 * Load all pattern to memory using ODPG
737 int ddr3_tip_load_all_pattern_to_mem(u32 dev_num)
739 u32 pattern = 0, if_id;
740 struct hws_topology_map *tm = ddr3_get_topology_map();
742 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
743 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
744 training_result[training_stage][if_id] = TEST_SUCCESS;
747 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
748 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
749 /* enable single cs */
750 CHECK_STATUS(ddr3_tip_if_write
751 (dev_num, ACCESS_TYPE_UNICAST, if_id,
752 CS_ENABLE_REG, (1 << 3), (1 << 3)));
755 for (pattern = 0; pattern < PATTERN_LIMIT; pattern++)
756 ddr3_tip_load_pattern_to_mem(dev_num, pattern);
762 * Wait till ODPG access is ready
764 int is_odpg_access_done(u32 dev_num, u32 if_id)
766 u32 poll_cnt = 0, data_value;
767 u32 read_data[MAX_INTERFACE_NUM];
769 for (poll_cnt = 0; poll_cnt < MAX_POLLING_ITERATIONS; poll_cnt++) {
770 CHECK_STATUS(ddr3_tip_if_read
771 (dev_num, ACCESS_TYPE_UNICAST, if_id,
772 ODPG_BIST_DONE, read_data, MASK_ALL_BITS));
773 data_value = read_data[if_id];
774 if (((data_value >> ODPG_BIST_DONE_BIT_OFFS) & 0x1) ==
775 ODPG_BIST_DONE_BIT_VALUE) {
776 data_value = data_value & 0xfffffffe;
777 CHECK_STATUS(ddr3_tip_if_write
778 (dev_num, ACCESS_TYPE_UNICAST,
779 if_id, ODPG_BIST_DONE, data_value,
785 if (poll_cnt >= MAX_POLLING_ITERATIONS) {
786 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
787 ("Bist Activate: poll failure 2\n"));
795 * Load specific pattern to memory using ODPG
797 int ddr3_tip_load_pattern_to_mem(u32 dev_num, enum hws_pattern pattern)
800 struct pattern_info *pattern_table = ddr3_tip_get_pattern_table();
801 struct hws_topology_map *tm = ddr3_get_topology_map();
803 /* load pattern to memory */
805 * Write Tx mode, CS0, phases, Tx burst size, delay between burst,
809 0x1 | (pattern_table[pattern].num_of_phases_tx << 5) |
810 (pattern_table[pattern].tx_burst_size << 11) |
811 (pattern_table[pattern].delay_between_bursts << 15) |
812 (pattern_table[pattern].num_of_phases_rx << 21) | (0x1 << 25) |
813 (effective_cs << 26);
814 CHECK_STATUS(ddr3_tip_if_write
815 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
816 ODPG_DATA_CONTROL_REG, reg_data, MASK_ALL_BITS));
817 /* ODPG Write enable from BIST */
818 CHECK_STATUS(ddr3_tip_if_write
819 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
820 ODPG_DATA_CONTROL_REG, (0x1 | (effective_cs << 26)),
822 /* disable error injection */
823 CHECK_STATUS(ddr3_tip_if_write
824 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
825 ODPG_WRITE_DATA_ERROR_REG, 0, 0x1));
826 /* load pattern to ODPG */
827 ddr3_tip_load_pattern_to_odpg(dev_num, ACCESS_TYPE_MULTICAST,
828 PARAM_NOT_CARE, pattern,
829 pattern_table[pattern].start_addr);
831 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
832 if (IS_ACTIVE(tm->if_act_mask, if_id) == 0)
835 CHECK_STATUS(ddr3_tip_if_write
836 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x1498,
840 CHECK_STATUS(ddr3_tip_if_write
841 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
842 ODPG_ENABLE_REG, 0x1 << ODPG_ENABLE_OFFS,
843 (0x1 << ODPG_ENABLE_OFFS)));
847 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
848 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
849 CHECK_STATUS(is_odpg_access_done(dev_num, if_id));
852 /* Disable ODPG and stop write to memory */
853 CHECK_STATUS(ddr3_tip_if_write
854 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
855 ODPG_DATA_CONTROL_REG, (0x1 << 30), (u32) (0x3 << 30)));
857 /* return to default */
858 CHECK_STATUS(ddr3_tip_if_write
859 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
860 ODPG_DATA_CONTROL_REG, 0, MASK_ALL_BITS));
862 /* Disable odt0 for CS0 training - need to adjust for multy CS */
863 CHECK_STATUS(ddr3_tip_if_write
864 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 0x1498,
867 /* temporary added */
874 * Load specific pattern to memory using CPU
876 int ddr3_tip_load_pattern_to_mem_by_cpu(u32 dev_num, enum hws_pattern pattern,
884 * Training search routine
886 int ddr3_tip_ip_training_wrapper_int(u32 dev_num,
887 enum hws_access_type access_type,
889 enum hws_access_type pup_access_type,
890 u32 pup_num, u32 bit_num,
891 enum hws_training_result result_type,
892 enum hws_control_element control_element,
893 enum hws_search_dir search_dir,
894 enum hws_dir direction,
895 u32 interface_mask, u32 init_value_l2h,
896 u32 init_value_h2l, u32 num_iter,
897 enum hws_pattern pattern,
898 enum hws_edge_compare edge_comp,
899 enum hws_ddr_cs train_cs_type, u32 cs_num,
900 enum hws_training_ip_stat *train_status)
902 u32 interface_num = 0, start_if, end_if, init_value_used;
903 enum hws_search_dir search_dir_id, start_search, end_search;
904 enum hws_edge_compare edge_comp_used;
905 u8 cons_tap = (direction == OPER_WRITE) ? (64) : (0);
906 struct hws_topology_map *tm = ddr3_get_topology_map();
908 if (train_status == NULL) {
909 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
910 ("train_status is NULL\n"));
914 if ((train_cs_type > CS_NON_SINGLE) ||
915 (edge_comp >= EDGE_PFP) ||
916 (pattern >= PATTERN_LIMIT) ||
917 (direction > OPER_WRITE_AND_READ) ||
918 (search_dir > HWS_HIGH2LOW) ||
919 (control_element > HWS_CONTROL_ELEMENT_DQS_SKEW) ||
920 (result_type > RESULT_PER_BYTE) ||
921 (pup_num >= tm->num_of_bus_per_interface) ||
922 (pup_access_type > ACCESS_TYPE_MULTICAST) ||
923 (if_id > 11) || (access_type > ACCESS_TYPE_MULTICAST)) {
924 DEBUG_TRAINING_IP_ENGINE(
926 ("wrong parameter train_cs_type %d edge_comp %d pattern %d direction %d search_dir %d control_element %d result_type %d pup_num %d pup_access_type %d if_id %d access_type %d\n",
927 train_cs_type, edge_comp, pattern, direction,
928 search_dir, control_element, result_type, pup_num,
929 pup_access_type, if_id, access_type));
933 if (edge_comp == EDGE_FPF) {
934 start_search = HWS_LOW2HIGH;
935 end_search = HWS_HIGH2LOW;
936 edge_comp_used = EDGE_FP;
938 start_search = search_dir;
939 end_search = search_dir;
940 edge_comp_used = edge_comp;
943 for (search_dir_id = start_search; search_dir_id <= end_search;
945 init_value_used = (search_dir_id == HWS_LOW2HIGH) ?
946 init_value_l2h : init_value_h2l;
947 DEBUG_TRAINING_IP_ENGINE(
949 ("dev_num %d, access_type %d, if_id %d, pup_access_type %d,pup_num %d, result_type %d, control_element %d search_dir_id %d, direction %d, interface_mask %d,init_value_used %d, num_iter %d, pattern %d, edge_comp_used %d, train_cs_type %d, cs_num %d\n",
950 dev_num, access_type, if_id, pup_access_type, pup_num,
951 result_type, control_element, search_dir_id,
952 direction, interface_mask, init_value_used, num_iter,
953 pattern, edge_comp_used, train_cs_type, cs_num));
955 ddr3_tip_ip_training(dev_num, access_type, if_id,
956 pup_access_type, pup_num, result_type,
957 control_element, search_dir_id, direction,
958 interface_mask, init_value_used, num_iter,
959 pattern, edge_comp_used, train_cs_type,
960 cs_num, train_status);
961 if (access_type == ACCESS_TYPE_MULTICAST) {
963 end_if = MAX_INTERFACE_NUM - 1;
969 for (interface_num = start_if; interface_num <= end_if;
971 VALIDATE_ACTIVE(tm->if_act_mask, interface_num);
973 CHECK_STATUS(ddr3_tip_read_training_result
974 (dev_num, interface_num, pup_access_type,
975 pup_num, bit_num, search_dir_id,
976 direction, result_type,
977 TRAINING_LOAD_OPERATION_UNLOAD,
978 train_cs_type, NULL, 0, cons_tap,
987 * Training search & read result routine
989 int ddr3_tip_ip_training_wrapper(u32 dev_num, enum hws_access_type access_type,
991 enum hws_access_type pup_access_type,
993 enum hws_training_result result_type,
994 enum hws_control_element control_element,
995 enum hws_search_dir search_dir,
996 enum hws_dir direction, u32 interface_mask,
997 u32 init_value_l2h, u32 init_value_h2l,
998 u32 num_iter, enum hws_pattern pattern,
999 enum hws_edge_compare edge_comp,
1000 enum hws_ddr_cs train_cs_type, u32 cs_num,
1001 enum hws_training_ip_stat *train_status)
1004 u32 interface_cnt, bit_id, start_if, end_if, bit_end = 0;
1005 u32 *result[HWS_SEARCH_DIR_LIMIT] = { 0 };
1006 u8 cons_tap = (direction == OPER_WRITE) ? (64) : (0);
1007 u8 bit_bit_mask[MAX_BUS_NUM] = { 0 }, bit_bit_mask_active = 0;
1009 struct hws_topology_map *tm = ddr3_get_topology_map();
1011 if (pup_num >= tm->num_of_bus_per_interface) {
1012 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
1013 ("pup_num %d not valid\n", pup_num));
1016 if (if_id >= MAX_INTERFACE_NUM) {
1017 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
1018 ("if_id %d not valid\n", if_id));
1021 CHECK_STATUS(ddr3_tip_ip_training_wrapper_int
1022 (dev_num, access_type, if_id, pup_access_type, pup_num,
1023 ALL_BITS_PER_PUP, result_type, control_element,
1024 search_dir, direction, interface_mask, init_value_l2h,
1025 init_value_h2l, num_iter, pattern, edge_comp,
1026 train_cs_type, cs_num, train_status));
1028 if (access_type == ACCESS_TYPE_MULTICAST) {
1030 end_if = MAX_INTERFACE_NUM - 1;
1036 for (interface_cnt = start_if; interface_cnt <= end_if;
1038 VALIDATE_ACTIVE(tm->if_act_mask, interface_cnt);
1040 pup_id <= (tm->num_of_bus_per_interface - 1); pup_id++) {
1041 VALIDATE_ACTIVE(tm->bus_act_mask, pup_id);
1042 if (result_type == RESULT_PER_BIT)
1043 bit_end = BUS_WIDTH_IN_BITS - 1;
1047 bit_bit_mask[pup_id] = 0;
1048 for (bit_id = 0; bit_id <= bit_end; bit_id++) {
1049 enum hws_search_dir search_dir_id;
1050 for (search_dir_id = HWS_LOW2HIGH;
1051 search_dir_id <= HWS_HIGH2LOW;
1054 (ddr3_tip_read_training_result
1055 (dev_num, interface_cnt,
1056 ACCESS_TYPE_UNICAST, pup_id,
1057 bit_id, search_dir_id,
1058 direction, result_type,
1059 TRAINING_LOAD_OPERATION_UNLOAD,
1061 &result[search_dir_id],
1064 e1 = GET_TAP_RESULT(result[HWS_LOW2HIGH][0],
1066 e2 = GET_TAP_RESULT(result[HWS_HIGH2LOW][0],
1068 DEBUG_TRAINING_IP_ENGINE(
1070 ("wrapper if_id %d pup_id %d bit %d l2h 0x%x (e1 0x%x) h2l 0x%x (e2 0x%x)\n",
1071 interface_cnt, pup_id, bit_id,
1072 result[HWS_LOW2HIGH][0], e1,
1073 result[HWS_HIGH2LOW][0], e2));
1074 /* TBD validate is valid only for tx */
1075 if (VALIDATE_TRAINING_LIMIT(e1, e2) == 1 &&
1076 GET_LOCK_RESULT(result[HWS_LOW2HIGH][0]) &&
1077 GET_LOCK_RESULT(result[HWS_LOW2HIGH][0])) {
1078 /* Mark problem bits */
1079 bit_bit_mask[pup_id] |= 1 << bit_id;
1080 bit_bit_mask_active = 1;
1082 } /* For all bits */
1083 } /* For all PUPs */
1085 /* Fix problem bits */
1086 if (bit_bit_mask_active != 0) {
1087 u32 *l2h_if_train_res = NULL;
1088 u32 *h2l_if_train_res = NULL;
1090 ddr3_tip_get_buf_ptr(dev_num, HWS_LOW2HIGH,
1094 ddr3_tip_get_buf_ptr(dev_num, HWS_HIGH2LOW,
1098 ddr3_tip_ip_training(dev_num, ACCESS_TYPE_UNICAST,
1100 ACCESS_TYPE_MULTICAST,
1101 PARAM_NOT_CARE, result_type,
1102 control_element, HWS_LOW2HIGH,
1103 direction, interface_mask,
1104 num_iter / 2, num_iter / 2,
1105 pattern, EDGE_FP, train_cs_type,
1106 cs_num, train_status);
1109 pup_id <= (tm->num_of_bus_per_interface - 1);
1111 VALIDATE_ACTIVE(tm->bus_act_mask, pup_id);
1113 if (bit_bit_mask[pup_id] == 0)
1116 for (bit_id = 0; bit_id <= bit_end; bit_id++) {
1117 if ((bit_bit_mask[pup_id] &
1118 (1 << bit_id)) == 0)
1121 (ddr3_tip_read_training_result
1122 (dev_num, interface_cnt,
1123 ACCESS_TYPE_UNICAST, pup_id,
1124 bit_id, HWS_LOW2HIGH,
1127 TRAINING_LOAD_OPERATION_UNLOAD,
1128 CS_SINGLE, &l2h_if_train_res,
1133 ddr3_tip_ip_training(dev_num, ACCESS_TYPE_UNICAST,
1135 ACCESS_TYPE_MULTICAST,
1136 PARAM_NOT_CARE, result_type,
1137 control_element, HWS_HIGH2LOW,
1138 direction, interface_mask,
1139 num_iter / 2, num_iter / 2,
1140 pattern, EDGE_FP, train_cs_type,
1141 cs_num, train_status);
1144 pup_id <= (tm->num_of_bus_per_interface - 1);
1146 VALIDATE_ACTIVE(tm->bus_act_mask, pup_id);
1148 if (bit_bit_mask[pup_id] == 0)
1151 for (bit_id = 0; bit_id <= bit_end; bit_id++) {
1152 if ((bit_bit_mask[pup_id] &
1153 (1 << bit_id)) == 0)
1156 (ddr3_tip_read_training_result
1157 (dev_num, interface_cnt,
1158 ACCESS_TYPE_UNICAST, pup_id,
1159 bit_id, HWS_HIGH2LOW, direction,
1161 TRAINING_LOAD_OPERATION_UNLOAD,
1162 CS_SINGLE, &h2l_if_train_res,
1166 } /* if bit_bit_mask_active */
1167 } /* For all Interfacess */
1175 int ddr3_tip_load_phy_values(int b_load)
1177 u32 bus_cnt = 0, if_id, dev_num = 0;
1178 struct hws_topology_map *tm = ddr3_get_topology_map();
1180 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1181 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1182 for (bus_cnt = 0; bus_cnt < GET_TOPOLOGY_NUM_OF_BUSES();
1184 VALIDATE_ACTIVE(tm->bus_act_mask, bus_cnt);
1186 CHECK_STATUS(ddr3_tip_bus_read
1188 ACCESS_TYPE_UNICAST, bus_cnt,
1190 WRITE_CENTRALIZATION_PHY_REG +
1192 CS_REGISTER_ADDR_OFFSET),
1193 &phy_reg_bk[if_id][bus_cnt]
1195 CHECK_STATUS(ddr3_tip_bus_read
1197 ACCESS_TYPE_UNICAST, bus_cnt,
1201 CS_REGISTER_ADDR_OFFSET),
1202 &phy_reg_bk[if_id][bus_cnt]
1204 CHECK_STATUS(ddr3_tip_bus_read
1206 ACCESS_TYPE_UNICAST, bus_cnt,
1208 READ_CENTRALIZATION_PHY_REG +
1210 CS_REGISTER_ADDR_OFFSET),
1211 &phy_reg_bk[if_id][bus_cnt]
1214 CHECK_STATUS(ddr3_tip_bus_write
1215 (dev_num, ACCESS_TYPE_UNICAST,
1216 if_id, ACCESS_TYPE_UNICAST,
1217 bus_cnt, DDR_PHY_DATA,
1218 WRITE_CENTRALIZATION_PHY_REG +
1220 CS_REGISTER_ADDR_OFFSET),
1221 phy_reg_bk[if_id][bus_cnt]
1223 CHECK_STATUS(ddr3_tip_bus_write
1224 (dev_num, ACCESS_TYPE_UNICAST,
1225 if_id, ACCESS_TYPE_UNICAST,
1226 bus_cnt, DDR_PHY_DATA,
1229 CS_REGISTER_ADDR_OFFSET),
1230 phy_reg_bk[if_id][bus_cnt]
1232 CHECK_STATUS(ddr3_tip_bus_write
1233 (dev_num, ACCESS_TYPE_UNICAST,
1234 if_id, ACCESS_TYPE_UNICAST,
1235 bus_cnt, DDR_PHY_DATA,
1236 READ_CENTRALIZATION_PHY_REG +
1238 CS_REGISTER_ADDR_OFFSET),
1239 phy_reg_bk[if_id][bus_cnt]
1248 int ddr3_tip_training_ip_test(u32 dev_num, enum hws_training_result result_type,
1249 enum hws_search_dir search_dir,
1250 enum hws_dir direction,
1251 enum hws_edge_compare edge,
1252 u32 init_val1, u32 init_val2,
1253 u32 num_of_iterations,
1254 u32 start_pattern, u32 end_pattern)
1256 u32 pattern, if_id, pup_id;
1257 enum hws_training_ip_stat train_status[MAX_INTERFACE_NUM];
1259 u32 search_state = 0;
1260 struct hws_topology_map *tm = ddr3_get_topology_map();
1262 ddr3_tip_load_phy_values(1);
1264 for (pattern = start_pattern; pattern <= end_pattern; pattern++) {
1265 for (search_state = 0; search_state < HWS_SEARCH_DIR_LIMIT;
1267 ddr3_tip_ip_training_wrapper(dev_num,
1268 ACCESS_TYPE_MULTICAST, 0,
1269 ACCESS_TYPE_MULTICAST, 0,
1271 HWS_CONTROL_ELEMENT_ADLL,
1272 search_dir, direction,
1275 num_of_iterations, pattern,
1280 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1;
1282 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1283 for (pup_id = 0; pup_id <
1284 tm->num_of_bus_per_interface;
1286 VALIDATE_ACTIVE(tm->bus_act_mask,
1289 (ddr3_tip_read_training_result
1291 ACCESS_TYPE_UNICAST, pup_id,
1294 direction, result_type,
1295 TRAINING_LOAD_OPERATION_UNLOAD,
1296 CS_SINGLE, &res, 1, 0,
1298 if (result_type == RESULT_PER_BYTE) {
1299 DEBUG_TRAINING_IP_ENGINE
1301 ("search_state %d if_id %d pup_id %d 0x%x\n",
1302 search_state, if_id,
1305 DEBUG_TRAINING_IP_ENGINE
1307 ("search_state %d if_id %d pup_id %d 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x\n",
1308 search_state, if_id,
1320 ddr3_tip_load_phy_values(0);
1325 struct pattern_info *ddr3_tip_get_pattern_table()
1327 struct hws_topology_map *tm = ddr3_get_topology_map();
1329 if (DDR3_IS_16BIT_DRAM_MODE(tm->bus_act_mask) == 0)
1330 return pattern_table_32;
1332 return pattern_table_16;
1335 u16 *ddr3_tip_get_mask_results_dq_reg()
1337 struct hws_topology_map *tm = ddr3_get_topology_map();
1339 if (DDR3_IS_ECC_PUP3_MODE(tm->bus_act_mask))
1340 return mask_results_dq_reg_map_pup3_ecc;
1342 return mask_results_dq_reg_map;
1345 u16 *ddr3_tip_get_mask_results_pup_reg_map()
1347 struct hws_topology_map *tm = ddr3_get_topology_map();
1349 if (DDR3_IS_ECC_PUP3_MODE(tm->bus_act_mask))
1350 return mask_results_pup_reg_map_pup3_ecc;
1352 return mask_results_pup_reg_map;