1 /* SPDX-License-Identifier: GPL-2.0 */
3 * Copyright (C) Marvell International Ltd. and its affiliates
6 #ifndef _DDR3_TRAINING_IP_H_
7 #define _DDR3_TRAINING_IP_H_
9 #include "ddr3_training_ip_def.h"
10 #include "ddr_topology_def.h"
11 #include "ddr_training_ip_db.h"
14 #define MAX_TOTAL_BUS_NUM (MAX_INTERFACE_NUM * MAX_BUS_NUM)
15 #define TIP_ENG_LOCK 0x02000000
16 #define TIP_TX_DLL_RANGE_MAX 64
18 #define GET_MIN(arg1, arg2) ((arg1) < (arg2)) ? (arg1) : (arg2)
19 #define GET_MAX(arg1, arg2) ((arg1) < (arg2)) ? (arg2) : (arg1)
21 #define INIT_CONTROLLER_MASK_BIT 0x00000001
22 #define STATIC_LEVELING_MASK_BIT 0x00000002
23 #define SET_LOW_FREQ_MASK_BIT 0x00000004
24 #define LOAD_PATTERN_MASK_BIT 0x00000008
25 #define SET_MEDIUM_FREQ_MASK_BIT 0x00000010
26 #define WRITE_LEVELING_MASK_BIT 0x00000020
27 #define LOAD_PATTERN_2_MASK_BIT 0x00000040
28 #define READ_LEVELING_MASK_BIT 0x00000080
29 #define SW_READ_LEVELING_MASK_BIT 0x00000100
30 #define WRITE_LEVELING_SUPP_MASK_BIT 0x00000200
31 #define PBS_RX_MASK_BIT 0x00000400
32 #define PBS_TX_MASK_BIT 0x00000800
33 #define SET_TARGET_FREQ_MASK_BIT 0x00001000
34 #define ADJUST_DQS_MASK_BIT 0x00002000
35 #define WRITE_LEVELING_TF_MASK_BIT 0x00004000
36 #define LOAD_PATTERN_HIGH_MASK_BIT 0x00008000
37 #define READ_LEVELING_TF_MASK_BIT 0x00010000
38 #define WRITE_LEVELING_SUPP_TF_MASK_BIT 0x00020000
39 #define DM_PBS_TX_MASK_BIT 0x00040000
40 #define RL_DQS_BURST_MASK_BIT 0x00080000
41 #define CENTRALIZATION_RX_MASK_BIT 0x00100000
42 #define CENTRALIZATION_TX_MASK_BIT 0x00200000
43 #define TX_EMPHASIS_MASK_BIT 0x00400000
44 #define PER_BIT_READ_LEVELING_TF_MASK_BIT 0x00800000
45 #define VREF_CALIBRATION_MASK_BIT 0x01000000
46 #define WRITE_LEVELING_LF_MASK_BIT 0x02000000
48 /* DDR4 Specific Training Mask bits */
56 enum hws_training_result {
61 enum auto_tune_stage {
77 WRITE_LEVELING_SUPP_TF,
84 PER_BIT_READ_LEVELING_TF,
89 enum hws_access_type {
90 ACCESS_TYPE_UNICAST = 0,
91 ACCESS_TYPE_MULTICAST = 1
99 struct init_cntr_param {
106 struct pattern_info {
109 u8 delay_between_bursts;
115 /* CL value for each frequency */
116 struct cl_val_per_freq {
117 u8 cl_val[DDR_FREQ_LAST];
126 /* 32 bits representing MRS bits */
127 u32 reg_mr0[MAX_INTERFACE_NUM];
128 u32 reg_mr1[MAX_INTERFACE_NUM];
129 u32 reg_mr2[MAX_INTERFACE_NUM];
130 u32 reg_m_r3[MAX_INTERFACE_NUM];
132 * Each element in array represent read_data_sample register delay for
133 * a specific interface.
134 * Each register, 4 bits[0+CS*8 to 4+CS*8] represent Number of DDR
135 * cycles from read command until data is ready to be fetched from
136 * the PHY, when accessing CS.
138 u32 read_data_sample[MAX_INTERFACE_NUM];
140 * Each element in array represent read_data_sample register delay for
141 * a specific interface.
142 * Each register, 4 bits[0+CS*8 to 4+CS*8] represent the total delay
143 * from read command until opening the read mask, when accessing CS.
144 * This field defines the delay in DDR cycles granularity.
146 u32 read_data_ready[MAX_INTERFACE_NUM];
149 struct hws_tip_freq_config_info {
155 struct hws_cs_config_info {
165 struct hws_xsb_info {
166 struct dfx_access *dfx_table;
169 int ddr3_tip_register_dq_table(u32 dev_num, u32 *table);
170 int hws_ddr3_tip_select_ddr_controller(u32 dev_num, int enable);
171 int hws_ddr3_tip_init_controller(u32 dev_num,
172 struct init_cntr_param *init_cntr_prm);
173 int hws_ddr3_tip_load_topology_map(u32 dev_num,
174 struct mv_ddr_topology_map *topology);
175 int hws_ddr3_tip_run_alg(u32 dev_num, enum hws_algo_type algo_type);
176 int hws_ddr3_tip_mode_read(u32 dev_num, struct mode_info *mode_info);
177 int hws_ddr3_tip_read_training_result(u32 dev_num,
178 enum hws_result result[MAX_STAGE_LIMIT][MAX_INTERFACE_NUM]);
179 int ddr3_tip_is_pup_lock(u32 *pup_buf, enum hws_training_result read_mode);
180 u8 ddr3_tip_get_buf_min(u8 *buf_ptr);
181 u8 ddr3_tip_get_buf_max(u8 *buf_ptr);
182 uint64_t mv_ddr_get_memory_size_per_cs_in_bits(void);
183 uint64_t mv_ddr_get_total_memory_size_in_bits(void);
184 #endif /* _DDR3_TRAINING_IP_H_ */