blob: da7359a253a172c97c9eb790c480bb71095536e0 [file] [log] [blame]
Angel Pons32859fc2020-04-02 23:48:27 +02001/* SPDX-License-Identifier: GPL-2.0-only */
Andrey Petrov3f85edb2019-08-01 14:18:06 -07002
3/*
4 * JEDEC Standard No. 21-C
5 * Annex L: Serial Presence Detect (SPD) for DDR4 SDRAM Modules
6 */
7
8#ifndef DEVICE_DRAM_DDR4L_H
9#define DEVICE_DRAM_DDR4L_H
10
11/**
12 * @file ddr4.h
13 *
14 * \brief Utilities for decoding DDR4 SPDs
15 */
16
Andrey Petrov3f85edb2019-08-01 14:18:06 -070017#include <spd.h>
18#include <device/dram/common.h>
19#include <types.h>
20
Martin Roth58964ff2023-10-23 09:59:09 -060021/** Maximum SPD size supported */
22#define SPD_SIZE_MAX_DDR4 512
23
Andrey Petrov3f85edb2019-08-01 14:18:06 -070024#define SPD_DDR4_PART_OFF 329
25#define SPD_DDR4_PART_LEN 20
26
Andrey Petrov3f85edb2019-08-01 14:18:06 -070027/*
28 * Module type (byte 3, bits 3:0) of SPD
29 * This definition is specific to DDR4. DDR2/3 SPDs have a different structure.
30 */
Angel Pons6c6e0492021-03-28 13:57:47 +020031enum spd_dimm_type_ddr4 {
32 SPD_DDR4_DIMM_TYPE_EXTENDED = 0x0,
33 SPD_DDR4_DIMM_TYPE_RDIMM = 0x1,
34 SPD_DDR4_DIMM_TYPE_UDIMM = 0x2,
35 SPD_DDR4_DIMM_TYPE_SO_DIMM = 0x3,
36 SPD_DDR4_DIMM_TYPE_LRDIMM = 0x4,
37 SPD_DDR4_DIMM_TYPE_MINI_RDIMM = 0x5,
38 SPD_DDR4_DIMM_TYPE_MINI_UDIMM = 0x6,
39 SPD_DDR4_DIMM_TYPE_72B_SO_RDIMM = 0x8,
40 SPD_DDR4_DIMM_TYPE_72B_SO_UDIMM = 0x9,
41 SPD_DDR4_DIMM_TYPE_16B_SO_DIMM = 0xc,
42 SPD_DDR4_DIMM_TYPE_32B_SO_DIMM = 0xd,
Andrey Petrov3f85edb2019-08-01 14:18:06 -070043 /* Masks to bits 3:0 to give the dimm type */
Angel Pons6c6e0492021-03-28 13:57:47 +020044 SPD_DDR4_DIMM_TYPE_MASK = 0xf
Andrey Petrov3f85edb2019-08-01 14:18:06 -070045};
46
47/**
48 * \brief DIMM characteristics
49 *
50 * The characteristics of each DIMM, as presented by the SPD
51 */
Angel Pons6c6e0492021-03-28 13:57:47 +020052struct dimm_attr_ddr4_st {
Andrey Petrov3f85edb2019-08-01 14:18:06 -070053 enum spd_memory_type dram_type;
Angel Pons6c6e0492021-03-28 13:57:47 +020054 enum spd_dimm_type_ddr4 dimm_type;
Andrey Petrov3f85edb2019-08-01 14:18:06 -070055 char part_number[SPD_DDR4_PART_LEN + 1];
56 u8 serial_number[4];
57 u8 bus_width;
58 u8 ranks;
59 u8 sdram_width;
60 u16 cap_per_die_mbit;
61 u16 size_mb;
62 u16 manufacturer_id;
63 u16 vdd_voltage;
64 bool ecc_extension;
Angel Pons6c6e0492021-03-28 13:57:47 +020065};
Andrey Petrov3f85edb2019-08-01 14:18:06 -070066
Elyes Haouas78ba7a72024-05-06 05:11:28 +020067typedef u8 spd_ddr4_raw_data[SPD_SIZE_MAX_DDR4];
Andrey Petrov3f85edb2019-08-01 14:18:06 -070068
Elyes Haouas78ba7a72024-05-06 05:11:28 +020069int spd_decode_ddr4(struct dimm_attr_ddr4_st *dimm, spd_ddr4_raw_data spd);
Andrey Petrov3f85edb2019-08-01 14:18:06 -070070
71enum cb_err spd_add_smbios17_ddr4(const u8 channel, const u8 slot,
72 const u16 selected_freq,
Angel Pons6c6e0492021-03-28 13:57:47 +020073 const struct dimm_attr_ddr4_st *info);
Andrey Petrov3f85edb2019-08-01 14:18:06 -070074
Rob Barnesa01ee362020-09-14 07:46:19 -060075/**
76 * Converts DDR4 clock speed in MHz to the standard reported speed in MT/s
77 */
78uint16_t ddr4_speed_mhz_to_reported_mts(uint16_t speed_mhz);
79
Krystian Hebel66036052021-05-21 13:49:38 +020080/**
81 * \brief Representation of an MRS command
82 *
83 * This represents an MRS command as seen by the DIMM. This is not a memory
84 * address that can be read to generate an MRS command. The mapping of CPU
85 * to memory pins is hardware-dependent.
86 * \n
87 * The idea is to generalize the MRS code, and only need a hardware-specific
88 * function to map the MRS bits to CPU address bits. An MRS command can be
89 * sent like:
90 * @code{.c}
91 * uint32_t addr;
92 * uint32_t mrs;
93 * chipset_enable_mrs_command_mode();
94 * mrs = ddr4_get_mr0(rtt_wr, srt, cas, asr, cwl)
95 * if (rank_has_mirrorred_pins)
96 * mrs = ddr4_mrs_mirror_pins(mrs);
97 * addr = chipset_specific_get_mrs_addr(mrs);
98 * volatile_read(addr);
99 * @endcode
100 *
101 * The MRS representation has the following structure:
102 * - cmd[17:0] = Address pins A[13:0]
103 * - cmd[21:20] = Bank address BA[1:0]
104 * - cmd[23:22] = Bank group BG[1:0]
105 *
106 * Address pins A[16:14] are always low for MRS commands. A17 is reserved for
107 * future use, cmd[19:18] is left as a placeholder in case it is needed.
108 */
109
110/* Swap A3<->A4, A5<->A6, A7<->A8, A11<->A13, BA0<->BA1, BG0<->BG1 */
111static inline uint32_t ddr4_mrs_mirror_pins(uint32_t mrs_cmd)
112{
113 mrs_cmd = (mrs_cmd & 0x5000A8) << 1 |
114 (mrs_cmd & 0xA00150) >> 1 |
115 (mrs_cmd & ~0xF001F8);
116 mrs_cmd = (mrs_cmd & 0x000800) << 2 |
117 (mrs_cmd & 0x002000) >> 2 |
118 (mrs_cmd & ~0x002800);
119 return mrs_cmd;
120}
121
122enum ddr4_mr0_mode {
123 DDR4_MR0_MODE_NORMAL = 0,
124 DDR4_MR0_MODE_TEST = 1,
125};
126enum ddr4_mr0_dll_reset {
127 DDR4_MR0_DLL_RESET_NO = 0,
128 DDR4_MR0_DLL_RESET_YES = 1,
129};
130enum ddr4_mr0_burst_type {
131 DDR4_MR0_BURST_TYPE_SEQUENTIAL = 0,
132 DDR4_MR0_BURST_TYPE_INTERLEAVED = 1,
133};
134enum ddr4_mr0_burst_length {
135 DDR4_MR0_BURST_LENGTH_FIXED_8 = 0,
136 DDR4_MR0_BURST_LENGTH_ON_THE_FLY = 1,
137 DDR4_MR0_BURST_LENGTH_FIXED_4 = 2,
138};
139
140/* Returns MRS command */
141uint32_t ddr4_get_mr0(u8 write_recovery,
142 enum ddr4_mr0_dll_reset dll_reset,
143 u8 cas,
144 enum ddr4_mr0_burst_type burst_type,
145 enum ddr4_mr0_burst_length burst_length);
146
147enum ddr4_mr1_qoff {
148 DDR4_MR1_QOFF_ENABLE = 0,
149 DDR4_MR1_QOFF_DISABLE = 1,
150};
151enum ddr4_mr1_tdqs {
152 DDR4_MR1_TDQS_DISABLE = 0,
153 DDR4_MR1_TDQS_ENABLE = 1,
154};
155enum ddr4_mr1_rtt_nom {
156 DDR4_MR1_RTT_NOM_OFF = 0,
157 DDR4_MR1_RTT_NOM_RZQ_4 = 1,
158 DDR4_MR1_RTT_NOM_RZQ_2 = 2,
159 DDR4_MR1_RTT_NOM_RZQ_6 = 3,
160 DDR4_MR1_RTT_NOM_RZQ_1 = 4,
161 DDR4_MR1_RTT_NOM_RZQ_5 = 5,
162 DDR4_MR1_RTT_NOM_RZQ_3 = 6,
163 DDR4_MR1_RTT_NOM_RZQ_7 = 7,
164};
165enum ddr4_mr1_write_leveling {
166 DDR4_MR1_WRLVL_DISABLE = 0,
167 DDR4_MR1_WRLVL_ENABLE = 1,
168};
169enum ddr4_mr1_additive_latency {
170 DDR4_MR1_AL_DISABLE = 0,
171 DDR4_MR1_AL_CL_MINUS_1 = 1,
172 DDR4_MR1_AL_CL_MINUS_2 = 2,
173};
174enum ddr4_mr1_odimp {
175 DDR4_MR1_ODIMP_RZQ_7 = 0,
176 DDR4_MR1_ODIMP_RZQ_5 = 1,
177};
178enum ddr4_mr1_dll {
179 DDR4_MR1_DLL_DISABLE = 0,
180 DDR4_MR1_DLL_ENABLE = 1,
181};
182
183/* Returns MRS command */
184uint32_t ddr4_get_mr1(enum ddr4_mr1_qoff qoff,
185 enum ddr4_mr1_tdqs tdqs,
186 enum ddr4_mr1_rtt_nom rtt_nom,
187 enum ddr4_mr1_write_leveling write_leveling,
188 enum ddr4_mr1_odimp output_drive_impedance,
189 enum ddr4_mr1_additive_latency additive_latency,
190 enum ddr4_mr1_dll dll_enable);
191
192enum ddr4_mr2_wr_crc {
193 DDR4_MR2_WR_CRC_DISABLE = 0,
194 DDR4_MR2_WR_CRC_ENABLE = 1,
195};
196enum ddr4_mr2_rtt_wr {
197 DDR4_MR2_RTT_WR_OFF = 0,
198 DDR4_MR2_RTT_WR_RZQ_2 = 1,
199 DDR4_MR2_RTT_WR_RZQ_1 = 2,
200 DDR4_MR2_RTT_WR_HI_Z = 3,
201 DDR4_MR2_RTT_WR_RZQ_3 = 4,
202};
203enum ddr4_mr2_lp_asr {
204 DDR4_MR2_ASR_MANUAL_NORMAL_RANGE = 0,
205 DDR4_MR2_ASR_MANUAL_REDUCED_RANGE = 1,
206 DDR4_MR2_ASR_MANUAL_EXTENDED_RANGE = 2,
207 DDR4_MR2_ASR_AUTO = 3,
208};
209
210/* Returns MRS command */
211uint32_t ddr4_get_mr2(enum ddr4_mr2_wr_crc wr_crc,
212 enum ddr4_mr2_rtt_wr rtt_wr,
213 enum ddr4_mr2_lp_asr self_refresh, u8 cwl);
214
215enum ddr4_mr3_mpr_read_format {
216 DDR4_MR3_MPR_SERIAL = 0,
217 DDR4_MR3_MPR_PARALLEL = 1,
218 DDR4_MR3_MPR_STAGGERED = 2,
219};
220enum ddr4_mr3_wr_cmd_lat_crc_dm {
221 DDR4_MR3_CRC_DM_4 = 0,
222 DDR4_MR3_CRC_DM_5 = 1,
223 DDR4_MR3_CRC_DM_6 = 2,
224};
225enum ddr4_mr3_fine_gran_ref {
226 DDR4_MR3_FINE_GRAN_REF_NORMAL = 0,
227 DDR4_MR3_FINE_GRAN_REF_FIXED_2 = 1,
228 DDR4_MR3_FINE_GRAN_REF_FIXED_4 = 2,
229 /* Two reserved values */
230 DDR4_MR3_FINE_GRAN_REF_ON_THE_FLY_2 = 5,
231 DDR4_MR3_FINE_GRAN_REF_ON_THE_FLY_4 = 6,
232};
233enum ddr4_mr3_temp_sensor_readout {
234 DDR4_MR3_TEMP_SENSOR_DISABLE = 0,
235 DDR4_MR3_TEMP_SENSOR_ENABLE = 1,
236};
237enum ddr4_mr3_pda {
238 DDR4_MR3_PDA_DISABLE = 0,
239 DDR4_MR3_PDA_ENABLE = 1,
240};
241enum ddr4_mr3_geardown_mode {
242 DDR4_MR3_GEARDOWN_1_2_RATE = 0,
243 DDR4_MR3_GEARDOWN_1_4_RATE = 1,
244};
245enum ddr4_mr3_mpr_operation {
246 DDR4_MR3_MPR_NORMAL = 0,
247 DDR4_MR3_MPR_MPR = 1,
248};
249
250/* Returns MRS command */
251uint32_t ddr4_get_mr3(enum ddr4_mr3_mpr_read_format mpr_read_format,
252 enum ddr4_mr3_wr_cmd_lat_crc_dm command_latency_crc_dm,
253 enum ddr4_mr3_fine_gran_ref fine_refresh,
254 enum ddr4_mr3_temp_sensor_readout temp_sensor,
255 enum ddr4_mr3_pda pda,
256 enum ddr4_mr3_geardown_mode geardown,
257 enum ddr4_mr3_mpr_operation mpr_operation,
258 u8 mpr_page);
259
260enum ddr4_mr4_hppr {
261 DDR4_MR4_HPPR_DISABLE = 0,
262 DDR4_MR4_HPPR_ENABLE = 1,
263};
264enum ddr4_mr4_wr_preamble {
265 DDR4_MR4_WR_PREAMBLE_1 = 0,
266 DDR4_MR4_WR_PREAMBLE_2 = 1,
267};
268enum ddr4_mr4_rd_preamble {
269 DDR4_MR4_RD_PREAMBLE_1 = 0,
270 DDR4_MR4_RD_PREAMBLE_2 = 1,
271};
272enum ddr4_mr4_rd_preamble_training {
273 DDR4_MR4_RD_PREAMBLE_TRAINING_DISABLE = 0,
274 DDR4_MR4_RD_PREAMBLE_TRAINING_ENABLE = 1,
275};
276enum ddr4_mr4_self_refr_abort {
277 DDR4_MR4_SELF_REFRESH_ABORT_DISABLE = 0,
278 DDR4_MR4_SELF_REFRESH_ABORT_ENABLE = 1,
279};
280enum ddr4_mr4_cs_to_cmd_latency {
281 DDR4_MR4_CS_TO_CMD_LAT_DISABLE = 0,
282 DDR4_MR4_CS_TO_CMD_LAT_3 = 1,
283 DDR4_MR4_CS_TO_CMD_LAT_4 = 2,
284 DDR4_MR4_CS_TO_CMD_LAT_5 = 3,
285 DDR4_MR4_CS_TO_CMD_LAT_6 = 4,
286 DDR4_MR4_CS_TO_CMD_LAT_8 = 5,
287};
288enum ddr4_mr4_sppr {
289 DDR4_MR4_SPPR_DISABLE = 0,
290 DDR4_MR4_SPPR_ENABLE = 1,
291};
292enum ddr4_mr4_internal_vref_mon {
293 DDR4_MR4_INTERNAL_VREF_MON_DISABLE = 0,
294 DDR4_MR4_INTERNAL_VREF_MON_ENABLE = 1,
295};
296enum ddr4_mr4_temp_controlled_refr {
297 DDR4_MR4_TEMP_CONTROLLED_REFR_DISABLE = 0,
298 DDR4_MR4_TEMP_CONTROLLED_REFR_NORMAL = 2,
299 DDR4_MR4_TEMP_CONTROLLED_REFR_EXTENDED = 3,
300};
301enum ddr4_mr4_max_pd_mode {
302 DDR4_MR4_MAX_PD_MODE_DISABLE = 0,
303 DDR4_MR4_MAX_PD_MODE_ENABLE = 1,
304};
305
306/* Returns MRS command */
307uint32_t ddr4_get_mr4(enum ddr4_mr4_hppr hppr,
308 enum ddr4_mr4_wr_preamble wr_preamble,
309 enum ddr4_mr4_rd_preamble rd_preamble,
310 enum ddr4_mr4_rd_preamble_training rd_preamble_train,
311 enum ddr4_mr4_self_refr_abort self_ref_abrt,
312 enum ddr4_mr4_cs_to_cmd_latency cs2cmd_lat,
313 enum ddr4_mr4_sppr sppr,
314 enum ddr4_mr4_internal_vref_mon int_vref_mon,
315 enum ddr4_mr4_temp_controlled_refr temp_ctrl_ref,
316 enum ddr4_mr4_max_pd_mode max_pd);
317
318enum ddr4_mr5_rd_dbi {
319 DDR4_MR5_RD_DBI_DISABLE = 0,
320 DDR4_MR5_RD_DBI_ENABLE = 1,
321};
322enum ddr4_mr5_wr_dbi {
323 DDR4_MR5_WR_DBI_DISABLE = 0,
324 DDR4_MR5_WR_DBI_ENABLE = 1,
325};
326enum ddr4_mr5_data_mask {
327 DDR4_MR5_DATA_MASK_DISABLE = 0,
328 DDR4_MR5_DATA_MASK_ENABLE = 1,
329};
330enum ddr4_mr5_rtt_park {
331 DDR4_MR5_RTT_PARK_OFF = 0,
332 DDR4_MR5_RTT_PARK_RZQ_4 = 1,
333 DDR4_MR5_RTT_PARK_RZQ_2 = 2,
334 DDR4_MR5_RTT_PARK_RZQ_6 = 3,
335 DDR4_MR5_RTT_PARK_RZQ_1 = 4,
336 DDR4_MR5_RTT_PARK_RZQ_5 = 5,
337 DDR4_MR5_RTT_PARK_RZQ_3 = 6,
338 DDR4_MR5_RTT_PARK_RZQ_7 = 7,
339};
340enum ddr4_mr5_odt_pd {
341 DDR4_MR5_ODT_PD_ACTIVADED = 0,
342 DDR4_MR5_ODT_PD_DEACTIVADED = 1,
343};
344enum ddr4_mr5_ca_parity_lat {
345 DDR4_MR5_CA_PARITY_LAT_DISABLE = 0,
346 DDR4_MR5_CA_PARITY_LAT_4 = 1, /* 1600-2133 MT/s */
347 DDR4_MR5_CA_PARITY_LAT_5 = 2, /* 2400-2666 MT/s */
348 DDR4_MR5_CA_PARITY_LAT_6 = 3, /* 2933-3200 MT/s */
349 DDR4_MR5_CA_PARITY_LAT_8 = 4, /* RFU */
350};
351
352/* Returns MRS command */
353uint32_t ddr4_get_mr5(enum ddr4_mr5_rd_dbi rd_dbi,
354 enum ddr4_mr5_wr_dbi wr_dbi,
355 enum ddr4_mr5_data_mask dm,
356 enum ddr4_mr5_rtt_park rtt_park,
357 enum ddr4_mr5_odt_pd odt_pd,
358 enum ddr4_mr5_ca_parity_lat pl);
359
360enum ddr4_mr6_vrefdq_training {
361 DDR4_MR6_VREFDQ_TRAINING_DISABLE = 0,
362 DDR4_MR6_VREFDQ_TRAINING_ENABLE = 1,
363};
364enum ddr4_mr6_vrefdq_training_range {
365 DDR4_MR6_VREFDQ_TRAINING_RANGE_1 = 0, /* 60% to 92.50% in 0.65% steps */
366 DDR4_MR6_VREFDQ_TRAINING_RANGE_2 = 1, /* 40% to 77.50% in 0.65% steps */
367};
368
369/* Returns MRS command */
370uint32_t ddr4_get_mr6(u8 tccd_l,
371 enum ddr4_mr6_vrefdq_training vrefdq_training,
372 enum ddr4_mr6_vrefdq_training_range range,
373 u8 vrefdq_value);
374
375enum ddr4_zqcal_ls {
376 DDR4_ZQCAL_SHORT = 0,
377 DDR4_ZQCAL_LONG = 1,
378};
379
380/* Returns MRS command */
381uint32_t ddr4_get_zqcal_cmd(enum ddr4_zqcal_ls long_short);
382
Andrey Petrov3f85edb2019-08-01 14:18:06 -0700383#endif /* DEVICE_DRAM_DDR4L_H */