blob: b38e0a1a0d8ceefa39749a4f054012586f592552 [file] [log] [blame]
Patrick Georgiac959032020-05-05 22:49:26 +02001/* SPDX-License-Identifier: GPL-2.0-or-later */
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -05002
3/**
Martin Roth98b698c2015-01-06 21:02:52 -07004 * @file ddr3.c
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -05005 *
6 * \brief Utilities for decoding DDR3 SPDs
7 */
8
9#include <console/console.h>
10#include <device/device.h>
11#include <device/dram/ddr3.h>
Andrey Petrov3f85edb2019-08-01 14:18:06 -070012#include <device/dram/common.h>
Patrick Rudolph07691592016-02-29 18:21:00 +010013#include <string.h>
Patrick Rudolph24efe732018-08-19 11:06:06 +020014#include <memory_info.h>
15#include <cbmem.h>
16#include <smbios.h>
Elyes HAOUASbd1683d2019-05-15 21:05:37 +020017#include <types.h>
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -050018
Alexandru Gagniuc78706fd2013-06-03 13:58:10 -050019/*==============================================================================
20 * = DDR3 SPD decoding helpers
21 *----------------------------------------------------------------------------*/
22
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -050023/**
24 * \brief Checks if the DIMM is Registered based on byte[3] of the SPD
25 *
26 * Tells if the DIMM type is registered or not.
27 *
28 * @param type DIMM type. This is byte[3] of the SPD.
29 */
Patrick Rudolph6e53ae62017-01-31 19:43:17 +010030int spd_dimm_is_registered_ddr3(enum spd_dimm_type type)
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -050031{
32 if ((type == SPD_DIMM_TYPE_RDIMM)
33 | (type == SPD_DIMM_TYPE_MINI_RDIMM)
34 | (type == SPD_DIMM_TYPE_72B_SO_RDIMM))
35 return 1;
36
37 return 0;
38}
39
40/**
Alexandru Gagniuc4c37e582013-12-17 13:08:01 -050041 * \brief Calculate the CRC of a DDR3 SPD
42 *
43 * @param spd pointer to raw SPD data
44 * @param len length of data in SPD
45 *
46 * @return the CRC of the SPD data, or 0 when spd data is truncated.
47 */
48u16 spd_ddr3_calc_crc(u8 *spd, int len)
49{
Kyösti Mälkki7dc4b842016-11-18 18:41:17 +020050 int n_crc;
Alexandru Gagniuc4c37e582013-12-17 13:08:01 -050051
52 /* Find the number of bytes covered by CRC */
53 if (spd[0] & 0x80) {
54 n_crc = 117;
55 } else {
56 n_crc = 126;
57 }
58
59 if (len < n_crc)
60 /* Not enough bytes available to get the CRC */
61 return 0;
62
Andrey Petrov3f85edb2019-08-01 14:18:06 -070063 return ddr_crc16(spd, n_crc);
Kyösti Mälkki7dc4b842016-11-18 18:41:17 +020064}
65
66/**
67 * \brief Calculate the CRC of a DDR3 SPD unique identifier
68 *
69 * @param spd pointer to raw SPD data
70 * @param len length of data in SPD
71 *
72 * @return the CRC of SPD data bytes 117..127, or 0 when spd data is truncated.
73 */
74u16 spd_ddr3_calc_unique_crc(u8 *spd, int len)
75{
76 if (len < (117 + 11))
77 /* Not enough bytes available to get the CRC */
78 return 0;
79
Andrey Petrov3f85edb2019-08-01 14:18:06 -070080 return ddr_crc16(&spd[117], 11);
Alexandru Gagniuc4c37e582013-12-17 13:08:01 -050081}
82
83/**
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -050084 * \brief Decode the raw SPD data
85 *
86 * Decodes a raw SPD data from a DDR3 DIMM, and organizes it into a
87 * @ref dimm_attr structure. The SPD data must first be read in a contiguous
88 * array, and passed to this function.
89 *
Martin Roth63373ed2013-07-08 16:24:19 -060090 * @param dimm pointer to @ref dimm_attr structure where the decoded data is to
Elyes HAOUASe3e3f4f2018-06-29 21:41:41 +020091 * be stored
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -050092 * @param spd array of raw data previously read from the SPD.
93 *
94 * @return @ref spd_status enumerator
95 * SPD_STATUS_OK -- decoding was successful
96 * SPD_STATUS_INVALID -- invalid SPD or not a DDR3 SPD
97 * SPD_STATUS_CRC_ERROR -- CRC did not verify
98 * SPD_STATUS_INVALID_FIELD -- A field with an invalid value was
Elyes HAOUASe3e3f4f2018-06-29 21:41:41 +020099 * detected.
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500100 */
Angel Ponsafb3d7e2021-03-28 13:43:13 +0200101int spd_decode_ddr3(struct dimm_attr_ddr3_st *dimm, spd_raw_data spd)
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500102{
Alexandru Gagniuc4c37e582013-12-17 13:08:01 -0500103 int ret;
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500104 u16 crc, spd_crc;
Nicola Corna76f8dbc2016-11-16 08:57:15 +0100105 u8 capacity_shift, bus_width;
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500106 u8 reg8;
107 u32 mtb; /* medium time base */
Nicola Corna76f8dbc2016-11-16 08:57:15 +0100108 u32 ftb; /* fine time base */
Elyes HAOUAS05c04552019-04-23 22:15:57 +0200109 unsigned int val;
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500110
111 ret = SPD_STATUS_OK;
112
113 /* Don't assume we memset 0 dimm struct. Clear all our flags */
114 dimm->flags.raw = 0;
Patrick Rudolphbd1fdc62016-01-26 08:45:21 +0100115 dimm->dimms_per_channel = 3;
116
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500117 /* Make sure that the SPD dump is indeed from a DDR3 module */
118 if (spd[2] != SPD_MEMORY_TYPE_SDRAM_DDR3) {
119 printram("Not a DDR3 SPD!\n");
120 dimm->dram_type = SPD_MEMORY_TYPE_UNDEFINED;
121 return SPD_STATUS_INVALID;
122 }
123 dimm->dram_type = SPD_MEMORY_TYPE_SDRAM_DDR3;
Vladimir Serbinenko0e675f72014-12-07 13:56:48 +0100124 dimm->dimm_type = spd[3] & 0xf;
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500125
Patrick Rudolph8c639352015-06-22 19:32:53 +0200126 crc = spd_ddr3_calc_crc(spd, sizeof(spd_raw_data));
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500127 /* Compare with the CRC in the SPD */
128 spd_crc = (spd[127] << 8) + spd[126];
129 /* Verify the CRC is correct */
130 if (crc != spd_crc) {
Patrick Rudolph78c6e3e2015-06-22 19:46:34 +0200131 printram("ERROR: SPD CRC failed!!!\n");
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500132 ret = SPD_STATUS_CRC_ERROR;
133 };
134
Patrick Rudolph66a98ee2016-03-13 13:02:16 +0100135 printram(" Revision : %x\n", spd[1]);
136 printram(" Type : %x\n", spd[2]);
137 printram(" Key : %x\n", spd[3]);
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500138
139 reg8 = spd[4];
140 /* Number of memory banks */
141 val = (reg8 >> 4) & 0x07;
142 if (val > 0x03) {
143 printram(" Invalid number of memory banks\n");
144 ret = SPD_STATUS_INVALID_FIELD;
145 }
Elyes HAOUAS05c04552019-04-23 22:15:57 +0200146 printram(" Banks : %u\n", 1 << (val + 3));
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500147 /* SDRAM capacity */
148 capacity_shift = reg8 & 0x0f;
149 if (capacity_shift > 0x06) {
150 printram(" Invalid module capacity\n");
151 ret = SPD_STATUS_INVALID_FIELD;
152 }
153 if (capacity_shift < 0x02) {
Patrick Rudolph66a98ee2016-03-13 13:02:16 +0100154 printram(" Capacity : %u Mb\n", 256 << capacity_shift);
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500155 } else {
Patrick Rudolph66a98ee2016-03-13 13:02:16 +0100156 printram(" Capacity : %u Gb\n", 1 << (capacity_shift - 2));
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500157 }
158
159 reg8 = spd[5];
160 /* Row address bits */
161 val = (reg8 >> 3) & 0x07;
162 if (val > 0x04) {
163 printram(" Invalid row address bits\n");
164 ret = SPD_STATUS_INVALID_FIELD;
165 }
166 dimm->row_bits = val + 12;
167 /* Column address bits */
168 val = reg8 & 0x07;
169 if (val > 0x03) {
170 printram(" Invalid column address bits\n");
171 ret = SPD_STATUS_INVALID_FIELD;
172 }
173 dimm->col_bits = val + 9;
174
175 /* Module nominal voltage */
176 reg8 = spd[6];
Patrick Rudolph66a98ee2016-03-13 13:02:16 +0100177 printram(" Supported voltages :");
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500178 if (reg8 & (1 << 2)) {
179 dimm->flags.operable_1_25V = 1;
Patrick Rudolphbd1fdc62016-01-26 08:45:21 +0100180 dimm->voltage = 1250;
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500181 printram(" 1.25V");
182 }
183 if (reg8 & (1 << 1)) {
184 dimm->flags.operable_1_35V = 1;
Patrick Rudolphbd1fdc62016-01-26 08:45:21 +0100185 dimm->voltage = 1300;
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500186 printram(" 1.35V");
187 }
188 if (!(reg8 & (1 << 0))) {
189 dimm->flags.operable_1_50V = 1;
Patrick Rudolphbd1fdc62016-01-26 08:45:21 +0100190 dimm->voltage = 1500;
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500191 printram(" 1.5V");
192 }
193 printram("\n");
194
195 /* Module organization */
196 reg8 = spd[7];
197 /* Number of ranks */
198 val = (reg8 >> 3) & 0x07;
199 if (val > 3) {
200 printram(" Invalid number of ranks\n");
201 ret = SPD_STATUS_INVALID_FIELD;
202 }
203 dimm->ranks = val + 1;
204 /* SDRAM device width */
205 val = (reg8 & 0x07);
206 if (val > 3) {
207 printram(" Invalid SDRAM width\n");
208 ret = SPD_STATUS_INVALID_FIELD;
209 }
Vladimir Serbinenko7686a562014-05-18 11:05:56 +0200210 dimm->width = (4 << val);
Patrick Rudolph66a98ee2016-03-13 13:02:16 +0100211 printram(" SDRAM width : %u\n", dimm->width);
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500212
213 /* Memory bus width */
214 reg8 = spd[8];
215 /* Bus extension */
216 val = (reg8 >> 3) & 0x03;
217 if (val > 1) {
218 printram(" Invalid bus extension\n");
219 ret = SPD_STATUS_INVALID_FIELD;
220 }
221 dimm->flags.is_ecc = val ? 1 : 0;
Patrick Rudolph66a98ee2016-03-13 13:02:16 +0100222 printram(" Bus extension : %u bits\n", val ? 8 : 0);
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500223 /* Bus width */
224 val = reg8 & 0x07;
225 if (val > 3) {
226 printram(" Invalid bus width\n");
227 ret = SPD_STATUS_INVALID_FIELD;
228 }
229 bus_width = 8 << val;
Patrick Rudolph66a98ee2016-03-13 13:02:16 +0100230 printram(" Bus width : %u\n", bus_width);
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500231
232 /* We have all the info we need to compute the dimm size */
233 /* Capacity is 256Mbit multiplied by the power of 2 specified in
234 * capacity_shift
235 * The rest is the JEDEC formula */
236 dimm->size_mb = ((1 << (capacity_shift + (25 - 20))) * bus_width
Vladimir Serbinenko7686a562014-05-18 11:05:56 +0200237 * dimm->ranks) / dimm->width;
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500238
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500239 /* Medium Timebase =
240 * Medium Timebase (MTB) Dividend /
241 * Medium Timebase (MTB) Divisor */
242 mtb = (((u32) spd[10]) << 8) / spd[11];
243
244 /* SDRAM Minimum Cycle Time (tCKmin) */
245 dimm->tCK = spd[12] * mtb;
246 /* CAS Latencies Supported */
247 dimm->cas_supported = (spd[15] << 8) + spd[14];
248 /* Minimum CAS Latency Time (tAAmin) */
249 dimm->tAA = spd[16] * mtb;
250 /* Minimum Write Recovery Time (tWRmin) */
251 dimm->tWR = spd[17] * mtb;
252 /* Minimum RAS# to CAS# Delay Time (tRCDmin) */
253 dimm->tRCD = spd[18] * mtb;
254 /* Minimum Row Active to Row Active Delay Time (tRRDmin) */
255 dimm->tRRD = spd[19] * mtb;
256 /* Minimum Row Precharge Delay Time (tRPmin) */
257 dimm->tRP = spd[20] * mtb;
258 /* Minimum Active to Precharge Delay Time (tRASmin) */
259 dimm->tRAS = (((spd[21] & 0x0f) << 8) + spd[22]) * mtb;
260 /* Minimum Active to Active/Refresh Delay Time (tRCmin) */
261 dimm->tRC = (((spd[21] & 0xf0) << 4) + spd[23]) * mtb;
262 /* Minimum Refresh Recovery Delay Time (tRFCmin) */
263 dimm->tRFC = ((spd[25] << 8) + spd[24]) * mtb;
264 /* Minimum Internal Write to Read Command Delay Time (tWTRmin) */
265 dimm->tWTR = spd[26] * mtb;
266 /* Minimum Internal Read to Precharge Command Delay Time (tRTPmin) */
267 dimm->tRTP = spd[27] * mtb;
268 /* Minimum Four Activate Window Delay Time (tFAWmin) */
269 dimm->tFAW = (((spd[28] & 0x0f) << 8) + spd[29]) * mtb;
Dan Elkouby0c024202018-04-13 18:45:02 +0300270 /* Minimum CAS Write Latency Time (tCWLmin)
271 * - not present in standard SPD */
272 dimm->tCWL = 0;
273 /* System CMD Rate Mode - not present in standard SPD */
274 dimm->tCMD = 0;
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500275
Nicola Corna76f8dbc2016-11-16 08:57:15 +0100276 printram(" FTB timings :");
277 /* FTB is introduced in SPD revision 1.1 */
278 if (spd[1] >= 0x11 && spd[9] & 0x0f) {
279 printram(" yes\n");
280
281 /* Fine timebase (1/256 ps) =
282 * Fine Timebase (FTB) Dividend /
283 * Fine Timebase (FTB) Divisor */
284 ftb = (((u16) spd[9] & 0xf0) << 4) / (spd[9] & 0x0f);
285
286 /* SPD recommends to round up the MTB part and use a negative
287 * FTB, so a negative rounding should be always safe */
288
289 /* SDRAM Minimum Cycle Time (tCKmin) correction */
290 dimm->tCK += (s32)((s8) spd[34] * ftb - 500) / 1000;
291 /* Minimum CAS Latency Time (tAAmin) correction */
292 dimm->tAA += (s32)((s8) spd[35] * ftb - 500) / 1000;
293 /* Minimum RAS# to CAS# Delay Time (tRCDmin) correction */
294 dimm->tRCD += (s32)((s8) spd[36] * ftb - 500) / 1000;
295 /* Minimum Row Precharge Delay Time (tRPmin) correction */
296 dimm->tRP += (s32)((s8) spd[37] * ftb - 500) / 1000;
297 /* Minimum Active to Active/Refresh Delay Time (tRCmin) corr. */
298 dimm->tRC += (s32)((s8) spd[38] * ftb - 500) / 1000;
299 }
300 else {
301 printram(" no\n");
302 }
303
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500304 /* SDRAM Optional Features */
305 reg8 = spd[30];
Patrick Rudolph66a98ee2016-03-13 13:02:16 +0100306 printram(" Optional features :");
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500307 if (reg8 & 0x80) {
308 dimm->flags.dll_off_mode = 1;
309 printram(" DLL-Off_mode");
310 }
311 if (reg8 & 0x02) {
312 dimm->flags.rzq7_supported = 1;
313 printram(" RZQ/7");
314 }
315 if (reg8 & 0x01) {
316 dimm->flags.rzq6_supported = 1;
317 printram(" RZQ/6");
318 }
319 printram("\n");
320
321 /* SDRAM Thermal and Refresh Options */
322 reg8 = spd[31];
Patrick Rudolph66a98ee2016-03-13 13:02:16 +0100323 printram(" Thermal features :");
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500324 if (reg8 & 0x80) {
325 dimm->flags.pasr = 1;
326 printram(" PASR");
327 }
328 if (reg8 & 0x08) {
329 dimm->flags.odts = 1;
330 printram(" ODTS");
331 }
332 if (reg8 & 0x04) {
333 dimm->flags.asr = 1;
334 printram(" ASR");
335 }
336 if (reg8 & 0x02) {
337 dimm->flags.ext_temp_range = 1;
338 printram(" ext_temp_refresh");
339 }
340 if (reg8 & 0x01) {
341 dimm->flags.ext_temp_refresh = 1;
342 printram(" ext_temp_range");
343 }
344 printram("\n");
345
346 /* Module Thermal Sensor */
347 reg8 = spd[32];
348 if (reg8 & 0x80)
349 dimm->flags.therm_sensor = 1;
Patrick Rudolph66a98ee2016-03-13 13:02:16 +0100350 printram(" Thermal sensor : %s\n",
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500351 dimm->flags.therm_sensor ? "yes" : "no");
352
353 /* SDRAM Device Type */
Jacob Garber93064ff2019-06-24 13:02:27 -0600354 printram(" Standard SDRAM : %s\n", (spd[33] & 0x80) ? "no" : "yes");
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500355
356 if (spd[63] & 0x01) {
357 dimm->flags.pins_mirrored = 1;
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500358 }
Patrick Rudolph66a98ee2016-03-13 13:02:16 +0100359 printram(" Rank1 Address bits : %s\n",
360 (spd[63] & 0x01) ? "mirrored" : "normal");
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500361
Vladimir Serbinenko7686a562014-05-18 11:05:56 +0200362 dimm->reference_card = spd[62] & 0x1f;
Patrick Rudolph66a98ee2016-03-13 13:02:16 +0100363 printram(" DIMM Reference card: %c\n", 'A' + dimm->reference_card);
Vladimir Serbinenko7686a562014-05-18 11:05:56 +0200364
Patrick Rudolph07691592016-02-29 18:21:00 +0100365 dimm->manufacturer_id = (spd[118] << 8) | spd[117];
Patrick Rudolph66a98ee2016-03-13 13:02:16 +0100366 printram(" Manufacturer ID : %x\n", dimm->manufacturer_id);
Patrick Rudolph07691592016-02-29 18:21:00 +0100367
368 dimm->part_number[16] = 0;
369 memcpy(dimm->part_number, &spd[128], 16);
Patrick Rudolph66a98ee2016-03-13 13:02:16 +0100370 printram(" Part number : %s\n", dimm->part_number);
Patrick Rudolph07691592016-02-29 18:21:00 +0100371
Patrick Rudolph15e64692018-08-17 15:24:56 +0200372 memcpy(dimm->serial, &spd[SPD_DIMM_SERIAL_NUM], SPD_DIMM_SERIAL_LEN);
373
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500374 return ret;
375}
376
Patrick Rudolphbd1fdc62016-01-26 08:45:21 +0100377/**
378 * \brief Decode the raw SPD XMP data
379 *
380 * Decodes a raw SPD XMP data from a DDR3 DIMM, and organizes it into a
381 * @ref dimm_attr structure. The SPD data must first be read in a contiguous
382 * array, and passed to this function.
383 *
384 * @param dimm pointer to @ref dimm_attr structure where the decoded data is to
385 * be stored
386 * @param spd array of raw data previously read from the SPD.
387 *
388 * @param profile select one of the profiles to load
389 *
390 * @return @ref spd_status enumerator
391 * SPD_STATUS_OK -- decoding was successful
392 * SPD_STATUS_INVALID -- invalid SPD or not a DDR3 SPD
393 * SPD_STATUS_CRC_ERROR -- CRC did not verify
394 * SPD_STATUS_INVALID_FIELD -- A field with an invalid value was
395 * detected.
396 */
Angel Ponsafb3d7e2021-03-28 13:43:13 +0200397int spd_xmp_decode_ddr3(struct dimm_attr_ddr3_st *dimm,
Patrick Rudolphbd1fdc62016-01-26 08:45:21 +0100398 spd_raw_data spd,
399 enum ddr3_xmp_profile profile)
400{
401 int ret;
402 u32 mtb; /* medium time base */
403 u8 *xmp; /* pointer to XMP profile data */
404
405 /* need a valid SPD */
406 ret = spd_decode_ddr3(dimm, spd);
407 if (ret != SPD_STATUS_OK)
408 return ret;
409
410 /* search for magic header */
411 if (spd[176] != 0x0C || spd[177] != 0x4A) {
412 printram("Not a DDR3 XMP profile!\n");
413 dimm->dram_type = SPD_MEMORY_TYPE_UNDEFINED;
414 return SPD_STATUS_INVALID;
415 }
416
417 if (profile == DDR3_XMP_PROFILE_1) {
418 if (!(spd[178] & 1)) {
419 printram("Selected XMP profile disabled!\n");
420 dimm->dram_type = SPD_MEMORY_TYPE_UNDEFINED;
421 return SPD_STATUS_INVALID;
422 }
Patrick Rudolph66a98ee2016-03-13 13:02:16 +0100423
424 printram(" XMP Profile : 1\n");
Patrick Rudolphbd1fdc62016-01-26 08:45:21 +0100425 xmp = &spd[185];
426
427 /* Medium Timebase =
428 * Medium Timebase (MTB) Dividend /
429 * Medium Timebase (MTB) Divisor */
430 mtb = (((u32) spd[180]) << 8) / spd[181];
431
432 dimm->dimms_per_channel = ((spd[178] >> 2) & 0x3) + 1;
433 } else {
434 if (!(spd[178] & 2)) {
435 printram("Selected XMP profile disabled!\n");
436 dimm->dram_type = SPD_MEMORY_TYPE_UNDEFINED;
437 return SPD_STATUS_INVALID;
438 }
Patrick Rudolph66a98ee2016-03-13 13:02:16 +0100439 printram(" XMP Profile : 2\n");
Patrick Rudolphbd1fdc62016-01-26 08:45:21 +0100440 xmp = &spd[220];
441
442 /* Medium Timebase =
443 * Medium Timebase (MTB) Dividend /
444 * Medium Timebase (MTB) Divisor */
445 mtb = (((u32) spd[182]) << 8) / spd[183];
446
447 dimm->dimms_per_channel = ((spd[178] >> 4) & 0x3) + 1;
448 }
Patrick Rudolph66a98ee2016-03-13 13:02:16 +0100449
450 printram(" Max DIMMs/channel : %u\n",
Patrick Rudolphbd1fdc62016-01-26 08:45:21 +0100451 dimm->dimms_per_channel);
452
Patrick Rudolph66a98ee2016-03-13 13:02:16 +0100453 printram(" XMP Revision : %u.%u\n", spd[179] >> 4, spd[179] & 0xf);
Patrick Rudolphbd1fdc62016-01-26 08:45:21 +0100454
455 /* calculate voltage in mV */
456 dimm->voltage = (xmp[0] & 1) * 50;
457 dimm->voltage += ((xmp[0] >> 1) & 0xf) * 100;
458 dimm->voltage += ((xmp[0] >> 5) & 0x3) * 1000;
Patrick Rudolph66a98ee2016-03-13 13:02:16 +0100459
460 printram(" Requested voltage : %u mV\n", dimm->voltage);
Patrick Rudolphbd1fdc62016-01-26 08:45:21 +0100461
462 /* SDRAM Minimum Cycle Time (tCKmin) */
463 dimm->tCK = xmp[1] * mtb;
464 /* CAS Latencies Supported */
Dan Elkouby0c024202018-04-13 18:45:02 +0300465 dimm->cas_supported = ((xmp[4] << 8) + xmp[3]) & 0x7fff;
Patrick Rudolphbd1fdc62016-01-26 08:45:21 +0100466 /* Minimum CAS Latency Time (tAAmin) */
467 dimm->tAA = xmp[2] * mtb;
468 /* Minimum Write Recovery Time (tWRmin) */
469 dimm->tWR = xmp[8] * mtb;
470 /* Minimum RAS# to CAS# Delay Time (tRCDmin) */
471 dimm->tRCD = xmp[7] * mtb;
472 /* Minimum Row Active to Row Active Delay Time (tRRDmin) */
473 dimm->tRRD = xmp[17] * mtb;
474 /* Minimum Row Precharge Delay Time (tRPmin) */
475 dimm->tRP = xmp[6] * mtb;
476 /* Minimum Active to Precharge Delay Time (tRASmin) */
477 dimm->tRAS = (((xmp[9] & 0x0f) << 8) + xmp[10]) * mtb;
478 /* Minimum Active to Active/Refresh Delay Time (tRCmin) */
479 dimm->tRC = (((xmp[9] & 0xf0) << 4) + xmp[11]) * mtb;
480 /* Minimum Refresh Recovery Delay Time (tRFCmin) */
481 dimm->tRFC = ((xmp[15] << 8) + xmp[14]) * mtb;
482 /* Minimum Internal Write to Read Command Delay Time (tWTRmin) */
483 dimm->tWTR = xmp[20] * mtb;
484 /* Minimum Internal Read to Precharge Command Delay Time (tRTPmin) */
485 dimm->tRTP = xmp[16] * mtb;
486 /* Minimum Four Activate Window Delay Time (tFAWmin) */
487 dimm->tFAW = (((xmp[18] & 0x0f) << 8) + xmp[19]) * mtb;
Dan Elkouby0c024202018-04-13 18:45:02 +0300488 /* Minimum CAS Write Latency Time (tCWLmin) */
489 dimm->tCWL = xmp[5] * mtb;
490 /* System CMD Rate Mode */
491 dimm->tCMD = xmp[23] * mtb;
Patrick Rudolphbd1fdc62016-01-26 08:45:21 +0100492
493 return ret;
494}
495
Patrick Rudolph24efe732018-08-19 11:06:06 +0200496/**
497 * Fill cbmem with information for SMBIOS type 17.
498 *
499 * @param channel Corresponding channel of provided @info
500 * @param slot Corresponding slot of provided @info
501 * @param selected_freq The actual frequency the DRAM is running on
502 * @param info DIMM parameters read from SPD
503 *
504 * @return CB_SUCCESS if DIMM info was written
505 */
506enum cb_err spd_add_smbios17(const u8 channel, const u8 slot,
507 const u16 selected_freq,
Angel Ponsafb3d7e2021-03-28 13:43:13 +0200508 const struct dimm_attr_ddr3_st *info)
Patrick Rudolph24efe732018-08-19 11:06:06 +0200509{
510 struct memory_info *mem_info;
511 struct dimm_info *dimm;
512
513 /*
514 * Allocate CBMEM area for DIMM information used to populate SMBIOS
515 * table 17
516 */
517 mem_info = cbmem_find(CBMEM_ID_MEMINFO);
518 if (!mem_info) {
519 mem_info = cbmem_add(CBMEM_ID_MEMINFO, sizeof(*mem_info));
520
Julius Werner540a9802019-12-09 13:03:29 -0800521 printk(BIOS_DEBUG, "CBMEM entry for DIMM info: %p\n",
Patrick Rudolph24efe732018-08-19 11:06:06 +0200522 mem_info);
523 if (!mem_info)
524 return CB_ERR;
525
526 memset(mem_info, 0, sizeof(*mem_info));
527 }
528
Nico Huberbb0ab9e2018-09-13 10:49:54 +0200529 if (mem_info->dimm_cnt >= ARRAY_SIZE(mem_info->dimm)) {
530 printk(BIOS_WARNING, "BUG: Too many DIMM infos for %s.\n",
531 __func__);
532 return CB_ERR;
533 }
534
Patrick Rudolph24efe732018-08-19 11:06:06 +0200535 dimm = &mem_info->dimm[mem_info->dimm_cnt];
536 if (info->size_mb) {
537 dimm->ddr_type = MEMORY_TYPE_DDR3;
538 dimm->ddr_frequency = selected_freq;
539 dimm->dimm_size = info->size_mb;
540 dimm->channel_num = channel;
541 dimm->rank_per_dimm = info->ranks;
542 dimm->dimm_num = slot;
543 memcpy(dimm->module_part_number, info->part_number, 16);
544 dimm->mod_id = info->manufacturer_id;
545
546 switch (info->dimm_type) {
547 case SPD_DIMM_TYPE_SO_DIMM:
548 dimm->mod_type = SPD_SODIMM;
549 break;
550 case SPD_DIMM_TYPE_72B_SO_CDIMM:
551 dimm->mod_type = SPD_72B_SO_CDIMM;
552 break;
553 case SPD_DIMM_TYPE_72B_SO_RDIMM:
554 dimm->mod_type = SPD_72B_SO_RDIMM;
555 break;
556 case SPD_DIMM_TYPE_UDIMM:
557 dimm->mod_type = SPD_UDIMM;
558 break;
559 case SPD_DIMM_TYPE_RDIMM:
560 dimm->mod_type = SPD_RDIMM;
561 break;
562 case SPD_DIMM_TYPE_UNDEFINED:
563 default:
564 dimm->mod_type = SPD_UNDEFINED;
565 break;
566 }
567
568 dimm->bus_width = MEMORY_BUS_WIDTH_64; // non-ECC only
569 memcpy(dimm->serial, info->serial,
570 MIN(sizeof(dimm->serial), sizeof(info->serial)));
571 mem_info->dimm_cnt++;
572 }
573
574 return CB_SUCCESS;
575}
576
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500577/*
578 * The information printed below has a more informational character, and is not
579 * necessarily tied in to RAM init debugging. Hence, we stop using printram(),
580 * and use the standard printk()'s below.
581 */
582
583static void print_ns(const char *msg, u32 val)
584{
585 u32 mant, fp;
586 mant = val / 256;
587 fp = (val % 256) * 1000 / 256;
588
589 printk(BIOS_INFO, "%s%3u.%.3u ns\n", msg, mant, fp);
590}
591
592/**
593* \brief Print the info in DIMM
594*
Martin Rothf48acbd2020-07-24 12:24:27 -0600595* Print info about the DIMM. Useful to use when CONFIG(DEBUG_RAM_SETUP) is
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500596* selected, or for a purely informative output.
597*
Martin Roth63373ed2013-07-08 16:24:19 -0600598* @param dimm pointer to already decoded @ref dimm_attr structure
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500599*/
Angel Ponsafb3d7e2021-03-28 13:43:13 +0200600void dram_print_spd_ddr3(const struct dimm_attr_ddr3_st *dimm)
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500601{
602 u16 val16;
603 int i;
604
605 printk(BIOS_INFO, " Row addr bits : %u\n", dimm->row_bits);
606 printk(BIOS_INFO, " Column addr bits : %u\n", dimm->col_bits);
607 printk(BIOS_INFO, " Number of ranks : %u\n", dimm->ranks);
608 printk(BIOS_INFO, " DIMM Capacity : %u MB\n", dimm->size_mb);
609
610 /* CAS Latencies Supported */
611 val16 = dimm->cas_supported;
612 printk(BIOS_INFO, " CAS latencies :");
613 i = 0;
614 do {
615 if (val16 & 1)
616 printk(BIOS_INFO, " %u", i + 4);
617 i++;
618 val16 >>= 1;
619 } while (val16);
620 printk(BIOS_INFO, "\n");
621
622 print_ns(" tCKmin : ", dimm->tCK);
623 print_ns(" tAAmin : ", dimm->tAA);
624 print_ns(" tWRmin : ", dimm->tWR);
625 print_ns(" tRCDmin : ", dimm->tRCD);
626 print_ns(" tRRDmin : ", dimm->tRRD);
627 print_ns(" tRPmin : ", dimm->tRP);
628 print_ns(" tRASmin : ", dimm->tRAS);
629 print_ns(" tRCmin : ", dimm->tRC);
630 print_ns(" tRFCmin : ", dimm->tRFC);
631 print_ns(" tWTRmin : ", dimm->tWTR);
632 print_ns(" tRTPmin : ", dimm->tRTP);
633 print_ns(" tFAWmin : ", dimm->tFAW);
Dan Elkouby0c024202018-04-13 18:45:02 +0300634 /* Those values are only relevant if an XMP profile sets them */
635 if (dimm->tCWL)
636 print_ns(" tCWLmin : ", dimm->tCWL);
637 if (dimm->tCMD)
638 printk(BIOS_INFO, " tCMDmin : %3u\n",
639 DIV_ROUND_UP(dimm->tCMD, 256));
Alexandru Gagniuc78706fd2013-06-03 13:58:10 -0500640}
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500641
Alexandru Gagniuc78706fd2013-06-03 13:58:10 -0500642/*==============================================================================
643 *= DDR3 MRS helpers
644 *----------------------------------------------------------------------------*/
645
646/*
647 * MRS command structure:
648 * cmd[15:0] = Address pins MA[15:0]
649 * cmd[18:16] = Bank address BA[2:0]
650 */
651
652/* Map tWR value to a bitmask of the MR0 cycle */
653static u16 ddr3_twr_to_mr0_map(u8 twr)
654{
655 if ((twr >= 5) && (twr <= 8))
656 return (twr - 4) << 9;
657
658 /*
659 * From 8T onwards, we can only use even values. Round up if we are
660 * given an odd value.
661 */
662 if ((twr >= 9) && (twr <= 14))
663 return ((twr + 1) >> 1) << 9;
664
665 /* tWR == 16T is [000] */
666 return 0;
667}
668
669/* Map the CAS latency to a bitmask for the MR0 cycle */
670static u16 ddr3_cas_to_mr0_map(u8 cas)
671{
672 u16 mask = 0;
673 /* A[6:4] are bits [2:0] of (CAS - 4) */
674 mask = ((cas - 4) & 0x07) << 4;
675
676 /* A2 is the MSB of (CAS - 4) */
677 if ((cas - 4) & (1 << 3))
678 mask |= (1 << 2);
679
680 return mask;
681}
682
683/**
684 * \brief Get command address for a DDR3 MR0 command
685 *
686 * The DDR3 specification only covers odd write_recovery up to 7T. If an odd
687 * write_recovery greater than 7 is specified, it will be rounded up. If a tWR
688 * greater than 8 is specified, it is recommended to explicitly round it up or
689 * down before calling this function.
690 *
691 * write_recovery and cas are given in clock cycles. For example, a CAS of 7T
692 * should be given as 7.
693 *
Martin Roth98b698c2015-01-06 21:02:52 -0700694 * @param precharge_pd
Alexandru Gagniuc78706fd2013-06-03 13:58:10 -0500695 * @param write_recovery Write recovery latency, tWR in clock cycles.
Martin Roth98b698c2015-01-06 21:02:52 -0700696 * @param dll_reset
697 * @param mode
Alexandru Gagniuc78706fd2013-06-03 13:58:10 -0500698 * @param cas CAS latency in clock cycles.
Martin Roth98b698c2015-01-06 21:02:52 -0700699 * @param burst_type
700 * @param burst_length
Alexandru Gagniuc78706fd2013-06-03 13:58:10 -0500701 */
702mrs_cmd_t ddr3_get_mr0(enum ddr3_mr0_precharge precharge_pd,
703 u8 write_recovery,
704 enum ddr3_mr0_dll_reset dll_reset,
705 enum ddr3_mr0_mode mode,
706 u8 cas,
707 enum ddr3_mr0_burst_type burst_type,
708 enum ddr3_mr0_burst_length burst_length)
709{
710 mrs_cmd_t cmd = 0 << 16;
711
712 if (precharge_pd == DDR3_MR0_PRECHARGE_FAST)
713 cmd |= (1 << 12);
714
715 cmd |= ddr3_twr_to_mr0_map(write_recovery);
716
717 if (dll_reset == DDR3_MR0_DLL_RESET_YES)
718 cmd |= (1 << 8);
719
720 if (mode == DDR3_MR0_MODE_TEST)
721 cmd |= (1 << 7);
722
723 cmd |= ddr3_cas_to_mr0_map(cas);
724
725 if (burst_type == DDR3_MR0_BURST_TYPE_INTERLEAVED)
726 cmd |= (1 << 3);
727
728 cmd |= (burst_length & 0x03) << 0;
729
730 return cmd;
731}
732
733static u16 ddr3_rtt_nom_to_mr1_map(enum ddr3_mr1_rtt_nom rtt_nom)
734{
735 u16 mask = 0;
736 /* A9 <-> rtt_nom[2] */
737 if (rtt_nom & (1 << 2))
738 mask |= (1 << 9);
739 /* A6 <-> rtt_nom[1] */
740 if (rtt_nom & (1 << 1))
741 mask |= (1 << 6);
742 /* A2 <-> rtt_nom[0] */
743 if (rtt_nom & (1 << 0))
744 mask |= (1 << 2);
745
746 return mask;
747}
748
749static u16 ddr3_ods_to_mr1_map(enum ddr3_mr1_ods ods)
750{
751 u16 mask = 0;
752 /* A5 <-> ods[1] */
753 if (ods & (1 << 1))
754 mask |= (1 << 5);
755 /* A1 <-> ods[0] */
756 if (ods & (1 << 0))
757 mask |= (1 << 1);
758
759 return mask;
760}
761
762/**
763 * \brief Get command address for a DDR3 MR1 command
764 */
765mrs_cmd_t ddr3_get_mr1(enum ddr3_mr1_qoff qoff,
766 enum ddr3_mr1_tqds tqds,
767 enum ddr3_mr1_rtt_nom rtt_nom,
768 enum ddr3_mr1_write_leveling write_leveling,
769 enum ddr3_mr1_ods ods,
770 enum ddr3_mr1_additive_latency additive_latency,
771 enum ddr3_mr1_dll dll_disable)
772{
773 mrs_cmd_t cmd = 1 << 16;
774
775 if (qoff == DDR3_MR1_QOFF_DISABLE)
776 cmd |= (1 << 12);
777
778 if (tqds == DDR3_MR1_TQDS_ENABLE)
779 cmd |= (1 << 11);
780
781 cmd |= ddr3_rtt_nom_to_mr1_map(rtt_nom);
782
783 if (write_leveling == DDR3_MR1_WRLVL_ENABLE)
784 cmd |= (1 << 7);
785
786 cmd |= ddr3_ods_to_mr1_map(ods);
787
788 cmd |= (additive_latency & 0x03) << 3;
789
790 if (dll_disable == DDR3_MR1_DLL_DISABLE)
791 cmd |= (1 << 0);
792
793 return cmd;
794}
795
796/**
797 * \brief Get command address for a DDR3 MR2 command
798 *
799 * cas_cwl is given in clock cycles. For example, a cas_cwl of 7T should be
800 * given as 7.
801 *
Martin Roth98b698c2015-01-06 21:02:52 -0700802 * @param rtt_wr
803 * @param extended_temp
804 * @param self_refresh
Alexandru Gagniuc78706fd2013-06-03 13:58:10 -0500805 * @param cas_cwl CAS write latency in clock cycles.
806 */
Martin Roth98b698c2015-01-06 21:02:52 -0700807
Alexandru Gagniuc78706fd2013-06-03 13:58:10 -0500808mrs_cmd_t ddr3_get_mr2(enum ddr3_mr2_rttwr rtt_wr,
809 enum ddr3_mr2_srt_range extended_temp,
810 enum ddr3_mr2_asr self_refresh, u8 cas_cwl)
811{
812 mrs_cmd_t cmd = 2 << 16;
813
814 cmd |= (rtt_wr & 0x03) << 9;
815
816 if (extended_temp == DDR3_MR2_SRT_EXTENDED)
817 cmd |= (1 << 7);
818
819 if (self_refresh == DDR3_MR2_ASR_AUTO)
820 cmd |= (1 << 6);
821
822 cmd |= ((cas_cwl - 5) & 0x07) << 3;
823
824 return cmd;
825}
826
827/**
828 * \brief Get command address for a DDR3 MR3 command
829 *
830 * @param dataflow_from_mpr Specify a non-zero value to put DRAM in read
831 * leveling mode. Zero for normal operation.
832 */
833mrs_cmd_t ddr3_get_mr3(char dataflow_from_mpr)
834{
835 mrs_cmd_t cmd = 3 << 16;
836
837 if (dataflow_from_mpr)
838 cmd |= (1 << 2);
839
840 return cmd;
841}
842
843/**
844 * \brief Mirror the address bits for this MRS command
845 *
846 * Swap the following bits in the MRS command:
847 * - MA3 <-> MA4
848 * - MA5 <-> MA6
849 * - MA7 <-> MA8
850 * - BA0 <-> BA1
851 */
852mrs_cmd_t ddr3_mrs_mirror_pins(mrs_cmd_t cmd)
853{
854 u32 downshift, upshift;
855 /* High bits= A4 | A6 | A8 | BA1 */
856 /* Low bits = A3 | A5 | A7 | BA0 */
857 u32 lowbits = (1 << 3) | (1 << 5) | (1 << 7) | (1 << 16);
858 downshift = (cmd & (lowbits << 1));
859 upshift = (cmd & lowbits);
860 cmd &= ~(lowbits | (lowbits << 1));
861 cmd |= (downshift >> 1) | (upshift << 1);
862 return cmd;
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500863}