blob: bef3c784977893060b98e5592cd8cbb56f03cfb7 [file] [log] [blame]
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -05001/*
2 * This file is part of the coreboot project.
3 *
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -05004 * This program is free software: you can redistribute it and/or modify
5 * it under the terms of the GNU General Public License as published by
6 * the Free Software Foundation, either version 2 of the License, or
7 * (at your option) any later version.
8 *
9 * This program is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 * GNU General Public License for more details.
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -050013 */
14
15/**
Martin Roth98b698c2015-01-06 21:02:52 -070016 * @file ddr3.c
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -050017 *
18 * \brief Utilities for decoding DDR3 SPDs
19 */
20
21#include <console/console.h>
22#include <device/device.h>
23#include <device/dram/ddr3.h>
Andrey Petrov3f85edb2019-08-01 14:18:06 -070024#include <device/dram/common.h>
Patrick Rudolph07691592016-02-29 18:21:00 +010025#include <string.h>
Patrick Rudolph24efe732018-08-19 11:06:06 +020026#include <memory_info.h>
27#include <cbmem.h>
28#include <smbios.h>
Elyes HAOUASbd1683d2019-05-15 21:05:37 +020029#include <types.h>
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -050030
Alexandru Gagniuc78706fd2013-06-03 13:58:10 -050031/*==============================================================================
32 * = DDR3 SPD decoding helpers
33 *----------------------------------------------------------------------------*/
34
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -050035/**
36 * \brief Checks if the DIMM is Registered based on byte[3] of the SPD
37 *
38 * Tells if the DIMM type is registered or not.
39 *
40 * @param type DIMM type. This is byte[3] of the SPD.
41 */
Patrick Rudolph6e53ae62017-01-31 19:43:17 +010042int spd_dimm_is_registered_ddr3(enum spd_dimm_type type)
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -050043{
44 if ((type == SPD_DIMM_TYPE_RDIMM)
45 | (type == SPD_DIMM_TYPE_MINI_RDIMM)
46 | (type == SPD_DIMM_TYPE_72B_SO_RDIMM))
47 return 1;
48
49 return 0;
50}
51
52/**
Alexandru Gagniuc4c37e582013-12-17 13:08:01 -050053 * \brief Calculate the CRC of a DDR3 SPD
54 *
55 * @param spd pointer to raw SPD data
56 * @param len length of data in SPD
57 *
58 * @return the CRC of the SPD data, or 0 when spd data is truncated.
59 */
60u16 spd_ddr3_calc_crc(u8 *spd, int len)
61{
Kyösti Mälkki7dc4b842016-11-18 18:41:17 +020062 int n_crc;
Alexandru Gagniuc4c37e582013-12-17 13:08:01 -050063
64 /* Find the number of bytes covered by CRC */
65 if (spd[0] & 0x80) {
66 n_crc = 117;
67 } else {
68 n_crc = 126;
69 }
70
71 if (len < n_crc)
72 /* Not enough bytes available to get the CRC */
73 return 0;
74
Andrey Petrov3f85edb2019-08-01 14:18:06 -070075 return ddr_crc16(spd, n_crc);
Kyösti Mälkki7dc4b842016-11-18 18:41:17 +020076}
77
78/**
79 * \brief Calculate the CRC of a DDR3 SPD unique identifier
80 *
81 * @param spd pointer to raw SPD data
82 * @param len length of data in SPD
83 *
84 * @return the CRC of SPD data bytes 117..127, or 0 when spd data is truncated.
85 */
86u16 spd_ddr3_calc_unique_crc(u8 *spd, int len)
87{
88 if (len < (117 + 11))
89 /* Not enough bytes available to get the CRC */
90 return 0;
91
Andrey Petrov3f85edb2019-08-01 14:18:06 -070092 return ddr_crc16(&spd[117], 11);
Alexandru Gagniuc4c37e582013-12-17 13:08:01 -050093}
94
95/**
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -050096 * \brief Decode the raw SPD data
97 *
98 * Decodes a raw SPD data from a DDR3 DIMM, and organizes it into a
99 * @ref dimm_attr structure. The SPD data must first be read in a contiguous
100 * array, and passed to this function.
101 *
Martin Roth63373ed2013-07-08 16:24:19 -0600102 * @param dimm pointer to @ref dimm_attr structure where the decoded data is to
Elyes HAOUASe3e3f4f2018-06-29 21:41:41 +0200103 * be stored
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500104 * @param spd array of raw data previously read from the SPD.
105 *
106 * @return @ref spd_status enumerator
107 * SPD_STATUS_OK -- decoding was successful
108 * SPD_STATUS_INVALID -- invalid SPD or not a DDR3 SPD
109 * SPD_STATUS_CRC_ERROR -- CRC did not verify
110 * SPD_STATUS_INVALID_FIELD -- A field with an invalid value was
Elyes HAOUASe3e3f4f2018-06-29 21:41:41 +0200111 * detected.
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500112 */
113int spd_decode_ddr3(dimm_attr * dimm, spd_raw_data spd)
114{
Alexandru Gagniuc4c37e582013-12-17 13:08:01 -0500115 int ret;
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500116 u16 crc, spd_crc;
Nicola Corna76f8dbc2016-11-16 08:57:15 +0100117 u8 capacity_shift, bus_width;
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500118 u8 reg8;
119 u32 mtb; /* medium time base */
Nicola Corna76f8dbc2016-11-16 08:57:15 +0100120 u32 ftb; /* fine time base */
Elyes HAOUAS05c04552019-04-23 22:15:57 +0200121 unsigned int val;
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500122
123 ret = SPD_STATUS_OK;
124
125 /* Don't assume we memset 0 dimm struct. Clear all our flags */
126 dimm->flags.raw = 0;
Patrick Rudolphbd1fdc62016-01-26 08:45:21 +0100127 dimm->dimms_per_channel = 3;
128
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500129 /* Make sure that the SPD dump is indeed from a DDR3 module */
130 if (spd[2] != SPD_MEMORY_TYPE_SDRAM_DDR3) {
131 printram("Not a DDR3 SPD!\n");
132 dimm->dram_type = SPD_MEMORY_TYPE_UNDEFINED;
133 return SPD_STATUS_INVALID;
134 }
135 dimm->dram_type = SPD_MEMORY_TYPE_SDRAM_DDR3;
Vladimir Serbinenko0e675f72014-12-07 13:56:48 +0100136 dimm->dimm_type = spd[3] & 0xf;
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500137
Patrick Rudolph8c639352015-06-22 19:32:53 +0200138 crc = spd_ddr3_calc_crc(spd, sizeof(spd_raw_data));
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500139 /* Compare with the CRC in the SPD */
140 spd_crc = (spd[127] << 8) + spd[126];
141 /* Verify the CRC is correct */
142 if (crc != spd_crc) {
Patrick Rudolph78c6e3e2015-06-22 19:46:34 +0200143 printram("ERROR: SPD CRC failed!!!\n");
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500144 ret = SPD_STATUS_CRC_ERROR;
145 };
146
Patrick Rudolph66a98ee2016-03-13 13:02:16 +0100147 printram(" Revision : %x\n", spd[1]);
148 printram(" Type : %x\n", spd[2]);
149 printram(" Key : %x\n", spd[3]);
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500150
151 reg8 = spd[4];
152 /* Number of memory banks */
153 val = (reg8 >> 4) & 0x07;
154 if (val > 0x03) {
155 printram(" Invalid number of memory banks\n");
156 ret = SPD_STATUS_INVALID_FIELD;
157 }
Elyes HAOUAS05c04552019-04-23 22:15:57 +0200158 printram(" Banks : %u\n", 1 << (val + 3));
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500159 /* SDRAM capacity */
160 capacity_shift = reg8 & 0x0f;
161 if (capacity_shift > 0x06) {
162 printram(" Invalid module capacity\n");
163 ret = SPD_STATUS_INVALID_FIELD;
164 }
165 if (capacity_shift < 0x02) {
Patrick Rudolph66a98ee2016-03-13 13:02:16 +0100166 printram(" Capacity : %u Mb\n", 256 << capacity_shift);
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500167 } else {
Patrick Rudolph66a98ee2016-03-13 13:02:16 +0100168 printram(" Capacity : %u Gb\n", 1 << (capacity_shift - 2));
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500169 }
170
171 reg8 = spd[5];
172 /* Row address bits */
173 val = (reg8 >> 3) & 0x07;
174 if (val > 0x04) {
175 printram(" Invalid row address bits\n");
176 ret = SPD_STATUS_INVALID_FIELD;
177 }
178 dimm->row_bits = val + 12;
179 /* Column address bits */
180 val = reg8 & 0x07;
181 if (val > 0x03) {
182 printram(" Invalid column address bits\n");
183 ret = SPD_STATUS_INVALID_FIELD;
184 }
185 dimm->col_bits = val + 9;
186
187 /* Module nominal voltage */
188 reg8 = spd[6];
Patrick Rudolph66a98ee2016-03-13 13:02:16 +0100189 printram(" Supported voltages :");
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500190 if (reg8 & (1 << 2)) {
191 dimm->flags.operable_1_25V = 1;
Patrick Rudolphbd1fdc62016-01-26 08:45:21 +0100192 dimm->voltage = 1250;
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500193 printram(" 1.25V");
194 }
195 if (reg8 & (1 << 1)) {
196 dimm->flags.operable_1_35V = 1;
Patrick Rudolphbd1fdc62016-01-26 08:45:21 +0100197 dimm->voltage = 1300;
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500198 printram(" 1.35V");
199 }
200 if (!(reg8 & (1 << 0))) {
201 dimm->flags.operable_1_50V = 1;
Patrick Rudolphbd1fdc62016-01-26 08:45:21 +0100202 dimm->voltage = 1500;
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500203 printram(" 1.5V");
204 }
205 printram("\n");
206
207 /* Module organization */
208 reg8 = spd[7];
209 /* Number of ranks */
210 val = (reg8 >> 3) & 0x07;
211 if (val > 3) {
212 printram(" Invalid number of ranks\n");
213 ret = SPD_STATUS_INVALID_FIELD;
214 }
215 dimm->ranks = val + 1;
216 /* SDRAM device width */
217 val = (reg8 & 0x07);
218 if (val > 3) {
219 printram(" Invalid SDRAM width\n");
220 ret = SPD_STATUS_INVALID_FIELD;
221 }
Vladimir Serbinenko7686a562014-05-18 11:05:56 +0200222 dimm->width = (4 << val);
Patrick Rudolph66a98ee2016-03-13 13:02:16 +0100223 printram(" SDRAM width : %u\n", dimm->width);
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500224
225 /* Memory bus width */
226 reg8 = spd[8];
227 /* Bus extension */
228 val = (reg8 >> 3) & 0x03;
229 if (val > 1) {
230 printram(" Invalid bus extension\n");
231 ret = SPD_STATUS_INVALID_FIELD;
232 }
233 dimm->flags.is_ecc = val ? 1 : 0;
Patrick Rudolph66a98ee2016-03-13 13:02:16 +0100234 printram(" Bus extension : %u bits\n", val ? 8 : 0);
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500235 /* Bus width */
236 val = reg8 & 0x07;
237 if (val > 3) {
238 printram(" Invalid bus width\n");
239 ret = SPD_STATUS_INVALID_FIELD;
240 }
241 bus_width = 8 << val;
Patrick Rudolph66a98ee2016-03-13 13:02:16 +0100242 printram(" Bus width : %u\n", bus_width);
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500243
244 /* We have all the info we need to compute the dimm size */
245 /* Capacity is 256Mbit multiplied by the power of 2 specified in
246 * capacity_shift
247 * The rest is the JEDEC formula */
248 dimm->size_mb = ((1 << (capacity_shift + (25 - 20))) * bus_width
Vladimir Serbinenko7686a562014-05-18 11:05:56 +0200249 * dimm->ranks) / dimm->width;
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500250
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500251 /* Medium Timebase =
252 * Medium Timebase (MTB) Dividend /
253 * Medium Timebase (MTB) Divisor */
254 mtb = (((u32) spd[10]) << 8) / spd[11];
255
256 /* SDRAM Minimum Cycle Time (tCKmin) */
257 dimm->tCK = spd[12] * mtb;
258 /* CAS Latencies Supported */
259 dimm->cas_supported = (spd[15] << 8) + spd[14];
260 /* Minimum CAS Latency Time (tAAmin) */
261 dimm->tAA = spd[16] * mtb;
262 /* Minimum Write Recovery Time (tWRmin) */
263 dimm->tWR = spd[17] * mtb;
264 /* Minimum RAS# to CAS# Delay Time (tRCDmin) */
265 dimm->tRCD = spd[18] * mtb;
266 /* Minimum Row Active to Row Active Delay Time (tRRDmin) */
267 dimm->tRRD = spd[19] * mtb;
268 /* Minimum Row Precharge Delay Time (tRPmin) */
269 dimm->tRP = spd[20] * mtb;
270 /* Minimum Active to Precharge Delay Time (tRASmin) */
271 dimm->tRAS = (((spd[21] & 0x0f) << 8) + spd[22]) * mtb;
272 /* Minimum Active to Active/Refresh Delay Time (tRCmin) */
273 dimm->tRC = (((spd[21] & 0xf0) << 4) + spd[23]) * mtb;
274 /* Minimum Refresh Recovery Delay Time (tRFCmin) */
275 dimm->tRFC = ((spd[25] << 8) + spd[24]) * mtb;
276 /* Minimum Internal Write to Read Command Delay Time (tWTRmin) */
277 dimm->tWTR = spd[26] * mtb;
278 /* Minimum Internal Read to Precharge Command Delay Time (tRTPmin) */
279 dimm->tRTP = spd[27] * mtb;
280 /* Minimum Four Activate Window Delay Time (tFAWmin) */
281 dimm->tFAW = (((spd[28] & 0x0f) << 8) + spd[29]) * mtb;
Dan Elkouby0c024202018-04-13 18:45:02 +0300282 /* Minimum CAS Write Latency Time (tCWLmin)
283 * - not present in standard SPD */
284 dimm->tCWL = 0;
285 /* System CMD Rate Mode - not present in standard SPD */
286 dimm->tCMD = 0;
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500287
Nicola Corna76f8dbc2016-11-16 08:57:15 +0100288 printram(" FTB timings :");
289 /* FTB is introduced in SPD revision 1.1 */
290 if (spd[1] >= 0x11 && spd[9] & 0x0f) {
291 printram(" yes\n");
292
293 /* Fine timebase (1/256 ps) =
294 * Fine Timebase (FTB) Dividend /
295 * Fine Timebase (FTB) Divisor */
296 ftb = (((u16) spd[9] & 0xf0) << 4) / (spd[9] & 0x0f);
297
298 /* SPD recommends to round up the MTB part and use a negative
299 * FTB, so a negative rounding should be always safe */
300
301 /* SDRAM Minimum Cycle Time (tCKmin) correction */
302 dimm->tCK += (s32)((s8) spd[34] * ftb - 500) / 1000;
303 /* Minimum CAS Latency Time (tAAmin) correction */
304 dimm->tAA += (s32)((s8) spd[35] * ftb - 500) / 1000;
305 /* Minimum RAS# to CAS# Delay Time (tRCDmin) correction */
306 dimm->tRCD += (s32)((s8) spd[36] * ftb - 500) / 1000;
307 /* Minimum Row Precharge Delay Time (tRPmin) correction */
308 dimm->tRP += (s32)((s8) spd[37] * ftb - 500) / 1000;
309 /* Minimum Active to Active/Refresh Delay Time (tRCmin) corr. */
310 dimm->tRC += (s32)((s8) spd[38] * ftb - 500) / 1000;
311 }
312 else {
313 printram(" no\n");
314 }
315
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500316 /* SDRAM Optional Features */
317 reg8 = spd[30];
Patrick Rudolph66a98ee2016-03-13 13:02:16 +0100318 printram(" Optional features :");
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500319 if (reg8 & 0x80) {
320 dimm->flags.dll_off_mode = 1;
321 printram(" DLL-Off_mode");
322 }
323 if (reg8 & 0x02) {
324 dimm->flags.rzq7_supported = 1;
325 printram(" RZQ/7");
326 }
327 if (reg8 & 0x01) {
328 dimm->flags.rzq6_supported = 1;
329 printram(" RZQ/6");
330 }
331 printram("\n");
332
333 /* SDRAM Thermal and Refresh Options */
334 reg8 = spd[31];
Patrick Rudolph66a98ee2016-03-13 13:02:16 +0100335 printram(" Thermal features :");
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500336 if (reg8 & 0x80) {
337 dimm->flags.pasr = 1;
338 printram(" PASR");
339 }
340 if (reg8 & 0x08) {
341 dimm->flags.odts = 1;
342 printram(" ODTS");
343 }
344 if (reg8 & 0x04) {
345 dimm->flags.asr = 1;
346 printram(" ASR");
347 }
348 if (reg8 & 0x02) {
349 dimm->flags.ext_temp_range = 1;
350 printram(" ext_temp_refresh");
351 }
352 if (reg8 & 0x01) {
353 dimm->flags.ext_temp_refresh = 1;
354 printram(" ext_temp_range");
355 }
356 printram("\n");
357
358 /* Module Thermal Sensor */
359 reg8 = spd[32];
360 if (reg8 & 0x80)
361 dimm->flags.therm_sensor = 1;
Patrick Rudolph66a98ee2016-03-13 13:02:16 +0100362 printram(" Thermal sensor : %s\n",
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500363 dimm->flags.therm_sensor ? "yes" : "no");
364
365 /* SDRAM Device Type */
Jacob Garber93064ff2019-06-24 13:02:27 -0600366 printram(" Standard SDRAM : %s\n", (spd[33] & 0x80) ? "no" : "yes");
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500367
368 if (spd[63] & 0x01) {
369 dimm->flags.pins_mirrored = 1;
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500370 }
Patrick Rudolph66a98ee2016-03-13 13:02:16 +0100371 printram(" Rank1 Address bits : %s\n",
372 (spd[63] & 0x01) ? "mirrored" : "normal");
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500373
Vladimir Serbinenko7686a562014-05-18 11:05:56 +0200374 dimm->reference_card = spd[62] & 0x1f;
Patrick Rudolph66a98ee2016-03-13 13:02:16 +0100375 printram(" DIMM Reference card: %c\n", 'A' + dimm->reference_card);
Vladimir Serbinenko7686a562014-05-18 11:05:56 +0200376
Patrick Rudolph07691592016-02-29 18:21:00 +0100377 dimm->manufacturer_id = (spd[118] << 8) | spd[117];
Patrick Rudolph66a98ee2016-03-13 13:02:16 +0100378 printram(" Manufacturer ID : %x\n", dimm->manufacturer_id);
Patrick Rudolph07691592016-02-29 18:21:00 +0100379
380 dimm->part_number[16] = 0;
381 memcpy(dimm->part_number, &spd[128], 16);
Patrick Rudolph66a98ee2016-03-13 13:02:16 +0100382 printram(" Part number : %s\n", dimm->part_number);
Patrick Rudolph07691592016-02-29 18:21:00 +0100383
Patrick Rudolph15e64692018-08-17 15:24:56 +0200384 memcpy(dimm->serial, &spd[SPD_DIMM_SERIAL_NUM], SPD_DIMM_SERIAL_LEN);
385
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500386 return ret;
387}
388
Patrick Rudolphbd1fdc62016-01-26 08:45:21 +0100389/**
390 * \brief Decode the raw SPD XMP data
391 *
392 * Decodes a raw SPD XMP data from a DDR3 DIMM, and organizes it into a
393 * @ref dimm_attr structure. The SPD data must first be read in a contiguous
394 * array, and passed to this function.
395 *
396 * @param dimm pointer to @ref dimm_attr structure where the decoded data is to
397 * be stored
398 * @param spd array of raw data previously read from the SPD.
399 *
400 * @param profile select one of the profiles to load
401 *
402 * @return @ref spd_status enumerator
403 * SPD_STATUS_OK -- decoding was successful
404 * SPD_STATUS_INVALID -- invalid SPD or not a DDR3 SPD
405 * SPD_STATUS_CRC_ERROR -- CRC did not verify
406 * SPD_STATUS_INVALID_FIELD -- A field with an invalid value was
407 * detected.
408 */
409int spd_xmp_decode_ddr3(dimm_attr *dimm,
410 spd_raw_data spd,
411 enum ddr3_xmp_profile profile)
412{
413 int ret;
414 u32 mtb; /* medium time base */
415 u8 *xmp; /* pointer to XMP profile data */
416
417 /* need a valid SPD */
418 ret = spd_decode_ddr3(dimm, spd);
419 if (ret != SPD_STATUS_OK)
420 return ret;
421
422 /* search for magic header */
423 if (spd[176] != 0x0C || spd[177] != 0x4A) {
424 printram("Not a DDR3 XMP profile!\n");
425 dimm->dram_type = SPD_MEMORY_TYPE_UNDEFINED;
426 return SPD_STATUS_INVALID;
427 }
428
429 if (profile == DDR3_XMP_PROFILE_1) {
430 if (!(spd[178] & 1)) {
431 printram("Selected XMP profile disabled!\n");
432 dimm->dram_type = SPD_MEMORY_TYPE_UNDEFINED;
433 return SPD_STATUS_INVALID;
434 }
Patrick Rudolph66a98ee2016-03-13 13:02:16 +0100435
436 printram(" XMP Profile : 1\n");
Patrick Rudolphbd1fdc62016-01-26 08:45:21 +0100437 xmp = &spd[185];
438
439 /* Medium Timebase =
440 * Medium Timebase (MTB) Dividend /
441 * Medium Timebase (MTB) Divisor */
442 mtb = (((u32) spd[180]) << 8) / spd[181];
443
444 dimm->dimms_per_channel = ((spd[178] >> 2) & 0x3) + 1;
445 } else {
446 if (!(spd[178] & 2)) {
447 printram("Selected XMP profile disabled!\n");
448 dimm->dram_type = SPD_MEMORY_TYPE_UNDEFINED;
449 return SPD_STATUS_INVALID;
450 }
Patrick Rudolph66a98ee2016-03-13 13:02:16 +0100451 printram(" XMP Profile : 2\n");
Patrick Rudolphbd1fdc62016-01-26 08:45:21 +0100452 xmp = &spd[220];
453
454 /* Medium Timebase =
455 * Medium Timebase (MTB) Dividend /
456 * Medium Timebase (MTB) Divisor */
457 mtb = (((u32) spd[182]) << 8) / spd[183];
458
459 dimm->dimms_per_channel = ((spd[178] >> 4) & 0x3) + 1;
460 }
Patrick Rudolph66a98ee2016-03-13 13:02:16 +0100461
462 printram(" Max DIMMs/channel : %u\n",
Patrick Rudolphbd1fdc62016-01-26 08:45:21 +0100463 dimm->dimms_per_channel);
464
Patrick Rudolph66a98ee2016-03-13 13:02:16 +0100465 printram(" XMP Revision : %u.%u\n", spd[179] >> 4, spd[179] & 0xf);
Patrick Rudolphbd1fdc62016-01-26 08:45:21 +0100466
467 /* calculate voltage in mV */
468 dimm->voltage = (xmp[0] & 1) * 50;
469 dimm->voltage += ((xmp[0] >> 1) & 0xf) * 100;
470 dimm->voltage += ((xmp[0] >> 5) & 0x3) * 1000;
Patrick Rudolph66a98ee2016-03-13 13:02:16 +0100471
472 printram(" Requested voltage : %u mV\n", dimm->voltage);
Patrick Rudolphbd1fdc62016-01-26 08:45:21 +0100473
474 /* SDRAM Minimum Cycle Time (tCKmin) */
475 dimm->tCK = xmp[1] * mtb;
476 /* CAS Latencies Supported */
Dan Elkouby0c024202018-04-13 18:45:02 +0300477 dimm->cas_supported = ((xmp[4] << 8) + xmp[3]) & 0x7fff;
Patrick Rudolphbd1fdc62016-01-26 08:45:21 +0100478 /* Minimum CAS Latency Time (tAAmin) */
479 dimm->tAA = xmp[2] * mtb;
480 /* Minimum Write Recovery Time (tWRmin) */
481 dimm->tWR = xmp[8] * mtb;
482 /* Minimum RAS# to CAS# Delay Time (tRCDmin) */
483 dimm->tRCD = xmp[7] * mtb;
484 /* Minimum Row Active to Row Active Delay Time (tRRDmin) */
485 dimm->tRRD = xmp[17] * mtb;
486 /* Minimum Row Precharge Delay Time (tRPmin) */
487 dimm->tRP = xmp[6] * mtb;
488 /* Minimum Active to Precharge Delay Time (tRASmin) */
489 dimm->tRAS = (((xmp[9] & 0x0f) << 8) + xmp[10]) * mtb;
490 /* Minimum Active to Active/Refresh Delay Time (tRCmin) */
491 dimm->tRC = (((xmp[9] & 0xf0) << 4) + xmp[11]) * mtb;
492 /* Minimum Refresh Recovery Delay Time (tRFCmin) */
493 dimm->tRFC = ((xmp[15] << 8) + xmp[14]) * mtb;
494 /* Minimum Internal Write to Read Command Delay Time (tWTRmin) */
495 dimm->tWTR = xmp[20] * mtb;
496 /* Minimum Internal Read to Precharge Command Delay Time (tRTPmin) */
497 dimm->tRTP = xmp[16] * mtb;
498 /* Minimum Four Activate Window Delay Time (tFAWmin) */
499 dimm->tFAW = (((xmp[18] & 0x0f) << 8) + xmp[19]) * mtb;
Dan Elkouby0c024202018-04-13 18:45:02 +0300500 /* Minimum CAS Write Latency Time (tCWLmin) */
501 dimm->tCWL = xmp[5] * mtb;
502 /* System CMD Rate Mode */
503 dimm->tCMD = xmp[23] * mtb;
Patrick Rudolphbd1fdc62016-01-26 08:45:21 +0100504
505 return ret;
506}
507
Patrick Rudolph24efe732018-08-19 11:06:06 +0200508
509/**
510 * Fill cbmem with information for SMBIOS type 17.
511 *
512 * @param channel Corresponding channel of provided @info
513 * @param slot Corresponding slot of provided @info
514 * @param selected_freq The actual frequency the DRAM is running on
515 * @param info DIMM parameters read from SPD
516 *
517 * @return CB_SUCCESS if DIMM info was written
518 */
519enum cb_err spd_add_smbios17(const u8 channel, const u8 slot,
520 const u16 selected_freq,
521 const dimm_attr *info)
522{
523 struct memory_info *mem_info;
524 struct dimm_info *dimm;
525
526 /*
527 * Allocate CBMEM area for DIMM information used to populate SMBIOS
528 * table 17
529 */
530 mem_info = cbmem_find(CBMEM_ID_MEMINFO);
531 if (!mem_info) {
532 mem_info = cbmem_add(CBMEM_ID_MEMINFO, sizeof(*mem_info));
533
Julius Werner540a9802019-12-09 13:03:29 -0800534 printk(BIOS_DEBUG, "CBMEM entry for DIMM info: %p\n",
Patrick Rudolph24efe732018-08-19 11:06:06 +0200535 mem_info);
536 if (!mem_info)
537 return CB_ERR;
538
539 memset(mem_info, 0, sizeof(*mem_info));
540 }
541
Nico Huberbb0ab9e2018-09-13 10:49:54 +0200542 if (mem_info->dimm_cnt >= ARRAY_SIZE(mem_info->dimm)) {
543 printk(BIOS_WARNING, "BUG: Too many DIMM infos for %s.\n",
544 __func__);
545 return CB_ERR;
546 }
547
Patrick Rudolph24efe732018-08-19 11:06:06 +0200548 dimm = &mem_info->dimm[mem_info->dimm_cnt];
549 if (info->size_mb) {
550 dimm->ddr_type = MEMORY_TYPE_DDR3;
551 dimm->ddr_frequency = selected_freq;
552 dimm->dimm_size = info->size_mb;
553 dimm->channel_num = channel;
554 dimm->rank_per_dimm = info->ranks;
555 dimm->dimm_num = slot;
556 memcpy(dimm->module_part_number, info->part_number, 16);
557 dimm->mod_id = info->manufacturer_id;
558
559 switch (info->dimm_type) {
560 case SPD_DIMM_TYPE_SO_DIMM:
561 dimm->mod_type = SPD_SODIMM;
562 break;
563 case SPD_DIMM_TYPE_72B_SO_CDIMM:
564 dimm->mod_type = SPD_72B_SO_CDIMM;
565 break;
566 case SPD_DIMM_TYPE_72B_SO_RDIMM:
567 dimm->mod_type = SPD_72B_SO_RDIMM;
568 break;
569 case SPD_DIMM_TYPE_UDIMM:
570 dimm->mod_type = SPD_UDIMM;
571 break;
572 case SPD_DIMM_TYPE_RDIMM:
573 dimm->mod_type = SPD_RDIMM;
574 break;
575 case SPD_DIMM_TYPE_UNDEFINED:
576 default:
577 dimm->mod_type = SPD_UNDEFINED;
578 break;
579 }
580
581 dimm->bus_width = MEMORY_BUS_WIDTH_64; // non-ECC only
582 memcpy(dimm->serial, info->serial,
583 MIN(sizeof(dimm->serial), sizeof(info->serial)));
584 mem_info->dimm_cnt++;
585 }
586
587 return CB_SUCCESS;
588}
589
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500590/*
591 * The information printed below has a more informational character, and is not
592 * necessarily tied in to RAM init debugging. Hence, we stop using printram(),
593 * and use the standard printk()'s below.
594 */
595
596static void print_ns(const char *msg, u32 val)
597{
598 u32 mant, fp;
599 mant = val / 256;
600 fp = (val % 256) * 1000 / 256;
601
602 printk(BIOS_INFO, "%s%3u.%.3u ns\n", msg, mant, fp);
603}
604
605/**
606* \brief Print the info in DIMM
607*
608* Print info about the DIMM. Useful to use when CONFIG_DEBUG_RAM_SETUP is
609* selected, or for a purely informative output.
610*
Martin Roth63373ed2013-07-08 16:24:19 -0600611* @param dimm pointer to already decoded @ref dimm_attr structure
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500612*/
613void dram_print_spd_ddr3(const dimm_attr * dimm)
614{
615 u16 val16;
616 int i;
617
618 printk(BIOS_INFO, " Row addr bits : %u\n", dimm->row_bits);
619 printk(BIOS_INFO, " Column addr bits : %u\n", dimm->col_bits);
620 printk(BIOS_INFO, " Number of ranks : %u\n", dimm->ranks);
621 printk(BIOS_INFO, " DIMM Capacity : %u MB\n", dimm->size_mb);
622
623 /* CAS Latencies Supported */
624 val16 = dimm->cas_supported;
625 printk(BIOS_INFO, " CAS latencies :");
626 i = 0;
627 do {
628 if (val16 & 1)
629 printk(BIOS_INFO, " %u", i + 4);
630 i++;
631 val16 >>= 1;
632 } while (val16);
633 printk(BIOS_INFO, "\n");
634
635 print_ns(" tCKmin : ", dimm->tCK);
636 print_ns(" tAAmin : ", dimm->tAA);
637 print_ns(" tWRmin : ", dimm->tWR);
638 print_ns(" tRCDmin : ", dimm->tRCD);
639 print_ns(" tRRDmin : ", dimm->tRRD);
640 print_ns(" tRPmin : ", dimm->tRP);
641 print_ns(" tRASmin : ", dimm->tRAS);
642 print_ns(" tRCmin : ", dimm->tRC);
643 print_ns(" tRFCmin : ", dimm->tRFC);
644 print_ns(" tWTRmin : ", dimm->tWTR);
645 print_ns(" tRTPmin : ", dimm->tRTP);
646 print_ns(" tFAWmin : ", dimm->tFAW);
Dan Elkouby0c024202018-04-13 18:45:02 +0300647 /* Those values are only relevant if an XMP profile sets them */
648 if (dimm->tCWL)
649 print_ns(" tCWLmin : ", dimm->tCWL);
650 if (dimm->tCMD)
651 printk(BIOS_INFO, " tCMDmin : %3u\n",
652 DIV_ROUND_UP(dimm->tCMD, 256));
Alexandru Gagniuc78706fd2013-06-03 13:58:10 -0500653}
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500654
Alexandru Gagniuc78706fd2013-06-03 13:58:10 -0500655/*==============================================================================
656 *= DDR3 MRS helpers
657 *----------------------------------------------------------------------------*/
658
659/*
660 * MRS command structure:
661 * cmd[15:0] = Address pins MA[15:0]
662 * cmd[18:16] = Bank address BA[2:0]
663 */
664
665/* Map tWR value to a bitmask of the MR0 cycle */
666static u16 ddr3_twr_to_mr0_map(u8 twr)
667{
668 if ((twr >= 5) && (twr <= 8))
669 return (twr - 4) << 9;
670
671 /*
672 * From 8T onwards, we can only use even values. Round up if we are
673 * given an odd value.
674 */
675 if ((twr >= 9) && (twr <= 14))
676 return ((twr + 1) >> 1) << 9;
677
678 /* tWR == 16T is [000] */
679 return 0;
680}
681
682/* Map the CAS latency to a bitmask for the MR0 cycle */
683static u16 ddr3_cas_to_mr0_map(u8 cas)
684{
685 u16 mask = 0;
686 /* A[6:4] are bits [2:0] of (CAS - 4) */
687 mask = ((cas - 4) & 0x07) << 4;
688
689 /* A2 is the MSB of (CAS - 4) */
690 if ((cas - 4) & (1 << 3))
691 mask |= (1 << 2);
692
693 return mask;
694}
695
696/**
697 * \brief Get command address for a DDR3 MR0 command
698 *
699 * The DDR3 specification only covers odd write_recovery up to 7T. If an odd
700 * write_recovery greater than 7 is specified, it will be rounded up. If a tWR
701 * greater than 8 is specified, it is recommended to explicitly round it up or
702 * down before calling this function.
703 *
704 * write_recovery and cas are given in clock cycles. For example, a CAS of 7T
705 * should be given as 7.
706 *
Martin Roth98b698c2015-01-06 21:02:52 -0700707 * @param precharge_pd
Alexandru Gagniuc78706fd2013-06-03 13:58:10 -0500708 * @param write_recovery Write recovery latency, tWR in clock cycles.
Martin Roth98b698c2015-01-06 21:02:52 -0700709 * @param dll_reset
710 * @param mode
Alexandru Gagniuc78706fd2013-06-03 13:58:10 -0500711 * @param cas CAS latency in clock cycles.
Martin Roth98b698c2015-01-06 21:02:52 -0700712 * @param burst_type
713 * @param burst_length
Alexandru Gagniuc78706fd2013-06-03 13:58:10 -0500714 */
715mrs_cmd_t ddr3_get_mr0(enum ddr3_mr0_precharge precharge_pd,
716 u8 write_recovery,
717 enum ddr3_mr0_dll_reset dll_reset,
718 enum ddr3_mr0_mode mode,
719 u8 cas,
720 enum ddr3_mr0_burst_type burst_type,
721 enum ddr3_mr0_burst_length burst_length)
722{
723 mrs_cmd_t cmd = 0 << 16;
724
725 if (precharge_pd == DDR3_MR0_PRECHARGE_FAST)
726 cmd |= (1 << 12);
727
728 cmd |= ddr3_twr_to_mr0_map(write_recovery);
729
730 if (dll_reset == DDR3_MR0_DLL_RESET_YES)
731 cmd |= (1 << 8);
732
733 if (mode == DDR3_MR0_MODE_TEST)
734 cmd |= (1 << 7);
735
736 cmd |= ddr3_cas_to_mr0_map(cas);
737
738 if (burst_type == DDR3_MR0_BURST_TYPE_INTERLEAVED)
739 cmd |= (1 << 3);
740
741 cmd |= (burst_length & 0x03) << 0;
742
743 return cmd;
744}
745
746static u16 ddr3_rtt_nom_to_mr1_map(enum ddr3_mr1_rtt_nom rtt_nom)
747{
748 u16 mask = 0;
749 /* A9 <-> rtt_nom[2] */
750 if (rtt_nom & (1 << 2))
751 mask |= (1 << 9);
752 /* A6 <-> rtt_nom[1] */
753 if (rtt_nom & (1 << 1))
754 mask |= (1 << 6);
755 /* A2 <-> rtt_nom[0] */
756 if (rtt_nom & (1 << 0))
757 mask |= (1 << 2);
758
759 return mask;
760}
761
762static u16 ddr3_ods_to_mr1_map(enum ddr3_mr1_ods ods)
763{
764 u16 mask = 0;
765 /* A5 <-> ods[1] */
766 if (ods & (1 << 1))
767 mask |= (1 << 5);
768 /* A1 <-> ods[0] */
769 if (ods & (1 << 0))
770 mask |= (1 << 1);
771
772 return mask;
773}
774
775/**
776 * \brief Get command address for a DDR3 MR1 command
777 */
778mrs_cmd_t ddr3_get_mr1(enum ddr3_mr1_qoff qoff,
779 enum ddr3_mr1_tqds tqds,
780 enum ddr3_mr1_rtt_nom rtt_nom,
781 enum ddr3_mr1_write_leveling write_leveling,
782 enum ddr3_mr1_ods ods,
783 enum ddr3_mr1_additive_latency additive_latency,
784 enum ddr3_mr1_dll dll_disable)
785{
786 mrs_cmd_t cmd = 1 << 16;
787
788 if (qoff == DDR3_MR1_QOFF_DISABLE)
789 cmd |= (1 << 12);
790
791 if (tqds == DDR3_MR1_TQDS_ENABLE)
792 cmd |= (1 << 11);
793
794 cmd |= ddr3_rtt_nom_to_mr1_map(rtt_nom);
795
796 if (write_leveling == DDR3_MR1_WRLVL_ENABLE)
797 cmd |= (1 << 7);
798
799 cmd |= ddr3_ods_to_mr1_map(ods);
800
801 cmd |= (additive_latency & 0x03) << 3;
802
803 if (dll_disable == DDR3_MR1_DLL_DISABLE)
804 cmd |= (1 << 0);
805
806 return cmd;
807}
808
809/**
810 * \brief Get command address for a DDR3 MR2 command
811 *
812 * cas_cwl is given in clock cycles. For example, a cas_cwl of 7T should be
813 * given as 7.
814 *
Martin Roth98b698c2015-01-06 21:02:52 -0700815 * @param rtt_wr
816 * @param extended_temp
817 * @param self_refresh
Alexandru Gagniuc78706fd2013-06-03 13:58:10 -0500818 * @param cas_cwl CAS write latency in clock cycles.
819 */
Martin Roth98b698c2015-01-06 21:02:52 -0700820
Alexandru Gagniuc78706fd2013-06-03 13:58:10 -0500821mrs_cmd_t ddr3_get_mr2(enum ddr3_mr2_rttwr rtt_wr,
822 enum ddr3_mr2_srt_range extended_temp,
823 enum ddr3_mr2_asr self_refresh, u8 cas_cwl)
824{
825 mrs_cmd_t cmd = 2 << 16;
826
827 cmd |= (rtt_wr & 0x03) << 9;
828
829 if (extended_temp == DDR3_MR2_SRT_EXTENDED)
830 cmd |= (1 << 7);
831
832 if (self_refresh == DDR3_MR2_ASR_AUTO)
833 cmd |= (1 << 6);
834
835 cmd |= ((cas_cwl - 5) & 0x07) << 3;
836
837 return cmd;
838}
839
840/**
841 * \brief Get command address for a DDR3 MR3 command
842 *
843 * @param dataflow_from_mpr Specify a non-zero value to put DRAM in read
844 * leveling mode. Zero for normal operation.
845 */
846mrs_cmd_t ddr3_get_mr3(char dataflow_from_mpr)
847{
848 mrs_cmd_t cmd = 3 << 16;
849
850 if (dataflow_from_mpr)
851 cmd |= (1 << 2);
852
853 return cmd;
854}
855
856/**
857 * \brief Mirror the address bits for this MRS command
858 *
859 * Swap the following bits in the MRS command:
860 * - MA3 <-> MA4
861 * - MA5 <-> MA6
862 * - MA7 <-> MA8
863 * - BA0 <-> BA1
864 */
865mrs_cmd_t ddr3_mrs_mirror_pins(mrs_cmd_t cmd)
866{
867 u32 downshift, upshift;
868 /* High bits= A4 | A6 | A8 | BA1 */
869 /* Low bits = A3 | A5 | A7 | BA0 */
870 u32 lowbits = (1 << 3) | (1 << 5) | (1 << 7) | (1 << 16);
871 downshift = (cmd & (lowbits << 1));
872 upshift = (cmd & lowbits);
873 cmd &= ~(lowbits | (lowbits << 1));
874 cmd |= (downshift >> 1) | (upshift << 1);
875 return cmd;
Alexandru Gagniucf97ff3f2013-05-21 14:43:45 -0500876}