blob: 5fc9a7ac391d45d076988f39efdc149b0bf96680 [file] [log] [blame]
Patrick Georgiac959032020-05-05 22:49:26 +02001/* SPDX-License-Identifier: GPL-2.0-or-later */
Damien Zammit4b513a62015-08-20 00:37:05 +10002
Kyösti Mälkkif1b58b72019-03-01 13:43:02 +02003#include <device/pci_ops.h>
Damien Zammit4b513a62015-08-20 00:37:05 +10004#include <cbmem.h>
Elyes HAOUASb559b3c2019-04-28 17:52:10 +02005#include <cf9_reset.h>
Damien Zammit4b513a62015-08-20 00:37:05 +10006#include <console/console.h>
Elyes HAOUASd2b9ec12018-10-27 09:41:02 +02007#include <arch/cpu.h>
Julius Wernercd49cce2019-03-05 16:53:33 -08008#if CONFIG(SOUTHBRIDGE_INTEL_I82801GX)
Martin Rothcbe38922016-01-05 19:40:41 -07009#include <southbridge/intel/i82801gx/i82801gx.h> /* smbus_read_byte */
Arthur Heymans349e0852017-04-09 20:48:37 +020010#else
11#include <southbridge/intel/i82801jx/i82801jx.h> /* smbus_read_byte */
12#endif
Damien Zammit4b513a62015-08-20 00:37:05 +100013#include <spd.h>
14#include <string.h>
Arthur Heymans3cf94032017-04-05 16:17:26 +020015#include <device/dram/ddr2.h>
Arthur Heymans1848ba32017-04-11 17:09:31 +020016#include <device/dram/ddr3.h>
Arthur Heymansadc571a2017-09-25 09:40:54 +020017#include <mrc_cache.h>
Elyes HAOUASf5a57a82019-01-08 22:15:53 +010018#include <timestamp.h>
Elyes HAOUAS51401c32019-05-15 21:09:30 +020019#include <types.h>
Arthur Heymansadc571a2017-09-25 09:40:54 +020020
Elyes HAOUASbf0970e2019-03-21 11:10:03 +010021#include "iomap.h"
22#include "x4x.h"
23
Arthur Heymansadc571a2017-09-25 09:40:54 +020024#define MRC_CACHE_VERSION 0
Damien Zammit4b513a62015-08-20 00:37:05 +100025
Arthur Heymans1848ba32017-04-11 17:09:31 +020026static u16 ddr2_get_crc(u8 device, u8 len)
27{
28 u8 raw_spd[128] = {};
Kyösti Mälkkic01a5052019-01-30 09:39:23 +020029 i2c_eeprom_read(device, 64, 9, &raw_spd[64]);
30 i2c_eeprom_read(device, 93, 6, &raw_spd[93]);
Arthur Heymans1848ba32017-04-11 17:09:31 +020031 return spd_ddr2_calc_unique_crc(raw_spd, len);
32}
33
34static u16 ddr3_get_crc(u8 device, u8 len)
35{
36 u8 raw_spd[256] = {};
Kyösti Mälkkic01a5052019-01-30 09:39:23 +020037 i2c_eeprom_read(device, 117, 11, &raw_spd[117]);
Arthur Heymans1848ba32017-04-11 17:09:31 +020038 return spd_ddr3_calc_unique_crc(raw_spd, len);
39}
40
Arthur Heymansadc571a2017-09-25 09:40:54 +020041static enum cb_err verify_spds(const u8 *spd_map,
42 const struct sysinfo *ctrl_cached)
43{
44 int i;
Arthur Heymansadc571a2017-09-25 09:40:54 +020045 u16 crc;
46
47 for (i = 0; i < TOTAL_DIMMS; i++) {
48 if (!(spd_map[i]))
49 continue;
50 int len = smbus_read_byte(spd_map[i], 0);
51 if (len < 0 && ctrl_cached->dimms[i].card_type
52 == RAW_CARD_UNPOPULATED)
53 continue;
54 if (len > 0 && ctrl_cached->dimms[i].card_type
55 == RAW_CARD_UNPOPULATED)
56 return CB_ERR;
57
Arthur Heymans1848ba32017-04-11 17:09:31 +020058 if (ctrl_cached->spd_type == DDR2)
59 crc = ddr2_get_crc(spd_map[i], len);
60 else
61 crc = ddr3_get_crc(spd_map[i], len);
62
Arthur Heymansadc571a2017-09-25 09:40:54 +020063 if (crc != ctrl_cached->dimms[i].spd_crc)
64 return CB_ERR;
65 }
66 return CB_SUCCESS;
67}
68
Arthur Heymans3cf94032017-04-05 16:17:26 +020069struct abs_timings {
70 u32 min_tclk;
71 u32 min_tRAS;
72 u32 min_tRP;
73 u32 min_tRCD;
74 u32 min_tWR;
75 u32 min_tRFC;
76 u32 min_tWTR;
77 u32 min_tRRD;
78 u32 min_tRTP;
Arthur Heymans1848ba32017-04-11 17:09:31 +020079 u32 min_tAA;
Arthur Heymans3cf94032017-04-05 16:17:26 +020080 u32 min_tCLK_cas[8];
81 u32 cas_supported;
82};
83
84#define CTRL_MIN_TCLK_DDR2 TCK_400MHZ
85
86static void select_cas_dramfreq_ddr2(struct sysinfo *s,
87 const struct abs_timings *saved_timings)
Damien Zammit4b513a62015-08-20 00:37:05 +100088{
Arthur Heymans3cf94032017-04-05 16:17:26 +020089 u8 try_cas;
90 /* Currently only these CAS are supported */
91 u8 cas_mask = SPD_CAS_LATENCY_DDR2_5 | SPD_CAS_LATENCY_DDR2_6;
Arthur Heymanscfa2eaa2017-03-20 16:32:07 +010092
Arthur Heymans3cf94032017-04-05 16:17:26 +020093 cas_mask &= saved_timings->cas_supported;
94 try_cas = spd_get_msbs(cas_mask);
Damien Zammit4b513a62015-08-20 00:37:05 +100095
Arthur Heymans3cf94032017-04-05 16:17:26 +020096 while (cas_mask & (1 << try_cas) && try_cas > 0) {
97 s->selected_timings.CAS = try_cas;
98 s->selected_timings.tclk = saved_timings->min_tCLK_cas[try_cas];
99 if (s->selected_timings.tclk >= CTRL_MIN_TCLK_DDR2 &&
100 saved_timings->min_tCLK_cas[try_cas] !=
101 saved_timings->min_tCLK_cas[try_cas - 1])
Arthur Heymans70a1dda2017-03-09 01:58:24 +0100102 break;
Arthur Heymans3cf94032017-04-05 16:17:26 +0200103 try_cas--;
Damien Zammit4b513a62015-08-20 00:37:05 +1000104 }
Damien Zammit4b513a62015-08-20 00:37:05 +1000105
Arthur Heymans3cf94032017-04-05 16:17:26 +0200106
107 if ((s->selected_timings.CAS < 3) || (s->selected_timings.tclk == 0))
108 die("Could not find common memory frequency and CAS\n");
109
110 switch (s->selected_timings.tclk) {
111 case TCK_200MHZ:
112 case TCK_266MHZ:
113 /* FIXME: this works on vendor BIOS */
114 die("Selected dram frequency not supported\n");
115 case TCK_333MHZ:
116 s->selected_timings.mem_clk = MEM_CLOCK_667MHz;
117 break;
118 case TCK_400MHZ:
119 s->selected_timings.mem_clk = MEM_CLOCK_800MHz;
120 break;
Damien Zammit4b513a62015-08-20 00:37:05 +1000121 }
Damien Zammit4b513a62015-08-20 00:37:05 +1000122}
123
124static void mchinfo_ddr2(struct sysinfo *s)
125{
126 const u32 eax = cpuid_ext(0x04, 0).eax;
Arthur Heymans3cf94032017-04-05 16:17:26 +0200127 printk(BIOS_WARNING, "%d CPU cores\n", ((eax >> 26) & 0x3f) + 1);
Damien Zammit4b513a62015-08-20 00:37:05 +1000128
Damien Zammit9ae09852016-06-18 23:57:43 +1000129 u32 capid = pci_read_config16(PCI_DEV(0, 0, 0), 0xe8);
Arthur Heymans70a1dda2017-03-09 01:58:24 +0100130 if (!(capid & (1<<(79-64))))
Damien Zammit4b513a62015-08-20 00:37:05 +1000131 printk(BIOS_WARNING, "iTPM enabled\n");
Damien Zammit4b513a62015-08-20 00:37:05 +1000132
133 capid = pci_read_config32(PCI_DEV(0, 0, 0), 0xe4);
Arthur Heymans70a1dda2017-03-09 01:58:24 +0100134 if (!(capid & (1<<(57-32))))
Damien Zammit4b513a62015-08-20 00:37:05 +1000135 printk(BIOS_WARNING, "ME enabled\n");
Damien Zammit4b513a62015-08-20 00:37:05 +1000136
Arthur Heymans70a1dda2017-03-09 01:58:24 +0100137 if (!(capid & (1<<(56-32))))
Damien Zammit4b513a62015-08-20 00:37:05 +1000138 printk(BIOS_WARNING, "AMT enabled\n");
Damien Zammit4b513a62015-08-20 00:37:05 +1000139
Arthur Heymans70a1dda2017-03-09 01:58:24 +0100140 if (!(capid & (1<<(48-32))))
Damien Zammit4b513a62015-08-20 00:37:05 +1000141 printk(BIOS_WARNING, "VT-d enabled\n");
Damien Zammit4b513a62015-08-20 00:37:05 +1000142}
143
Arthur Heymans3cf94032017-04-05 16:17:26 +0200144static int ddr2_save_dimminfo(u8 dimm_idx, u8 *raw_spd,
145 struct abs_timings *saved_timings, struct sysinfo *s)
Damien Zammit4b513a62015-08-20 00:37:05 +1000146{
Arthur Heymansfc31e442018-02-12 15:12:34 +0100147 struct dimm_attr_ddr2_st decoded_dimm;
Arthur Heymans3cf94032017-04-05 16:17:26 +0200148 int i;
Damien Zammit4b513a62015-08-20 00:37:05 +1000149
Arthur Heymans3cf94032017-04-05 16:17:26 +0200150 if (spd_decode_ddr2(&decoded_dimm, raw_spd) != SPD_STATUS_OK) {
151 printk(BIOS_DEBUG, "Problems decoding SPD\n");
152 return CB_ERR;
153 }
Damien Zammit7c2e5392016-07-24 03:28:42 +1000154
Julius Wernercd49cce2019-03-05 16:53:33 -0800155 if (CONFIG(DEBUG_RAM_SETUP))
Arthur Heymans3cf94032017-04-05 16:17:26 +0200156 dram_print_spd_ddr2(&decoded_dimm);
157
158 if (!(decoded_dimm.width & (0x08 | 0x10))) {
159
160 printk(BIOS_ERR,
161 "DIMM%d Unsupported width: x%d. Disabling dimm\n",
162 dimm_idx, s->dimms[dimm_idx].width);
163 return CB_ERR;
164 }
165 s->dimms[dimm_idx].width = (decoded_dimm.width >> 3) - 1;
166 /*
167 * This boils down to:
168 * "Except for the x16 configuration, all DDR2 devices have a
169 * 1KB page size. For the x16 configuration, the page size is 2KB
170 * for all densities except the 256Mb device, which has a 1KB page
171 * size." Micron, 'TN-47-16 Designing for High-Density DDR2 Memory'
Arthur Heymansd4e57622017-12-25 17:01:33 +0100172 * The formula is pagesize in KiB = width * 2^col_bits / 8.
Arthur Heymans3cf94032017-04-05 16:17:26 +0200173 */
Arthur Heymansd4e57622017-12-25 17:01:33 +0100174 s->dimms[dimm_idx].page_size = decoded_dimm.width *
175 (1 << decoded_dimm.col_bits) / 8;
Arthur Heymans3cf94032017-04-05 16:17:26 +0200176
177 switch (decoded_dimm.banks) {
178 case 4:
179 s->dimms[dimm_idx].n_banks = N_BANKS_4;
180 break;
181 case 8:
182 s->dimms[dimm_idx].n_banks = N_BANKS_8;
183 break;
184 default:
185 printk(BIOS_ERR,
186 "DIMM%d Unsupported #banks: x%d. Disabling dimm\n",
187 dimm_idx, decoded_dimm.banks);
188 return CB_ERR;
189 }
190
191 s->dimms[dimm_idx].ranks = decoded_dimm.ranks;
192 s->dimms[dimm_idx].rows = decoded_dimm.row_bits;
193 s->dimms[dimm_idx].cols = decoded_dimm.col_bits;
194
195 saved_timings->cas_supported &= decoded_dimm.cas_supported;
196
197 saved_timings->min_tRAS =
198 MAX(saved_timings->min_tRAS, decoded_dimm.tRAS);
199 saved_timings->min_tRP =
200 MAX(saved_timings->min_tRP, decoded_dimm.tRP);
201 saved_timings->min_tRCD =
202 MAX(saved_timings->min_tRCD, decoded_dimm.tRCD);
203 saved_timings->min_tWR =
204 MAX(saved_timings->min_tWR, decoded_dimm.tWR);
205 saved_timings->min_tRFC =
206 MAX(saved_timings->min_tRFC, decoded_dimm.tRFC);
207 saved_timings->min_tWTR =
208 MAX(saved_timings->min_tWTR, decoded_dimm.tWTR);
209 saved_timings->min_tRRD =
210 MAX(saved_timings->min_tRRD, decoded_dimm.tRRD);
211 saved_timings->min_tRTP =
212 MAX(saved_timings->min_tRTP, decoded_dimm.tRTP);
213 for (i = 0; i < 8; i++) {
214 if (!(saved_timings->cas_supported & (1 << i)))
215 saved_timings->min_tCLK_cas[i] = 0;
216 else
217 saved_timings->min_tCLK_cas[i] =
218 MAX(saved_timings->min_tCLK_cas[i],
219 decoded_dimm.cycle_time[i]);
220 }
Arthur Heymansadc571a2017-09-25 09:40:54 +0200221
222 s->dimms[dimm_idx].spd_crc = spd_ddr2_calc_unique_crc(raw_spd,
223 spd_decode_spd_size_ddr2(raw_spd[0]));
Arthur Heymans3cf94032017-04-05 16:17:26 +0200224 return CB_SUCCESS;
225}
226
Arthur Heymans1848ba32017-04-11 17:09:31 +0200227static void normalize_tCLK(u32 *tCLK)
228{
229 if (*tCLK <= TCK_666MHZ)
230 *tCLK = TCK_666MHZ;
231 else if (*tCLK <= TCK_533MHZ)
232 *tCLK = TCK_533MHZ;
233 else if (*tCLK <= TCK_400MHZ)
234 *tCLK = TCK_400MHZ;
235 else
236 *tCLK = 0;
237}
238
239static void select_cas_dramfreq_ddr3(struct sysinfo *s,
240 struct abs_timings *saved_timings)
241{
242 /*
243 * various constraints must be fulfilled:
244 * CAS * tCK < 20ns == 160MTB
245 * tCK_max >= tCK >= tCK_min
246 * CAS >= roundup(tAA_min/tCK)
247 * CAS supported
248 * AND BTW: Clock(MT) = 2000 / tCK(ns) - intel uses MTs but calls them MHz
249 */
250
251 u32 min_tCLK;
252 u8 try_CAS;
253 u16 capid = (pci_read_config16(PCI_DEV(0, 0, 0), 0xea) >> 4) & 0x3f;
254
255 switch (s->max_fsb) {
256 default:
257 case FSB_CLOCK_800MHz:
258 min_tCLK = TCK_400MHZ;
259 break;
260 case FSB_CLOCK_1066MHz:
261 min_tCLK = TCK_533MHZ;
262 break;
263 case FSB_CLOCK_1333MHz:
264 min_tCLK = TCK_666MHZ;
265 break;
266 }
267
268 switch (capid >> 3) {
269 default: /* Should not happen */
270 min_tCLK = TCK_400MHZ;
271 break;
272 case 1:
273 min_tCLK = MAX(min_tCLK, TCK_400MHZ);
274 break;
275 case 2:
276 min_tCLK = MAX(min_tCLK, TCK_533MHZ);
277 break;
278 case 3: /* Only on P45 */
Arthur Heymansb1ba6622018-10-14 13:22:16 +0200279 case 0:
Arthur Heymans1848ba32017-04-11 17:09:31 +0200280 min_tCLK = MAX(min_tCLK, TCK_666MHZ);
281 break;
282 }
283
284 min_tCLK = MAX(min_tCLK, saved_timings->min_tclk);
285 if (min_tCLK == 0) {
286 printk(BIOS_ERR, "DRAM frequency is under lowest supported "
Jonathan Neuschäfer0f14df42018-10-30 10:50:47 +0100287 "frequency (400 MHz). Increasing to 400 MHz "
Arthur Heymans1848ba32017-04-11 17:09:31 +0200288 "as last resort");
289 min_tCLK = TCK_400MHZ;
290 }
291
292 while (1) {
293 normalize_tCLK(&min_tCLK);
294 if (min_tCLK == 0)
295 die("Couldn't find compatible clock / CAS settings.\n");
296 try_CAS = DIV_ROUND_UP(saved_timings->min_tAA, min_tCLK);
297 printk(BIOS_SPEW, "Trying CAS %u, tCK %u.\n", try_CAS, min_tCLK);
298 for (; try_CAS <= DDR3_MAX_CAS; try_CAS++) {
299 /*
300 * cas_supported is encoded like the SPD which starts
301 * at CAS=4.
302 */
303 if ((saved_timings->cas_supported << 4) & (1 << try_CAS))
304 break;
305 }
306 if ((try_CAS <= DDR3_MAX_CAS) && (try_CAS * min_tCLK < 20 * 256)) {
307 /* Found good CAS. */
308 printk(BIOS_SPEW, "Found compatible tCLK / CAS pair: %u / %u.\n",
309 min_tCLK, try_CAS);
310 break;
311 }
312 /*
313 * If no valid tCLK / CAS pair could be found for a tCLK
314 * increase it after which it gets normalised. This means
315 * that a lower frequency gets tried.
316 */
317 min_tCLK++;
318 }
319
320 s->selected_timings.tclk = min_tCLK;
321 s->selected_timings.CAS = try_CAS;
322
323 switch (s->selected_timings.tclk) {
324 case TCK_400MHZ:
325 s->selected_timings.mem_clk = MEM_CLOCK_800MHz;
326 break;
327 case TCK_533MHZ:
328 s->selected_timings.mem_clk = MEM_CLOCK_1066MHz;
329 break;
330 case TCK_666MHZ:
331 s->selected_timings.mem_clk = MEM_CLOCK_1333MHz;
332 break;
333 }
334}
335
Arthur Heymans5a9dbde2018-05-26 15:05:09 +0200336/* With DDR3 and 533MHz mem clock and an enabled internal gfx device the display
337 is not usable in non stacked mode, so select stacked mode accordingly */
338static void workaround_stacked_mode(struct sysinfo *s)
339{
340 u32 deven;
341 /* Only a problem on DDR3 */
342 if (s->spd_type == DDR2)
343 return;
344 /* Does not matter if only one channel is populated */
345 if (!CHANNEL_IS_POPULATED(s->dimms, 0)
346 || !CHANNEL_IS_POPULATED(s->dimms, 1))
347 return;
348 if (s->selected_timings.mem_clk != MEM_CLOCK_1066MHz)
349 return;
350 /* IGD0EN gets disabled if not present before this code runs */
351 deven = pci_read_config32(PCI_DEV(0, 0, 0), D0F0_DEVEN);
352 if (deven & IGD0EN)
353 s->stacked_mode = 1;
354}
355
Arthur Heymans1848ba32017-04-11 17:09:31 +0200356static int ddr3_save_dimminfo(u8 dimm_idx, u8 *raw_spd,
357 struct abs_timings *saved_timings, struct sysinfo *s)
358{
359 struct dimm_attr_st decoded_dimm;
360
361 if (spd_decode_ddr3(&decoded_dimm, raw_spd) != SPD_STATUS_OK)
362 return CB_ERR;
363
Julius Wernercd49cce2019-03-05 16:53:33 -0800364 if (CONFIG(DEBUG_RAM_SETUP))
Arthur Heymans1848ba32017-04-11 17:09:31 +0200365 dram_print_spd_ddr3(&decoded_dimm);
366
367 /* x4 DIMMs are not supported (true for both ddr2 and ddr3) */
368 if (!(decoded_dimm.width & (0x8 | 0x10))) {
369 printk(BIOS_ERR, "DIMM%d Unsupported width: x%d. Disabling dimm\n",
370 dimm_idx, s->dimms[dimm_idx].width);
371 return CB_ERR;
372 }
373 s->dimms[dimm_idx].width = (decoded_dimm.width >> 3) - 1;
374 /*
375 * This boils down to:
376 * "Except for the x16 configuration, all DDR3 devices have a
377 * 1KB page size. For the x16 configuration, the page size is 2KB
378 * for all densities except the 256Mb device, which has a 1KB page size."
379 * Micron, 'TN-47-16 Designing for High-Density DDR2 Memory'
380 */
381 s->dimms[dimm_idx].page_size = decoded_dimm.width *
382 (1 << decoded_dimm.col_bits) / 8;
383
384 s->dimms[dimm_idx].n_banks = N_BANKS_8; /* Always 8 banks on ddr3?? */
385
386 s->dimms[dimm_idx].ranks = decoded_dimm.ranks;
387 s->dimms[dimm_idx].rows = decoded_dimm.row_bits;
388 s->dimms[dimm_idx].cols = decoded_dimm.col_bits;
389
390 saved_timings->min_tRAS =
391 MAX(saved_timings->min_tRAS, decoded_dimm.tRAS);
392 saved_timings->min_tRP =
393 MAX(saved_timings->min_tRP, decoded_dimm.tRP);
394 saved_timings->min_tRCD =
395 MAX(saved_timings->min_tRCD, decoded_dimm.tRCD);
396 saved_timings->min_tWR =
397 MAX(saved_timings->min_tWR, decoded_dimm.tWR);
398 saved_timings->min_tRFC =
399 MAX(saved_timings->min_tRFC, decoded_dimm.tRFC);
400 saved_timings->min_tWTR =
401 MAX(saved_timings->min_tWTR, decoded_dimm.tWTR);
402 saved_timings->min_tRRD =
403 MAX(saved_timings->min_tRRD, decoded_dimm.tRRD);
404 saved_timings->min_tRTP =
405 MAX(saved_timings->min_tRTP, decoded_dimm.tRTP);
406 saved_timings->min_tAA =
407 MAX(saved_timings->min_tAA, decoded_dimm.tAA);
408 saved_timings->cas_supported &= decoded_dimm.cas_supported;
409
410 s->dimms[dimm_idx].spd_crc = spd_ddr3_calc_unique_crc(raw_spd,
411 raw_spd[0]);
Arthur Heymansf1287262017-12-25 18:30:01 +0100412
413 s->dimms[dimm_idx].mirrored = decoded_dimm.flags.pins_mirrored;
414
Arthur Heymans1848ba32017-04-11 17:09:31 +0200415 return CB_SUCCESS;
416}
417
418
Arthur Heymans3cf94032017-04-05 16:17:26 +0200419static void select_discrete_timings(struct sysinfo *s,
420 const struct abs_timings *timings)
421{
422 s->selected_timings.tRAS = DIV_ROUND_UP(timings->min_tRAS,
423 s->selected_timings.tclk);
424 s->selected_timings.tRP = DIV_ROUND_UP(timings->min_tRP,
425 s->selected_timings.tclk);
426 s->selected_timings.tRCD = DIV_ROUND_UP(timings->min_tRCD,
427 s->selected_timings.tclk);
428 s->selected_timings.tWR = DIV_ROUND_UP(timings->min_tWR,
429 s->selected_timings.tclk);
430 s->selected_timings.tRFC = DIV_ROUND_UP(timings->min_tRFC,
431 s->selected_timings.tclk);
432 s->selected_timings.tWTR = DIV_ROUND_UP(timings->min_tWTR,
433 s->selected_timings.tclk);
434 s->selected_timings.tRRD = DIV_ROUND_UP(timings->min_tRRD,
435 s->selected_timings.tclk);
436 s->selected_timings.tRTP = DIV_ROUND_UP(timings->min_tRTP,
437 s->selected_timings.tclk);
438}
439static void print_selected_timings(struct sysinfo *s)
440{
441 printk(BIOS_DEBUG, "Selected timings:\n");
442 printk(BIOS_DEBUG, "\tFSB: %dMHz\n",
Elyes HAOUASe951e8e2019-06-15 11:03:00 +0200443 fsb_to_mhz(s->selected_timings.fsb_clk));
Arthur Heymans3cf94032017-04-05 16:17:26 +0200444 printk(BIOS_DEBUG, "\tDDR: %dMHz\n",
Elyes HAOUASe951e8e2019-06-15 11:03:00 +0200445 ddr_to_mhz(s->selected_timings.mem_clk));
Arthur Heymans3cf94032017-04-05 16:17:26 +0200446
447 printk(BIOS_DEBUG, "\tCAS: %d\n", s->selected_timings.CAS);
448 printk(BIOS_DEBUG, "\ttRAS: %d\n", s->selected_timings.tRAS);
449 printk(BIOS_DEBUG, "\ttRP: %d\n", s->selected_timings.tRP);
450 printk(BIOS_DEBUG, "\ttRCD: %d\n", s->selected_timings.tRCD);
451 printk(BIOS_DEBUG, "\ttWR: %d\n", s->selected_timings.tWR);
452 printk(BIOS_DEBUG, "\ttRFC: %d\n", s->selected_timings.tRFC);
453 printk(BIOS_DEBUG, "\ttWTR: %d\n", s->selected_timings.tWTR);
454 printk(BIOS_DEBUG, "\ttRRD: %d\n", s->selected_timings.tRRD);
455 printk(BIOS_DEBUG, "\ttRTP: %d\n", s->selected_timings.tRTP);
456}
457
458static void find_fsb_speed(struct sysinfo *s)
459{
Damien Zammit4b513a62015-08-20 00:37:05 +1000460 switch (MCHBAR32(0xc00) & 0x7) {
461 case 0x0:
462 s->max_fsb = FSB_CLOCK_1066MHz;
463 break;
464 case 0x2:
465 s->max_fsb = FSB_CLOCK_800MHz;
466 break;
467 case 0x4:
468 s->max_fsb = FSB_CLOCK_1333MHz;
469 break;
470 default:
471 s->max_fsb = FSB_CLOCK_800MHz;
472 printk(BIOS_WARNING, "Can't detect FSB, setting 800MHz\n");
473 break;
474 }
Arthur Heymans3cf94032017-04-05 16:17:26 +0200475 s->selected_timings.fsb_clk = s->max_fsb;
476}
Damien Zammit4b513a62015-08-20 00:37:05 +1000477
Arthur Heymans3cf94032017-04-05 16:17:26 +0200478static void decode_spd_select_timings(struct sysinfo *s)
479{
480 unsigned int device;
481 u8 dram_type_mask = (1 << DDR2) | (1 << DDR3);
482 u8 dimm_mask = 0;
483 u8 raw_spd[256];
484 int i, j;
485 struct abs_timings saved_timings;
486 memset(&saved_timings, 0, sizeof(saved_timings));
487 saved_timings.cas_supported = UINT32_MAX;
Damien Zammit4b513a62015-08-20 00:37:05 +1000488
Arthur Heymans3cf94032017-04-05 16:17:26 +0200489 FOR_EACH_DIMM(i) {
490 s->dimms[i].card_type = RAW_CARD_POPULATED;
491 device = s->spd_map[i];
492 if (!device) {
493 s->dimms[i].card_type = RAW_CARD_UNPOPULATED;
494 continue;
Damien Zammit4b513a62015-08-20 00:37:05 +1000495 }
Kyösti Mälkkibd659852020-01-05 20:00:18 +0200496 switch (smbus_read_byte(s->spd_map[i], SPD_MEMORY_TYPE)) {
Arthur Heymans3cf94032017-04-05 16:17:26 +0200497 case DDR2SPD:
498 dram_type_mask &= 1 << DDR2;
499 s->spd_type = DDR2;
Damien Zammit4b513a62015-08-20 00:37:05 +1000500 break;
Arthur Heymans3cf94032017-04-05 16:17:26 +0200501 case DDR3SPD:
502 dram_type_mask &= 1 << DDR3;
503 s->spd_type = DDR3;
Damien Zammit4b513a62015-08-20 00:37:05 +1000504 break;
505 default:
Arthur Heymans3cf94032017-04-05 16:17:26 +0200506 s->dimms[i].card_type = RAW_CARD_UNPOPULATED;
507 continue;
Damien Zammit4b513a62015-08-20 00:37:05 +1000508 }
Arthur Heymans3cf94032017-04-05 16:17:26 +0200509 if (!dram_type_mask)
510 die("Mixing up dimm types is not supported!\n");
Damien Zammit4b513a62015-08-20 00:37:05 +1000511
Arthur Heymans3cf94032017-04-05 16:17:26 +0200512 printk(BIOS_DEBUG, "Decoding dimm %d\n", i);
Kyösti Mälkkic01a5052019-01-30 09:39:23 +0200513 if (i2c_eeprom_read(device, 0, 128, raw_spd) != 128) {
Arthur Heymans1848ba32017-04-11 17:09:31 +0200514 printk(BIOS_DEBUG, "i2c block operation failed,"
515 " trying smbus byte operation.\n");
516 for (j = 0; j < 128; j++)
Kyösti Mälkkibd659852020-01-05 20:00:18 +0200517 raw_spd[j] = smbus_read_byte(device, j);
Arthur Heymans1848ba32017-04-11 17:09:31 +0200518 }
519
Arthur Heymans3cf94032017-04-05 16:17:26 +0200520 if (s->spd_type == DDR2){
Arthur Heymans3cf94032017-04-05 16:17:26 +0200521 if (ddr2_save_dimminfo(i, raw_spd, &saved_timings, s)) {
522 printk(BIOS_WARNING,
523 "Encountered problems with SPD, "
524 "skipping this DIMM.\n");
525 s->dimms[i].card_type = RAW_CARD_UNPOPULATED;
526 continue;
527 }
Arthur Heymans1848ba32017-04-11 17:09:31 +0200528 } else { /* DDR3 */
529 if (ddr3_save_dimminfo(i, raw_spd, &saved_timings, s)) {
530 printk(BIOS_WARNING,
531 "Encountered problems with SPD, "
532 "skipping this DIMM.\n");
533 /* something in decoded SPD was unsupported */
534 s->dimms[i].card_type = RAW_CARD_UNPOPULATED;
535 continue;
536 }
Arthur Heymans3cf94032017-04-05 16:17:26 +0200537 }
538 dimm_mask |= (1 << i);
Damien Zammit4b513a62015-08-20 00:37:05 +1000539 }
Arthur Heymans3cf94032017-04-05 16:17:26 +0200540 if (!dimm_mask)
541 die("No memory installed.\n");
542
543 if (s->spd_type == DDR2)
544 select_cas_dramfreq_ddr2(s, &saved_timings);
Arthur Heymans1848ba32017-04-11 17:09:31 +0200545 else
546 select_cas_dramfreq_ddr3(s, &saved_timings);
Arthur Heymans3cf94032017-04-05 16:17:26 +0200547 select_discrete_timings(s, &saved_timings);
Arthur Heymans5a9dbde2018-05-26 15:05:09 +0200548 workaround_stacked_mode(s);
Arthur Heymans3cf94032017-04-05 16:17:26 +0200549}
550
551static void find_dimm_config(struct sysinfo *s)
552{
553 int chan, i;
554
555 FOR_EACH_POPULATED_CHANNEL(s->dimms, chan) {
556 FOR_EACH_POPULATED_DIMM_IN_CHANNEL(s->dimms, chan, i) {
557 int dimm_config;
558 if (s->dimms[i].ranks == 1) {
559 if (s->dimms[i].width == 0) /* x8 */
560 dimm_config = 1;
561 else /* x16 */
562 dimm_config = 3;
563 } else {
564 if (s->dimms[i].width == 0) /* x8 */
565 dimm_config = 2;
566 else
567 die("Dual-rank x16 not supported\n");
568 }
569 s->dimm_config[chan] |=
570 dimm_config << (i % DIMMS_PER_CHANNEL) * 2;
571 }
572 printk(BIOS_DEBUG, " Config[CH%d] : %d\n", chan,
573 s->dimm_config[chan]);
574 }
575
Damien Zammit4b513a62015-08-20 00:37:05 +1000576}
577
Arthur Heymansbb5e77c2016-11-30 20:37:29 +0100578static void checkreset_ddr2(int boot_path)
579{
580 u8 pmcon2;
581 u32 pmsts;
582
583 if (boot_path >= 1) {
584 pmsts = MCHBAR32(PMSTS_MCHBAR);
585 if (!(pmsts & 1))
586 printk(BIOS_DEBUG,
587 "Channel 0 possibly not in self refresh\n");
588 if (!(pmsts & 2))
589 printk(BIOS_DEBUG,
590 "Channel 1 possibly not in self refresh\n");
591 }
592
593 pmcon2 = pci_read_config8(PCI_DEV(0, 0x1f, 0), 0xa2);
594
595 if (pmcon2 & 0x80) {
596 pmcon2 &= ~0x80;
597 pci_write_config8(PCI_DEV(0, 0x1f, 0), 0xa2, pmcon2);
598
599 /* do magic 0xf0 thing. */
Angel Pons4a9569a2020-06-08 01:39:25 +0200600 pci_and_config8(PCI_DEV(0, 0, 0), 0xf0, ~(1 << 2));
601
602 pci_or_config8(PCI_DEV(0, 0, 0), 0xf0, (1 << 2));
Arthur Heymansbb5e77c2016-11-30 20:37:29 +0100603
Elyes HAOUAS1bc7b6e2019-05-05 16:29:41 +0200604 full_reset();
Arthur Heymansbb5e77c2016-11-30 20:37:29 +0100605 }
606 pmcon2 |= 0x80;
607 pci_write_config8(PCI_DEV(0, 0x1f, 0), 0xa2, pmcon2);
608}
609
Damien Zammit4b513a62015-08-20 00:37:05 +1000610/**
611 * @param boot_path: 0 = normal, 1 = reset, 2 = resume from s3
612 */
613void sdram_initialize(int boot_path, const u8 *spd_map)
614{
Arthur Heymansadc571a2017-09-25 09:40:54 +0200615 struct sysinfo s, *ctrl_cached;
Damien Zammit4b513a62015-08-20 00:37:05 +1000616 u8 reg8;
Arthur Heymansadc571a2017-09-25 09:40:54 +0200617 int fast_boot, cbmem_was_inited, cache_not_found;
618 struct region_device rdev;
Damien Zammit4b513a62015-08-20 00:37:05 +1000619
Elyes HAOUASf5a57a82019-01-08 22:15:53 +0100620 timestamp_add_now(TS_BEFORE_INITRAM);
Damien Zammit4b513a62015-08-20 00:37:05 +1000621 printk(BIOS_DEBUG, "Setting up RAM controller.\n");
622
Arthur Heymans70a1dda2017-03-09 01:58:24 +0100623 pci_write_config8(PCI_DEV(0, 0, 0), 0xdf, 0xff);
Damien Zammit4b513a62015-08-20 00:37:05 +1000624
625 memset(&s, 0, sizeof(struct sysinfo));
626
Arthur Heymansadc571a2017-09-25 09:40:54 +0200627 cache_not_found = mrc_cache_get_current(MRC_TRAINING_DATA,
628 MRC_CACHE_VERSION, &rdev);
Damien Zammit4b513a62015-08-20 00:37:05 +1000629
Arthur Heymansadc571a2017-09-25 09:40:54 +0200630 if (cache_not_found || (region_device_sz(&rdev) < sizeof(s))) {
631 if (boot_path == BOOT_PATH_RESUME) {
632 /* Failed S3 resume, reset to come up cleanly */
Elyes HAOUASb559b3c2019-04-28 17:52:10 +0200633 system_reset();
Arthur Heymansdf946b82018-06-14 10:53:51 +0200634 } else if (boot_path == BOOT_PATH_WARM_RESET) {
635 /* On warm reset some of dram calibrations fail
636 and therefore requiring valid cached settings */
Elyes HAOUASb559b3c2019-04-28 17:52:10 +0200637 full_reset();
Arthur Heymansadc571a2017-09-25 09:40:54 +0200638 }
639 ctrl_cached = NULL;
640 } else {
641 ctrl_cached = rdev_mmap_full(&rdev);
642 }
Arthur Heymansbb5e77c2016-11-30 20:37:29 +0100643
Arthur Heymansadc571a2017-09-25 09:40:54 +0200644 /* verify MRC cache for fast boot */
645 if (boot_path != BOOT_PATH_RESUME && ctrl_cached) {
646 /* check SPD checksum to make sure the DIMMs haven't been
647 * replaced */
648 fast_boot = verify_spds(spd_map, ctrl_cached) == CB_SUCCESS;
Arthur Heymansb0c6cff2018-09-05 20:39:39 +0200649 if (!fast_boot) {
Arthur Heymansadc571a2017-09-25 09:40:54 +0200650 printk(BIOS_DEBUG, "SPD checksums don't match,"
651 " dimm's have been replaced\n");
Arthur Heymansb0c6cff2018-09-05 20:39:39 +0200652 } else {
653 find_fsb_speed(&s);
654 fast_boot = s.max_fsb == ctrl_cached->max_fsb;
655 if (!fast_boot)
656 printk(BIOS_DEBUG,
657 "CPU FSB does not match and has been replaced\n");
658 }
Arthur Heymansadc571a2017-09-25 09:40:54 +0200659 } else {
660 fast_boot = boot_path == BOOT_PATH_RESUME;
661 }
Damien Zammit4b513a62015-08-20 00:37:05 +1000662
Arthur Heymansadc571a2017-09-25 09:40:54 +0200663 if (fast_boot) {
664 printk(BIOS_DEBUG, "Using cached raminit settings\n");
665 memcpy(&s, ctrl_cached, sizeof(s));
666 s.boot_path = boot_path;
667 mchinfo_ddr2(&s);
668 print_selected_timings(&s);
669 } else {
670 s.boot_path = boot_path;
671 s.spd_map[0] = spd_map[0];
672 s.spd_map[1] = spd_map[1];
673 s.spd_map[2] = spd_map[2];
674 s.spd_map[3] = spd_map[3];
675 checkreset_ddr2(s.boot_path);
Damien Zammit4b513a62015-08-20 00:37:05 +1000676
Arthur Heymansadc571a2017-09-25 09:40:54 +0200677 /* Detect dimms per channel */
678 reg8 = pci_read_config8(PCI_DEV(0, 0, 0), 0xe9);
679 printk(BIOS_DEBUG, "Dimms per channel: %d\n",
680 (reg8 & 0x10) ? 1 : 2);
681
682 mchinfo_ddr2(&s);
683
684 find_fsb_speed(&s);
685 decode_spd_select_timings(&s);
686 print_selected_timings(&s);
687 find_dimm_config(&s);
688 }
Damien Zammit4b513a62015-08-20 00:37:05 +1000689
Arthur Heymansa2cc2312017-05-15 10:13:36 +0200690 do_raminit(&s, fast_boot);
Damien Zammit4b513a62015-08-20 00:37:05 +1000691
Angel Pons4a9569a2020-06-08 01:39:25 +0200692 pci_and_config8(PCI_DEV(0, 0x1f, 0), 0xa2, (u8)~0x80);
Damien Zammit4b513a62015-08-20 00:37:05 +1000693
Angel Pons4a9569a2020-06-08 01:39:25 +0200694 pci_or_config8(PCI_DEV(0, 0, 0), 0xf4, 1);
695
Damien Zammit4b513a62015-08-20 00:37:05 +1000696 printk(BIOS_DEBUG, "RAM initialization finished.\n");
Arthur Heymansadc571a2017-09-25 09:40:54 +0200697
698 cbmem_was_inited = !cbmem_recovery(s.boot_path == BOOT_PATH_RESUME);
699 if (!fast_boot)
700 mrc_cache_stash_data(MRC_TRAINING_DATA, MRC_CACHE_VERSION,
701 &s, sizeof(s));
702 if (s.boot_path == BOOT_PATH_RESUME && !cbmem_was_inited) {
703 /* Failed S3 resume, reset to come up cleanly */
Elyes HAOUASb559b3c2019-04-28 17:52:10 +0200704 system_reset();
Arthur Heymansadc571a2017-09-25 09:40:54 +0200705 }
Elyes HAOUASf5a57a82019-01-08 22:15:53 +0100706
707 timestamp_add_now(TS_AFTER_INITRAM);
Elyes HAOUASf5a57a82019-01-08 22:15:53 +0100708 printk(BIOS_DEBUG, "Memory initialized\n");
Damien Zammit4b513a62015-08-20 00:37:05 +1000709}