blob: ea00f293e1db8155ed382cff0201bc9558e58051 [file] [log] [blame]
Damien Zammit4b513a62015-08-20 00:37:05 +10001/*
2 * This file is part of the coreboot project.
3 *
4 * Copyright (C) 2015 Damien Zammit <damien@zamaudio.com>
5 *
6 * This program is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU General Public License as
8 * published by the Free Software Foundation; either version 2 of
9 * the License, or (at your option) any later version.
10 *
11 * This program is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 * GNU General Public License for more details.
15 */
16
17#include <arch/io.h>
Kyösti Mälkkif1b58b72019-03-01 13:43:02 +020018#include <device/pci_ops.h>
Damien Zammit4b513a62015-08-20 00:37:05 +100019#include <cbmem.h>
20#include <console/console.h>
21#include <cpu/x86/cache.h>
22#include <cpu/x86/mtrr.h>
Elyes HAOUASd2b9ec12018-10-27 09:41:02 +020023#include <arch/cpu.h>
Damien Zammit4b513a62015-08-20 00:37:05 +100024#include <delay.h>
25#include <halt.h>
Elyes HAOUASf5a57a82019-01-08 22:15:53 +010026#include <lib.h>
Martin Rothcbe38922016-01-05 19:40:41 -070027#include "iomap.h"
Arthur Heymans349e0852017-04-09 20:48:37 +020028#if IS_ENABLED(CONFIG_SOUTHBRIDGE_INTEL_I82801GX)
Martin Rothcbe38922016-01-05 19:40:41 -070029#include <southbridge/intel/i82801gx/i82801gx.h> /* smbus_read_byte */
Arthur Heymans349e0852017-04-09 20:48:37 +020030#else
31#include <southbridge/intel/i82801jx/i82801jx.h> /* smbus_read_byte */
32#endif
Martin Rothcbe38922016-01-05 19:40:41 -070033#include "x4x.h"
Damien Zammit4b513a62015-08-20 00:37:05 +100034#include <spd.h>
35#include <string.h>
Arthur Heymans3cf94032017-04-05 16:17:26 +020036#include <device/dram/ddr2.h>
Arthur Heymans1848ba32017-04-11 17:09:31 +020037#include <device/dram/ddr3.h>
Arthur Heymansadc571a2017-09-25 09:40:54 +020038#include <mrc_cache.h>
Elyes HAOUASf5a57a82019-01-08 22:15:53 +010039#include <timestamp.h>
Arthur Heymansadc571a2017-09-25 09:40:54 +020040
41#define MRC_CACHE_VERSION 0
Damien Zammit4b513a62015-08-20 00:37:05 +100042
43static inline int spd_read_byte(unsigned int device, unsigned int address)
44{
45 return smbus_read_byte(device, address);
46}
47
Arthur Heymans1848ba32017-04-11 17:09:31 +020048static u16 ddr2_get_crc(u8 device, u8 len)
49{
50 u8 raw_spd[128] = {};
Kyösti Mälkkic01a5052019-01-30 09:39:23 +020051 i2c_eeprom_read(device, 64, 9, &raw_spd[64]);
52 i2c_eeprom_read(device, 93, 6, &raw_spd[93]);
Arthur Heymans1848ba32017-04-11 17:09:31 +020053 return spd_ddr2_calc_unique_crc(raw_spd, len);
54}
55
56static u16 ddr3_get_crc(u8 device, u8 len)
57{
58 u8 raw_spd[256] = {};
Kyösti Mälkkic01a5052019-01-30 09:39:23 +020059 i2c_eeprom_read(device, 117, 11, &raw_spd[117]);
Arthur Heymans1848ba32017-04-11 17:09:31 +020060 return spd_ddr3_calc_unique_crc(raw_spd, len);
61}
62
Arthur Heymansadc571a2017-09-25 09:40:54 +020063static enum cb_err verify_spds(const u8 *spd_map,
64 const struct sysinfo *ctrl_cached)
65{
66 int i;
Arthur Heymansadc571a2017-09-25 09:40:54 +020067 u16 crc;
68
69 for (i = 0; i < TOTAL_DIMMS; i++) {
70 if (!(spd_map[i]))
71 continue;
72 int len = smbus_read_byte(spd_map[i], 0);
73 if (len < 0 && ctrl_cached->dimms[i].card_type
74 == RAW_CARD_UNPOPULATED)
75 continue;
76 if (len > 0 && ctrl_cached->dimms[i].card_type
77 == RAW_CARD_UNPOPULATED)
78 return CB_ERR;
79
Arthur Heymans1848ba32017-04-11 17:09:31 +020080 if (ctrl_cached->spd_type == DDR2)
81 crc = ddr2_get_crc(spd_map[i], len);
82 else
83 crc = ddr3_get_crc(spd_map[i], len);
84
Arthur Heymansadc571a2017-09-25 09:40:54 +020085 if (crc != ctrl_cached->dimms[i].spd_crc)
86 return CB_ERR;
87 }
88 return CB_SUCCESS;
89}
90
Arthur Heymans3cf94032017-04-05 16:17:26 +020091struct abs_timings {
92 u32 min_tclk;
93 u32 min_tRAS;
94 u32 min_tRP;
95 u32 min_tRCD;
96 u32 min_tWR;
97 u32 min_tRFC;
98 u32 min_tWTR;
99 u32 min_tRRD;
100 u32 min_tRTP;
Arthur Heymans1848ba32017-04-11 17:09:31 +0200101 u32 min_tAA;
Arthur Heymans3cf94032017-04-05 16:17:26 +0200102 u32 min_tCLK_cas[8];
103 u32 cas_supported;
104};
105
106#define CTRL_MIN_TCLK_DDR2 TCK_400MHZ
107
108static void select_cas_dramfreq_ddr2(struct sysinfo *s,
109 const struct abs_timings *saved_timings)
Damien Zammit4b513a62015-08-20 00:37:05 +1000110{
Arthur Heymans3cf94032017-04-05 16:17:26 +0200111 u8 try_cas;
112 /* Currently only these CAS are supported */
113 u8 cas_mask = SPD_CAS_LATENCY_DDR2_5 | SPD_CAS_LATENCY_DDR2_6;
Arthur Heymanscfa2eaa2017-03-20 16:32:07 +0100114
Arthur Heymans3cf94032017-04-05 16:17:26 +0200115 cas_mask &= saved_timings->cas_supported;
116 try_cas = spd_get_msbs(cas_mask);
Damien Zammit4b513a62015-08-20 00:37:05 +1000117
Arthur Heymans3cf94032017-04-05 16:17:26 +0200118 while (cas_mask & (1 << try_cas) && try_cas > 0) {
119 s->selected_timings.CAS = try_cas;
120 s->selected_timings.tclk = saved_timings->min_tCLK_cas[try_cas];
121 if (s->selected_timings.tclk >= CTRL_MIN_TCLK_DDR2 &&
122 saved_timings->min_tCLK_cas[try_cas] !=
123 saved_timings->min_tCLK_cas[try_cas - 1])
Arthur Heymans70a1dda2017-03-09 01:58:24 +0100124 break;
Arthur Heymans3cf94032017-04-05 16:17:26 +0200125 try_cas--;
Damien Zammit4b513a62015-08-20 00:37:05 +1000126 }
Damien Zammit4b513a62015-08-20 00:37:05 +1000127
Arthur Heymans3cf94032017-04-05 16:17:26 +0200128
129 if ((s->selected_timings.CAS < 3) || (s->selected_timings.tclk == 0))
130 die("Could not find common memory frequency and CAS\n");
131
132 switch (s->selected_timings.tclk) {
133 case TCK_200MHZ:
134 case TCK_266MHZ:
135 /* FIXME: this works on vendor BIOS */
136 die("Selected dram frequency not supported\n");
137 case TCK_333MHZ:
138 s->selected_timings.mem_clk = MEM_CLOCK_667MHz;
139 break;
140 case TCK_400MHZ:
141 s->selected_timings.mem_clk = MEM_CLOCK_800MHz;
142 break;
Damien Zammit4b513a62015-08-20 00:37:05 +1000143 }
Damien Zammit4b513a62015-08-20 00:37:05 +1000144}
145
146static void mchinfo_ddr2(struct sysinfo *s)
147{
148 const u32 eax = cpuid_ext(0x04, 0).eax;
Arthur Heymans3cf94032017-04-05 16:17:26 +0200149 printk(BIOS_WARNING, "%d CPU cores\n", ((eax >> 26) & 0x3f) + 1);
Damien Zammit4b513a62015-08-20 00:37:05 +1000150
Damien Zammit9ae09852016-06-18 23:57:43 +1000151 u32 capid = pci_read_config16(PCI_DEV(0, 0, 0), 0xe8);
Arthur Heymans70a1dda2017-03-09 01:58:24 +0100152 if (!(capid & (1<<(79-64))))
Damien Zammit4b513a62015-08-20 00:37:05 +1000153 printk(BIOS_WARNING, "iTPM enabled\n");
Damien Zammit4b513a62015-08-20 00:37:05 +1000154
155 capid = pci_read_config32(PCI_DEV(0, 0, 0), 0xe4);
Arthur Heymans70a1dda2017-03-09 01:58:24 +0100156 if (!(capid & (1<<(57-32))))
Damien Zammit4b513a62015-08-20 00:37:05 +1000157 printk(BIOS_WARNING, "ME enabled\n");
Damien Zammit4b513a62015-08-20 00:37:05 +1000158
Arthur Heymans70a1dda2017-03-09 01:58:24 +0100159 if (!(capid & (1<<(56-32))))
Damien Zammit4b513a62015-08-20 00:37:05 +1000160 printk(BIOS_WARNING, "AMT enabled\n");
Damien Zammit4b513a62015-08-20 00:37:05 +1000161
Arthur Heymans70a1dda2017-03-09 01:58:24 +0100162 if (!(capid & (1<<(48-32))))
Damien Zammit4b513a62015-08-20 00:37:05 +1000163 printk(BIOS_WARNING, "VT-d enabled\n");
Damien Zammit4b513a62015-08-20 00:37:05 +1000164}
165
Arthur Heymans3cf94032017-04-05 16:17:26 +0200166static int ddr2_save_dimminfo(u8 dimm_idx, u8 *raw_spd,
167 struct abs_timings *saved_timings, struct sysinfo *s)
Damien Zammit4b513a62015-08-20 00:37:05 +1000168{
Arthur Heymansfc31e442018-02-12 15:12:34 +0100169 struct dimm_attr_ddr2_st decoded_dimm;
Arthur Heymans3cf94032017-04-05 16:17:26 +0200170 int i;
Damien Zammit4b513a62015-08-20 00:37:05 +1000171
Arthur Heymans3cf94032017-04-05 16:17:26 +0200172 if (spd_decode_ddr2(&decoded_dimm, raw_spd) != SPD_STATUS_OK) {
173 printk(BIOS_DEBUG, "Problems decoding SPD\n");
174 return CB_ERR;
175 }
Damien Zammit7c2e5392016-07-24 03:28:42 +1000176
Arthur Heymans3cf94032017-04-05 16:17:26 +0200177 if (IS_ENABLED(CONFIG_DEBUG_RAM_SETUP))
178 dram_print_spd_ddr2(&decoded_dimm);
179
180 if (!(decoded_dimm.width & (0x08 | 0x10))) {
181
182 printk(BIOS_ERR,
183 "DIMM%d Unsupported width: x%d. Disabling dimm\n",
184 dimm_idx, s->dimms[dimm_idx].width);
185 return CB_ERR;
186 }
187 s->dimms[dimm_idx].width = (decoded_dimm.width >> 3) - 1;
188 /*
189 * This boils down to:
190 * "Except for the x16 configuration, all DDR2 devices have a
191 * 1KB page size. For the x16 configuration, the page size is 2KB
192 * for all densities except the 256Mb device, which has a 1KB page
193 * size." Micron, 'TN-47-16 Designing for High-Density DDR2 Memory'
Arthur Heymansd4e57622017-12-25 17:01:33 +0100194 * The formula is pagesize in KiB = width * 2^col_bits / 8.
Arthur Heymans3cf94032017-04-05 16:17:26 +0200195 */
Arthur Heymansd4e57622017-12-25 17:01:33 +0100196 s->dimms[dimm_idx].page_size = decoded_dimm.width *
197 (1 << decoded_dimm.col_bits) / 8;
Arthur Heymans3cf94032017-04-05 16:17:26 +0200198
199 switch (decoded_dimm.banks) {
200 case 4:
201 s->dimms[dimm_idx].n_banks = N_BANKS_4;
202 break;
203 case 8:
204 s->dimms[dimm_idx].n_banks = N_BANKS_8;
205 break;
206 default:
207 printk(BIOS_ERR,
208 "DIMM%d Unsupported #banks: x%d. Disabling dimm\n",
209 dimm_idx, decoded_dimm.banks);
210 return CB_ERR;
211 }
212
213 s->dimms[dimm_idx].ranks = decoded_dimm.ranks;
214 s->dimms[dimm_idx].rows = decoded_dimm.row_bits;
215 s->dimms[dimm_idx].cols = decoded_dimm.col_bits;
216
217 saved_timings->cas_supported &= decoded_dimm.cas_supported;
218
219 saved_timings->min_tRAS =
220 MAX(saved_timings->min_tRAS, decoded_dimm.tRAS);
221 saved_timings->min_tRP =
222 MAX(saved_timings->min_tRP, decoded_dimm.tRP);
223 saved_timings->min_tRCD =
224 MAX(saved_timings->min_tRCD, decoded_dimm.tRCD);
225 saved_timings->min_tWR =
226 MAX(saved_timings->min_tWR, decoded_dimm.tWR);
227 saved_timings->min_tRFC =
228 MAX(saved_timings->min_tRFC, decoded_dimm.tRFC);
229 saved_timings->min_tWTR =
230 MAX(saved_timings->min_tWTR, decoded_dimm.tWTR);
231 saved_timings->min_tRRD =
232 MAX(saved_timings->min_tRRD, decoded_dimm.tRRD);
233 saved_timings->min_tRTP =
234 MAX(saved_timings->min_tRTP, decoded_dimm.tRTP);
235 for (i = 0; i < 8; i++) {
236 if (!(saved_timings->cas_supported & (1 << i)))
237 saved_timings->min_tCLK_cas[i] = 0;
238 else
239 saved_timings->min_tCLK_cas[i] =
240 MAX(saved_timings->min_tCLK_cas[i],
241 decoded_dimm.cycle_time[i]);
242 }
Arthur Heymansadc571a2017-09-25 09:40:54 +0200243
244 s->dimms[dimm_idx].spd_crc = spd_ddr2_calc_unique_crc(raw_spd,
245 spd_decode_spd_size_ddr2(raw_spd[0]));
Arthur Heymans3cf94032017-04-05 16:17:26 +0200246 return CB_SUCCESS;
247}
248
Arthur Heymans1848ba32017-04-11 17:09:31 +0200249static void normalize_tCLK(u32 *tCLK)
250{
251 if (*tCLK <= TCK_666MHZ)
252 *tCLK = TCK_666MHZ;
253 else if (*tCLK <= TCK_533MHZ)
254 *tCLK = TCK_533MHZ;
255 else if (*tCLK <= TCK_400MHZ)
256 *tCLK = TCK_400MHZ;
257 else
258 *tCLK = 0;
259}
260
261static void select_cas_dramfreq_ddr3(struct sysinfo *s,
262 struct abs_timings *saved_timings)
263{
264 /*
265 * various constraints must be fulfilled:
266 * CAS * tCK < 20ns == 160MTB
267 * tCK_max >= tCK >= tCK_min
268 * CAS >= roundup(tAA_min/tCK)
269 * CAS supported
270 * AND BTW: Clock(MT) = 2000 / tCK(ns) - intel uses MTs but calls them MHz
271 */
272
273 u32 min_tCLK;
274 u8 try_CAS;
275 u16 capid = (pci_read_config16(PCI_DEV(0, 0, 0), 0xea) >> 4) & 0x3f;
276
277 switch (s->max_fsb) {
278 default:
279 case FSB_CLOCK_800MHz:
280 min_tCLK = TCK_400MHZ;
281 break;
282 case FSB_CLOCK_1066MHz:
283 min_tCLK = TCK_533MHZ;
284 break;
285 case FSB_CLOCK_1333MHz:
286 min_tCLK = TCK_666MHZ;
287 break;
288 }
289
290 switch (capid >> 3) {
291 default: /* Should not happen */
292 min_tCLK = TCK_400MHZ;
293 break;
294 case 1:
295 min_tCLK = MAX(min_tCLK, TCK_400MHZ);
296 break;
297 case 2:
298 min_tCLK = MAX(min_tCLK, TCK_533MHZ);
299 break;
300 case 3: /* Only on P45 */
Arthur Heymansb1ba6622018-10-14 13:22:16 +0200301 case 0:
Arthur Heymans1848ba32017-04-11 17:09:31 +0200302 min_tCLK = MAX(min_tCLK, TCK_666MHZ);
303 break;
304 }
305
306 min_tCLK = MAX(min_tCLK, saved_timings->min_tclk);
307 if (min_tCLK == 0) {
308 printk(BIOS_ERR, "DRAM frequency is under lowest supported "
Jonathan Neuschäfer0f14df42018-10-30 10:50:47 +0100309 "frequency (400 MHz). Increasing to 400 MHz "
Arthur Heymans1848ba32017-04-11 17:09:31 +0200310 "as last resort");
311 min_tCLK = TCK_400MHZ;
312 }
313
314 while (1) {
315 normalize_tCLK(&min_tCLK);
316 if (min_tCLK == 0)
317 die("Couldn't find compatible clock / CAS settings.\n");
318 try_CAS = DIV_ROUND_UP(saved_timings->min_tAA, min_tCLK);
319 printk(BIOS_SPEW, "Trying CAS %u, tCK %u.\n", try_CAS, min_tCLK);
320 for (; try_CAS <= DDR3_MAX_CAS; try_CAS++) {
321 /*
322 * cas_supported is encoded like the SPD which starts
323 * at CAS=4.
324 */
325 if ((saved_timings->cas_supported << 4) & (1 << try_CAS))
326 break;
327 }
328 if ((try_CAS <= DDR3_MAX_CAS) && (try_CAS * min_tCLK < 20 * 256)) {
329 /* Found good CAS. */
330 printk(BIOS_SPEW, "Found compatible tCLK / CAS pair: %u / %u.\n",
331 min_tCLK, try_CAS);
332 break;
333 }
334 /*
335 * If no valid tCLK / CAS pair could be found for a tCLK
336 * increase it after which it gets normalised. This means
337 * that a lower frequency gets tried.
338 */
339 min_tCLK++;
340 }
341
342 s->selected_timings.tclk = min_tCLK;
343 s->selected_timings.CAS = try_CAS;
344
345 switch (s->selected_timings.tclk) {
346 case TCK_400MHZ:
347 s->selected_timings.mem_clk = MEM_CLOCK_800MHz;
348 break;
349 case TCK_533MHZ:
350 s->selected_timings.mem_clk = MEM_CLOCK_1066MHz;
351 break;
352 case TCK_666MHZ:
353 s->selected_timings.mem_clk = MEM_CLOCK_1333MHz;
354 break;
355 }
356}
357
Arthur Heymans5a9dbde2018-05-26 15:05:09 +0200358/* With DDR3 and 533MHz mem clock and an enabled internal gfx device the display
359 is not usable in non stacked mode, so select stacked mode accordingly */
360static void workaround_stacked_mode(struct sysinfo *s)
361{
362 u32 deven;
363 /* Only a problem on DDR3 */
364 if (s->spd_type == DDR2)
365 return;
366 /* Does not matter if only one channel is populated */
367 if (!CHANNEL_IS_POPULATED(s->dimms, 0)
368 || !CHANNEL_IS_POPULATED(s->dimms, 1))
369 return;
370 if (s->selected_timings.mem_clk != MEM_CLOCK_1066MHz)
371 return;
372 /* IGD0EN gets disabled if not present before this code runs */
373 deven = pci_read_config32(PCI_DEV(0, 0, 0), D0F0_DEVEN);
374 if (deven & IGD0EN)
375 s->stacked_mode = 1;
376}
377
Arthur Heymans1848ba32017-04-11 17:09:31 +0200378static int ddr3_save_dimminfo(u8 dimm_idx, u8 *raw_spd,
379 struct abs_timings *saved_timings, struct sysinfo *s)
380{
381 struct dimm_attr_st decoded_dimm;
382
383 if (spd_decode_ddr3(&decoded_dimm, raw_spd) != SPD_STATUS_OK)
384 return CB_ERR;
385
386 if (IS_ENABLED(CONFIG_DEBUG_RAM_SETUP))
387 dram_print_spd_ddr3(&decoded_dimm);
388
389 /* x4 DIMMs are not supported (true for both ddr2 and ddr3) */
390 if (!(decoded_dimm.width & (0x8 | 0x10))) {
391 printk(BIOS_ERR, "DIMM%d Unsupported width: x%d. Disabling dimm\n",
392 dimm_idx, s->dimms[dimm_idx].width);
393 return CB_ERR;
394 }
395 s->dimms[dimm_idx].width = (decoded_dimm.width >> 3) - 1;
396 /*
397 * This boils down to:
398 * "Except for the x16 configuration, all DDR3 devices have a
399 * 1KB page size. For the x16 configuration, the page size is 2KB
400 * for all densities except the 256Mb device, which has a 1KB page size."
401 * Micron, 'TN-47-16 Designing for High-Density DDR2 Memory'
402 */
403 s->dimms[dimm_idx].page_size = decoded_dimm.width *
404 (1 << decoded_dimm.col_bits) / 8;
405
406 s->dimms[dimm_idx].n_banks = N_BANKS_8; /* Always 8 banks on ddr3?? */
407
408 s->dimms[dimm_idx].ranks = decoded_dimm.ranks;
409 s->dimms[dimm_idx].rows = decoded_dimm.row_bits;
410 s->dimms[dimm_idx].cols = decoded_dimm.col_bits;
411
412 saved_timings->min_tRAS =
413 MAX(saved_timings->min_tRAS, decoded_dimm.tRAS);
414 saved_timings->min_tRP =
415 MAX(saved_timings->min_tRP, decoded_dimm.tRP);
416 saved_timings->min_tRCD =
417 MAX(saved_timings->min_tRCD, decoded_dimm.tRCD);
418 saved_timings->min_tWR =
419 MAX(saved_timings->min_tWR, decoded_dimm.tWR);
420 saved_timings->min_tRFC =
421 MAX(saved_timings->min_tRFC, decoded_dimm.tRFC);
422 saved_timings->min_tWTR =
423 MAX(saved_timings->min_tWTR, decoded_dimm.tWTR);
424 saved_timings->min_tRRD =
425 MAX(saved_timings->min_tRRD, decoded_dimm.tRRD);
426 saved_timings->min_tRTP =
427 MAX(saved_timings->min_tRTP, decoded_dimm.tRTP);
428 saved_timings->min_tAA =
429 MAX(saved_timings->min_tAA, decoded_dimm.tAA);
430 saved_timings->cas_supported &= decoded_dimm.cas_supported;
431
432 s->dimms[dimm_idx].spd_crc = spd_ddr3_calc_unique_crc(raw_spd,
433 raw_spd[0]);
Arthur Heymansf1287262017-12-25 18:30:01 +0100434
435 s->dimms[dimm_idx].mirrored = decoded_dimm.flags.pins_mirrored;
436
Arthur Heymans1848ba32017-04-11 17:09:31 +0200437 return CB_SUCCESS;
438}
439
440
Arthur Heymans3cf94032017-04-05 16:17:26 +0200441static void select_discrete_timings(struct sysinfo *s,
442 const struct abs_timings *timings)
443{
444 s->selected_timings.tRAS = DIV_ROUND_UP(timings->min_tRAS,
445 s->selected_timings.tclk);
446 s->selected_timings.tRP = DIV_ROUND_UP(timings->min_tRP,
447 s->selected_timings.tclk);
448 s->selected_timings.tRCD = DIV_ROUND_UP(timings->min_tRCD,
449 s->selected_timings.tclk);
450 s->selected_timings.tWR = DIV_ROUND_UP(timings->min_tWR,
451 s->selected_timings.tclk);
452 s->selected_timings.tRFC = DIV_ROUND_UP(timings->min_tRFC,
453 s->selected_timings.tclk);
454 s->selected_timings.tWTR = DIV_ROUND_UP(timings->min_tWTR,
455 s->selected_timings.tclk);
456 s->selected_timings.tRRD = DIV_ROUND_UP(timings->min_tRRD,
457 s->selected_timings.tclk);
458 s->selected_timings.tRTP = DIV_ROUND_UP(timings->min_tRTP,
459 s->selected_timings.tclk);
460}
461static void print_selected_timings(struct sysinfo *s)
462{
463 printk(BIOS_DEBUG, "Selected timings:\n");
464 printk(BIOS_DEBUG, "\tFSB: %dMHz\n",
465 fsb2mhz(s->selected_timings.fsb_clk));
466 printk(BIOS_DEBUG, "\tDDR: %dMHz\n",
467 ddr2mhz(s->selected_timings.mem_clk));
468
469 printk(BIOS_DEBUG, "\tCAS: %d\n", s->selected_timings.CAS);
470 printk(BIOS_DEBUG, "\ttRAS: %d\n", s->selected_timings.tRAS);
471 printk(BIOS_DEBUG, "\ttRP: %d\n", s->selected_timings.tRP);
472 printk(BIOS_DEBUG, "\ttRCD: %d\n", s->selected_timings.tRCD);
473 printk(BIOS_DEBUG, "\ttWR: %d\n", s->selected_timings.tWR);
474 printk(BIOS_DEBUG, "\ttRFC: %d\n", s->selected_timings.tRFC);
475 printk(BIOS_DEBUG, "\ttWTR: %d\n", s->selected_timings.tWTR);
476 printk(BIOS_DEBUG, "\ttRRD: %d\n", s->selected_timings.tRRD);
477 printk(BIOS_DEBUG, "\ttRTP: %d\n", s->selected_timings.tRTP);
478}
479
480static void find_fsb_speed(struct sysinfo *s)
481{
Damien Zammit4b513a62015-08-20 00:37:05 +1000482 switch (MCHBAR32(0xc00) & 0x7) {
483 case 0x0:
484 s->max_fsb = FSB_CLOCK_1066MHz;
485 break;
486 case 0x2:
487 s->max_fsb = FSB_CLOCK_800MHz;
488 break;
489 case 0x4:
490 s->max_fsb = FSB_CLOCK_1333MHz;
491 break;
492 default:
493 s->max_fsb = FSB_CLOCK_800MHz;
494 printk(BIOS_WARNING, "Can't detect FSB, setting 800MHz\n");
495 break;
496 }
Arthur Heymans3cf94032017-04-05 16:17:26 +0200497 s->selected_timings.fsb_clk = s->max_fsb;
498}
Damien Zammit4b513a62015-08-20 00:37:05 +1000499
Arthur Heymans3cf94032017-04-05 16:17:26 +0200500static void decode_spd_select_timings(struct sysinfo *s)
501{
502 unsigned int device;
503 u8 dram_type_mask = (1 << DDR2) | (1 << DDR3);
504 u8 dimm_mask = 0;
505 u8 raw_spd[256];
506 int i, j;
507 struct abs_timings saved_timings;
508 memset(&saved_timings, 0, sizeof(saved_timings));
509 saved_timings.cas_supported = UINT32_MAX;
Damien Zammit4b513a62015-08-20 00:37:05 +1000510
Arthur Heymans3cf94032017-04-05 16:17:26 +0200511 FOR_EACH_DIMM(i) {
512 s->dimms[i].card_type = RAW_CARD_POPULATED;
513 device = s->spd_map[i];
514 if (!device) {
515 s->dimms[i].card_type = RAW_CARD_UNPOPULATED;
516 continue;
Damien Zammit4b513a62015-08-20 00:37:05 +1000517 }
Arthur Heymans3cf94032017-04-05 16:17:26 +0200518 switch (spd_read_byte(s->spd_map[i], SPD_MEMORY_TYPE)) {
519 case DDR2SPD:
520 dram_type_mask &= 1 << DDR2;
521 s->spd_type = DDR2;
Damien Zammit4b513a62015-08-20 00:37:05 +1000522 break;
Arthur Heymans3cf94032017-04-05 16:17:26 +0200523 case DDR3SPD:
524 dram_type_mask &= 1 << DDR3;
525 s->spd_type = DDR3;
Damien Zammit4b513a62015-08-20 00:37:05 +1000526 break;
527 default:
Arthur Heymans3cf94032017-04-05 16:17:26 +0200528 s->dimms[i].card_type = RAW_CARD_UNPOPULATED;
529 continue;
Damien Zammit4b513a62015-08-20 00:37:05 +1000530 }
Arthur Heymans3cf94032017-04-05 16:17:26 +0200531 if (!dram_type_mask)
532 die("Mixing up dimm types is not supported!\n");
Damien Zammit4b513a62015-08-20 00:37:05 +1000533
Arthur Heymans3cf94032017-04-05 16:17:26 +0200534 printk(BIOS_DEBUG, "Decoding dimm %d\n", i);
Kyösti Mälkkic01a5052019-01-30 09:39:23 +0200535 if (i2c_eeprom_read(device, 0, 128, raw_spd) != 128) {
Arthur Heymans1848ba32017-04-11 17:09:31 +0200536 printk(BIOS_DEBUG, "i2c block operation failed,"
537 " trying smbus byte operation.\n");
538 for (j = 0; j < 128; j++)
539 raw_spd[j] = spd_read_byte(device, j);
540 }
541
Arthur Heymans3cf94032017-04-05 16:17:26 +0200542 if (s->spd_type == DDR2){
Arthur Heymans3cf94032017-04-05 16:17:26 +0200543 if (ddr2_save_dimminfo(i, raw_spd, &saved_timings, s)) {
544 printk(BIOS_WARNING,
545 "Encountered problems with SPD, "
546 "skipping this DIMM.\n");
547 s->dimms[i].card_type = RAW_CARD_UNPOPULATED;
548 continue;
549 }
Arthur Heymans1848ba32017-04-11 17:09:31 +0200550 } else { /* DDR3 */
551 if (ddr3_save_dimminfo(i, raw_spd, &saved_timings, s)) {
552 printk(BIOS_WARNING,
553 "Encountered problems with SPD, "
554 "skipping this DIMM.\n");
555 /* something in decoded SPD was unsupported */
556 s->dimms[i].card_type = RAW_CARD_UNPOPULATED;
557 continue;
558 }
Arthur Heymans3cf94032017-04-05 16:17:26 +0200559 }
560 dimm_mask |= (1 << i);
Damien Zammit4b513a62015-08-20 00:37:05 +1000561 }
Arthur Heymans3cf94032017-04-05 16:17:26 +0200562 if (!dimm_mask)
563 die("No memory installed.\n");
564
565 if (s->spd_type == DDR2)
566 select_cas_dramfreq_ddr2(s, &saved_timings);
Arthur Heymans1848ba32017-04-11 17:09:31 +0200567 else
568 select_cas_dramfreq_ddr3(s, &saved_timings);
Arthur Heymans3cf94032017-04-05 16:17:26 +0200569 select_discrete_timings(s, &saved_timings);
Arthur Heymans5a9dbde2018-05-26 15:05:09 +0200570 workaround_stacked_mode(s);
Arthur Heymans3cf94032017-04-05 16:17:26 +0200571}
572
573static void find_dimm_config(struct sysinfo *s)
574{
575 int chan, i;
576
577 FOR_EACH_POPULATED_CHANNEL(s->dimms, chan) {
578 FOR_EACH_POPULATED_DIMM_IN_CHANNEL(s->dimms, chan, i) {
579 int dimm_config;
580 if (s->dimms[i].ranks == 1) {
581 if (s->dimms[i].width == 0) /* x8 */
582 dimm_config = 1;
583 else /* x16 */
584 dimm_config = 3;
585 } else {
586 if (s->dimms[i].width == 0) /* x8 */
587 dimm_config = 2;
588 else
589 die("Dual-rank x16 not supported\n");
590 }
591 s->dimm_config[chan] |=
592 dimm_config << (i % DIMMS_PER_CHANNEL) * 2;
593 }
594 printk(BIOS_DEBUG, " Config[CH%d] : %d\n", chan,
595 s->dimm_config[chan]);
596 }
597
Damien Zammit4b513a62015-08-20 00:37:05 +1000598}
599
Arthur Heymansbb5e77c2016-11-30 20:37:29 +0100600static void checkreset_ddr2(int boot_path)
601{
602 u8 pmcon2;
603 u32 pmsts;
604
605 if (boot_path >= 1) {
606 pmsts = MCHBAR32(PMSTS_MCHBAR);
607 if (!(pmsts & 1))
608 printk(BIOS_DEBUG,
609 "Channel 0 possibly not in self refresh\n");
610 if (!(pmsts & 2))
611 printk(BIOS_DEBUG,
612 "Channel 1 possibly not in self refresh\n");
613 }
614
615 pmcon2 = pci_read_config8(PCI_DEV(0, 0x1f, 0), 0xa2);
616
617 if (pmcon2 & 0x80) {
618 pmcon2 &= ~0x80;
619 pci_write_config8(PCI_DEV(0, 0x1f, 0), 0xa2, pmcon2);
620
621 /* do magic 0xf0 thing. */
622 u8 reg8 = pci_read_config8(PCI_DEV(0, 0, 0), 0xf0);
623 pci_write_config8(PCI_DEV(0, 0, 0), 0xf0, reg8 & ~(1 << 2));
624 reg8 = pci_read_config8(PCI_DEV(0, 0, 0), 0xf0);
625 pci_write_config8(PCI_DEV(0, 0, 0), 0xf0, reg8 | (1 << 2));
626
627 printk(BIOS_DEBUG, "Reset...\n");
Arthur Heymans8565c032017-04-30 17:36:31 +0200628 outb(0xe, 0xcf9);
Arthur Heymansbb5e77c2016-11-30 20:37:29 +0100629 asm ("hlt");
630 }
631 pmcon2 |= 0x80;
632 pci_write_config8(PCI_DEV(0, 0x1f, 0), 0xa2, pmcon2);
633}
634
Damien Zammit4b513a62015-08-20 00:37:05 +1000635/**
636 * @param boot_path: 0 = normal, 1 = reset, 2 = resume from s3
637 */
638void sdram_initialize(int boot_path, const u8 *spd_map)
639{
Arthur Heymansadc571a2017-09-25 09:40:54 +0200640 struct sysinfo s, *ctrl_cached;
Damien Zammit4b513a62015-08-20 00:37:05 +1000641 u8 reg8;
Arthur Heymansadc571a2017-09-25 09:40:54 +0200642 int fast_boot, cbmem_was_inited, cache_not_found;
643 struct region_device rdev;
Damien Zammit4b513a62015-08-20 00:37:05 +1000644
Elyes HAOUASf5a57a82019-01-08 22:15:53 +0100645 timestamp_add_now(TS_BEFORE_INITRAM);
Damien Zammit4b513a62015-08-20 00:37:05 +1000646 printk(BIOS_DEBUG, "Setting up RAM controller.\n");
647
Arthur Heymans70a1dda2017-03-09 01:58:24 +0100648 pci_write_config8(PCI_DEV(0, 0, 0), 0xdf, 0xff);
Damien Zammit4b513a62015-08-20 00:37:05 +1000649
650 memset(&s, 0, sizeof(struct sysinfo));
651
Arthur Heymansadc571a2017-09-25 09:40:54 +0200652 cache_not_found = mrc_cache_get_current(MRC_TRAINING_DATA,
653 MRC_CACHE_VERSION, &rdev);
Damien Zammit4b513a62015-08-20 00:37:05 +1000654
Arthur Heymansadc571a2017-09-25 09:40:54 +0200655 if (cache_not_found || (region_device_sz(&rdev) < sizeof(s))) {
656 if (boot_path == BOOT_PATH_RESUME) {
657 /* Failed S3 resume, reset to come up cleanly */
658 outb(0x6, 0xcf9);
659 halt();
Arthur Heymansdf946b82018-06-14 10:53:51 +0200660 } else if (boot_path == BOOT_PATH_WARM_RESET) {
661 /* On warm reset some of dram calibrations fail
662 and therefore requiring valid cached settings */
663 outb(0xe, 0xcf9);
664 halt();
Arthur Heymansadc571a2017-09-25 09:40:54 +0200665 }
666 ctrl_cached = NULL;
667 } else {
668 ctrl_cached = rdev_mmap_full(&rdev);
669 }
Arthur Heymansbb5e77c2016-11-30 20:37:29 +0100670
Arthur Heymansadc571a2017-09-25 09:40:54 +0200671 /* verify MRC cache for fast boot */
672 if (boot_path != BOOT_PATH_RESUME && ctrl_cached) {
673 /* check SPD checksum to make sure the DIMMs haven't been
674 * replaced */
675 fast_boot = verify_spds(spd_map, ctrl_cached) == CB_SUCCESS;
Arthur Heymansb0c6cff2018-09-05 20:39:39 +0200676 if (!fast_boot) {
Arthur Heymansadc571a2017-09-25 09:40:54 +0200677 printk(BIOS_DEBUG, "SPD checksums don't match,"
678 " dimm's have been replaced\n");
Arthur Heymansb0c6cff2018-09-05 20:39:39 +0200679 } else {
680 find_fsb_speed(&s);
681 fast_boot = s.max_fsb == ctrl_cached->max_fsb;
682 if (!fast_boot)
683 printk(BIOS_DEBUG,
684 "CPU FSB does not match and has been replaced\n");
685 }
Arthur Heymansadc571a2017-09-25 09:40:54 +0200686 } else {
687 fast_boot = boot_path == BOOT_PATH_RESUME;
688 }
Damien Zammit4b513a62015-08-20 00:37:05 +1000689
Arthur Heymansadc571a2017-09-25 09:40:54 +0200690 if (fast_boot) {
691 printk(BIOS_DEBUG, "Using cached raminit settings\n");
692 memcpy(&s, ctrl_cached, sizeof(s));
693 s.boot_path = boot_path;
694 mchinfo_ddr2(&s);
695 print_selected_timings(&s);
696 } else {
697 s.boot_path = boot_path;
698 s.spd_map[0] = spd_map[0];
699 s.spd_map[1] = spd_map[1];
700 s.spd_map[2] = spd_map[2];
701 s.spd_map[3] = spd_map[3];
702 checkreset_ddr2(s.boot_path);
Damien Zammit4b513a62015-08-20 00:37:05 +1000703
Arthur Heymansadc571a2017-09-25 09:40:54 +0200704 /* Detect dimms per channel */
705 reg8 = pci_read_config8(PCI_DEV(0, 0, 0), 0xe9);
706 printk(BIOS_DEBUG, "Dimms per channel: %d\n",
707 (reg8 & 0x10) ? 1 : 2);
708
709 mchinfo_ddr2(&s);
710
711 find_fsb_speed(&s);
712 decode_spd_select_timings(&s);
713 print_selected_timings(&s);
714 find_dimm_config(&s);
715 }
Damien Zammit4b513a62015-08-20 00:37:05 +1000716
Arthur Heymansa2cc2312017-05-15 10:13:36 +0200717 do_raminit(&s, fast_boot);
Damien Zammit4b513a62015-08-20 00:37:05 +1000718
719 reg8 = pci_read_config8(PCI_DEV(0, 0x1f, 0), 0xa2);
720 pci_write_config8(PCI_DEV(0, 0x1f, 0), 0xa2, reg8 & ~0x80);
721
Arthur Heymans70a1dda2017-03-09 01:58:24 +0100722 reg8 = pci_read_config8(PCI_DEV(0, 0, 0), 0xf4);
723 pci_write_config8(PCI_DEV(0, 0, 0), 0xf4, reg8 | 1);
Damien Zammit4b513a62015-08-20 00:37:05 +1000724 printk(BIOS_DEBUG, "RAM initialization finished.\n");
Arthur Heymansadc571a2017-09-25 09:40:54 +0200725
726 cbmem_was_inited = !cbmem_recovery(s.boot_path == BOOT_PATH_RESUME);
727 if (!fast_boot)
728 mrc_cache_stash_data(MRC_TRAINING_DATA, MRC_CACHE_VERSION,
729 &s, sizeof(s));
730 if (s.boot_path == BOOT_PATH_RESUME && !cbmem_was_inited) {
731 /* Failed S3 resume, reset to come up cleanly */
732 outb(0x6, 0xcf9);
733 halt();
734 }
Elyes HAOUASf5a57a82019-01-08 22:15:53 +0100735
736 timestamp_add_now(TS_AFTER_INITRAM);
737 quick_ram_check();
738 printk(BIOS_DEBUG, "Memory initialized\n");
Damien Zammit4b513a62015-08-20 00:37:05 +1000739}