blob: 013c2585ae9db965e5dc2dfafaa0a605c00f86b5 [file] [log] [blame]
Damien Zammit4b513a62015-08-20 00:37:05 +10001/*
2 * This file is part of the coreboot project.
3 *
4 * Copyright (C) 2015 Damien Zammit <damien@zamaudio.com>
5 *
6 * This program is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU General Public License as
8 * published by the Free Software Foundation; either version 2 of
9 * the License, or (at your option) any later version.
10 *
11 * This program is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 * GNU General Public License for more details.
15 */
16
17#include <arch/io.h>
18#include <cbmem.h>
19#include <console/console.h>
20#include <cpu/x86/cache.h>
21#include <cpu/x86/mtrr.h>
22#include <delay.h>
23#include <halt.h>
24#include <lib.h>
Martin Rothcbe38922016-01-05 19:40:41 -070025#include "iomap.h"
Arthur Heymans349e0852017-04-09 20:48:37 +020026#if IS_ENABLED(CONFIG_SOUTHBRIDGE_INTEL_I82801GX)
Martin Rothcbe38922016-01-05 19:40:41 -070027#include <southbridge/intel/i82801gx/i82801gx.h> /* smbus_read_byte */
Arthur Heymans349e0852017-04-09 20:48:37 +020028#else
29#include <southbridge/intel/i82801jx/i82801jx.h> /* smbus_read_byte */
30#endif
Martin Rothcbe38922016-01-05 19:40:41 -070031#include "x4x.h"
Damien Zammit4b513a62015-08-20 00:37:05 +100032#include <pc80/mc146818rtc.h>
33#include <spd.h>
34#include <string.h>
Arthur Heymans3cf94032017-04-05 16:17:26 +020035#include <device/dram/ddr2.h>
Arthur Heymansadc571a2017-09-25 09:40:54 +020036#include <mrc_cache.h>
37
38#define MRC_CACHE_VERSION 0
Damien Zammit4b513a62015-08-20 00:37:05 +100039
40static inline int spd_read_byte(unsigned int device, unsigned int address)
41{
42 return smbus_read_byte(device, address);
43}
44
Arthur Heymansadc571a2017-09-25 09:40:54 +020045static enum cb_err verify_spds(const u8 *spd_map,
46 const struct sysinfo *ctrl_cached)
47{
48 int i;
49 u8 raw_spd[256] = {};
50 u16 crc;
51
52 for (i = 0; i < TOTAL_DIMMS; i++) {
53 if (!(spd_map[i]))
54 continue;
55 int len = smbus_read_byte(spd_map[i], 0);
56 if (len < 0 && ctrl_cached->dimms[i].card_type
57 == RAW_CARD_UNPOPULATED)
58 continue;
59 if (len > 0 && ctrl_cached->dimms[i].card_type
60 == RAW_CARD_UNPOPULATED)
61 return CB_ERR;
62
63 if (ctrl_cached->spd_type == DDR2) {
64 i2c_block_read(spd_map[i], 64, 9, &raw_spd[64]);
65 i2c_block_read(spd_map[i], 93, 6, &raw_spd[93]);
66 crc = spd_ddr2_calc_unique_crc(raw_spd, len);
67 } else { /*
68 * DDR3: TODO ddr2.h and ddr3.h
69 * cannot be included directly
70 */
71 crc = 0;
72 // i2c_block_read(spd_map[i], 117, 11, &raw_spd[117]);
73 // crc = spd_ddr3_calc_unique_crc(raw_spd, len);
74 }
75 if (crc != ctrl_cached->dimms[i].spd_crc)
76 return CB_ERR;
77 }
78 return CB_SUCCESS;
79}
80
Arthur Heymans3cf94032017-04-05 16:17:26 +020081struct abs_timings {
82 u32 min_tclk;
83 u32 min_tRAS;
84 u32 min_tRP;
85 u32 min_tRCD;
86 u32 min_tWR;
87 u32 min_tRFC;
88 u32 min_tWTR;
89 u32 min_tRRD;
90 u32 min_tRTP;
91 u32 min_tCLK_cas[8];
92 u32 cas_supported;
93};
94
95#define CTRL_MIN_TCLK_DDR2 TCK_400MHZ
96
97static void select_cas_dramfreq_ddr2(struct sysinfo *s,
98 const struct abs_timings *saved_timings)
Damien Zammit4b513a62015-08-20 00:37:05 +100099{
Arthur Heymans3cf94032017-04-05 16:17:26 +0200100 u8 try_cas;
101 /* Currently only these CAS are supported */
102 u8 cas_mask = SPD_CAS_LATENCY_DDR2_5 | SPD_CAS_LATENCY_DDR2_6;
Arthur Heymanscfa2eaa2017-03-20 16:32:07 +0100103
Arthur Heymans3cf94032017-04-05 16:17:26 +0200104 cas_mask &= saved_timings->cas_supported;
105 try_cas = spd_get_msbs(cas_mask);
Damien Zammit4b513a62015-08-20 00:37:05 +1000106
Arthur Heymans3cf94032017-04-05 16:17:26 +0200107 while (cas_mask & (1 << try_cas) && try_cas > 0) {
108 s->selected_timings.CAS = try_cas;
109 s->selected_timings.tclk = saved_timings->min_tCLK_cas[try_cas];
110 if (s->selected_timings.tclk >= CTRL_MIN_TCLK_DDR2 &&
111 saved_timings->min_tCLK_cas[try_cas] !=
112 saved_timings->min_tCLK_cas[try_cas - 1])
Arthur Heymans70a1dda2017-03-09 01:58:24 +0100113 break;
Arthur Heymans3cf94032017-04-05 16:17:26 +0200114 try_cas--;
Damien Zammit4b513a62015-08-20 00:37:05 +1000115 }
Damien Zammit4b513a62015-08-20 00:37:05 +1000116
Arthur Heymans3cf94032017-04-05 16:17:26 +0200117
118 if ((s->selected_timings.CAS < 3) || (s->selected_timings.tclk == 0))
119 die("Could not find common memory frequency and CAS\n");
120
121 switch (s->selected_timings.tclk) {
122 case TCK_200MHZ:
123 case TCK_266MHZ:
124 /* FIXME: this works on vendor BIOS */
125 die("Selected dram frequency not supported\n");
126 case TCK_333MHZ:
127 s->selected_timings.mem_clk = MEM_CLOCK_667MHz;
128 break;
129 case TCK_400MHZ:
130 s->selected_timings.mem_clk = MEM_CLOCK_800MHz;
131 break;
Damien Zammit4b513a62015-08-20 00:37:05 +1000132 }
Damien Zammit4b513a62015-08-20 00:37:05 +1000133}
134
135static void mchinfo_ddr2(struct sysinfo *s)
136{
137 const u32 eax = cpuid_ext(0x04, 0).eax;
Arthur Heymans3cf94032017-04-05 16:17:26 +0200138 printk(BIOS_WARNING, "%d CPU cores\n", ((eax >> 26) & 0x3f) + 1);
Damien Zammit4b513a62015-08-20 00:37:05 +1000139
Damien Zammit9ae09852016-06-18 23:57:43 +1000140 u32 capid = pci_read_config16(PCI_DEV(0, 0, 0), 0xe8);
Arthur Heymans70a1dda2017-03-09 01:58:24 +0100141 if (!(capid & (1<<(79-64))))
Damien Zammit4b513a62015-08-20 00:37:05 +1000142 printk(BIOS_WARNING, "iTPM enabled\n");
Damien Zammit4b513a62015-08-20 00:37:05 +1000143
144 capid = pci_read_config32(PCI_DEV(0, 0, 0), 0xe4);
Arthur Heymans70a1dda2017-03-09 01:58:24 +0100145 if (!(capid & (1<<(57-32))))
Damien Zammit4b513a62015-08-20 00:37:05 +1000146 printk(BIOS_WARNING, "ME enabled\n");
Damien Zammit4b513a62015-08-20 00:37:05 +1000147
Arthur Heymans70a1dda2017-03-09 01:58:24 +0100148 if (!(capid & (1<<(56-32))))
Damien Zammit4b513a62015-08-20 00:37:05 +1000149 printk(BIOS_WARNING, "AMT enabled\n");
Damien Zammit4b513a62015-08-20 00:37:05 +1000150
Damien Zammit7c2e5392016-07-24 03:28:42 +1000151 s->max_ddr2_mhz = 800; // All chipsets in x4x support up to 800MHz DDR2
Damien Zammit4b513a62015-08-20 00:37:05 +1000152 printk(BIOS_WARNING, "Capable of DDR2 of %d MHz or lower\n", s->max_ddr2_mhz);
153
Arthur Heymans70a1dda2017-03-09 01:58:24 +0100154 if (!(capid & (1<<(48-32))))
Damien Zammit4b513a62015-08-20 00:37:05 +1000155 printk(BIOS_WARNING, "VT-d enabled\n");
Damien Zammit4b513a62015-08-20 00:37:05 +1000156}
157
Arthur Heymans3cf94032017-04-05 16:17:26 +0200158static int ddr2_save_dimminfo(u8 dimm_idx, u8 *raw_spd,
159 struct abs_timings *saved_timings, struct sysinfo *s)
Damien Zammit4b513a62015-08-20 00:37:05 +1000160{
Arthur Heymansfc31e442018-02-12 15:12:34 +0100161 struct dimm_attr_ddr2_st decoded_dimm;
Arthur Heymans3cf94032017-04-05 16:17:26 +0200162 int i;
Damien Zammit4b513a62015-08-20 00:37:05 +1000163
Arthur Heymans3cf94032017-04-05 16:17:26 +0200164 if (spd_decode_ddr2(&decoded_dimm, raw_spd) != SPD_STATUS_OK) {
165 printk(BIOS_DEBUG, "Problems decoding SPD\n");
166 return CB_ERR;
167 }
Damien Zammit7c2e5392016-07-24 03:28:42 +1000168
Arthur Heymans3cf94032017-04-05 16:17:26 +0200169 if (IS_ENABLED(CONFIG_DEBUG_RAM_SETUP))
170 dram_print_spd_ddr2(&decoded_dimm);
171
172 if (!(decoded_dimm.width & (0x08 | 0x10))) {
173
174 printk(BIOS_ERR,
175 "DIMM%d Unsupported width: x%d. Disabling dimm\n",
176 dimm_idx, s->dimms[dimm_idx].width);
177 return CB_ERR;
178 }
179 s->dimms[dimm_idx].width = (decoded_dimm.width >> 3) - 1;
180 /*
181 * This boils down to:
182 * "Except for the x16 configuration, all DDR2 devices have a
183 * 1KB page size. For the x16 configuration, the page size is 2KB
184 * for all densities except the 256Mb device, which has a 1KB page
185 * size." Micron, 'TN-47-16 Designing for High-Density DDR2 Memory'
Arthur Heymansd4e57622017-12-25 17:01:33 +0100186 * The formula is pagesize in KiB = width * 2^col_bits / 8.
Arthur Heymans3cf94032017-04-05 16:17:26 +0200187 */
Arthur Heymansd4e57622017-12-25 17:01:33 +0100188 s->dimms[dimm_idx].page_size = decoded_dimm.width *
189 (1 << decoded_dimm.col_bits) / 8;
Arthur Heymans3cf94032017-04-05 16:17:26 +0200190
191 switch (decoded_dimm.banks) {
192 case 4:
193 s->dimms[dimm_idx].n_banks = N_BANKS_4;
194 break;
195 case 8:
196 s->dimms[dimm_idx].n_banks = N_BANKS_8;
197 break;
198 default:
199 printk(BIOS_ERR,
200 "DIMM%d Unsupported #banks: x%d. Disabling dimm\n",
201 dimm_idx, decoded_dimm.banks);
202 return CB_ERR;
203 }
204
205 s->dimms[dimm_idx].ranks = decoded_dimm.ranks;
206 s->dimms[dimm_idx].rows = decoded_dimm.row_bits;
207 s->dimms[dimm_idx].cols = decoded_dimm.col_bits;
208
209 saved_timings->cas_supported &= decoded_dimm.cas_supported;
210
211 saved_timings->min_tRAS =
212 MAX(saved_timings->min_tRAS, decoded_dimm.tRAS);
213 saved_timings->min_tRP =
214 MAX(saved_timings->min_tRP, decoded_dimm.tRP);
215 saved_timings->min_tRCD =
216 MAX(saved_timings->min_tRCD, decoded_dimm.tRCD);
217 saved_timings->min_tWR =
218 MAX(saved_timings->min_tWR, decoded_dimm.tWR);
219 saved_timings->min_tRFC =
220 MAX(saved_timings->min_tRFC, decoded_dimm.tRFC);
221 saved_timings->min_tWTR =
222 MAX(saved_timings->min_tWTR, decoded_dimm.tWTR);
223 saved_timings->min_tRRD =
224 MAX(saved_timings->min_tRRD, decoded_dimm.tRRD);
225 saved_timings->min_tRTP =
226 MAX(saved_timings->min_tRTP, decoded_dimm.tRTP);
227 for (i = 0; i < 8; i++) {
228 if (!(saved_timings->cas_supported & (1 << i)))
229 saved_timings->min_tCLK_cas[i] = 0;
230 else
231 saved_timings->min_tCLK_cas[i] =
232 MAX(saved_timings->min_tCLK_cas[i],
233 decoded_dimm.cycle_time[i]);
234 }
Arthur Heymansadc571a2017-09-25 09:40:54 +0200235
236 s->dimms[dimm_idx].spd_crc = spd_ddr2_calc_unique_crc(raw_spd,
237 spd_decode_spd_size_ddr2(raw_spd[0]));
Arthur Heymans3cf94032017-04-05 16:17:26 +0200238 return CB_SUCCESS;
239}
240
241static void select_discrete_timings(struct sysinfo *s,
242 const struct abs_timings *timings)
243{
244 s->selected_timings.tRAS = DIV_ROUND_UP(timings->min_tRAS,
245 s->selected_timings.tclk);
246 s->selected_timings.tRP = DIV_ROUND_UP(timings->min_tRP,
247 s->selected_timings.tclk);
248 s->selected_timings.tRCD = DIV_ROUND_UP(timings->min_tRCD,
249 s->selected_timings.tclk);
250 s->selected_timings.tWR = DIV_ROUND_UP(timings->min_tWR,
251 s->selected_timings.tclk);
252 s->selected_timings.tRFC = DIV_ROUND_UP(timings->min_tRFC,
253 s->selected_timings.tclk);
254 s->selected_timings.tWTR = DIV_ROUND_UP(timings->min_tWTR,
255 s->selected_timings.tclk);
256 s->selected_timings.tRRD = DIV_ROUND_UP(timings->min_tRRD,
257 s->selected_timings.tclk);
258 s->selected_timings.tRTP = DIV_ROUND_UP(timings->min_tRTP,
259 s->selected_timings.tclk);
260}
261static void print_selected_timings(struct sysinfo *s)
262{
263 printk(BIOS_DEBUG, "Selected timings:\n");
264 printk(BIOS_DEBUG, "\tFSB: %dMHz\n",
265 fsb2mhz(s->selected_timings.fsb_clk));
266 printk(BIOS_DEBUG, "\tDDR: %dMHz\n",
267 ddr2mhz(s->selected_timings.mem_clk));
268
269 printk(BIOS_DEBUG, "\tCAS: %d\n", s->selected_timings.CAS);
270 printk(BIOS_DEBUG, "\ttRAS: %d\n", s->selected_timings.tRAS);
271 printk(BIOS_DEBUG, "\ttRP: %d\n", s->selected_timings.tRP);
272 printk(BIOS_DEBUG, "\ttRCD: %d\n", s->selected_timings.tRCD);
273 printk(BIOS_DEBUG, "\ttWR: %d\n", s->selected_timings.tWR);
274 printk(BIOS_DEBUG, "\ttRFC: %d\n", s->selected_timings.tRFC);
275 printk(BIOS_DEBUG, "\ttWTR: %d\n", s->selected_timings.tWTR);
276 printk(BIOS_DEBUG, "\ttRRD: %d\n", s->selected_timings.tRRD);
277 printk(BIOS_DEBUG, "\ttRTP: %d\n", s->selected_timings.tRTP);
278}
279
280static void find_fsb_speed(struct sysinfo *s)
281{
Damien Zammit4b513a62015-08-20 00:37:05 +1000282 switch (MCHBAR32(0xc00) & 0x7) {
283 case 0x0:
284 s->max_fsb = FSB_CLOCK_1066MHz;
285 break;
286 case 0x2:
287 s->max_fsb = FSB_CLOCK_800MHz;
288 break;
289 case 0x4:
290 s->max_fsb = FSB_CLOCK_1333MHz;
291 break;
292 default:
293 s->max_fsb = FSB_CLOCK_800MHz;
294 printk(BIOS_WARNING, "Can't detect FSB, setting 800MHz\n");
295 break;
296 }
Arthur Heymans3cf94032017-04-05 16:17:26 +0200297 s->selected_timings.fsb_clk = s->max_fsb;
298}
Damien Zammit4b513a62015-08-20 00:37:05 +1000299
Arthur Heymans3cf94032017-04-05 16:17:26 +0200300static void decode_spd_select_timings(struct sysinfo *s)
301{
302 unsigned int device;
303 u8 dram_type_mask = (1 << DDR2) | (1 << DDR3);
304 u8 dimm_mask = 0;
305 u8 raw_spd[256];
306 int i, j;
307 struct abs_timings saved_timings;
308 memset(&saved_timings, 0, sizeof(saved_timings));
309 saved_timings.cas_supported = UINT32_MAX;
Damien Zammit4b513a62015-08-20 00:37:05 +1000310
Arthur Heymans3cf94032017-04-05 16:17:26 +0200311 FOR_EACH_DIMM(i) {
312 s->dimms[i].card_type = RAW_CARD_POPULATED;
313 device = s->spd_map[i];
314 if (!device) {
315 s->dimms[i].card_type = RAW_CARD_UNPOPULATED;
316 continue;
Damien Zammit4b513a62015-08-20 00:37:05 +1000317 }
Arthur Heymans3cf94032017-04-05 16:17:26 +0200318 switch (spd_read_byte(s->spd_map[i], SPD_MEMORY_TYPE)) {
319 case DDR2SPD:
320 dram_type_mask &= 1 << DDR2;
321 s->spd_type = DDR2;
Damien Zammit4b513a62015-08-20 00:37:05 +1000322 break;
Arthur Heymans3cf94032017-04-05 16:17:26 +0200323 case DDR3SPD:
324 dram_type_mask &= 1 << DDR3;
325 s->spd_type = DDR3;
Damien Zammit4b513a62015-08-20 00:37:05 +1000326 break;
327 default:
Arthur Heymans3cf94032017-04-05 16:17:26 +0200328 s->dimms[i].card_type = RAW_CARD_UNPOPULATED;
329 continue;
Damien Zammit4b513a62015-08-20 00:37:05 +1000330 }
Arthur Heymans3cf94032017-04-05 16:17:26 +0200331 if (!dram_type_mask)
332 die("Mixing up dimm types is not supported!\n");
Damien Zammit4b513a62015-08-20 00:37:05 +1000333
Arthur Heymans3cf94032017-04-05 16:17:26 +0200334 printk(BIOS_DEBUG, "Decoding dimm %d\n", i);
335 if (s->spd_type == DDR2){
336 printk(BIOS_DEBUG,
337 "Reading SPD using i2c block operation.\n");
Arthur Heymansadc571a2017-09-25 09:40:54 +0200338 if (i2c_block_read(device, 0, 128, raw_spd) != 128) {
Arthur Heymans3cf94032017-04-05 16:17:26 +0200339 printk(BIOS_DEBUG, "i2c block operation failed,"
340 " trying smbus byte operation.\n");
Arthur Heymansadc571a2017-09-25 09:40:54 +0200341 for (j = 0; j < 128; j++)
Arthur Heymans3cf94032017-04-05 16:17:26 +0200342 raw_spd[j] = spd_read_byte(device, j);
343 }
344 if (ddr2_save_dimminfo(i, raw_spd, &saved_timings, s)) {
345 printk(BIOS_WARNING,
346 "Encountered problems with SPD, "
347 "skipping this DIMM.\n");
348 s->dimms[i].card_type = RAW_CARD_UNPOPULATED;
349 continue;
350 }
351 } else { /* DDR3: not implemented so don't decode */
352 die("DDR3 support is not implemented\n");
353 }
354 dimm_mask |= (1 << i);
Damien Zammit4b513a62015-08-20 00:37:05 +1000355 }
Arthur Heymans3cf94032017-04-05 16:17:26 +0200356 if (!dimm_mask)
357 die("No memory installed.\n");
358
359 if (s->spd_type == DDR2)
360 select_cas_dramfreq_ddr2(s, &saved_timings);
361 select_discrete_timings(s, &saved_timings);
362}
363
364static void find_dimm_config(struct sysinfo *s)
365{
366 int chan, i;
367
368 FOR_EACH_POPULATED_CHANNEL(s->dimms, chan) {
369 FOR_EACH_POPULATED_DIMM_IN_CHANNEL(s->dimms, chan, i) {
370 int dimm_config;
371 if (s->dimms[i].ranks == 1) {
372 if (s->dimms[i].width == 0) /* x8 */
373 dimm_config = 1;
374 else /* x16 */
375 dimm_config = 3;
376 } else {
377 if (s->dimms[i].width == 0) /* x8 */
378 dimm_config = 2;
379 else
380 die("Dual-rank x16 not supported\n");
381 }
382 s->dimm_config[chan] |=
383 dimm_config << (i % DIMMS_PER_CHANNEL) * 2;
384 }
385 printk(BIOS_DEBUG, " Config[CH%d] : %d\n", chan,
386 s->dimm_config[chan]);
387 }
388
Damien Zammit4b513a62015-08-20 00:37:05 +1000389}
390
Arthur Heymansbb5e77c2016-11-30 20:37:29 +0100391static void checkreset_ddr2(int boot_path)
392{
393 u8 pmcon2;
394 u32 pmsts;
395
396 if (boot_path >= 1) {
397 pmsts = MCHBAR32(PMSTS_MCHBAR);
398 if (!(pmsts & 1))
399 printk(BIOS_DEBUG,
400 "Channel 0 possibly not in self refresh\n");
401 if (!(pmsts & 2))
402 printk(BIOS_DEBUG,
403 "Channel 1 possibly not in self refresh\n");
404 }
405
406 pmcon2 = pci_read_config8(PCI_DEV(0, 0x1f, 0), 0xa2);
407
408 if (pmcon2 & 0x80) {
409 pmcon2 &= ~0x80;
410 pci_write_config8(PCI_DEV(0, 0x1f, 0), 0xa2, pmcon2);
411
412 /* do magic 0xf0 thing. */
413 u8 reg8 = pci_read_config8(PCI_DEV(0, 0, 0), 0xf0);
414 pci_write_config8(PCI_DEV(0, 0, 0), 0xf0, reg8 & ~(1 << 2));
415 reg8 = pci_read_config8(PCI_DEV(0, 0, 0), 0xf0);
416 pci_write_config8(PCI_DEV(0, 0, 0), 0xf0, reg8 | (1 << 2));
417
418 printk(BIOS_DEBUG, "Reset...\n");
Arthur Heymans8565c032017-04-30 17:36:31 +0200419 outb(0xe, 0xcf9);
Arthur Heymansbb5e77c2016-11-30 20:37:29 +0100420 asm ("hlt");
421 }
422 pmcon2 |= 0x80;
423 pci_write_config8(PCI_DEV(0, 0x1f, 0), 0xa2, pmcon2);
424}
425
Damien Zammit4b513a62015-08-20 00:37:05 +1000426/**
427 * @param boot_path: 0 = normal, 1 = reset, 2 = resume from s3
428 */
429void sdram_initialize(int boot_path, const u8 *spd_map)
430{
Arthur Heymansadc571a2017-09-25 09:40:54 +0200431 struct sysinfo s, *ctrl_cached;
Damien Zammit4b513a62015-08-20 00:37:05 +1000432 u8 reg8;
Arthur Heymansadc571a2017-09-25 09:40:54 +0200433 int fast_boot, cbmem_was_inited, cache_not_found;
434 struct region_device rdev;
Damien Zammit4b513a62015-08-20 00:37:05 +1000435
436 printk(BIOS_DEBUG, "Setting up RAM controller.\n");
437
Arthur Heymans70a1dda2017-03-09 01:58:24 +0100438 pci_write_config8(PCI_DEV(0, 0, 0), 0xdf, 0xff);
Damien Zammit4b513a62015-08-20 00:37:05 +1000439
440 memset(&s, 0, sizeof(struct sysinfo));
441
Arthur Heymansadc571a2017-09-25 09:40:54 +0200442 cache_not_found = mrc_cache_get_current(MRC_TRAINING_DATA,
443 MRC_CACHE_VERSION, &rdev);
Damien Zammit4b513a62015-08-20 00:37:05 +1000444
Arthur Heymansadc571a2017-09-25 09:40:54 +0200445 if (cache_not_found || (region_device_sz(&rdev) < sizeof(s))) {
446 if (boot_path == BOOT_PATH_RESUME) {
447 /* Failed S3 resume, reset to come up cleanly */
448 outb(0x6, 0xcf9);
449 halt();
450 }
451 ctrl_cached = NULL;
452 } else {
453 ctrl_cached = rdev_mmap_full(&rdev);
454 }
Arthur Heymansbb5e77c2016-11-30 20:37:29 +0100455
Arthur Heymansadc571a2017-09-25 09:40:54 +0200456 /* verify MRC cache for fast boot */
457 if (boot_path != BOOT_PATH_RESUME && ctrl_cached) {
458 /* check SPD checksum to make sure the DIMMs haven't been
459 * replaced */
460 fast_boot = verify_spds(spd_map, ctrl_cached) == CB_SUCCESS;
461 if (!fast_boot)
462 printk(BIOS_DEBUG, "SPD checksums don't match,"
463 " dimm's have been replaced\n");
464 } else {
465 fast_boot = boot_path == BOOT_PATH_RESUME;
466 }
Damien Zammit4b513a62015-08-20 00:37:05 +1000467
Arthur Heymansadc571a2017-09-25 09:40:54 +0200468 if (fast_boot) {
469 printk(BIOS_DEBUG, "Using cached raminit settings\n");
470 memcpy(&s, ctrl_cached, sizeof(s));
471 s.boot_path = boot_path;
472 mchinfo_ddr2(&s);
473 print_selected_timings(&s);
474 } else {
475 s.boot_path = boot_path;
476 s.spd_map[0] = spd_map[0];
477 s.spd_map[1] = spd_map[1];
478 s.spd_map[2] = spd_map[2];
479 s.spd_map[3] = spd_map[3];
480 checkreset_ddr2(s.boot_path);
Damien Zammit4b513a62015-08-20 00:37:05 +1000481
Arthur Heymansadc571a2017-09-25 09:40:54 +0200482 /* Detect dimms per channel */
483 reg8 = pci_read_config8(PCI_DEV(0, 0, 0), 0xe9);
484 printk(BIOS_DEBUG, "Dimms per channel: %d\n",
485 (reg8 & 0x10) ? 1 : 2);
486
487 mchinfo_ddr2(&s);
488
489 find_fsb_speed(&s);
490 decode_spd_select_timings(&s);
491 print_selected_timings(&s);
492 find_dimm_config(&s);
493 }
Damien Zammit4b513a62015-08-20 00:37:05 +1000494
495 switch (s.spd_type) {
496 case DDR2:
Arthur Heymansadc571a2017-09-25 09:40:54 +0200497 raminit_ddr2(&s, fast_boot);
Damien Zammit4b513a62015-08-20 00:37:05 +1000498 break;
499 case DDR3:
500 // FIXME Add: raminit_ddr3(&s);
501 break;
502 default:
503 die("Unknown DDR type\n");
504 break;
505 }
506
507 reg8 = pci_read_config8(PCI_DEV(0, 0x1f, 0), 0xa2);
508 pci_write_config8(PCI_DEV(0, 0x1f, 0), 0xa2, reg8 & ~0x80);
509
Arthur Heymans70a1dda2017-03-09 01:58:24 +0100510 reg8 = pci_read_config8(PCI_DEV(0, 0, 0), 0xf4);
511 pci_write_config8(PCI_DEV(0, 0, 0), 0xf4, reg8 | 1);
Damien Zammit4b513a62015-08-20 00:37:05 +1000512 printk(BIOS_DEBUG, "RAM initialization finished.\n");
Arthur Heymansadc571a2017-09-25 09:40:54 +0200513
514 cbmem_was_inited = !cbmem_recovery(s.boot_path == BOOT_PATH_RESUME);
515 if (!fast_boot)
516 mrc_cache_stash_data(MRC_TRAINING_DATA, MRC_CACHE_VERSION,
517 &s, sizeof(s));
518 if (s.boot_path == BOOT_PATH_RESUME && !cbmem_was_inited) {
519 /* Failed S3 resume, reset to come up cleanly */
520 outb(0x6, 0xcf9);
521 halt();
522 }
Damien Zammit4b513a62015-08-20 00:37:05 +1000523}